changeset 5344:cc34be74eeec

Merge with crew-stable.
author Bryan O'Sullivan <bos@serpentine.com>
date Sat, 29 Sep 2007 21:10:54 -0700
parents 26692d08c2f9 (current diff) c8d6f8510bf4 (diff)
children 9c0f864fddca f0931c0240b4
files hgext/convert/hg.py
diffstat 176 files changed, 9038 insertions(+), 3071 deletions(-) [+]
line wrap: on
line diff
--- a/.hgignore
+++ b/.hgignore
@@ -4,6 +4,7 @@ syntax: glob
 *.orig
 *.rej
 *~
+*.mergebackup
 *.o
 *.so
 *.pyc
--- a/contrib/buildrpm
+++ b/contrib/buildrpm
@@ -29,7 +29,7 @@ tmpspec=/tmp/`basename "$specfile"`.$$
 version=`hg tags | perl -e 'while(<STDIN>){if(/^(\d\S+)/){print$1;exit}}'`
 # Compute the release number as the difference in revision numbers
 # between the tip and the most recent tag.
-release=`hg tags | perl -e 'while(<STDIN>){/^(\S+)\s+(\d+)/;if($1eq"tip"){$t=$2}else{print$t-$2+1;exit}}'`
+release=`hg tags | perl -e 'while(<STDIN>){($tag,$id)=/^(\S+)\s+(\d+)/;if($tag eq "tip"){$tip = $id}elsif($tag=~/^\d/){print $tip-$id+1;exit}}'`
 tip=`hg -q tip`
 
 # Beat up the spec file
@@ -40,6 +40,19 @@ sed -e 's,^Source:.*,Source: /dev/null,'
     -e 's,^%setup.*,,' \
     $specfile > $tmpspec
 
+cat <<EOF >> $tmpspec
+%changelog
+* `date +'%a %b %d %Y'` `hg showconfig ui.username` $version-$release
+- Automatically built via $0
+
+EOF
+hg log \
+     --template '* {date|rfc822date} {author}\n- {desc|firstline}\n\n' \
+     .hgtags \
+  | sed -e 's/^\(\* [MTWFS][a-z][a-z]\), \([0-3][0-9]\) \([A-Z][a-z][a-z]\) /\1 \3 \2 /' \
+        -e '/^\* [MTWFS][a-z][a-z] /{s/ [012][0-9]:[0-9][0-9]:[0-9][0-9] [+-][0-9]\{4\}//}' \
+   >> $tmpspec
+
 rpmbuild --define "_topdir $rpmdir" -bb $tmpspec
 if [ $? = 0 ]; then
     rm -rf $tmpspec $rpmdir/BUILD
--- a/contrib/churn.py
+++ b/contrib/churn.py
@@ -11,9 +11,34 @@
 #
 # <alias email> <actual email>
 
-import sys
 from mercurial.i18n import gettext as _
 from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
+import os, sys
+
+def get_tty_width():
+    if 'COLUMNS' in os.environ:
+        try:
+            return int(os.environ['COLUMNS'])
+        except ValueError:
+            pass
+    try:
+        import termios, fcntl, struct
+        buf = 'abcd'
+        for dev in (sys.stdout, sys.stdin):
+            try:
+                if buf != 'abcd':
+                    break
+                fd = dev.fileno()
+                if not os.isatty(fd):
+                    continue
+                buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf)
+            except ValueError:
+                pass
+        if buf != 'abcd':
+           return struct.unpack('hh', buf)[1]
+    except ImportError:
+        pass
+    return 80
 
 def __gather(ui, repo, node1, node2):
     def dirtywork(f, mmap1, mmap2):
@@ -159,8 +184,9 @@ def churn(ui, repo, **opts):
 
     maximum = ordered[0][1]
 
-    ui.note("Assuming 80 character terminal\n")
-    width = 80 - 1
+    width = get_tty_width()
+    ui.note(_("assuming %i character terminal\n") % width)
+    width -= 1
 
     for i in ordered:
         person = i[0]
--- a/contrib/darcs2hg.py
+++ b/contrib/darcs2hg.py
@@ -15,6 +15,7 @@ import os, sys
 import tempfile
 import xml.dom.minidom as xml_dom
 from time import strptime, mktime
+import re
 
 DARCS_REPO = None
 HG_REPO    = None
@@ -93,11 +94,50 @@ def darcs_tip(darcs_repo):
 def darcs_pull(hg_repo, darcs_repo, chash):
 	old_tip = darcs_tip(darcs_repo)
 	res     = cmd("darcs pull \"%s\" --all --match=\"hash %s\"" % (darcs_repo, chash), hg_repo)
+	if re.search('^We have conflicts in the following files:$', res, re.MULTILINE):
+		print "Trying to revert files to work around conflict..."
+		rev_res = cmd ("darcs revert --all", hg_repo)
+		print rev_res
 	print res
 	new_tip = darcs_tip(darcs_repo)
 	if not new_tip != old_tip + 1:
 		error("Darcs pull did not work as expected: " + res)
 
+def darcs_changes_summary(darcs_repo, chash):
+	"""Gets the changes from the darcs summary. This returns the chronological
+	list of changes as (change_type, args). Eg. ('add_file', 'foo.txt') or
+	('move', ['foo.txt','bar.txt'])."""
+	change = cmd("darcs changes --summary --xml-output --match=\"hash %s\"" % (chash), darcs_repo)
+	doc = xml_dom.parseString(change)
+	for patch_node in doc.childNodes[0].childNodes:
+		summary_nodes = filter(lambda n: n.nodeName == "summary" and n.nodeType == n.ELEMENT_NODE, patch_node.childNodes)
+		for summary_node in summary_nodes:
+			change_nodes = filter(lambda n: n.nodeType == n.ELEMENT_NODE, summary_node.childNodes)
+			if len(change_nodes) == 0:
+				name = filter(lambda n: n.nodeName == "name", patch_node.childNodes)
+				if not name:
+					error("Darcs patch has an empty summary node and no name: " + patch_node.toxml())
+				name = name[0].childNodes[0].data.strip()
+				(tag, sub_count) = re.subn('^TAG ', '', name, 1)
+				if sub_count != 1:
+					error("Darcs patch has an empty summary node but doesn't look like a tag: " + patch_node.toxml());
+			for change_node in change_nodes:
+				change = change_node.nodeName
+				if change == 'modify_file':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'add_file':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'remove_file':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'add_directory':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'remove_directory':
+					yield change, change_node.childNodes[0].data.strip()
+				elif change == 'move':
+					yield change, (change_node.getAttribute('from'), change_node.getAttribute('to'))
+				else:
+					error('Problem parsing summary xml: Unexpected element: ' + change_node.toxml())
+
 # ------------------------------------------------------------------------------
 #
 # Mercurial interface
@@ -127,6 +167,36 @@ def hg_tip( hg_repo ):
 	tip = tip.split("\n")[0].split(":")[1].strip()
 	return int(tip)
 
+def hg_rename( hg_repo, from_file, to_file ):
+	cmd("hg rename --after \"%s\" \"%s\"" % (from_file, to_file), hg_repo);
+	
+def hg_tag ( hg_repo, text, author, date ):
+	old_tip = hg_tip(hg_repo)
+	res = cmd("hg tag -u \"%s\" -d \"%s 0\" \"%s\""	 % (author, date, text), hg_repo)
+	new_tip = hg_tip(hg_repo)
+	if not new_tip == old_tip + 1:
+		error("Mercurial tag did not work as expected: " + res)
+
+def hg_handle_change( hg_repo, author, date, change, arg ):
+	"""Processes a change event as output by darcs_changes_summary. These
+	consist of file move/rename/add/delete commands."""
+	if change == 'modify_file':
+		pass
+	elif change == 'add_file':
+		pass
+	elif change =='remove_file':
+		pass
+	elif change == 'add_directory':
+		pass
+	elif change == 'remove_directory':
+		pass
+	elif change == 'move':
+		hg_rename(hg_repo, arg[0], arg[1])
+	elif change == 'tag':
+		hg_tag(hg_repo, arg, author, date)
+	else:
+		error('Unknown change type ' + change + ': ' + arg)
+
 # ------------------------------------------------------------------------------
 #
 # Main
@@ -167,11 +237,13 @@ if __name__ == "__main__":
 			print "(skipping)"
 		else:
 			text = summary + "\n" + description
-			darcs_pull(hg_repo, darcs_repo, chash)
 			# The commit hash has a date like 20021020201112
 			# --------------------------------YYYYMMDDHHMMSS
 			date = chash.split("-")[0]
 			epoch = int(mktime(strptime(date, '%Y%m%d%H%M%S')))
+			darcs_pull(hg_repo, darcs_repo, chash)
+			for change, arg in darcs_changes_summary(darcs_repo, chash):
+				hg_handle_change(hg_repo, author, epoch, change, arg)
 			hg_commit(hg_repo, text, author, epoch)
 		change_number += 1
 	print "Darcs repository (_darcs) was not deleted. You can keep or remove it."
--- a/contrib/hg-ssh
+++ b/contrib/hg-ssh
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# Copyright 2005, 2006 by Intevation GmbH <intevation@intevation.de>
+# Copyright 2005-2007 by Intevation GmbH <intevation@intevation.de>
 # Author(s):
 # Thomas Arendsen Hein <thomas@intevation.de>
 #
@@ -25,7 +25,10 @@ You can use pattern matching of your nor
 command="cd repos && hg-ssh user/thomas/* projects/{mercurial,foo}"
 """
 
-from mercurial import commands
+# enable importing on demand to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
+from mercurial import dispatch
 
 import sys, os
 
@@ -38,7 +41,7 @@ if orig_cmd.startswith('hg -R ') and ori
     path = orig_cmd[6:-14]
     repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
     if repo in allowed_paths:
-        commands.dispatch(['-R', repo, 'serve', '--stdio'])
+        dispatch.dispatch(['-R', repo, 'serve', '--stdio'])
     else:
         sys.stderr.write("Illegal repository %r\n" % repo)
         sys.exit(-1)
--- a/contrib/hgk
+++ b/contrib/hgk
@@ -5,6 +5,74 @@
 # and distributed under the terms of the GNU General Public Licence,
 # either version 2, or (at your option) any later version.
 
+
+# Modified version of Tip 171:
+# http://www.tcl.tk/cgi-bin/tct/tip/171.html
+#
+# The in_mousewheel global was added to fix strange reentrancy issues.
+# The whole snipped is activated only under windows, mouse wheel
+# bindings working already under MacOSX and Linux.
+
+if {[tk windowingsystem] eq "win32"} {
+
+set mw_classes [list Text Listbox Table TreeCtrl]
+   foreach class $mw_classes { bind $class <MouseWheel> {} }
+
+set in_mousewheel 0
+
+proc ::tk::MouseWheel {wFired X Y D {shifted 0}} {
+    global in_mousewheel
+    if { $in_mousewheel != 0 } { return }
+    # Set event to check based on call
+    set evt "<[expr {$shifted?{Shift-}:{}}]MouseWheel>"
+    # do not double-fire in case the class already has a binding
+    if {[bind [winfo class $wFired] $evt] ne ""} { return }
+    # obtain the window the mouse is over
+    set w [winfo containing $X $Y]
+    # if we are outside the app, try and scroll the focus widget
+    if {![winfo exists $w]} { catch {set w [focus]} }
+    if {[winfo exists $w]} {
+
+        if {[bind $w $evt] ne ""} {
+            # Awkward ... this widget has a MouseWheel binding, but to
+            # trigger successfully in it, we must give it focus.
+            catch {focus} old
+            if {$w ne $old} { focus $w }
+            set in_mousewheel 1
+            event generate $w $evt -rootx $X -rooty $Y -delta $D
+            set in_mousewheel 0
+            if {$w ne $old} { focus $old }
+            return
+        }
+
+        # aqua and x11/win32 have different delta handling
+        if {[tk windowingsystem] ne "aqua"} {
+            set delta [expr {- ($D / 30)}]
+        } else {
+            set delta [expr {- ($D)}]
+        }
+        # scrollbars have different call conventions
+        if {[string match "*Scrollbar" [winfo class $w]]} {
+            catch {tk::ScrollByUnits $w \
+                       [string index [$w cget -orient] 0] $delta}
+        } else {
+            set cmd [list $w [expr {$shifted ? "xview" : "yview"}] \
+                         scroll $delta units]
+            # Walking up to find the proper widget (handles cases like
+            # embedded widgets in a canvas)
+            while {[catch $cmd] && [winfo toplevel $w] ne $w} {
+                set w [winfo parent $w]
+            }
+        }
+    }
+}
+
+bind all <MouseWheel> [list ::tk::MouseWheel %W %X %Y %D 0]
+
+# end of win32 section
+}
+
+
 proc gitdir {} {
     global env
     if {[info exists env(GIT_DIR)]} {
@@ -299,6 +367,11 @@ proc readotherrefs {base dname excl} {
     }
 }
 
+proc allcansmousewheel {delta} {
+    set delta [expr -5*(int($delta)/abs($delta))]
+    allcanvs yview scroll $delta units
+}
+
 proc error_popup msg {
     set w .error
     toplevel $w
@@ -470,6 +543,7 @@ proc makewindow {} {
 
     bindall <1> {selcanvline %W %x %y}
     #bindall <B1-Motion> {selcanvline %W %x %y}
+    bindall <MouseWheel> "allcansmousewheel %D"
     bindall <ButtonRelease-4> "allcanvs yview scroll -5 units"
     bindall <ButtonRelease-5> "allcanvs yview scroll 5 units"
     bindall <2> "allcanvs scan mark 0 %y"
@@ -3681,4 +3755,6 @@ set patchnum 0
 setcoords
 makewindow
 readrefs
+set hgroot [exec $env(HG) root]
+wm title . "hgk $hgroot"
 getcommits $revtreeargs
--- a/contrib/hgwebdir.fcgi
+++ b/contrib/hgwebdir.fcgi
@@ -2,14 +2,17 @@
 #
 # An example CGI script to export multiple hgweb repos, edit as necessary
 
+# adjust python path if not a system-wide install:
+#import sys
+#sys.path.insert(0, "/path/to/python/lib")
+
+# enable demandloading to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
 # send python tracebacks to the browser if an error occurs:
 import cgitb
 cgitb.enable()
 
-# adjust python path if not a system-wide install:
-#import sys
-#sys.path.insert(0, "/path/to/python/lib")
-
 # If you'd like to serve pages with UTF-8 instead of your default
 # locale charset, you can do so by uncommenting the following lines.
 # Note that this will cause your .hgrc files to be interpreted in
--- a/contrib/macosx/Readme.html
+++ b/contrib/macosx/Readme.html
@@ -19,10 +19,14 @@
 <p class="p2"><br></p>
 <p class="p3">This is <i>not</i> a stand-alone version of Mercurial.</p>
 <p class="p2"><br></p>
-<p class="p3">To use it, you must have the Universal MacPython 2.4.3 from <a href="http://www.python.org">www.python.org</a> installed.</p>
+<p class="p3">To use it, you must have the appropriate version of Universal MacPython from <a href="http://www.python.org">www.python.org</a> installed.</p>
 <p class="p2"><br></p>
-<p class="p3">You can download MacPython 2.4.3 from here:</p>
-<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.4.3/Universal-MacPython-2.4.3-2006-04-07.dmg">http://www.python.org/ftp/python/2.4.3/Universal-MacPython-2.4.3-2006-04-07.dmg</a></span></p>
+<p class="p3">You can find more information and download MacPython from here:</p>
+<p class="p4"><span class="s1"><a href="http://www.python.org/download">http://www.python.org/download</a></span></p>
+<p class="p2"><br></p>
+<p class="p3">Or direct links to the latest version are:</p>
+<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.5.1/python-2.5.1-macosx.dmg">Python 2.5.1 for Macintosh OS X</a></span></p>
+<p class="p4"><span class="s1"><a href="http://www.python.org/ftp/python/2.4.4/python-2.4.4-macosx2006-10-18.dmg">Python 2.4.4 for Macintosh OS X</a></span></p>
 <p class="p2"><br></p>
 <p class="p1"><b>After you install</b></p>
 <p class="p2"><br></p>
--- a/contrib/mercurial.el
+++ b/contrib/mercurial.el
@@ -1261,9 +1261,22 @@ Names are displayed relative to the repo
   (interactive)
   (error "not implemented"))
 
-(defun hg-version-other-window ()
-  (interactive)
-  (error "not implemented"))
+(defun hg-version-other-window (rev)
+  "Visit version REV of the current file in another window.
+If the current file is named `F', the version is named `F.~REV~'.
+If `F.~REV~' already exists, use it instead of checking it out again."
+  (interactive "sVersion to visit (default is workfile version): ")
+  (let* ((file buffer-file-name)
+       	 (version (if (string-equal rev "")
+		       "tip"
+		        rev))
+ 	 (automatic-backup (vc-version-backup-file-name file version))
+          (manual-backup (vc-version-backup-file-name file version 'manual)))
+     (unless (file-exists-p manual-backup)
+       (if (file-exists-p automatic-backup)
+           (rename-file automatic-backup manual-backup nil)
+         (hg-run0 "-q" "cat" "-r" version "-o" manual-backup file)))
+     (find-file-other-window manual-backup)))
 
 
 (provide 'mercurial)
old mode 100644
new mode 100755
--- a/contrib/mercurial.spec
+++ b/contrib/mercurial.spec
@@ -8,6 +8,17 @@ Source: http://www.selenic.com/mercurial
 URL: http://www.selenic.com/mercurial
 BuildRoot: /tmp/build.%{name}-%{version}-%{release}
 
+# From the README:
+#
+#   Note: some distributions fails to include bits of distutils by
+#   default, you'll need python-dev to install. You'll also need a C
+#   compiler and a 3-way merge tool like merge, tkdiff, or kdiff3.
+#
+# python-devel provides an adequate python-dev.  The merge tool is a
+# run-time dependency.
+#
+BuildRequires: python >= 2.3, python-devel, make, gcc, asciidoc, xmlto
+
 %define pythonver %(python -c 'import sys;print ".".join(map(str, sys.version_info[:2]))')
 %define pythonlib %{_libdir}/python%{pythonver}/site-packages/%{name}
 %define hgext %{_libdir}/python%{pythonver}/site-packages/hgext
@@ -21,23 +32,51 @@ rm -rf $RPM_BUILD_ROOT
 %setup -q
 
 %build
-python setup.py build
+make all
 
 %install
-python setup.py install --root $RPM_BUILD_ROOT
+python setup.py install --root $RPM_BUILD_ROOT --prefix %{_prefix}
+make install-doc DESTDIR=$RPM_BUILD_ROOT MANDIR=%{_mandir}
+
+install contrib/hgk          $RPM_BUILD_ROOT%{_bindir}
+install contrib/convert-repo $RPM_BUILD_ROOT%{_bindir}/mercurial-convert-repo
+install contrib/hg-ssh       $RPM_BUILD_ROOT%{_bindir}
+install contrib/git-viz/{hg-viz,git-rev-tree} $RPM_BUILD_ROOT%{_bindir}
+
+bash_completion_dir=$RPM_BUILD_ROOT%{_sysconfdir}/bash_completion.d
+mkdir -p $bash_completion_dir
+install contrib/bash_completion $bash_completion_dir/mercurial.sh
+
+zsh_completion_dir=$RPM_BUILD_ROOT%{_datadir}/zsh/site-functions
+mkdir -p $zsh_completion_dir
+install contrib/zsh_completion $zsh_completion_dir/_mercurial
+
+lisp_dir=$RPM_BUILD_ROOT%{_datadir}/emacs/site-lisp
+mkdir -p $lisp_dir
+install contrib/mercurial.el $lisp_dir
 
 %clean
 rm -rf $RPM_BUILD_ROOT
 
 %files
 %defattr(-,root,root,-)
-%doc doc/* *.cgi
+%doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html doc/ja *.cgi
+%{_mandir}/man?/hg*.gz
 %dir %{pythonlib}
 %dir %{hgext}
+%{_sysconfdir}/bash_completion.d/mercurial.sh
+%{_datadir}/zsh/site-functions/_mercurial
+%{_datadir}/emacs/site-lisp/mercurial.el
+%{_bindir}/hg
+%{_bindir}/hgk
 %{_bindir}/hgmerge
-%{_bindir}/hg
+%{_bindir}/hg-ssh
+%{_bindir}/hg-viz
+%{_bindir}/git-rev-tree
+%{_bindir}/mercurial-convert-repo
 %{pythonlib}/templates
 %{pythonlib}/*.py*
 %{pythonlib}/hgweb/*.py*
 %{pythonlib}/*.so
 %{hgext}/*.py*
+%{hgext}/convert/*.py*
--- a/contrib/win32/mercurial.ini
+++ b/contrib/win32/mercurial.ini
@@ -1,41 +1,41 @@
-; System-wide Mercurial config file.  To override these settings on a
-; per-user basis, please edit the following file instead, where
-; USERNAME is your Windows user name:
-;   C:\Documents and Settings\USERNAME\Mercurial.ini
-
-[ui]
-editor = notepad
-
-; By default, we try to encode and decode all files that do not
-; contain ASCII NUL characters.  What this means is that we try to set
-; line endings to Windows style on update, and to Unix style on
-; commit.  This lets us cooperate with Linux and Unix users, so
-; everybody sees files with their native line endings.
-
-[extensions]
-; The win32text extension is available and installed by default.  It
-; provides built-in Python hooks to perform line ending conversions.
-; This is normally much faster than running an external program.
-hgext.win32text =
-
-
-[encode]
-; Encode files that don't contain NUL characters.
-
-; ** = cleverencode:
-
-; Alternatively, you can explicitly specify each file extension that
-; you want encoded (any you omit will be left untouched), like this:
-
-; *.txt = dumbencode:
-
-
-[decode]
-; Decode files that don't contain NUL characters.
-
-; ** = cleverdecode:
-
-; Alternatively, you can explicitly specify each file extension that
-; you want decoded (any you omit will be left untouched), like this:
-
-; **.txt = dumbdecode:
+; System-wide Mercurial config file.  To override these settings on a
+; per-user basis, please edit the following file instead, where
+; USERNAME is your Windows user name:
+;   C:\Documents and Settings\USERNAME\Mercurial.ini
+
+[ui]
+editor = notepad
+
+; By default, we try to encode and decode all files that do not
+; contain ASCII NUL characters.  What this means is that we try to set
+; line endings to Windows style on update, and to Unix style on
+; commit.  This lets us cooperate with Linux and Unix users, so
+; everybody sees files with their native line endings.
+
+[extensions]
+; The win32text extension is available and installed by default.  It
+; provides built-in Python hooks to perform line ending conversions.
+; This is normally much faster than running an external program.
+hgext.win32text =
+
+
+[encode]
+; Encode files that don't contain NUL characters.
+
+; ** = cleverencode:
+
+; Alternatively, you can explicitly specify each file extension that
+; you want encoded (any you omit will be left untouched), like this:
+
+; *.txt = dumbencode:
+
+
+[decode]
+; Decode files that don't contain NUL characters.
+
+; ** = cleverdecode:
+
+; Alternatively, you can explicitly specify each file extension that
+; you want decoded (any you omit will be left untouched), like this:
+
+; **.txt = dumbdecode:
--- a/contrib/zsh_completion
+++ b/contrib/zsh_completion
@@ -200,6 +200,13 @@ typeset -A _hg_cmd_globals
   _wanted files expl 'modified files' _multi_parts / status_files
 }
 
+_hg_config() {
+    typeset -a items
+    local line
+    items=(${${(%f)"$(_hg_cmd showconfig)"}%%\=*})
+    (( $#items )) && _describe -t config 'config item' items
+}
+
 _hg_addremove() {
   _alternative 'files:unknown files:_hg_unknown' \
     'files:missing files:_hg_missing'
@@ -352,6 +359,17 @@ typeset -A _hg_cmd_globals
   '*:destination:_files'
 }
 
+_hg_cmd_backout() {
+  _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
+    '--merge[merge with old dirstate parent after backout]' \
+    '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
+    '--parent[parent to choose when backing out merge]' \
+    '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
+    '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_tags' \
+    '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
+    '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt'
+}
+
 _hg_cmd_bundle() {
   _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
   '(--force -f)'{-f,--force}'[run even when remote repository is unrelated]' \
@@ -431,7 +449,8 @@ typeset -A _hg_cmd_globals
   '(--line-number -n)'{-n,--line-number}'[print matching line numbers]' \
   '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \
   '(--user -u)'{-u,--user}'[print user who committed change]' \
-  '*:search pattern:_hg_files'
+  '1:search pattern:' \
+  '*:files:_hg_files'
 }
 
 _hg_cmd_heads() {
@@ -444,6 +463,15 @@ typeset -A _hg_cmd_globals
   '*:mercurial command:_hg_commands'
 }
 
+_hg_cmd_identify() {
+  _arguments -s -w : $_hg_global_opts \
+  '(--rev -r)'{-r+,--rev}'[identify the specified rev]:revision:_hg_tags' \
+  '(--num -n)'{-n+,--num}'[show local revision number]' \
+  '(--id -i)'{-i+,--id}'[show global revision id]' \
+  '(--branch -b)'{-b+,--branch}'[show branch]' \
+  '(--tags -t)'{-t+,--tags}'[show tags]'
+}
+
 _hg_cmd_import() {
   _arguments -s -w : $_hg_global_opts \
   '(--strip -p)'{-p+,--strip}'[directory strip option for patch (default: 1)]:count:' \
@@ -457,7 +485,7 @@ typeset -A _hg_cmd_globals
   '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
   '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
   '(--patch -p)'{-p,--patch}'[show patch]' \
-  '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]' \
+  '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:_hg_tags' \
   '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
   '--bundle[file to store the bundles into]:bundle file:_files' \
   ':source:_hg_remote'
@@ -509,7 +537,7 @@ typeset -A _hg_cmd_globals
 _hg_cmd_parents() {
   _arguments -s -w : $_hg_global_opts $_hg_style_opts \
   '(--rev -r)'{-r+,--rev}'[show parents of the specified rev]:revision:_hg_tags' \
-  ':revision:_hg_tags'
+  ':last modified file:_hg_files'
 }
 
 _hg_cmd_paths() {
@@ -521,13 +549,14 @@ typeset -A _hg_cmd_globals
   _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
   '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
   '(--update -u)'{-u,--update}'[update to new tip if changesets were pulled]' \
+  '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:' \
   ':source:_hg_remote'
 }
 
 _hg_cmd_push() {
   _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
   '(--force -f)'{-f,--force}'[force push]' \
-  '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]' \
+  '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]:revision:_hg_tags' \
   ':destination:_hg_remote'
 }
 
@@ -579,6 +608,12 @@ typeset -A _hg_cmd_globals
   '(--ipv6 -6)'{-6,--ipv6}'[use IPv6 in addition to IPv4]'
 }
 
+_hg_cmd_showconfig() {
+  _arguments -s -w : $_hg_global_opts \
+  '(--untrusted -u)'{-u+,--untrusted}'[show untrusted configuration options]' \
+  ':config item:_hg_config'
+}
+
 _hg_cmd_status() {
   _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
   '(--all -A)'{-A,--all}'[show status of all files]' \
@@ -620,9 +655,15 @@ typeset -A _hg_cmd_globals
 _hg_cmd_update() {
   _arguments -s -w : $_hg_global_opts \
   '(--clean -C)'{-C,--clean}'[overwrite locally modified files]' \
+  '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_tags' \
   ':revision:_hg_tags'
 }
 
+# bisect extension
+_hg_cmd_bisect() {
+  _arguments -s -w : $_hg_global_opts ':evaluation:(help init reset next good bad)'
+}
+
 # HGK
 _hg_cmd_view() {
   _arguments -s -w : $_hg_global_opts \
--- a/doc/gendoc.py
+++ b/doc/gendoc.py
@@ -1,6 +1,7 @@
 import sys, textwrap
 # import from the live mercurial repo
 sys.path.insert(0, "..")
+from mercurial import demandimport; demandimport.enable()
 from mercurial.commands import table, globalopts
 from mercurial.i18n import gettext as _
 from mercurial.help import helptable
--- a/hg
+++ b/hg
@@ -7,5 +7,8 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import mercurial.commands
-mercurial.commands.run()
+# enable importing on demand to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
+import mercurial.dispatch
+mercurial.dispatch.run()
new file mode 100644
--- /dev/null
+++ b/hgext/alias.py
@@ -0,0 +1,76 @@
+# Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
+# This file is published under the GNU GPL.
+
+'''allow user-defined command aliases
+
+To use, create entries in your hgrc of the form
+
+[alias]
+mycmd = cmd --args
+'''
+
+from mercurial.cmdutil import findcmd, UnknownCommand, AmbiguousCommand
+from mercurial import commands
+
+cmdtable = {}
+
+class RecursiveCommand(Exception): pass
+
+class lazycommand(object):
+    '''defer command lookup until needed, so that extensions loaded
+    after alias can be aliased'''
+    def __init__(self, ui, name, target):
+        self._ui = ui
+        self._name = name
+        self._target = target
+        self._cmd = None
+
+    def __len__(self):
+        self._resolve()
+        return len(self._cmd)
+
+    def __getitem__(self, key):
+        self._resolve()
+        return self._cmd[key]
+
+    def __iter__(self):
+        self._resolve()
+        return self._cmd.__iter__()
+
+    def _resolve(self):
+        if self._cmd is not None:
+            return
+
+        try:
+            self._cmd = findcmd(self._ui, self._target, commands.table)[1]
+            if self._cmd == self:
+                raise RecursiveCommand()
+            if self._target in commands.norepo.split(' '):
+                commands.norepo += ' %s' % self._name
+            return
+        except UnknownCommand:
+            msg = '*** [alias] %s: command %s is unknown' % \
+                  (self._name, self._target)
+        except AmbiguousCommand:
+            msg = '*** [alias] %s: command %s is ambiguous' % \
+                  (self._name, self._target)
+        except RecursiveCommand:
+            msg = '*** [alias] %s: circular dependency on %s' % \
+                  (self._name, self._target)
+        def nocmd(*args, **opts):
+            self._ui.warn(msg + '\n')
+            return 1
+        nocmd.__doc__ = msg
+        self._cmd = (nocmd, [], '')
+        commands.norepo += ' %s' % self._name
+
+def uisetup(ui):
+    for cmd, target in ui.configitems('alias'):
+        if not target:
+            ui.warn('*** [alias] %s: no definition\n' % cmd)
+            continue
+        args = target.split(' ')
+        tcmd = args.pop(0)
+        if args:
+            ui.setconfig('defaults', cmd, ' '.join(args))
+        cmdtable[cmd] = lazycommand(ui, cmd, tcmd)
new file mode 100644
--- /dev/null
+++ b/hgext/children.py
@@ -0,0 +1,41 @@
+# Mercurial extension to provide the 'hg children' command
+#
+# Copyright 2007 by Intevation GmbH <intevation@intevation.de>
+# Author(s):
+# Thomas Arendsen Hein <thomas@intevation.de>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial import cmdutil
+from mercurial.i18n import _
+
+
+def children(ui, repo, file_=None, **opts):
+    """show the children of the given or working dir revision
+
+    Print the children of the working directory's revisions.
+    If a revision is given via --rev, the children of that revision
+    will be printed. If a file argument is given, revision in
+    which the file was last changed (after the working directory
+    revision or the argument to --rev if given) is printed.
+    """
+    rev = opts.get('rev')
+    if file_:
+        ctx = repo.filectx(file_, changeid=rev)
+    else:
+        ctx = repo.changectx(rev)
+
+    displayer = cmdutil.show_changeset(ui, repo, opts)
+    for node in [cp.node() for cp in ctx.children()]:
+        displayer.show(changenode=node)
+
+
+cmdtable = {
+    "children":
+        (children,
+         [('r', 'rev', '', _('show children of the specified rev')),
+          ('', 'style', '', _('display using template map file')),
+          ('', 'template', '', _('display with template'))],
+         _('hg children [-r REV] [FILE]')),
+}
--- a/hgext/convert/__init__.py
+++ b/hgext/convert/__init__.py
@@ -5,48 +5,60 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-from common import NoRepo
+from common import NoRepo, converter_source, converter_sink
 from cvs import convert_cvs
 from git import convert_git
-from hg import convert_mercurial
+from hg import mercurial_source, mercurial_sink
+from subversion import convert_svn, debugsvnlog
 
-import os
+import os, shlex, shutil
 from mercurial import hg, ui, util, commands
+from mercurial.i18n import _
+
+commands.norepo += " convert debugsvnlog"
 
-commands.norepo += " convert"
+converters = [convert_cvs, convert_git, convert_svn, mercurial_source,
+              mercurial_sink]
 
-converters = [convert_cvs, convert_git, convert_mercurial]
+def convertsource(ui, path, **opts):
+    for c in converters:
+        try:
+            return c.getcommit and c(ui, path, **opts)
+        except (AttributeError, NoRepo):
+            pass
+    raise util.Abort('%s: unknown repository type' % path)
 
-def converter(ui, path):
+def convertsink(ui, path):
     if not os.path.isdir(path):
         raise util.Abort("%s: not a directory" % path)
     for c in converters:
         try:
-            return c(ui, path)
-        except NoRepo:
+            return c.putcommit and c(ui, path)
+        except (AttributeError, NoRepo):
             pass
-    raise util.Abort("%s: unknown repository type" % path)
+    raise util.Abort('%s: unknown repository type' % path)
 
-class convert(object):
-    def __init__(self, ui, source, dest, mapfile, opts):
+class converter(object):
+    def __init__(self, ui, source, dest, revmapfile, filemapper, opts):
 
         self.source = source
         self.dest = dest
         self.ui = ui
         self.opts = opts
         self.commitcache = {}
-        self.mapfile = mapfile
-        self.mapfilefd = None
+        self.revmapfile = revmapfile
+        self.revmapfilefd = None
         self.authors = {}
         self.authorfile = None
+        self.mapfile = filemapper
 
         self.map = {}
         try:
-            origmapfile = open(self.mapfile, 'r')
-            for l in origmapfile:
+            origrevmapfile = open(self.revmapfile, 'r')
+            for l in origrevmapfile:
                 sv, dv = l[:-1].split()
                 self.map[sv] = dv
-            origmapfile.close()
+            origrevmapfile.close()
         except IOError:
             pass
 
@@ -69,10 +81,9 @@ class convert(object):
             n = visit.pop(0)
             if n in known or n in self.map: continue
             known[n] = 1
-            self.commitcache[n] = self.source.getcommit(n)
-            cp = self.commitcache[n].parents
+            commit = self.cachecommit(n)
             parents[n] = []
-            for p in cp:
+            for p in commit.parents:
                 parents[n].append(p)
                 visit.append(p)
 
@@ -138,14 +149,14 @@ class convert(object):
         return s
 
     def mapentry(self, src, dst):
-        if self.mapfilefd is None:
+        if self.revmapfilefd is None:
             try:
-                self.mapfilefd = open(self.mapfile, "a")
+                self.revmapfilefd = open(self.revmapfile, "a")
             except IOError, (errno, strerror):
-                raise util.Abort("Could not open map file %s: %s, %s\n" % (self.mapfile, errno, strerror))
+                raise util.Abort("Could not open map file %s: %s, %s\n" % (self.revmapfile, errno, strerror))
         self.map[src] = dst
-        self.mapfilefd.write("%s %s\n" % (src, dst))
-        self.mapfilefd.flush()
+        self.revmapfilefd.write("%s %s\n" % (src, dst))
+        self.revmapfilefd.flush()
 
     def writeauthormap(self):
         authorfile = self.authorfile
@@ -176,26 +187,56 @@ class convert(object):
                     % (authorfile, line))
         afile.close()
 
+    def cachecommit(self, rev):
+        commit = self.source.getcommit(rev)
+        commit.author = self.authors.get(commit.author, commit.author)
+        self.commitcache[rev] = commit
+        return commit
+
     def copy(self, rev):
-        c = self.commitcache[rev]
-        files = self.source.getchanges(rev)
+        commit = self.commitcache[rev]
+        do_copies = hasattr(self.dest, 'copyfile')
+        filenames = []
 
+        files, copies = self.source.getchanges(rev)
+        parents = [self.map[r] for r in commit.parents]
+        if commit.parents:
+            prev = commit.parents[0]
+            if prev not in self.commitcache:
+                self.cachecommit(prev)
+            pbranch = self.commitcache[prev].branch
+        else:
+            pbranch = None
+        self.dest.setbranch(commit.branch, pbranch, parents)
         for f, v in files:
+            newf = self.mapfile(f)
+            if not newf:
+                continue
+            filenames.append(newf)
             try:
                 data = self.source.getfile(f, v)
             except IOError, inst:
-                self.dest.delfile(f)
+                self.dest.delfile(newf)
             else:
                 e = self.source.getmode(f, v)
-                self.dest.putfile(f, e, data)
+                self.dest.putfile(newf, e, data)
+                if do_copies:
+                    if f in copies:
+                        copyf = self.mapfile(copies[f])
+                        if copyf:
+                            # Merely marks that a copy happened.
+                            self.dest.copyfile(copyf, newf)
 
-        r = [self.map[v] for v in c.parents]
-        f = [f for f, v in files]
-        newnode = self.dest.putcommit(f, r, c)
+        if not filenames and self.mapfile.active():
+            newnode = parents[0]
+        else:
+            newnode = self.dest.putcommit(filenames, parents, commit)
         self.mapentry(rev, newnode)
 
     def convert(self):
         try:
+            self.dest.before()
+            self.source.setrevmap(self.map)
             self.ui.status("scanning source...\n")
             heads = self.source.getheads()
             parents = self.walktree(heads)
@@ -210,9 +251,6 @@ class convert(object):
                 desc = self.commitcache[c].desc
                 if "\n" in desc:
                     desc = desc.splitlines()[0]
-                author = self.commitcache[c].author
-                author = self.authors.get(author, author)
-                self.commitcache[c].author = author
                 self.ui.status("%d %s\n" % (num, desc))
                 self.copy(c)
 
@@ -235,25 +273,117 @@ class convert(object):
             self.cleanup()
 
     def cleanup(self):
-       if self.mapfilefd:
-           self.mapfilefd.close()
+        self.dest.after()
+        if self.revmapfilefd:
+            self.revmapfilefd.close()
+
+def rpairs(name):
+    e = len(name)
+    while e != -1:
+        yield name[:e], name[e+1:]
+        e = name.rfind('/', 0, e)
+
+class filemapper(object):
+    '''Map and filter filenames when importing.
+    A name can be mapped to itself, a new name, or None (omit from new
+    repository).'''
+
+    def __init__(self, ui, path=None):
+        self.ui = ui
+        self.include = {}
+        self.exclude = {}
+        self.rename = {}
+        if path:
+            if self.parse(path):
+                raise util.Abort(_('errors in filemap'))
 
-def _convert(ui, src, dest=None, mapfile=None, **opts):
-    '''Convert a foreign SCM repository to a Mercurial one.
+    def parse(self, path):
+        errs = 0
+        def check(name, mapping, listname):
+            if name in mapping:
+                self.ui.warn(_('%s:%d: %r already in %s list\n') %
+                             (lex.infile, lex.lineno, name, listname))
+                return 1
+            return 0
+        lex = shlex.shlex(open(path), path, True)
+        lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
+        cmd = lex.get_token()
+        while cmd:
+            if cmd == 'include':
+                name = lex.get_token()
+                errs += check(name, self.exclude, 'exclude')
+                self.include[name] = name
+            elif cmd == 'exclude':
+                name = lex.get_token()
+                errs += check(name, self.include, 'include')
+                errs += check(name, self.rename, 'rename')
+                self.exclude[name] = name
+            elif cmd == 'rename':
+                src = lex.get_token()
+                dest = lex.get_token()
+                errs += check(src, self.exclude, 'exclude')
+                self.rename[src] = dest
+            elif cmd == 'source':
+                errs += self.parse(lex.get_token())
+            else:
+                self.ui.warn(_('%s:%d: unknown directive %r\n') %
+                             (lex.infile, lex.lineno, cmd))
+                errs += 1
+            cmd = lex.get_token()
+        return errs
+
+    def lookup(self, name, mapping):
+        for pre, suf in rpairs(name):
+            try:
+                return mapping[pre], pre, suf
+            except KeyError, err:
+                pass
+        return '', name, ''
+
+    def __call__(self, name):
+        if self.include:
+            inc = self.lookup(name, self.include)[0]
+        else:
+            inc = name
+        if self.exclude:
+            exc = self.lookup(name, self.exclude)[0]
+        else:
+            exc = ''
+        if not inc or exc:
+            return None
+        newpre, pre, suf = self.lookup(name, self.rename)
+        if newpre:
+            if newpre == '.':
+                return suf
+            if suf:
+                return newpre + '/' + suf
+            return newpre
+        return name
+
+    def active(self):
+        return bool(self.include or self.exclude or self.rename)
+
+def convert(ui, src, dest=None, revmapfile=None, **opts):
+    """Convert a foreign SCM repository to a Mercurial one.
 
     Accepted source formats:
     - GIT
     - CVS
+    - SVN
 
     Accepted destination formats:
     - Mercurial
 
+    If no revision is given, all revisions will be converted. Otherwise,
+    convert will only import up to the named revision (given in a format
+    understood by the source).
+
     If no destination directory name is specified, it defaults to the
     basename of the source with '-hg' appended.  If the destination
     repository doesn't exist, it will be created.
 
-    If <mapfile> isn't given, it will be put in a default location
-    (<dest>/.hg/shamap by default).  The <mapfile> is a simple text
+    If <revmapfile> isn't given, it will be put in a default location
+    (<dest>/.hg/shamap by default).  The <revmapfile> is a simple text
     file that maps each source commit ID to the destination ID for
     that revision, like so:
     <source ID> <destination ID>
@@ -267,19 +397,33 @@ def _convert(ui, src, dest=None, mapfile
     that use unix logins to identify authors (eg: CVS). One line per author
     mapping and the line format is:
     srcauthor=whatever string you want
-    '''
+
+    The filemap is a file that allows filtering and remapping of files
+    and directories.  Comment lines start with '#'.  Each line can
+    contain one of the following directives:
+
+      include path/to/file
+
+      exclude path/to/file
+
+      rename from/file to/file
+    
+    The 'include' directive causes a file, or all files under a
+    directory, to be included in the destination repository.  The
+    'exclude' directive causes files or directories to be omitted.
+    The 'rename' directive renames a file or directory.  To rename
+    from a subdirectory into the root of the repository, use '.' as
+    the path to rename to.
+    """
 
     util._encoding = 'UTF-8'
 
-    srcc = converter(ui, src)
-    if not hasattr(srcc, "getcommit"):
-        raise util.Abort("%s: can't read from this repo type" % src)
-
     if not dest:
         dest = hg.defaultdest(src) + "-hg"
         ui.status("assuming destination %s\n" % dest)
 
     # Try to be smart and initalize things when required
+    created = False
     if os.path.isdir(dest):
         if len(os.listdir(dest)) > 0:
             try:
@@ -294,29 +438,46 @@ def _convert(ui, src, dest=None, mapfile
         else:
             ui.status("initializing destination %s repository\n" % dest)
             hg.repository(ui, dest, create=True)
+            created = True
     elif os.path.exists(dest):
         raise util.Abort("destination %s exists and is not a directory" % dest)
     else:
         ui.status("initializing destination %s repository\n" % dest)
         hg.repository(ui, dest, create=True)
+        created = True
 
-    destc = converter(ui, dest)
-    if not hasattr(destc, "putcommit"):
-        raise util.Abort("%s: can't write to this repo type" % src)
+    destc = convertsink(ui, dest)
 
-    if not mapfile:
+    try:
+        srcc = convertsource(ui, src, rev=opts.get('rev'))
+    except Exception:
+        if created:
+            shutil.rmtree(dest, True)
+        raise
+
+    if not revmapfile:
         try:
-            mapfile = destc.mapfile()
+            revmapfile = destc.revmapfile()
         except:
-            mapfile = os.path.join(destc, "map")
+            revmapfile = os.path.join(destc, "map")
+
 
-    c = convert(ui, srcc, destc, mapfile, opts)
+    c = converter(ui, srcc, destc, revmapfile, filemapper(ui, opts['filemap']),
+                  opts)
     c.convert()
 
+
 cmdtable = {
     "convert":
-        (_convert,
+        (convert,
          [('A', 'authors', '', 'username mapping filename'),
+          ('', 'filemap', '', 'remap file names using contents of file'),
+          ('r', 'rev', '', 'import up to target revision REV'),
           ('', 'datesort', None, 'try to sort changesets by date')],
          'hg convert [OPTION]... SOURCE [DEST [MAPFILE]]'),
+    "debugsvnlog":
+        (debugsvnlog,
+         [],
+         'hg debugsvnlog'),
 }
+
--- a/hgext/convert/common.py
+++ b/hgext/convert/common.py
@@ -1,21 +1,46 @@
 # common code for the convert extension
+import base64
+import cPickle as pickle
+
+def encodeargs(args):
+    def encodearg(s):
+        lines = base64.encodestring(s)
+        lines = [l.splitlines()[0] for l in lines]
+        return ''.join(lines)
+
+    s = pickle.dumps(args)
+    return encodearg(s)
+
+def decodeargs(s):
+    s = base64.decodestring(s)
+    return pickle.loads(s)
 
 class NoRepo(Exception): pass
 
 class commit(object):
-    def __init__(self, **parts):
-        for x in "author date desc parents".split():
-            if not x in parts:
-                raise util.Abort("commit missing field %s" % x)
-        self.__dict__.update(parts)
+    def __init__(self, author, date, desc, parents, branch=None, rev=None):
+        self.author = author
+        self.date = date
+        self.desc = desc
+        self.parents = parents
+        self.branch = branch
+        self.rev = rev
 
 class converter_source(object):
     """Conversion source interface"""
 
-    def __init__(self, ui, path):
+    def __init__(self, ui, path, rev=None):
         """Initialize conversion source (or raise NoRepo("message")
         exception if path is not a valid repository)"""
-        raise NotImplementedError()
+        self.ui = ui
+        self.path = path
+        self.rev = rev
+
+        self.encoding = 'utf-8'
+
+    def setrevmap(self, revmap):
+        """set the map of already-converted revisions"""
+        pass
 
     def getheads(self):
         """Return a list of this repository's heads"""
@@ -30,10 +55,12 @@ class converter_source(object):
         raise NotImplementedError()
 
     def getchanges(self, version):
-        """Return sorted list of (filename, id) tuples for all files changed in rev.
+        """Returns a tuple of (files, copies)
+        Files is a sorted list of (filename, id) tuples for all files changed
+        in version, where id is the source revision id of the file.
 
-        id just tells us which revision to return in getfile(), e.g. in
-        git it's an object hash."""
+        copies is a dictionary of dest: source
+        """
         raise NotImplementedError()
 
     def getcommit(self, version):
@@ -44,6 +71,20 @@ class converter_source(object):
         """Return the tags as a dictionary of name: revision"""
         raise NotImplementedError()
 
+    def recode(self, s, encoding=None):
+        if not encoding:
+            encoding = self.encoding or 'utf-8'
+
+        if isinstance(s, unicode):
+            return s.encode("utf-8")
+        try:
+            return s.decode(encoding).encode("utf-8")
+        except:
+            try:
+                return s.decode("latin-1").encode("utf-8")
+            except:
+                return s.decode(encoding, "replace").encode("utf-8")
+
 class converter_sink(object):
     """Conversion sink (target) interface"""
 
@@ -56,7 +97,7 @@ class converter_sink(object):
         """Return a list of this repository's heads"""
         raise NotImplementedError()
 
-    def mapfile(self):
+    def revmapfile(self):
         """Path to a file that will contain lines
         source_rev_id sink_rev_id
         mapping equivalent revision identifiers for each system."""
@@ -94,3 +135,11 @@ class converter_sink(object):
         """Put tags into sink.
         tags: {tagname: sink_rev_id, ...}"""
         raise NotImplementedError()
+
+    def setbranch(self, branch, pbranch, parents):
+        """Set the current branch name. Called before the first putfile
+        on the branch.
+        branch: branch name for subsequent commits
+        pbranch: branch name of parent commit
+        parents: destination revisions of parent"""
+        pass
--- a/hgext/convert/cvs.py
+++ b/hgext/convert/cvs.py
@@ -6,9 +6,9 @@ from mercurial import util
 from common import NoRepo, commit, converter_source
 
 class convert_cvs(converter_source):
-    def __init__(self, ui, path):
-        self.path = path
-        self.ui = ui
+    def __init__(self, ui, path, rev=None):
+        super(convert_cvs, self).__init__(ui, path, rev=rev)
+
         cvs = os.path.join(path, "CVS")
         if not os.path.exists(cvs):
             raise NoRepo("couldn't open CVS repo %s" % path)
@@ -29,15 +29,33 @@ class convert_cvs(converter_source):
         if self.changeset:
             return
 
+        maxrev = 0
+        cmd = 'cvsps -A -u --cvs-direct -q'
+        if self.rev:
+            # TODO: handle tags
+            try:
+                # patchset number?
+                maxrev = int(self.rev)
+            except ValueError:
+                try:
+                    # date
+                    util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
+                    cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
+                except util.Abort:
+                    raise util.Abort('revision %s is not a patchset number or date' % self.rev)
+        cmd += " 2>&1"
+
         d = os.getcwd()
         try:
             os.chdir(self.path)
             id = None
             state = 0
-            for l in os.popen("cvsps -A -u --cvs-direct -q 2>&1"):
+            for l in os.popen(cmd):
                 if state == 0: # header
                     if l.startswith("PatchSet"):
                         id = l[9:-2]
+                        if maxrev and int(id) > maxrev:
+                            state = 3
                     elif l.startswith("Date"):
                         date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
                         date = util.datestr(date)
@@ -62,8 +80,6 @@ class convert_cvs(converter_source):
                     if l == "Members: \n":
                         files = {}
                         log = self.recode(log[:-1])
-                        if log.isspace():
-                            log = "*** empty log message ***\n"
                         state = 2
                     else:
                         log += l
@@ -85,6 +101,8 @@ class convert_cvs(converter_source):
                         rev = l[colon+1:-2]
                         rev = rev.split("->")[1]
                         files[file] = rev
+                elif state == 3:
+                    continue
 
             self.heads = self.lastbranch.values()
         finally:
@@ -136,7 +154,7 @@ class convert_cvs(converter_source):
                 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
                                     "END AUTH REQUEST", ""]))
                 if sck.recv(128) != "I LOVE YOU\n":
-                    raise NoRepo("CVS pserver authentication failed")
+                    raise util.Abort("CVS pserver authentication failed")
 
                 self.writep = self.readp = sck.makefile('r+')
 
@@ -241,10 +259,7 @@ class convert_cvs(converter_source):
         files = self.files[rev]
         cl = files.items()
         cl.sort()
-        return cl
-
-    def recode(self, text):
-        return text.decode(self.encoding, "replace").encode("utf-8")
+        return (cl, {})
 
     def getcommit(self, rev):
         return self.changeset[rev]
--- a/hgext/convert/git.py
+++ b/hgext/convert/git.py
@@ -5,15 +5,6 @@ from mercurial import util
 
 from common import NoRepo, commit, converter_source
 
-def recode(s):
-    try:
-        return s.decode("utf-8").encode("utf-8")
-    except:
-        try:
-            return s.decode("latin-1").encode("utf-8")
-        except:
-            return s.decode("utf-8", "replace").encode("utf-8")
-
 class convert_git(converter_source):
     # Windows does not support GIT_DIR= construct while other systems
     # cannot remove environment variable. Just assume none have
@@ -32,18 +23,22 @@ class convert_git(converter_source):
     else:
         def gitcmd(self, s):
             return os.popen('GIT_DIR=%s %s' % (self.path, s))
-    
-    def __init__(self, ui, path):
+
+    def __init__(self, ui, path, rev=None):
+        super(convert_git, self).__init__(ui, path, rev=rev)
+
         if os.path.isdir(path + "/.git"):
             path += "/.git"
-        self.path = path
-        self.ui = ui
         if not os.path.exists(path + "/objects"):
             raise NoRepo("couldn't open GIT repo %s" % path)
+        self.path = path
 
     def getheads(self):
-        fh = self.gitcmd("git-rev-parse --verify HEAD")
-        return [fh.read()[:-1]]
+        if not self.rev:
+            return self.gitcmd('git-rev-parse --branches').read().splitlines()
+        else:
+            fh = self.gitcmd("git-rev-parse --verify %s" % self.rev)
+            return [fh.read()[:-1]]
 
     def catfile(self, rev, type):
         if rev == "0" * 40: raise IOError()
@@ -75,13 +70,13 @@ class convert_git(converter_source):
             s = (m[1] == "120000")
             self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
             changes.append((f, h))
-        return changes
+        return (changes, {})
 
     def getcommit(self, version):
         c = self.catfile(version, "commit") # read the commit hash
         end = c.find("\n\n")
         message = c[end+2:]
-        message = recode(message)
+        message = self.recode(message)
         l = c[:end].splitlines()
         manifest = l[0].split()[1]
         parents = []
@@ -92,13 +87,13 @@ class convert_git(converter_source):
                 tm, tz = p[-2:]
                 author = " ".join(p[:-2])
                 if author[0] == "<": author = author[1:-1]
-                author = recode(author)
+                author = self.recode(author)
             if n == "committer":
                 p = v.split()
                 tm, tz = p[-2:]
                 committer = " ".join(p[:-2])
                 if committer[0] == "<": committer = committer[1:-1]
-                committer = recode(committer)
+                committer = self.recode(committer)
                 message += "\ncommitter: %s\n" % committer
             if n == "parent": parents.append(v)
 
@@ -107,7 +102,8 @@ class convert_git(converter_source):
         date = tm + " " + str(tz)
         author = author or "unknown"
 
-        c = commit(parents=parents, date=date, author=author, desc=message)
+        c = commit(parents=parents, date=date, author=author, desc=message,
+                   rev=version)
         return c
 
     def gettags(self):
--- a/hgext/convert/hg.py
+++ b/hgext/convert/hg.py
@@ -1,20 +1,45 @@
 # hg backend for convert extension
 
+# Note for hg->hg conversion: Old versions of Mercurial didn't trim
+# the whitespace from the ends of commit messages, but new versions
+# do.  Changesets created by those older versions, then converted, may
+# thus have different hashes for changesets that are otherwise
+# identical.
+
+
 import os, time
-from mercurial import hg
+from mercurial.i18n import _
+from mercurial.node import *
+from mercurial import hg, lock, revlog, util
 
-from common import NoRepo, converter_sink
+from common import NoRepo, commit, converter_source, converter_sink
 
-class convert_mercurial(converter_sink):
+class mercurial_sink(converter_sink):
     def __init__(self, ui, path):
         self.path = path
         self.ui = ui
+        self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
+        self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
+        self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
+        self.lastbranch = None
         try:
             self.repo = hg.repository(self.ui, path)
         except:
-            raise NoRepo("could open hg repo %s" % path)
+            raise NoRepo("could not open hg repo %s as sink" % path)
+        self.lock = None
+        self.wlock = None
 
-    def mapfile(self):
+    def before(self):
+        self.wlock = self.repo.wlock()
+        self.lock = self.repo.lock()
+        self.repo.dirstate.clear()
+
+    def after(self):
+        self.repo.dirstate.invalidate()
+        self.lock = None
+        self.wlock = None
+
+    def revmapfile(self):
         return os.path.join(self.path, ".hg", "shamap")
 
     def authorfile(self):
@@ -22,12 +47,15 @@ class convert_mercurial(converter_sink):
 
     def getheads(self):
         h = self.repo.changelog.heads()
-        return [ hg.hex(x) for x in h ]
+        return [ hex(x) for x in h ]
 
     def putfile(self, f, e, data):
         self.repo.wwrite(f, data, e)
-        if self.repo.dirstate.state(f) == '?':
-            self.repo.dirstate.update([f], "a")
+        if f not in self.repo.dirstate:
+            self.repo.dirstate.normallookup(f)
+
+    def copyfile(self, source, dest):
+        self.repo.copy(source, dest)
 
     def delfile(self, f):
         try:
@@ -36,6 +64,30 @@ class convert_mercurial(converter_sink):
         except:
             pass
 
+    def setbranch(self, branch, pbranch, parents):
+        if (not self.clonebranches) or (branch == self.lastbranch):
+            return
+
+        self.lastbranch = branch
+        self.after()
+        if not branch:
+            branch = 'default'
+        if not pbranch:
+            pbranch = 'default'
+
+        branchpath = os.path.join(self.path, branch)
+        try:
+            self.repo = hg.repository(self.ui, branchpath)
+        except:
+            if not parents:
+                self.repo = hg.repository(self.ui, branchpath, create=True)
+            else:
+                self.ui.note(_('cloning branch %s to %s\n') % (pbranch, branch))
+                hg.clone(self.ui, os.path.join(self.path, pbranch),
+                         branchpath, rev=parents, update=False,
+                         stream=True)
+                self.repo = hg.repository(self.ui, branchpath)
+
     def putcommit(self, files, parents, commit):
         seen = {}
         pl = []
@@ -51,16 +103,17 @@ class convert_mercurial(converter_sink):
 
         text = commit.desc
         extra = {}
-        try:
-            extra["branch"] = commit.branch
-        except AttributeError:
-            pass
+        if self.branchnames and commit.branch:
+            extra['branch'] = commit.branch
+        if commit.rev:
+            extra['convert_revision'] = commit.rev
 
         while parents:
             p1 = p2
             p2 = parents.pop(0)
             a = self.repo.rawcommit(files, text, commit.author, commit.date,
-                                    hg.bin(p1), hg.bin(p2), extra=extra)
+                                    bin(p1), bin(p2), extra=extra)
+            self.repo.dirstate.clear()
             text = "(octopus merge fixup)\n"
             p2 = hg.hex(self.repo.changelog.tip())
 
@@ -89,6 +142,69 @@ class convert_mercurial(converter_sink):
             f.close()
             if not oldlines: self.repo.add([".hgtags"])
             date = "%s 0" % int(time.mktime(time.gmtime()))
+            extra = {}
+            if self.tagsbranch != 'default':
+                extra['branch'] = self.tagsbranch
+            try:
+                tagparent = self.repo.changectx(self.tagsbranch).node()
+            except hg.RepoError, inst:
+                tagparent = nullid
             self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
-                                date, self.repo.changelog.tip(), hg.nullid)
-            return hg.hex(self.repo.changelog.tip())
+                                date, tagparent, nullid)
+            return hex(self.repo.changelog.tip())
+
+class mercurial_source(converter_source):
+    def __init__(self, ui, path, rev=None):
+        converter_source.__init__(self, ui, path, rev)
+        self.repo = hg.repository(self.ui, path)
+        self.lastrev = None
+        self.lastctx = None
+
+    def changectx(self, rev):
+        if self.lastrev != rev:
+            self.lastctx = self.repo.changectx(rev)
+            self.lastrev = rev
+        return self.lastctx
+
+    def getheads(self):
+        if self.rev:
+            return [hex(self.repo.changectx(self.rev).node())]
+        else:
+            return [hex(node) for node in self.repo.heads()]
+
+    def getfile(self, name, rev):
+        try:
+            return self.changectx(rev).filectx(name).data()
+        except revlog.LookupError, err:
+            raise IOError(err)
+
+    def getmode(self, name, rev):
+        m = self.changectx(rev).manifest()
+        return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
+
+    def getchanges(self, rev):
+        ctx = self.changectx(rev)
+        m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
+        changes = [(name, rev) for name in m + a + r]
+        changes.sort()
+        return (changes, self.getcopies(ctx, m + a))
+
+    def getcopies(self, ctx, files):
+        copies = {}
+        for name in files:
+            try:
+                copies[name] = ctx.filectx(name).renamed()[0]
+            except TypeError:
+                pass
+        return copies
+
+    def getcommit(self, rev):
+        ctx = self.changectx(rev)
+        parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
+        return commit(author=ctx.user(), date=util.datestr(ctx.date()),
+                      desc=ctx.description(), parents=parents,
+                      branch=ctx.branch())
+
+    def gettags(self):
+        tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
+        return dict([(name, hex(node)) for name, node in tags])
new file mode 100644
--- /dev/null
+++ b/hgext/convert/subversion.py
@@ -0,0 +1,646 @@
+# Subversion 1.4/1.5 Python API backend
+#
+# Copyright(C) 2007 Daniel Holth et al
+#
+# Configuration options:
+#
+# convert.svn.trunk
+#   Relative path to the trunk (default: "trunk")
+# convert.svn.branches
+#   Relative path to tree of branches (default: "branches")
+#
+# Set these in a hgrc, or on the command line as follows:
+#
+#   hg convert --config convert.svn.trunk=wackoname [...]
+
+import locale
+import os
+import sys
+import cPickle as pickle
+from mercurial import util
+
+# Subversion stuff. Works best with very recent Python SVN bindings
+# e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
+# these bindings.
+
+from cStringIO import StringIO
+
+from common import NoRepo, commit, converter_source, encodeargs, decodeargs
+
+try:
+    from svn.core import SubversionException, Pool
+    import svn
+    import svn.client
+    import svn.core
+    import svn.ra
+    import svn.delta
+    import transport
+except ImportError:
+    pass
+
+def geturl(path):
+    try:
+        return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
+    except SubversionException:
+        pass
+    if os.path.isdir(path):
+        return 'file://%s' % os.path.normpath(os.path.abspath(path))
+    return path
+
+def optrev(number):
+    optrev = svn.core.svn_opt_revision_t()
+    optrev.kind = svn.core.svn_opt_revision_number
+    optrev.value.number = number
+    return optrev
+
+class changedpath(object):
+    def __init__(self, p):
+        self.copyfrom_path = p.copyfrom_path
+        self.copyfrom_rev = p.copyfrom_rev
+        self.action = p.action
+
+def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
+                    strict_node_history=False):
+    protocol = -1
+    def receiver(orig_paths, revnum, author, date, message, pool):
+        if orig_paths is not None:
+            for k, v in orig_paths.iteritems():
+                orig_paths[k] = changedpath(v)
+        pickle.dump((orig_paths, revnum, author, date, message),
+                    fp, protocol)
+
+    try:
+        # Use an ra of our own so that our parent can consume
+        # our results without confusing the server.
+        t = transport.SvnRaTransport(url=url)
+        svn.ra.get_log(t.ra, paths, start, end, limit,
+                       discover_changed_paths,
+                       strict_node_history,
+                       receiver)
+    except SubversionException, (inst, num):
+        pickle.dump(num, fp, protocol)
+    else:
+        pickle.dump(None, fp, protocol)
+    fp.close()
+
+def debugsvnlog(ui, **opts):
+    """Fetch SVN log in a subprocess and channel them back to parent to
+    avoid memory collection issues.
+    """
+    util.set_binary(sys.stdin)
+    util.set_binary(sys.stdout)
+    args = decodeargs(sys.stdin.read())
+    get_log_child(sys.stdout, *args)
+
+# SVN conversion code stolen from bzr-svn and tailor
+class convert_svn(converter_source):
+    def __init__(self, ui, url, rev=None):
+        super(convert_svn, self).__init__(ui, url, rev=rev)
+
+        try:
+            SubversionException
+        except NameError:
+            msg = 'subversion python bindings could not be loaded\n'
+            ui.warn(msg)
+            raise NoRepo(msg)
+
+        self.encoding = locale.getpreferredencoding()
+        self.lastrevs = {}
+
+        latest = None
+        try:
+            # Support file://path@rev syntax. Useful e.g. to convert
+            # deleted branches.
+            at = url.rfind('@')
+            if at >= 0:
+                latest = int(url[at+1:])
+                url = url[:at]
+        except ValueError, e:
+            pass
+        self.url = geturl(url)
+        self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
+        try:
+            self.transport = transport.SvnRaTransport(url=self.url)
+            self.ra = self.transport.ra
+            self.ctx = self.transport.client
+            self.base = svn.ra.get_repos_root(self.ra)
+            self.module = self.url[len(self.base):]
+            self.modulemap = {} # revision, module
+            self.commits = {}
+            self.paths = {}
+            self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
+        except SubversionException, e:
+            raise NoRepo("couldn't open SVN repo %s" % self.url)
+
+        if rev:
+            try:
+                latest = int(rev)
+            except ValueError:
+                raise util.Abort('svn: revision %s is not an integer' % rev)
+
+        try:
+            self.get_blacklist()
+        except IOError, e:
+            pass
+
+        self.last_changed = self.latest(self.module, latest)
+
+        self.head = self.revid(self.last_changed)
+
+    def setrevmap(self, revmap):
+        lastrevs = {}
+        for revid in revmap.keys():
+            uuid, module, revnum = self.revsplit(revid)
+            lastrevnum = lastrevs.setdefault(module, revnum)
+            if revnum > lastrevnum:
+                lastrevs[module] = revnum
+        self.lastrevs = lastrevs
+
+    def exists(self, path, optrev):
+        try:
+            return svn.client.ls(self.url.rstrip('/') + '/' + path,
+                                 optrev, False, self.ctx)
+        except SubversionException, err:
+            return []
+
+    def getheads(self):
+        # detect standard /branches, /tags, /trunk layout
+        rev = optrev(self.last_changed)
+        rpath = self.url.strip('/')
+        cfgtrunk = self.ui.config('convert', 'svn.trunk')
+        cfgbranches = self.ui.config('convert', 'svn.branches')
+        trunk = (cfgtrunk or 'trunk').strip('/')
+        branches = (cfgbranches or 'branches').strip('/')
+        if self.exists(trunk, rev) and self.exists(branches, rev):
+            self.ui.note('found trunk at %r and branches at %r\n' %
+                         (trunk, branches))
+            oldmodule = self.module
+            self.module += '/' + trunk
+            lt = self.latest(self.module, self.last_changed)
+            self.head = self.revid(lt)
+            self.heads = [self.head]
+            branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
+                                        self.ctx)
+            for branch in branchnames.keys():
+                if oldmodule:
+                    module = '/' + oldmodule + '/' + branches + '/' + branch
+                else:
+                    module = '/' + branches + '/' + branch
+                brevnum = self.latest(module, self.last_changed)
+                brev = self.revid(brevnum, module)
+                self.ui.note('found branch %s at %d\n' % (branch, brevnum))
+                self.heads.append(brev)
+        elif cfgtrunk or cfgbranches:
+            raise util.Abort('trunk/branch layout expected, but not found')
+        else:
+            self.ui.note('working with one branch\n')
+            self.heads = [self.head]
+        return self.heads
+
+    def getfile(self, file, rev):
+        data, mode = self._getfile(file, rev)
+        self.modecache[(file, rev)] = mode
+        return data
+
+    def getmode(self, file, rev):
+        return self.modecache[(file, rev)]
+
+    def getchanges(self, rev):
+        self.modecache = {}
+        (paths, parents) = self.paths[rev]
+        files, copies = self.expandpaths(rev, paths, parents)
+        files.sort()
+        files = zip(files, [rev] * len(files))
+
+        # caller caches the result, so free it here to release memory
+        del self.paths[rev]
+        return (files, copies)
+
+    def getcommit(self, rev):
+        if rev not in self.commits:
+            uuid, module, revnum = self.revsplit(rev)
+            self.module = module
+            self.reparent(module)
+            stop = self.lastrevs.get(module, 0)
+            self._fetch_revisions(from_revnum=revnum, to_revnum=stop)
+        commit = self.commits[rev]
+        # caller caches the result, so free it here to release memory
+        del self.commits[rev]
+        return commit
+
+    def get_log(self, paths, start, end, limit=0, discover_changed_paths=True,
+                strict_node_history=False):
+
+        def parent(fp):
+            while True:
+                entry = pickle.load(fp)
+                try:
+                    orig_paths, revnum, author, date, message = entry
+                except:
+                    if entry is None:
+                        break
+                    raise SubversionException("child raised exception", entry)
+                yield entry
+
+        args = [self.url, paths, start, end, limit, discover_changed_paths,
+                strict_node_history]
+        arg = encodeargs(args)
+        hgexe = util.hgexecutable()
+        cmd = '%s debugsvnlog' % util.shellquote(hgexe)
+        stdin, stdout = os.popen2(cmd, 'b')
+
+        stdin.write(arg)
+        stdin.close()
+
+        for p in parent(stdout):
+            yield p
+
+    def gettags(self):
+        tags = {}
+        start = self.revnum(self.head)
+        try:
+            for entry in self.get_log(['/tags'], 0, start):
+                orig_paths, revnum, author, date, message = entry
+                for path in orig_paths:
+                    if not path.startswith('/tags/'):
+                        continue
+                    ent = orig_paths[path]
+                    source = ent.copyfrom_path
+                    rev = ent.copyfrom_rev
+                    tag = path.split('/', 2)[2]
+                    tags[tag] = self.revid(rev, module=source)
+        except SubversionException, (inst, num):
+            self.ui.note('no tags found at revision %d\n' % start)
+        return tags
+
+    # -- helper functions --
+
+    def revid(self, revnum, module=None):
+        if not module:
+            module = self.module
+        return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
+                                 revnum)
+
+    def revnum(self, rev):
+        return int(rev.split('@')[-1])
+
+    def revsplit(self, rev):
+        url, revnum = rev.encode(self.encoding).split('@', 1)
+        revnum = int(revnum)
+        parts = url.split('/', 1)
+        uuid = parts.pop(0)[4:]
+        mod = ''
+        if parts:
+            mod = '/' + parts[0]
+        return uuid, mod, revnum
+
+    def latest(self, path, stop=0):
+        'find the latest revision affecting path, up to stop'
+        if not stop:
+            stop = svn.ra.get_latest_revnum(self.ra)
+        try:
+            self.reparent('')
+            dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
+            self.reparent(self.module)
+        except SubversionException:
+            dirent = None
+        if not dirent:
+            raise util.Abort('%s not found up to revision %d' % (path, stop))
+
+        return dirent.created_rev
+
+    def get_blacklist(self):
+        """Avoid certain revision numbers.
+        It is not uncommon for two nearby revisions to cancel each other
+        out, e.g. 'I copied trunk into a subdirectory of itself instead
+        of making a branch'. The converted repository is significantly
+        smaller if we ignore such revisions."""
+        self.blacklist = util.set()
+        blacklist = self.blacklist
+        for line in file("blacklist.txt", "r"):
+            if not line.startswith("#"):
+                try:
+                    svn_rev = int(line.strip())
+                    blacklist.add(svn_rev)
+                except ValueError, e:
+                    pass # not an integer or a comment
+
+    def is_blacklisted(self, svn_rev):
+        return svn_rev in self.blacklist
+
+    def reparent(self, module):
+        svn_url = self.base + module
+        self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
+        svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
+
+    def expandpaths(self, rev, paths, parents):
+        def get_entry_from_path(path, module=self.module):
+            # Given the repository url of this wc, say
+            #   "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
+            # extract the "entry" portion (a relative path) from what
+            # svn log --xml says, ie
+            #   "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
+            # that is to say "tests/PloneTestCase.py"
+            if path.startswith(module):
+                relative = path[len(module):]
+                if relative.startswith('/'):
+                    return relative[1:]
+                else:
+                    return relative
+
+            # The path is outside our tracked tree...
+            self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
+            return None
+
+        entries = []
+        copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
+        copies = {}
+        revnum = self.revnum(rev)
+
+        if revnum in self.modulemap:
+            new_module = self.modulemap[revnum]
+            if new_module != self.module:
+                self.module = new_module
+                self.reparent(self.module)
+
+        for path, ent in paths:
+            entrypath = get_entry_from_path(path, module=self.module)
+            entry = entrypath.decode(self.encoding)
+
+            kind = svn.ra.check_path(self.ra, entrypath, revnum)
+            if kind == svn.core.svn_node_file:
+                if ent.copyfrom_path:
+                    copyfrom_path = get_entry_from_path(ent.copyfrom_path)
+                    if copyfrom_path:
+                        self.ui.debug("Copied to %s from %s@%s\n" % (entry, copyfrom_path, ent.copyfrom_rev))
+                        # It's probably important for hg that the source
+                        # exists in the revision's parent, not just the
+                        # ent.copyfrom_rev
+                        fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
+                        if fromkind != 0:
+                            copies[self.recode(entry)] = self.recode(copyfrom_path)
+                entries.append(self.recode(entry))
+            elif kind == 0: # gone, but had better be a deleted *file*
+                self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
+
+                # if a branch is created but entries are removed in the same
+                # changeset, get the right fromrev
+                if parents:
+                    uuid, old_module, fromrev = self.revsplit(parents[0])
+                else:
+                    fromrev = revnum - 1
+                    # might always need to be revnum - 1 in these 3 lines?
+                    old_module = self.modulemap.get(fromrev, self.module)
+
+                basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
+                entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
+
+                def lookup_parts(p):
+                    rc = None
+                    parts = p.split("/")
+                    for i in range(len(parts)):
+                        part = "/".join(parts[:i])
+                        info = part, copyfrom.get(part, None)
+                        if info[1] is not None:
+                            self.ui.debug("Found parent directory %s\n" % info[1])
+                            rc = info
+                    return rc
+
+                self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
+
+                frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
+
+                # need to remove fragment from lookup_parts and replace with copyfrom_path
+                if frompath is not None:
+                    self.ui.debug("munge-o-matic\n")
+                    self.ui.debug(entrypath + '\n')
+                    self.ui.debug(entrypath[len(frompath):] + '\n')
+                    entrypath = froment.copyfrom_path + entrypath[len(frompath):]
+                    fromrev = froment.copyfrom_rev
+                    self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
+
+                fromkind = svn.ra.check_path(self.ra, entrypath, fromrev)
+                if fromkind == svn.core.svn_node_file:   # a deleted file
+                    entries.append(self.recode(entry))
+                elif fromkind == svn.core.svn_node_dir:
+                    # print "Deleted/moved non-file:", revnum, path, ent
+                    # children = self._find_children(path, revnum - 1)
+                    # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
+                    # Sometimes this is tricky. For example: in
+                    # The Subversion Repository revision 6940 a dir
+                    # was copied and one of its files was deleted
+                    # from the new location in the same commit. This
+                    # code can't deal with that yet.
+                    if ent.action == 'C':
+                        children = self._find_children(path, fromrev)
+                    else:
+                        oroot = entrypath.strip('/')
+                        nroot = path.strip('/')
+                        children = self._find_children(oroot, fromrev)
+                        children = [s.replace(oroot,nroot) for s in children]
+                    # Mark all [files, not directories] as deleted.
+                    for child in children:
+                        # Can we move a child directory and its
+                        # parent in the same commit? (probably can). Could
+                        # cause problems if instead of revnum -1,
+                        # we have to look in (copyfrom_path, revnum - 1)
+                        entrypath = get_entry_from_path("/" + child, module=old_module)
+                        if entrypath:
+                            entry = self.recode(entrypath.decode(self.encoding))
+                            if entry in copies:
+                                # deleted file within a copy
+                                del copies[entry]
+                            else:
+                                entries.append(entry)
+                else:
+                    self.ui.debug('unknown path in revision %d: %s\n' % \
+                                  (revnum, path))
+            elif kind == svn.core.svn_node_dir:
+                # Should probably synthesize normal file entries
+                # and handle as above to clean up copy/rename handling.
+
+                # If the directory just had a prop change,
+                # then we shouldn't need to look for its children.
+                # Also this could create duplicate entries. Not sure
+                # whether this will matter. Maybe should make entries a set.
+                # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
+                # This will fail if a directory was copied
+                # from another branch and then some of its files
+                # were deleted in the same transaction.
+                children = self._find_children(path, revnum)
+                children.sort()
+                for child in children:
+                    # Can we move a child directory and its
+                    # parent in the same commit? (probably can). Could
+                    # cause problems if instead of revnum -1,
+                    # we have to look in (copyfrom_path, revnum - 1)
+                    entrypath = get_entry_from_path("/" + child, module=self.module)
+                    # print child, self.module, entrypath
+                    if entrypath:
+                        # Need to filter out directories here...
+                        kind = svn.ra.check_path(self.ra, entrypath, revnum)
+                        if kind != svn.core.svn_node_dir:
+                            entries.append(self.recode(entrypath))
+
+                # Copies here (must copy all from source)
+                # Probably not a real problem for us if
+                # source does not exist
+
+                # Can do this with the copy command "hg copy"
+                # if ent.copyfrom_path:
+                #     copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
+                #             module=self.module)
+                #     copyto_entry = entrypath
+                #
+                #     print "copy directory", copyfrom_entry, 'to', copyto_entry
+                #
+                #     copies.append((copyfrom_entry, copyto_entry))
+
+                if ent.copyfrom_path:
+                    copyfrom_path = ent.copyfrom_path.decode(self.encoding)
+                    copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
+                    if copyfrom_entry:
+                        copyfrom[path] = ent
+                        self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
+
+                        # Good, /probably/ a regular copy. Really should check
+                        # to see whether the parent revision actually contains
+                        # the directory in question.
+                        children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
+                        children.sort()
+                        for child in children:
+                            entrypath = get_entry_from_path("/" + child, module=self.module)
+                            if entrypath:
+                                entry = entrypath.decode(self.encoding)
+                                # print "COPY COPY From", copyfrom_entry, entry
+                                copyto_path = path + entry[len(copyfrom_entry):]
+                                copyto_entry =  get_entry_from_path(copyto_path, module=self.module)
+                                # print "COPY", entry, "COPY To", copyto_entry
+                                copies[self.recode(copyto_entry)] = self.recode(entry)
+                                # copy from quux splort/quuxfile
+
+        return (entries, copies)
+
+    def _fetch_revisions(self, from_revnum = 0, to_revnum = 347):
+        self.child_cset = None
+        def parselogentry(orig_paths, revnum, author, date, message):
+            self.ui.debug("parsing revision %d (%d changes)\n" %
+                          (revnum, len(orig_paths)))
+
+            if revnum in self.modulemap:
+                new_module = self.modulemap[revnum]
+                if new_module != self.module:
+                    self.module = new_module
+                    self.reparent(self.module)
+
+            rev = self.revid(revnum)
+            # branch log might return entries for a parent we already have
+            if (rev in self.commits or
+                (revnum < self.lastrevs.get(self.module, 0))):
+                return
+
+            parents = []
+            # check whether this revision is the start of a branch
+            if self.module in orig_paths:
+                ent = orig_paths[self.module]
+                if ent.copyfrom_path:
+                    # ent.copyfrom_rev may not be the actual last revision
+                    prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
+                    self.modulemap[prev] = ent.copyfrom_path
+                    parents = [self.revid(prev, ent.copyfrom_path)]
+                    self.ui.note('found parent of branch %s at %d: %s\n' % \
+                                     (self.module, prev, ent.copyfrom_path))
+                else:
+                    self.ui.debug("No copyfrom path, don't know what to do.\n")
+
+            self.modulemap[revnum] = self.module # track backwards in time
+
+            orig_paths = orig_paths.items()
+            orig_paths.sort()
+            paths = []
+            # filter out unrelated paths
+            for path, ent in orig_paths:
+                if not path.startswith(self.module):
+                    self.ui.debug("boring@%s: %s\n" % (revnum, path))
+                    continue
+                paths.append((path, ent))
+
+            self.paths[rev] = (paths, parents)
+
+            # Example SVN datetime. Includes microseconds.
+            # ISO-8601 conformant
+            # '2007-01-04T17:35:00.902377Z'
+            date = util.parsedate(date[:18] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
+
+            log = message and self.recode(message)
+            author = author and self.recode(author) or ''
+            try:
+                branch = self.module.split("/")[-1]
+                if branch == 'trunk':
+                    branch = ''
+            except IndexError:
+                branch = None
+
+            cset = commit(author=author,
+                          date=util.datestr(date),
+                          desc=log,
+                          parents=parents,
+                          branch=branch,
+                          rev=rev.encode('utf-8'))
+
+            self.commits[rev] = cset
+            if self.child_cset and not self.child_cset.parents:
+                self.child_cset.parents = [rev]
+            self.child_cset = cset
+
+        self.ui.note('fetching revision log for "%s" from %d to %d\n' %
+                     (self.module, from_revnum, to_revnum))
+
+        try:
+            for entry in self.get_log([self.module], from_revnum, to_revnum):
+                orig_paths, revnum, author, date, message = entry
+                if self.is_blacklisted(revnum):
+                    self.ui.note('skipping blacklisted revision %d\n' % revnum)
+                    continue
+                if orig_paths is None:
+                    self.ui.debug('revision %d has no entries\n' % revnum)
+                    continue
+                parselogentry(orig_paths, revnum, author, date, message)
+        except SubversionException, (inst, num):
+            if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
+                raise NoSuchRevision(branch=self,
+                    revision="Revision number %d" % to_revnum)
+            raise
+
+    def _getfile(self, file, rev):
+        io = StringIO()
+        # TODO: ra.get_file transmits the whole file instead of diffs.
+        mode = ''
+        try:
+            revnum = self.revnum(rev)
+            if self.module != self.modulemap[revnum]:
+                self.module = self.modulemap[revnum]
+                self.reparent(self.module)
+            info = svn.ra.get_file(self.ra, file, revnum, io)
+            if isinstance(info, list):
+                info = info[-1]
+            mode = ("svn:executable" in info) and 'x' or ''
+            mode = ("svn:special" in info) and 'l' or mode
+        except SubversionException, e:
+            notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
+                svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
+            if e.apr_err in notfound: # File not found
+                raise IOError()
+            raise
+        data = io.getvalue()
+        if mode == 'l':
+            link_prefix = "link "
+            if data.startswith(link_prefix):
+                data = data[len(link_prefix):]
+        return data, mode
+
+    def _find_children(self, path, revnum):
+        path = path.strip('/')
+        pool = Pool()
+        rpath = '/'.join([self.base, path]).strip('/')
+        return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
new file mode 100644
--- /dev/null
+++ b/hgext/convert/transport.py
@@ -0,0 +1,129 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
+# This is a stripped-down version of the original bzr-svn transport.py,
+# Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+from cStringIO import StringIO
+import os
+from tempfile import mktemp
+
+from svn.core import SubversionException, Pool
+import svn.ra
+import svn.client
+import svn.core
+
+# Some older versions of the Python bindings need to be
+# explicitly initialized. But what we want to do probably
+# won't work worth a darn against those libraries anyway!
+svn.ra.initialize()
+
+svn_config = svn.core.svn_config_get_config(None)
+
+
+def _create_auth_baton(pool):
+    """Create a Subversion authentication baton. """
+    import svn.client
+    # Give the client context baton a suite of authentication
+    # providers.h
+    providers = [
+        svn.client.get_simple_provider(pool),
+        svn.client.get_username_provider(pool),
+        svn.client.get_ssl_client_cert_file_provider(pool),
+        svn.client.get_ssl_client_cert_pw_file_provider(pool),
+        svn.client.get_ssl_server_trust_file_provider(pool),
+        ]
+    # Platform-dependant authentication methods
+    if hasattr(svn.client, 'get_windows_simple_provider'):
+        providers.append(svn.client.get_windows_simple_provider(pool))
+
+    return svn.core.svn_auth_open(providers, pool)
+
+class NotBranchError(SubversionException):
+    pass
+
+class SvnRaTransport(object):
+    """
+    Open an ra connection to a Subversion repository.
+    """
+    def __init__(self, url="", ra=None):
+        self.pool = Pool()
+        self.svn_url = url
+        self.username = ''
+        self.password = ''
+
+        # Only Subversion 1.4 has reparent()
+        if ra is None or not hasattr(svn.ra, 'reparent'):
+            self.client = svn.client.create_context(self.pool)
+            ab = _create_auth_baton(self.pool)
+            if False:
+                svn.core.svn_auth_set_parameter(
+                    ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
+                svn.core.svn_auth_set_parameter(
+                    ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
+            self.client.auth_baton = ab
+            self.client.config = svn_config
+            try:
+                self.ra = svn.client.open_ra_session(
+                    self.svn_url.encode('utf8'),
+                    self.client, self.pool)
+            except SubversionException, (inst, num):
+                if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
+                           svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
+                           svn.core.SVN_ERR_BAD_URL):
+                    raise NotBranchError(url)
+                raise
+        else:
+            self.ra = ra
+            svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
+
+    class Reporter:
+        def __init__(self, (reporter, report_baton)):
+            self._reporter = reporter
+            self._baton = report_baton
+
+        def set_path(self, path, revnum, start_empty, lock_token, pool=None):
+            svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
+                        path, revnum, start_empty, lock_token, pool)
+
+        def delete_path(self, path, pool=None):
+            svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
+                    path, pool)
+
+        def link_path(self, path, url, revision, start_empty, lock_token,
+                      pool=None):
+            svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
+                    path, url, revision, start_empty, lock_token,
+                    pool)
+
+        def finish_report(self, pool=None):
+            svn.ra.reporter2_invoke_finish_report(self._reporter,
+                    self._baton, pool)
+
+        def abort_report(self, pool=None):
+            svn.ra.reporter2_invoke_abort_report(self._reporter,
+                    self._baton, pool)
+
+    def do_update(self, revnum, path, *args, **kwargs):
+        return self.Reporter(svn.ra.do_update(self.ra, revnum, path, *args, **kwargs))
+
+    def clone(self, offset=None):
+        """See Transport.clone()."""
+        if offset is None:
+            return self.__class__(self.base)
+
+        return SvnRaTransport(urlutils.join(self.base, offset), ra=self.ra)
--- a/hgext/extdiff.py
+++ b/hgext/extdiff.py
@@ -4,106 +4,103 @@
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
-#
-# The `extdiff' Mercurial extension allows you to use external programs
-# to compare revisions, or revision with working dir.  The external diff
-# programs are called with a configurable set of options and two
-# non-option arguments: paths to directories containing snapshots of
-# files to compare.
-#
-# To enable this extension:
-#
-#   [extensions]
-#   hgext.extdiff =
-#
-# The `extdiff' extension also allows to configure new diff commands, so
-# you do not need to type "hg extdiff -p kdiff3" always.
-#
-#   [extdiff]
-#   # add new command that runs GNU diff(1) in 'context diff' mode
-#   cmd.cdiff = gdiff
-#   opts.cdiff = -Nprc5
+
+'''
+The `extdiff' Mercurial extension allows you to use external programs
+to compare revisions, or revision with working dir.  The external diff
+programs are called with a configurable set of options and two
+non-option arguments: paths to directories containing snapshots of
+files to compare.
+
+To enable this extension:
+
+  [extensions]
+  hgext.extdiff =
+
+The `extdiff' extension also allows to configure new diff commands, so
+you do not need to type "hg extdiff -p kdiff3" always.
 
-#   # add new command called vdiff, runs kdiff3
-#   cmd.vdiff = kdiff3
+  [extdiff]
+  # add new command that runs GNU diff(1) in 'context diff' mode
+  cdiff = gdiff -Nprc5
+  ## or the old way:
+  #cmd.cdiff = gdiff
+  #opts.cdiff = -Nprc5
 
-#   # add new command called meld, runs meld (no need to name twice)
-#   cmd.meld =
+  # add new command called vdiff, runs kdiff3
+  vdiff = kdiff3
 
-#   # add new command called vimdiff, runs gvimdiff with DirDiff plugin
-#   #(see http://www.vim.org/scripts/script.php?script_id=102)
-#   # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
-#   # your .vimrc
-#   cmd.vimdiff = gvim
-#   opts.vimdiff = -f '+next' '+execute "DirDiff" argv(0) argv(1)'
-#
-# Each custom diff commands can have two parts: a `cmd' and an `opts'
-# part.  The cmd.xxx option defines the name of an executable program
-# that will be run, and opts.xxx defines a set of command-line options
-# which will be inserted to the command between the program name and
-# the files/directories to diff (i.e. the cdiff example above).
-#
-# You can use -I/-X and list of file or directory names like normal
-# "hg diff" command.  The `extdiff' extension makes snapshots of only
-# needed files, so running the external diff program will actually be
-# pretty fast (at least faster than having to compare the entire tree).
+  # add new command called meld, runs meld (no need to name twice)
+  meld =
+
+  # add new command called vimdiff, runs gvimdiff with DirDiff plugin
+  #(see http://www.vim.org/scripts/script.php?script_id=102)
+  # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
+  # your .vimrc
+  vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
+
+You can use -I/-X and list of file or directory names like normal
+"hg diff" command.  The `extdiff' extension makes snapshots of only
+needed files, so running the external diff program will actually be
+pretty fast (at least faster than having to compare the entire tree).
+'''
 
 from mercurial.i18n import _
 from mercurial.node import *
-from mercurial import cmdutil, util
-import os, shutil, tempfile
+from mercurial import cmdutil, util, commands
+import os, shlex, shutil, tempfile
+
+def snapshot_node(ui, repo, files, node, tmproot):
+    '''snapshot files as of some revision'''
+    mf = repo.changectx(node).manifest()
+    dirname = os.path.basename(repo.root)
+    if dirname == "":
+        dirname = "root"
+    dirname = '%s.%s' % (dirname, short(node))
+    base = os.path.join(tmproot, dirname)
+    os.mkdir(base)
+    ui.note(_('making snapshot of %d files from rev %s\n') %
+            (len(files), short(node)))
+    for fn in files:
+        if not fn in mf:
+            # skipping new file after a merge ?
+            continue
+        wfn = util.pconvert(fn)
+        ui.note('  %s\n' % wfn)
+        dest = os.path.join(base, wfn)
+        destdir = os.path.dirname(dest)
+        if not os.path.isdir(destdir):
+            os.makedirs(destdir)
+        data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
+        open(dest, 'wb').write(data)
+    return dirname
+
+
+def snapshot_wdir(ui, repo, files, tmproot):
+    '''snapshot files from working directory.
+    if not using snapshot, -I/-X does not work and recursive diff
+    in tools like kdiff3 and meld displays too many files.'''
+    dirname = os.path.basename(repo.root)
+    if dirname == "":
+        dirname = "root"
+    base = os.path.join(tmproot, dirname)
+    os.mkdir(base)
+    ui.note(_('making snapshot of %d files from working dir\n') %
+            (len(files)))
+    for fn in files:
+        wfn = util.pconvert(fn)
+        ui.note('  %s\n' % wfn)
+        dest = os.path.join(base, wfn)
+        destdir = os.path.dirname(dest)
+        if not os.path.isdir(destdir):
+            os.makedirs(destdir)
+        fp = open(dest, 'wb')
+        for chunk in util.filechunkiter(repo.wopener(wfn)):
+            fp.write(chunk)
+    return dirname
+
 
 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
-    def snapshot_node(files, node):
-        '''snapshot files as of some revision'''
-        mf = repo.changectx(node).manifest()
-        dirname = os.path.basename(repo.root)
-        if dirname == "":
-            dirname = "root"
-        dirname = '%s.%s' % (dirname, short(node))
-        base = os.path.join(tmproot, dirname)
-        os.mkdir(base)
-        if not ui.quiet:
-            ui.write_err(_('making snapshot of %d files from rev %s\n') %
-                         (len(files), short(node)))
-        for fn in files:
-            if not fn in mf:
-                # skipping new file after a merge ?
-                continue
-            wfn = util.pconvert(fn)
-            ui.note('  %s\n' % wfn)
-            dest = os.path.join(base, wfn)
-            destdir = os.path.dirname(dest)
-            if not os.path.isdir(destdir):
-                os.makedirs(destdir)
-            data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
-            open(dest, 'wb').write(data)
-        return dirname
-
-    def snapshot_wdir(files):
-        '''snapshot files from working directory.
-        if not using snapshot, -I/-X does not work and recursive diff
-        in tools like kdiff3 and meld displays too many files.'''
-        dirname = os.path.basename(repo.root)
-        if dirname == "":
-            dirname = "root"
-        base = os.path.join(tmproot, dirname)
-        os.mkdir(base)
-        if not ui.quiet:
-            ui.write_err(_('making snapshot of %d files from working dir\n') %
-                         (len(files)))
-        for fn in files:
-            wfn = util.pconvert(fn)
-            ui.note('  %s\n' % wfn)
-            dest = os.path.join(base, wfn)
-            destdir = os.path.dirname(dest)
-            if not os.path.isdir(destdir):
-                os.makedirs(destdir)
-            fp = open(dest, 'wb')
-            for chunk in util.filechunkiter(repo.wopener(wfn)):
-                fp.write(chunk)
-        return dirname
-
     node1, node2 = cmdutil.revpair(repo, opts['rev'])
     files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
     modified, added, removed, deleted, unknown = repo.status(
@@ -112,12 +109,34 @@ def dodiff(ui, repo, diffcmd, diffopts, 
         return 0
 
     tmproot = tempfile.mkdtemp(prefix='extdiff.')
+    dir2root = ''
     try:
-        dir1 = snapshot_node(modified + removed, node1)
+        # Always make a copy of node1
+        dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
+        changes = len(modified) + len(removed) + len(added)
+
+        # If node2 in not the wc or there is >1 change, copy it
         if node2:
-            dir2 = snapshot_node(modified + added, node2)
+            dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
+        elif changes > 1:
+            dir2 = snapshot_wdir(ui, repo, modified + added, tmproot)
         else:
-            dir2 = snapshot_wdir(modified + added)
+            # This lets the diff tool open the changed file directly
+            dir2 = ''
+            dir2root = repo.root
+
+        # If only one change, diff the files instead of the directories
+        if changes == 1 :
+            if len(modified):
+                dir1 = os.path.join(dir1, util.localpath(modified[0]))
+                dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
+            elif len(removed) :
+                dir1 = os.path.join(dir1, util.localpath(removed[0]))
+                dir2 = os.devnull
+            else:
+                dir1 = os.devnull
+                dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
+
         cmdline = ('%s %s %s %s' %
                    (util.shellquote(diffcmd), ' '.join(diffopts),
                     util.shellquote(dir1), util.shellquote(dir2)))
@@ -158,18 +177,26 @@ cmdtable = {
      [('p', 'program', '', _('comparison program to run')),
       ('o', 'option', [], _('pass option to comparison program')),
       ('r', 'rev', [], _('revision')),
-      ('I', 'include', [], _('include names matching the given patterns')),
-      ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+     ] + commands.walkopts,
      _('hg extdiff [OPT]... [FILE]...')),
     }
 
 def uisetup(ui):
     for cmd, path in ui.configitems('extdiff'):
-        if not cmd.startswith('cmd.'): continue
-        cmd = cmd[4:]
-        if not path: path = cmd
-        diffopts = ui.config('extdiff', 'opts.' + cmd, '')
-        diffopts = diffopts and [diffopts] or []
+        if cmd.startswith('cmd.'):
+            cmd = cmd[4:]
+            if not path: path = cmd
+            diffopts = ui.config('extdiff', 'opts.' + cmd, '')
+            diffopts = diffopts and [diffopts] or []
+        elif cmd.startswith('opts.'):
+            continue
+        else:
+            # command = path opts
+            if path:
+                diffopts = shlex.split(path)
+                path = diffopts.pop(0)
+            else:
+                path, diffopts = cmd, []
         def save(cmd, path, diffopts):
             '''use closure to save diff command to use'''
             def mydiff(ui, repo, *pats, **opts):
--- a/hgext/fetch.py
+++ b/hgext/fetch.py
@@ -23,29 +23,29 @@ def fetch(ui, repo, source='default', **
         if modheads == 0:
             return 0
         if modheads == 1:
-            return hg.clean(repo, repo.changelog.tip(), wlock=wlock)
+            return hg.clean(repo, repo.changelog.tip())
         newheads = repo.heads(parent)
         newchildren = [n for n in repo.heads(parent) if n != parent]
         newparent = parent
         if newchildren:
             newparent = newchildren[0]
-            hg.clean(repo, newparent, wlock=wlock)
+            hg.clean(repo, newparent)
         newheads = [n for n in repo.heads() if n != newparent]
         err = False
         if newheads:
             ui.status(_('merging with new head %d:%s\n') %
                       (repo.changelog.rev(newheads[0]), short(newheads[0])))
-            err = hg.merge(repo, newheads[0], remind=False, wlock=wlock)
+            err = hg.merge(repo, newheads[0], remind=False)
         if not err and len(newheads) > 1:
             ui.status(_('not merging with %d other new heads '
                         '(use "hg heads" and "hg merge" to merge them)') %
                       (len(newheads) - 1))
         if not err:
-            mod, add, rem = repo.status(wlock=wlock)[:3]
+            mod, add, rem = repo.status()[:3]
             message = (cmdutil.logmessage(opts) or
                        (_('Automated merge with %s') % other.url()))
             n = repo.commit(mod + add + rem, message,
-                            opts['user'], opts['date'], lock=lock, wlock=wlock,
+                            opts['user'], opts['date'],
                             force_editor=opts.get('force_editor'))
             ui.status(_('new changeset %d:%s merges remote changes '
                         'with local\n') % (repo.changelog.rev(n),
@@ -60,7 +60,7 @@ def fetch(ui, repo, source='default', **
             raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
         elif opts['rev']:
             revs = [other.lookup(rev) for rev in opts['rev']]
-        modheads = repo.pull(other, heads=revs, lock=lock)
+        modheads = repo.pull(other, heads=revs)
         return postincoming(other, modheads)
 
     parent, p2 = repo.dirstate.parents()
@@ -69,10 +69,11 @@ def fetch(ui, repo, source='default', **
                            '(use "hg update" to check out tip)'))
     if p2 != nullid:
         raise util.Abort(_('outstanding uncommitted merge'))
-    wlock = repo.wlock()
-    lock = repo.lock()
+    wlock = lock = None
     try:
-        mod, add, rem = repo.status(wlock=wlock)[:3]
+        wlock = repo.wlock()
+        lock = repo.lock()
+        mod, add, rem = repo.status()[:3]
         if mod or add or rem:
             raise util.Abort(_('outstanding uncommitted changes'))
         if len(repo.heads()) > 1:
@@ -80,19 +81,13 @@ def fetch(ui, repo, source='default', **
                                '(use "hg heads" and "hg merge" to merge)'))
         return pull()
     finally:
-        lock.release()
-        wlock.release()
+        del lock, wlock
 
 cmdtable = {
     'fetch':
         (fetch,
-        [('e', 'ssh', '', _('specify ssh command to use')),
-         ('m', 'message', '', _('use <text> as commit message')),
-         ('l', 'logfile', '', _('read the commit message from <file>')),
-         ('d', 'date', '', _('record datecode as commit date')),
-         ('u', 'user', '', _('record user as commiter')),
-         ('r', 'rev', [], _('a specific revision you would like to pull')),
+        [('r', 'rev', [], _('a specific revision you would like to pull')),
          ('f', 'force-editor', None, _('edit commit message')),
-         ('', 'remotecmd', '', _('hg command to run on the remote side'))],
+        ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
         _('hg fetch [SOURCE]')),
 }
--- a/hgext/gpg.py
+++ b/hgext/gpg.py
@@ -6,7 +6,7 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 import os, tempfile, binascii
-from mercurial import util
+from mercurial import util, commands
 from mercurial import node as hgnode
 from mercurial.i18n import _
 
@@ -240,7 +240,7 @@ def sign(ui, repo, *revs, **opts):
 
     repo.wfile(".hgsigs", "ab").write(sigmessage)
 
-    if repo.dirstate.state(".hgsigs") == '?':
+    if '.hgsigs' not in repo.dirstate:
         repo.add([".hgsigs"])
 
     if opts["no_commit"]:
@@ -269,10 +269,9 @@ cmdtable = {
          [('l', 'local', None, _('make the signature local')),
           ('f', 'force', None, _('sign even if the sigfile is modified')),
           ('', 'no-commit', None, _('do not commit the sigfile after signing')),
+          ('k', 'key', '', _('the key id to sign with')),
           ('m', 'message', '', _('commit message')),
-          ('d', 'date', '', _('date code')),
-          ('u', 'user', '', _('user')),
-          ('k', 'key', '', _('the key id to sign with'))],
+         ] + commands.commitopts2,
          _('hg sign [OPTION]... [REVISION]...')),
     "sigcheck": (check, [], _('hg sigcheck REVISION')),
     "sigs": (sigs, [], _('hg sigs')),
--- a/hgext/hbisect.py
+++ b/hgext/hbisect.py
@@ -37,10 +37,9 @@ class bisect(object):
         self.ui = ui
         self.goodrevs = []
         self.badrev = None
-        self.good_dirty = 0
-        self.bad_dirty = 0
         self.good_path = "good"
         self.bad_path = "bad"
+        self.is_reset = False
 
         if os.path.exists(os.path.join(self.path, self.good_path)):
             self.goodrevs = self.opener(self.good_path).read().splitlines()
@@ -51,8 +50,10 @@ class bisect(object):
                 self.badrev = hg.bin(r.pop(0))
 
     def write(self):
+        if self.is_reset:
+            return
         if not os.path.isdir(self.path):
-            return
+            os.mkdir(self.path)
         f = self.opener(self.good_path, "w")
         f.write("\n".join([hg.hex(r) for r in  self.goodrevs]))
         if len(self.goodrevs) > 0:
@@ -81,6 +82,7 @@ class bisect(object):
         # Not sure about this
         #self.ui.write("Going back to tip\n")
         #self.repo.update(self.repo.changelog.tip())
+        self.is_reset = True
         return 0
 
     def num_ancestors(self, head=None, stop=None):
@@ -301,10 +303,9 @@ For subcommands see "hg bisect help\"
     if len(args) > bisectcmdtable[cmd][1]:
         ui.warn(_("bisect: Too many arguments\n"))
         return help_()
-    try:
-        return bisectcmdtable[cmd][0](*args)
-    finally:
-        b.write()
+    ret = bisectcmdtable[cmd][0](*args)
+    b.write()
+    return ret
 
 cmdtable = {
     "bisect": (bisect_run, [], _("hg bisect [help|init|reset|next|good|bad]")),
new file mode 100644
--- /dev/null
+++ b/hgext/imerge.py
@@ -0,0 +1,405 @@
+# Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
+# Published under the GNU GPL
+
+'''
+imerge - interactive merge
+'''
+
+from mercurial.i18n import _
+from mercurial.node import *
+from mercurial import commands, cmdutil, dispatch, fancyopts, hg, merge, util
+import os, tarfile
+
+class InvalidStateFileException(Exception): pass
+
+class ImergeStateFile(object):
+    def __init__(self, im):
+        self.im = im
+
+    def save(self, dest):
+        tf = tarfile.open(dest, 'w:gz')
+
+        st = os.path.join(self.im.path, 'status')
+        tf.add(st, os.path.join('.hg', 'imerge', 'status'))
+
+        for f in self.im.resolved:
+            (fd, fo) = self.im.conflicts[f]
+            abssrc = self.im.repo.wjoin(fd)
+            tf.add(abssrc, fd)
+
+        tf.close()
+
+    def load(self, source):
+        wlock = self.im.repo.wlock()
+        lock = self.im.repo.lock()
+
+        tf = tarfile.open(source, 'r')
+        contents = tf.getnames()
+        # tarfile normalizes path separators to '/'
+        statusfile = '.hg/imerge/status'
+        if statusfile not in contents:
+            raise InvalidStateFileException('no status file')
+
+        tf.extract(statusfile, self.im.repo.root)
+        p1, p2 = self.im.load()
+        if self.im.repo.dirstate.parents()[0] != p1.node():
+            hg.clean(self.im.repo, p1.node())
+        self.im.start(p2.node())
+        for tarinfo in tf:
+            tf.extract(tarinfo, self.im.repo.root)
+        self.im.load()
+
+class Imerge(object):
+    def __init__(self, ui, repo):
+        self.ui = ui
+        self.repo = repo
+
+        self.path = repo.join('imerge')
+        self.opener = util.opener(self.path)
+
+        self.wctx = self.repo.workingctx()
+        self.conflicts = {}
+        self.resolved = []
+
+    def merging(self):
+        return len(self.wctx.parents()) > 1
+
+    def load(self):
+        # status format. \0-delimited file, fields are
+        # p1, p2, conflict count, conflict filenames, resolved filenames
+        # conflict filenames are tuples of localname, remoteorig, remotenew
+
+        statusfile = self.opener('status')
+
+        status = statusfile.read().split('\0')
+        if len(status) < 3:
+            raise util.Abort('invalid imerge status file')
+
+        try:
+            parents = [self.repo.changectx(n) for n in status[:2]]
+        except LookupError:
+            raise util.Abort('merge parent %s not in repository' % short(p))
+
+        status = status[2:]
+        conflicts = int(status.pop(0)) * 3
+        self.resolved = status[conflicts:]
+        for i in xrange(0, conflicts, 3):
+            self.conflicts[status[i]] = (status[i+1], status[i+2])
+
+        return parents
+
+    def save(self):
+        lock = self.repo.lock()
+
+        if not os.path.isdir(self.path):
+            os.mkdir(self.path)
+        statusfile = self.opener('status', 'wb')
+
+        out = [hex(n.node()) for n in self.wctx.parents()]
+        out.append(str(len(self.conflicts)))
+        conflicts = self.conflicts.items()
+        conflicts.sort()
+        for fw, fd_fo in conflicts:
+            out.append(fw)
+            out.extend(fd_fo)
+        out.extend(self.resolved)
+
+        statusfile.write('\0'.join(out))
+
+    def remaining(self):
+        return [f for f in self.conflicts if f not in self.resolved]
+
+    def filemerge(self, fn, interactive=True):
+        wlock = self.repo.wlock()
+
+        (fd, fo) = self.conflicts[fn]
+        p1, p2 = self.wctx.parents()
+
+        # this could be greatly improved
+        realmerge = os.environ.get('HGMERGE')
+        if not interactive:
+            os.environ['HGMERGE'] = 'merge'
+
+        # The filemerge ancestor algorithm does not work if self.wctx
+        # already has two parents (in normal merge it doesn't yet). But
+        # this is very dirty.
+        self.wctx._parents.pop()
+        try:
+            # TODO: we should probably revert the file if merge fails
+            return merge.filemerge(self.repo, fn, fd, fo, self.wctx, p2)
+        finally:
+            self.wctx._parents.append(p2)
+            if realmerge:
+                os.environ['HGMERGE'] = realmerge
+            elif not interactive:
+                del os.environ['HGMERGE']
+
+    def start(self, rev=None):
+        _filemerge = merge.filemerge
+        def filemerge(repo, fw, fd, fo, wctx, mctx):
+            self.conflicts[fw] = (fd, fo)
+
+        merge.filemerge = filemerge
+        commands.merge(self.ui, self.repo, rev=rev)
+        merge.filemerge = _filemerge
+
+        self.wctx = self.repo.workingctx()
+        self.save()
+
+    def resume(self):
+        self.load()
+
+        dp = self.repo.dirstate.parents()
+        p1, p2 = self.wctx.parents()
+        if p1.node() != dp[0] or p2.node() != dp[1]:
+            raise util.Abort('imerge state does not match working directory')
+
+    def next(self):
+        remaining = self.remaining()
+        return remaining and remaining[0]
+
+    def resolve(self, files):
+        resolved = dict.fromkeys(self.resolved)
+        for fn in files:
+            if fn not in self.conflicts:
+                raise util.Abort('%s is not in the merge set' % fn)
+            resolved[fn] = True
+        self.resolved = resolved.keys()
+        self.resolved.sort()
+        self.save()
+        return 0
+
+    def unresolve(self, files):
+        resolved = dict.fromkeys(self.resolved)
+        for fn in files:
+            if fn not in resolved:
+                raise util.Abort('%s is not resolved' % fn)
+            del resolved[fn]
+        self.resolved = resolved.keys()
+        self.resolved.sort()
+        self.save()
+        return 0
+
+    def pickle(self, dest):
+        '''write current merge state to file to be resumed elsewhere'''
+        state = ImergeStateFile(self)
+        return state.save(dest)
+
+    def unpickle(self, source):
+        '''read merge state from file'''
+        state = ImergeStateFile(self)
+        return state.load(source)
+
+def load(im, source):
+    if im.merging():
+        raise util.Abort('there is already a merge in progress '
+                         '(update -C <rev> to abort it)' )
+    m, a, r, d =  im.repo.status()[:4]
+    if m or a or r or d:
+        raise util.Abort('working directory has uncommitted changes')
+
+    rc = im.unpickle(source)
+    if not rc:
+        status(im)
+    return rc
+
+def merge_(im, filename=None, auto=False):
+    success = True
+    if auto and not filename:
+        for fn in im.remaining():
+            rc = im.filemerge(fn, interactive=False)
+            if rc:
+                success = False
+            else:
+                im.resolve([fn])
+        if success:
+            im.ui.write('all conflicts resolved\n')
+        else:
+            status(im)
+        return 0
+
+    if not filename:
+        filename = im.next()
+        if not filename:
+            im.ui.write('all conflicts resolved\n')
+            return 0
+
+    rc = im.filemerge(filename, interactive=not auto)
+    if not rc:
+        im.resolve([filename])
+        if not im.next():
+            im.ui.write('all conflicts resolved\n')
+    return rc
+
+def next(im):
+    n = im.next()
+    if n:
+        im.ui.write('%s\n' % n)
+    else:
+        im.ui.write('all conflicts resolved\n')
+    return 0
+
+def resolve(im, *files):
+    if not files:
+        raise util.Abort('resolve requires at least one filename')
+    return im.resolve(files)
+
+def save(im, dest):
+    return im.pickle(dest)
+
+def status(im, **opts):
+    if not opts.get('resolved') and not opts.get('unresolved'):
+        opts['resolved'] = True
+        opts['unresolved'] = True
+
+    if im.ui.verbose:
+        p1, p2 = [short(p.node()) for p in im.wctx.parents()]
+        im.ui.note(_('merging %s and %s\n') % (p1, p2))
+
+    conflicts = im.conflicts.keys()
+    conflicts.sort()
+    remaining = dict.fromkeys(im.remaining())
+    st = []
+    for fn in conflicts:
+        if opts.get('no_status'):
+            mode = ''
+        elif fn in remaining:
+            mode = 'U '
+        else:
+            mode = 'R '
+        if ((opts.get('resolved') and fn not in remaining)
+            or (opts.get('unresolved') and fn in remaining)):
+            st.append((mode, fn))
+    st.sort()
+    for (mode, fn) in st:
+        if im.ui.verbose:
+            fo, fd = im.conflicts[fn]
+            if fd != fn:
+                fn = '%s (%s)' % (fn, fd)
+        im.ui.write('%s%s\n' % (mode, fn))
+    if opts.get('unresolved') and not remaining:
+        im.ui.write(_('all conflicts resolved\n'))
+
+    return 0
+
+def unresolve(im, *files):
+    if not files:
+        raise util.Abort('unresolve requires at least one filename')
+    return im.unresolve(files)
+
+subcmdtable = {
+    'load': (load, []),
+    'merge':
+        (merge_,
+         [('a', 'auto', None, _('automatically resolve if possible'))]),
+    'next': (next, []),
+    'resolve': (resolve, []),
+    'save': (save, []),
+    'status':
+        (status,
+         [('n', 'no-status', None, _('hide status prefix')),
+          ('', 'resolved', None, _('only show resolved conflicts')),
+          ('', 'unresolved', None, _('only show unresolved conflicts'))]),
+    'unresolve': (unresolve, [])
+}
+
+def dispatch_(im, args, opts):
+    def complete(s, choices):
+        candidates = []
+        for choice in choices:
+            if choice.startswith(s):
+                candidates.append(choice)
+        return candidates
+
+    c, args = args[0], list(args[1:])
+    cmd = complete(c, subcmdtable.keys())
+    if not cmd:
+        raise cmdutil.UnknownCommand('imerge ' + c)
+    if len(cmd) > 1:
+        cmd.sort()
+        raise cmdutil.AmbiguousCommand('imerge ' + c, cmd)
+    cmd = cmd[0]
+
+    func, optlist = subcmdtable[cmd]
+    opts = {}
+    try:
+        args = fancyopts.fancyopts(args, optlist, opts)
+        return func(im, *args, **opts)
+    except fancyopts.getopt.GetoptError, inst:
+        raise dispatch.ParseError('imerge', '%s: %s' % (cmd, inst))
+    except TypeError:
+        raise dispatch.ParseError('imerge', _('%s: invalid arguments') % cmd)
+
+def imerge(ui, repo, *args, **opts):
+    '''interactive merge
+
+    imerge lets you split a merge into pieces. When you start a merge
+    with imerge, the names of all files with conflicts are recorded.
+    You can then merge any of these files, and if the merge is
+    successful, they will be marked as resolved. When all files are
+    resolved, the merge is complete.
+
+    If no merge is in progress, hg imerge [rev] will merge the working
+    directory with rev (defaulting to the other head if the repository
+    only has two heads). You may also resume a saved merge with
+    hg imerge load <file>.
+
+    If a merge is in progress, hg imerge will default to merging the
+    next unresolved file.
+
+    The following subcommands are available:
+
+    status:
+      show the current state of the merge
+      options:
+        -n --no-status:  do not print the status prefix
+           --resolved:   only print resolved conflicts
+           --unresolved: only print unresolved conflicts
+    next:
+      show the next unresolved file merge
+    merge [<file>]:
+      merge <file>. If the file merge is successful, the file will be
+      recorded as resolved. If no file is given, the next unresolved
+      file will be merged.
+    resolve <file>...:
+      mark files as successfully merged
+    unresolve <file>...:
+      mark files as requiring merging.
+    save <file>:
+      save the state of the merge to a file to be resumed elsewhere
+    load <file>:
+      load the state of the merge from a file created by save
+    '''
+
+    im = Imerge(ui, repo)
+
+    if im.merging():
+        im.resume()
+    else:
+        rev = opts.get('rev')
+        if rev and args:
+            raise util.Abort('please specify just one revision')
+
+        if len(args) == 2 and args[0] == 'load':
+            pass
+        else:
+            if args:
+                rev = args[0]
+            im.start(rev=rev)
+            if opts.get('auto'):
+                args = ['merge', '--auto']
+            else:
+                args = ['status']
+
+    if not args:
+        args = ['merge']
+
+    return dispatch_(im, args, opts)
+
+cmdtable = {
+    '^imerge':
+    (imerge,
+     [('r', 'rev', '', _('revision to merge')),
+      ('a', 'auto', None, _('automatically merge where possible'))],
+      'hg imerge [command]')
+}
new file mode 100644
--- /dev/null
+++ b/hgext/interhg.py
@@ -0,0 +1,83 @@
+# interhg.py - interhg
+#
+# Copyright 2007 OHASHI Hideya <ohachige@gmail.com>
+#
+# Contributor(s):
+#   Edward Lee <edward.lee@engineering.uiuc.edu>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+# The `interhg' Mercurial extension allows you to change changelog and
+# summary text just like InterWiki way.
+#
+# To enable this extension:
+#
+#   [extensions]
+#   interhg =
+#
+# These are some example patterns (link to bug tracking, etc.)
+#
+#   [interhg]
+#   issues = s!issue(\d+)!<a href="http://bts/issue\1">issue\1<\/a>!
+#   bugzilla = s!((?:bug|b=|(?=#?\d{4,}))(?:\s*#?)(\d+))!<a..=\2">\1</a>!i
+#   boldify = s/(^|\s)#(\d+)\b/ <b>#\2<\/b>/
+#
+# Add any number of names and patterns to match
+
+import re
+from mercurial.hgweb import hgweb_mod
+from mercurial import templater
+
+orig_escape = templater.common_filters["escape"]
+
+interhg_table = []
+
+def interhg_escape(x):
+    escstr = orig_escape(x)
+    for regexp, format in interhg_table:
+        escstr = regexp.sub(format, escstr)
+    return escstr
+
+templater.common_filters["escape"] = interhg_escape
+
+orig_refresh = hgweb_mod.hgweb.refresh
+
+def interhg_refresh(self):
+    interhg_table[:] = []
+    for key, pattern in self.repo.ui.configitems('interhg'):
+        # grab the delimiter from the character after the "s"
+        unesc = pattern[1]
+        delim = re.escape(unesc)
+
+        # identify portions of the pattern, taking care to avoid escaped
+        # delimiters. the replace format and flags are optional, but delimiters
+        # are required.
+        match = re.match(r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
+                         % (delim, delim, delim), pattern)
+        if not match:
+            self.repo.ui.warn("interhg: invalid pattern for %s: %s\n"
+                              % (key, pattern))
+            continue
+
+        # we need to unescape the delimiter for regexp and format
+        delim_re = re.compile(r'(?<!\\)\\%s' % delim)
+        regexp = delim_re.sub(unesc, match.group(1))
+        format = delim_re.sub(unesc, match.group(2))
+
+        # the pattern allows for 6 regexp flags, so set them if necessary
+        flagin = match.group(3)
+        flags = 0
+        if flagin:
+            for flag in flagin.upper():
+                flags |= re.__dict__[flag]
+
+        try:
+            regexp = re.compile(regexp, flags)
+            interhg_table.append((regexp, format))
+        except re.error:
+            self.repo.ui.warn("interhg: invalid regexp for %s: %s\n"
+                              % (key, regexp))
+    return orig_refresh(self)
+
+hgweb_mod.hgweb.refresh = interhg_refresh
--- a/hgext/mq.py
+++ b/hgext/mq.py
@@ -323,10 +323,10 @@ class queue:
         patch.diff(repo, node1, node2, fns, match=matchfn,
                    fp=fp, changes=changes, opts=self.diffopts())
 
-    def mergeone(self, repo, mergeq, head, patch, rev, wlock):
+    def mergeone(self, repo, mergeq, head, patch, rev):
         # first try just applying the patch
         (err, n) = self.apply(repo, [ patch ], update_status=False,
-                              strict=True, merge=rev, wlock=wlock)
+                              strict=True, merge=rev)
 
         if err == 0:
             return (err, n)
@@ -337,15 +337,14 @@ class queue:
         self.ui.warn("patch didn't work out, merging %s\n" % patch)
 
         # apply failed, strip away that rev and merge.
-        hg.clean(repo, head, wlock=wlock)
-        self.strip(repo, n, update=False, backup='strip', wlock=wlock)
+        hg.clean(repo, head)
+        self.strip(repo, n, update=False, backup='strip')
 
         ctx = repo.changectx(rev)
-        ret = hg.merge(repo, rev, wlock=wlock)
+        ret = hg.merge(repo, rev)
         if ret:
             raise util.Abort(_("update returned %d") % ret)
-        n = repo.commit(None, ctx.description(), ctx.user(),
-                        force=1, wlock=wlock)
+        n = repo.commit(None, ctx.description(), ctx.user(), force=1)
         if n == None:
             raise util.Abort(_("repo commit failed"))
         try:
@@ -381,7 +380,7 @@ class queue:
                 return pp[1]
         return pp[0]
 
-    def mergepatch(self, repo, mergeq, series, wlock):
+    def mergepatch(self, repo, mergeq, series):
         if len(self.applied) == 0:
             # each of the patches merged in will have two parents.  This
             # can confuse the qrefresh, qdiff, and strip code because it
@@ -390,8 +389,7 @@ class queue:
             # the first patch in the queue is never a merge patch
             #
             pname = ".hg.patches.merge.marker"
-            n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
-                            wlock=wlock)
+            n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
             self.removeundo(repo)
             self.applied.append(statusentry(revlog.hex(n), pname))
             self.applied_dirty = 1
@@ -412,7 +410,7 @@ class queue:
                 self.ui.warn("patch %s is not applied\n" % patch)
                 return (1, None)
             rev = revlog.bin(info[1])
-            (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
+            (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
             if head:
                 self.applied.append(statusentry(revlog.hex(head), patch))
                 self.applied_dirty = 1
@@ -437,30 +435,30 @@ class queue:
         return (True, files, fuzz)
 
     def apply(self, repo, series, list=False, update_status=True,
-              strict=False, patchdir=None, merge=None, wlock=None,
-              all_files={}):
-        if not wlock:
-            wlock = repo.wlock()
-        lock = repo.lock()
-        tr = repo.transaction()
+              strict=False, patchdir=None, merge=None, all_files={}):
+        wlock = lock = tr = None
         try:
-            ret = self._apply(tr, repo, series, list, update_status,
-                              strict, patchdir, merge, wlock,
-                              lock=lock, all_files=all_files)
-            tr.close()
-            self.save_dirty()
-            return ret
-        except:
+            wlock = repo.wlock()
+            lock = repo.lock()
+            tr = repo.transaction()
             try:
-                tr.abort()
-            finally:
-                repo.invalidate()
-                repo.dirstate.invalidate()
-            raise
+                ret = self._apply(repo, series, list, update_status,
+                                  strict, patchdir, merge, all_files=all_files)
+                tr.close()
+                self.save_dirty()
+                return ret
+            except:
+                try:
+                    tr.abort()
+                finally:
+                    repo.invalidate()
+                    repo.dirstate.invalidate()
+                raise
+        finally:
+            del tr, lock, wlock
 
-    def _apply(self, tr, repo, series, list=False, update_status=True,
-               strict=False, patchdir=None, merge=None, wlock=None,
-               lock=None, all_files={}):
+    def _apply(self, repo, series, list=False, update_status=True,
+               strict=False, patchdir=None, merge=None, all_files={}):
         # TODO unify with commands.py
         if not patchdir:
             patchdir = self.path
@@ -497,17 +495,18 @@ class queue:
                 removed = []
                 merged = []
                 for f in files:
-                    if os.path.exists(repo.dirstate.wjoin(f)):
+                    if os.path.exists(repo.wjoin(f)):
                         merged.append(f)
                     else:
                         removed.append(f)
-                repo.dirstate.update(repo.dirstate.filterfiles(removed), 'r')
-                repo.dirstate.update(repo.dirstate.filterfiles(merged), 'm')
+                for f in removed:
+                    repo.dirstate.remove(f)
+                for f in merged:
+                    repo.dirstate.merge(f)
                 p1, p2 = repo.dirstate.parents()
                 repo.dirstate.setparents(p1, merge)
-            files = patch.updatedir(self.ui, repo, files, wlock=wlock)
-            n = repo.commit(files, message, user, date, force=1, lock=lock,
-                            wlock=wlock)
+            files = patch.updatedir(self.ui, repo, files)
+            n = repo.commit(files, message, user, date, force=1)
 
             if n == None:
                 raise util.Abort(_("repo commit failed"))
@@ -614,44 +613,49 @@ class queue:
         commitfiles = m + a + r
         self.check_toppatch(repo)
         wlock = repo.wlock()
-        insert = self.full_series_end()
-        if msg:
-            n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
-        else:
-            n = repo.commit(commitfiles,
-                            "[mq]: %s" % patch, force=True, wlock=wlock)
-        if n == None:
-            raise util.Abort(_("repo commit failed"))
-        self.full_series[insert:insert] = [patch]
-        self.applied.append(statusentry(revlog.hex(n), patch))
-        self.parse_series()
-        self.series_dirty = 1
-        self.applied_dirty = 1
-        p = self.opener(patch, "w")
-        if msg:
-            msg = msg + "\n"
-            p.write(msg)
-        p.close()
-        wlock = None
-        r = self.qrepo()
-        if r: r.add([patch])
-        if commitfiles:
-            self.refresh(repo, short=True)
-        self.removeundo(repo)
+        try:
+            insert = self.full_series_end()
+            if msg:
+                n = repo.commit(commitfiles, msg, force=True)
+            else:
+                n = repo.commit(commitfiles, "[mq]: %s" % patch, force=True)
+            if n == None:
+                raise util.Abort(_("repo commit failed"))
+            self.full_series[insert:insert] = [patch]
+            self.applied.append(statusentry(revlog.hex(n), patch))
+            self.parse_series()
+            self.series_dirty = 1
+            self.applied_dirty = 1
+            p = self.opener(patch, "w")
+            if msg:
+                msg = msg + "\n"
+                p.write(msg)
+            p.close()
+            wlock = None
+            r = self.qrepo()
+            if r: r.add([patch])
+            if commitfiles:
+                self.refresh(repo, short=True, git=opts.get('git'))
+            self.removeundo(repo)
+        finally:
+            del wlock
 
-    def strip(self, repo, rev, update=True, backup="all", wlock=None):
-        if not wlock:
+    def strip(self, repo, rev, update=True, backup="all"):
+        wlock = lock = None
+        try:
             wlock = repo.wlock()
-        lock = repo.lock()
+            lock = repo.lock()
 
-        if update:
-            self.check_localchanges(repo, refresh=False)
-            urev = self.qparents(repo, rev)
-            hg.clean(repo, urev, wlock=wlock)
-            repo.dirstate.write()
+            if update:
+                self.check_localchanges(repo, refresh=False)
+                urev = self.qparents(repo, rev)
+                hg.clean(repo, urev)
+                repo.dirstate.write()
 
-        self.removeundo(repo)
-        repair.strip(self.ui, repo, rev, backup)
+            self.removeundo(repo)
+            repair.strip(self.ui, repo, rev, backup)
+        finally:
+            del lock, wlock
 
     def isapplied(self, patch):
         """returns (index, rev, patch)"""
@@ -735,157 +739,160 @@ class queue:
         raise util.Abort(_("patch %s not in series") % patch)
 
     def push(self, repo, patch=None, force=False, list=False,
-             mergeq=None, wlock=None):
-        if not wlock:
-            wlock = repo.wlock()
-        patch = self.lookup(patch)
-        # Suppose our series file is: A B C and the current 'top' patch is B.
-        # qpush C should be performed (moving forward)
-        # qpush B is a NOP (no change)
-        # qpush A is an error (can't go backwards with qpush)
-        if patch:
-            info = self.isapplied(patch)
-            if info:
-                if info[0] < len(self.applied) - 1:
-                    raise util.Abort(_("cannot push to a previous patch: %s") %
-                                     patch)
-                if info[0] < len(self.series) - 1:
-                    self.ui.warn(_('qpush: %s is already at the top\n') % patch)
-                else:
-                    self.ui.warn(_('all patches are currently applied\n'))
-                return
+             mergeq=None):
+        wlock = repo.wlock()
+        try:
+            patch = self.lookup(patch)
+            # Suppose our series file is: A B C and the current 'top'
+            # patch is B. qpush C should be performed (moving forward)
+            # qpush B is a NOP (no change) qpush A is an error (can't
+            # go backwards with qpush)
+            if patch:
+                info = self.isapplied(patch)
+                if info:
+                    if info[0] < len(self.applied) - 1:
+                        raise util.Abort(
+                            _("cannot push to a previous patch: %s") % patch)
+                    if info[0] < len(self.series) - 1:
+                        self.ui.warn(
+                            _('qpush: %s is already at the top\n') % patch)
+                    else:
+                        self.ui.warn(_('all patches are currently applied\n'))
+                    return
 
-        # Following the above example, starting at 'top' of B:
-        #  qpush should be performed (pushes C), but a subsequent qpush without
-        #  an argument is an error (nothing to apply). This allows a loop
-        #  of "...while hg qpush..." to work as it detects an error when done
-        if self.series_end() == len(self.series):
-            self.ui.warn(_('patch series already fully applied\n'))
-            return 1
-        if not force:
-            self.check_localchanges(repo)
+            # Following the above example, starting at 'top' of B:
+            # qpush should be performed (pushes C), but a subsequent
+            # qpush without an argument is an error (nothing to
+            # apply). This allows a loop of "...while hg qpush..." to
+            # work as it detects an error when done
+            if self.series_end() == len(self.series):
+                self.ui.warn(_('patch series already fully applied\n'))
+                return 1
+            if not force:
+                self.check_localchanges(repo)
 
-        self.applied_dirty = 1;
-        start = self.series_end()
-        if start > 0:
-            self.check_toppatch(repo)
-        if not patch:
-            patch = self.series[start]
-            end = start + 1
-        else:
-            end = self.series.index(patch, start) + 1
-        s = self.series[start:end]
-        all_files = {}
-        try:
-            if mergeq:
-                ret = self.mergepatch(repo, mergeq, s, wlock)
+            self.applied_dirty = 1;
+            start = self.series_end()
+            if start > 0:
+                self.check_toppatch(repo)
+            if not patch:
+                patch = self.series[start]
+                end = start + 1
             else:
-                ret = self.apply(repo, s, list, wlock=wlock,
-                                 all_files=all_files)
-        except:
-            self.ui.warn(_('cleaning up working directory...'))
-            node = repo.dirstate.parents()[0]
-            hg.revert(repo, node, None, wlock)
-            unknown = repo.status(wlock=wlock)[4]
-            # only remove unknown files that we know we touched or
-            # created while patching
-            for f in unknown:
-                if f in all_files:
-                    util.unlink(repo.wjoin(f))
-            self.ui.warn(_('done\n'))
-            raise
-        top = self.applied[-1].name
-        if ret[0]:
-            self.ui.write("Errors during apply, please fix and refresh %s\n" %
-                          top)
-        else:
-            self.ui.write("Now at: %s\n" % top)
-        return ret[0]
+                end = self.series.index(patch, start) + 1
+            s = self.series[start:end]
+            all_files = {}
+            try:
+                if mergeq:
+                    ret = self.mergepatch(repo, mergeq, s)
+                else:
+                    ret = self.apply(repo, s, list, all_files=all_files)
+            except:
+                self.ui.warn(_('cleaning up working directory...'))
+                node = repo.dirstate.parents()[0]
+                hg.revert(repo, node, None)
+                unknown = repo.status()[4]
+                # only remove unknown files that we know we touched or
+                # created while patching
+                for f in unknown:
+                    if f in all_files:
+                        util.unlink(repo.wjoin(f))
+                self.ui.warn(_('done\n'))
+                raise
+            top = self.applied[-1].name
+            if ret[0]:
+                self.ui.write(
+                    "Errors during apply, please fix and refresh %s\n" % top)
+            else:
+                self.ui.write("Now at: %s\n" % top)
+            return ret[0]
+        finally:
+            del wlock
 
-    def pop(self, repo, patch=None, force=False, update=True, all=False,
-            wlock=None):
-        def getfile(f, rev):
+    def pop(self, repo, patch=None, force=False, update=True, all=False):
+        def getfile(f, rev, flags):
             t = repo.file(f).read(rev)
-            repo.wfile(f, "w").write(t)
+            repo.wwrite(f, t, flags)
 
-        if not wlock:
-            wlock = repo.wlock()
-        if patch:
-            # index, rev, patch
-            info = self.isapplied(patch)
-            if not info:
-                patch = self.lookup(patch)
-            info = self.isapplied(patch)
-            if not info:
-                raise util.Abort(_("patch %s is not applied") % patch)
+        wlock = repo.wlock()
+        try:
+            if patch:
+                # index, rev, patch
+                info = self.isapplied(patch)
+                if not info:
+                    patch = self.lookup(patch)
+                info = self.isapplied(patch)
+                if not info:
+                    raise util.Abort(_("patch %s is not applied") % patch)
 
-        if len(self.applied) == 0:
-            # Allow qpop -a to work repeatedly,
-            # but not qpop without an argument
-            self.ui.warn(_("no patches applied\n"))
-            return not all
+            if len(self.applied) == 0:
+                # Allow qpop -a to work repeatedly,
+                # but not qpop without an argument
+                self.ui.warn(_("no patches applied\n"))
+                return not all
 
-        if not update:
-            parents = repo.dirstate.parents()
-            rr = [ revlog.bin(x.rev) for x in self.applied ]
-            for p in parents:
-                if p in rr:
-                    self.ui.warn("qpop: forcing dirstate update\n")
-                    update = True
+            if not update:
+                parents = repo.dirstate.parents()
+                rr = [ revlog.bin(x.rev) for x in self.applied ]
+                for p in parents:
+                    if p in rr:
+                        self.ui.warn("qpop: forcing dirstate update\n")
+                        update = True
 
-        if not force and update:
-            self.check_localchanges(repo)
+            if not force and update:
+                self.check_localchanges(repo)
 
-        self.applied_dirty = 1;
-        end = len(self.applied)
-        if not patch:
-            if all:
-                popi = 0
+            self.applied_dirty = 1;
+            end = len(self.applied)
+            if not patch:
+                if all:
+                    popi = 0
+                else:
+                    popi = len(self.applied) - 1
             else:
-                popi = len(self.applied) - 1
-        else:
-            popi = info[0] + 1
-            if popi >= end:
-                self.ui.warn("qpop: %s is already at the top\n" % patch)
-                return
-        info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
+                popi = info[0] + 1
+                if popi >= end:
+                    self.ui.warn("qpop: %s is already at the top\n" % patch)
+                    return
+            info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
 
-        start = info[0]
-        rev = revlog.bin(info[1])
+            start = info[0]
+            rev = revlog.bin(info[1])
 
-        # we know there are no local changes, so we can make a simplified
-        # form of hg.update.
-        if update:
-            top = self.check_toppatch(repo)
-            qp = self.qparents(repo, rev)
-            changes = repo.changelog.read(qp)
-            mmap = repo.manifest.read(changes[0])
-            m, a, r, d, u = repo.status(qp, top)[:5]
-            if d:
-                raise util.Abort("deletions found between repo revs")
-            for f in m:
-                getfile(f, mmap[f])
-            for f in r:
-                getfile(f, mmap[f])
-                util.set_exec(repo.wjoin(f), mmap.execf(f))
-            repo.dirstate.update(m + r, 'n')
-            for f in a:
-                try:
-                    os.unlink(repo.wjoin(f))
-                except OSError, e:
-                    if e.errno != errno.ENOENT:
-                        raise
-                try: os.removedirs(os.path.dirname(repo.wjoin(f)))
-                except: pass
-            if a:
-                repo.dirstate.forget(a)
-            repo.dirstate.setparents(qp, revlog.nullid)
-        self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
-        del self.applied[start:end]
-        if len(self.applied):
-            self.ui.write("Now at: %s\n" % self.applied[-1].name)
-        else:
-            self.ui.write("Patch queue now empty\n")
+            # we know there are no local changes, so we can make a simplified
+            # form of hg.update.
+            if update:
+                top = self.check_toppatch(repo)
+                qp = self.qparents(repo, rev)
+                changes = repo.changelog.read(qp)
+                mmap = repo.manifest.read(changes[0])
+                m, a, r, d, u = repo.status(qp, top)[:5]
+                if d:
+                    raise util.Abort("deletions found between repo revs")
+                for f in m:
+                    getfile(f, mmap[f], mmap.flags(f))
+                for f in r:
+                    getfile(f, mmap[f], mmap.flags(f))
+                for f in m + r:
+                    repo.dirstate.normal(f)
+                for f in a:
+                    try:
+                        os.unlink(repo.wjoin(f))
+                    except OSError, e:
+                        if e.errno != errno.ENOENT:
+                            raise
+                    try: os.removedirs(os.path.dirname(repo.wjoin(f)))
+                    except: pass
+                    repo.dirstate.forget(f)
+                repo.dirstate.setparents(qp, revlog.nullid)
+            self.strip(repo, rev, update=False, backup='strip')
+            del self.applied[start:end]
+            if len(self.applied):
+                self.ui.write("Now at: %s\n" % self.applied[-1].name)
+            else:
+                self.ui.write("Patch queue now empty\n")
+        finally:
+            del wlock
 
     def diff(self, repo, pats, opts):
         top = self.check_toppatch(repo)
@@ -902,177 +909,192 @@ class queue:
             self.ui.write("No patches applied\n")
             return 1
         wlock = repo.wlock()
-        self.check_toppatch(repo)
-        (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
-        top = revlog.bin(top)
-        cparents = repo.changelog.parents(top)
-        patchparent = self.qparents(repo, top)
-        message, comments, user, date, patchfound = self.readheaders(patchfn)
+        try:
+            self.check_toppatch(repo)
+            (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
+            top = revlog.bin(top)
+            cparents = repo.changelog.parents(top)
+            patchparent = self.qparents(repo, top)
+            message, comments, user, date, patchfound = self.readheaders(patchfn)
 
-        patchf = self.opener(patchfn, 'r+')
+            patchf = self.opener(patchfn, 'r+')
+
+            # if the patch was a git patch, refresh it as a git patch
+            for line in patchf:
+                if line.startswith('diff --git'):
+                    self.diffopts().git = True
+                    break
 
-        # if the patch was a git patch, refresh it as a git patch
-        for line in patchf:
-            if line.startswith('diff --git'):
-                self.diffopts().git = True
-                break
-
-        msg = opts.get('msg', '').rstrip()
-        if msg:
-            if comments:
-                # Remove existing message.
+            msg = opts.get('msg', '').rstrip()
+            if msg and comments:
+                # Remove existing message, keeping the rest of the comments
+                # fields.
+                # If comments contains 'subject: ', message will prepend
+                # the field and a blank line.
+                if message:
+                    subj = 'subject: ' + message[0].lower()
+                    for i in xrange(len(comments)):
+                        if subj == comments[i].lower():
+                            del comments[i]
+                            message = message[2:]
+                            break
                 ci = 0
-                subj = None
                 for mi in xrange(len(message)):
-                    if comments[ci].lower().startswith('subject: '):
-                        subj = comments[ci][9:]
-                    while message[mi] != comments[ci] and message[mi] != subj:
+                    while message[mi] != comments[ci]:
                         ci += 1
                     del comments[ci]
-            comments.append(msg)
+            if msg:
+                comments.append(msg)
 
-        patchf.seek(0)
-        patchf.truncate()
+            patchf.seek(0)
+            patchf.truncate()
 
-        if comments:
-            comments = "\n".join(comments) + '\n\n'
-            patchf.write(comments)
+            if comments:
+                comments = "\n".join(comments) + '\n\n'
+                patchf.write(comments)
 
-        if opts.get('git'):
-            self.diffopts().git = True
-        fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
-        tip = repo.changelog.tip()
-        if top == tip:
-            # if the top of our patch queue is also the tip, there is an
-            # optimization here.  We update the dirstate in place and strip
-            # off the tip commit.  Then just commit the current directory
-            # tree.  We can also send repo.commit the list of files
-            # changed to speed up the diff
-            #
-            # in short mode, we only diff the files included in the
-            # patch already
-            #
-            # this should really read:
-            #   mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
-            # but we do it backwards to take advantage of manifest/chlog
-            # caching against the next repo.status call
-            #
-            mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
-            changes = repo.changelog.read(tip)
-            man = repo.manifest.read(changes[0])
-            aaa = aa[:]
-            if opts.get('short'):
-                filelist = mm + aa + dd
-                match = dict.fromkeys(filelist).__contains__
-            else:
-                filelist = None
-                match = util.always
-            m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
+            if opts.get('git'):
+                self.diffopts().git = True
+            fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
+            tip = repo.changelog.tip()
+            if top == tip:
+                # if the top of our patch queue is also the tip, there is an
+                # optimization here.  We update the dirstate in place and strip
+                # off the tip commit.  Then just commit the current directory
+                # tree.  We can also send repo.commit the list of files
+                # changed to speed up the diff
+                #
+                # in short mode, we only diff the files included in the
+                # patch already
+                #
+                # this should really read:
+                #   mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
+                # but we do it backwards to take advantage of manifest/chlog
+                # caching against the next repo.status call
+                #
+                mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
+                changes = repo.changelog.read(tip)
+                man = repo.manifest.read(changes[0])
+                aaa = aa[:]
+                if opts.get('short'):
+                    filelist = mm + aa + dd
+                    match = dict.fromkeys(filelist).__contains__
+                else:
+                    filelist = None
+                    match = util.always
+                m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
 
-            # we might end up with files that were added between tip and
-            # the dirstate parent, but then changed in the local dirstate.
-            # in this case, we want them to only show up in the added section
-            for x in m:
-                if x not in aa:
-                    mm.append(x)
-            # we might end up with files added by the local dirstate that
-            # were deleted by the patch.  In this case, they should only
-            # show up in the changed section.
-            for x in a:
-                if x in dd:
-                    del dd[dd.index(x)]
-                    mm.append(x)
-                else:
-                    aa.append(x)
-            # make sure any files deleted in the local dirstate
-            # are not in the add or change column of the patch
-            forget = []
-            for x in d + r:
-                if x in aa:
-                    del aa[aa.index(x)]
-                    forget.append(x)
-                    continue
-                elif x in mm:
-                    del mm[mm.index(x)]
-                dd.append(x)
+                # we might end up with files that were added between
+                # tip and the dirstate parent, but then changed in the
+                # local dirstate. in this case, we want them to only
+                # show up in the added section
+                for x in m:
+                    if x not in aa:
+                        mm.append(x)
+                # we might end up with files added by the local dirstate that
+                # were deleted by the patch.  In this case, they should only
+                # show up in the changed section.
+                for x in a:
+                    if x in dd:
+                        del dd[dd.index(x)]
+                        mm.append(x)
+                    else:
+                        aa.append(x)
+                # make sure any files deleted in the local dirstate
+                # are not in the add or change column of the patch
+                forget = []
+                for x in d + r:
+                    if x in aa:
+                        del aa[aa.index(x)]
+                        forget.append(x)
+                        continue
+                    elif x in mm:
+                        del mm[mm.index(x)]
+                    dd.append(x)
 
-            m = util.unique(mm)
-            r = util.unique(dd)
-            a = util.unique(aa)
-            c = [filter(matchfn, l) for l in (m, a, r, [], u)]
-            filelist = util.unique(c[0] + c[1] + c[2])
-            patch.diff(repo, patchparent, files=filelist, match=matchfn,
-                       fp=patchf, changes=c, opts=self.diffopts())
-            patchf.close()
+                m = util.unique(mm)
+                r = util.unique(dd)
+                a = util.unique(aa)
+                c = [filter(matchfn, l) for l in (m, a, r, [], u)]
+                filelist = util.unique(c[0] + c[1] + c[2])
+                patch.diff(repo, patchparent, files=filelist, match=matchfn,
+                           fp=patchf, changes=c, opts=self.diffopts())
+                patchf.close()
 
-            repo.dirstate.setparents(*cparents)
-            copies = {}
-            for dst in a:
-                src = repo.dirstate.copied(dst)
-                if src is None:
-                    continue
-                copies.setdefault(src, []).append(dst)
-            repo.dirstate.update(a, 'a')
-            # remember the copies between patchparent and tip
-            # this may be slow, so don't do it if we're not tracking copies
-            if self.diffopts().git:
-                for dst in aaa:
-                    f = repo.file(dst)
-                    src = f.renamed(man[dst])
-                    if src:
-                        copies[src[0]] = copies.get(dst, [])
-                        if dst in a:
-                            copies[src[0]].append(dst)
-                    # we can't copy a file created by the patch itself
-                    if dst in copies:
-                        del copies[dst]
-            for src, dsts in copies.iteritems():
-                for dst in dsts:
-                    repo.dirstate.copy(src, dst)
-            repo.dirstate.update(r, 'r')
-            # if the patch excludes a modified file, mark that file with mtime=0
-            # so status can see it.
-            mm = []
-            for i in xrange(len(m)-1, -1, -1):
-                if not matchfn(m[i]):
-                    mm.append(m[i])
-                    del m[i]
-            repo.dirstate.update(m, 'n')
-            repo.dirstate.update(mm, 'n', st_mtime=-1, st_size=-1)
-            repo.dirstate.forget(forget)
+                repo.dirstate.setparents(*cparents)
+                copies = {}
+                for dst in a:
+                    src = repo.dirstate.copied(dst)
+                    if src is not None:
+                        copies.setdefault(src, []).append(dst)
+                    repo.dirstate.add(dst)
+                # remember the copies between patchparent and tip
+                # this may be slow, so don't do it if we're not tracking copies
+                if self.diffopts().git:
+                    for dst in aaa:
+                        f = repo.file(dst)
+                        src = f.renamed(man[dst])
+                        if src:
+                            copies[src[0]] = copies.get(dst, [])
+                            if dst in a:
+                                copies[src[0]].append(dst)
+                        # we can't copy a file created by the patch itself
+                        if dst in copies:
+                            del copies[dst]
+                for src, dsts in copies.iteritems():
+                    for dst in dsts:
+                        repo.dirstate.copy(src, dst)
+                for f in r:
+                    repo.dirstate.remove(f)
+                # if the patch excludes a modified file, mark that
+                # file with mtime=0 so status can see it.
+                mm = []
+                for i in xrange(len(m)-1, -1, -1):
+                    if not matchfn(m[i]):
+                        mm.append(m[i])
+                        del m[i]
+                for f in m:
+                    repo.dirstate.normal(f)
+                for f in mm:
+                    repo.dirstate.normallookup(f)
+                for f in forget:
+                    repo.dirstate.forget(f)
 
-            if not msg:
-                if not message:
-                    message = "[mq]: %s\n" % patchfn
+                if not msg:
+                    if not message:
+                        message = "[mq]: %s\n" % patchfn
+                    else:
+                        message = "\n".join(message)
                 else:
-                    message = "\n".join(message)
-            else:
-                message = msg
+                    message = msg
 
-            self.strip(repo, top, update=False, backup='strip', wlock=wlock)
-            n = repo.commit(filelist, message, changes[1], match=matchfn,
-                            force=1, wlock=wlock)
-            self.applied[-1] = statusentry(revlog.hex(n), patchfn)
-            self.applied_dirty = 1
-            self.removeundo(repo)
-        else:
-            self.printdiff(repo, patchparent, fp=patchf)
-            patchf.close()
-            added = repo.status()[1]
-            for a in added:
-                f = repo.wjoin(a)
-                try:
-                    os.unlink(f)
-                except OSError, e:
-                    if e.errno != errno.ENOENT:
-                        raise
-                try: os.removedirs(os.path.dirname(f))
-                except: pass
-            # forget the file copies in the dirstate
-            # push should readd the files later on
-            repo.dirstate.forget(added)
-            self.pop(repo, force=True, wlock=wlock)
-            self.push(repo, force=True, wlock=wlock)
+                self.strip(repo, top, update=False,
+                           backup='strip')
+                n = repo.commit(filelist, message, changes[1], match=matchfn,
+                                force=1)
+                self.applied[-1] = statusentry(revlog.hex(n), patchfn)
+                self.applied_dirty = 1
+                self.removeundo(repo)
+            else:
+                self.printdiff(repo, patchparent, fp=patchf)
+                patchf.close()
+                added = repo.status()[1]
+                for a in added:
+                    f = repo.wjoin(a)
+                    try:
+                        os.unlink(f)
+                    except OSError, e:
+                        if e.errno != errno.ENOENT:
+                            raise
+                    try: os.removedirs(os.path.dirname(f))
+                    except: pass
+                    # forget the file copies in the dirstate
+                    # push should readd the files later on
+                    repo.dirstate.forget(a)
+                self.pop(repo, force=True)
+                self.push(repo, force=True)
+        finally:
+            del wlock
 
     def init(self, repo, create=False):
         if not create and os.path.isdir(self.path):
@@ -1489,6 +1511,9 @@ def clone(ui, source, dest=None, **opts)
 
     Source patch repository is looked for in <src>/.hg/patches by
     default.  Use -p <url> to change.
+
+    The patch directory must be a nested mercurial repository, as
+    would be created by qinit -c.
     '''
     def patchdir(repo):
         url = repo.url()
@@ -1499,6 +1524,12 @@ def clone(ui, source, dest=None, **opts)
     if dest is None:
         dest = hg.defaultdest(source)
     sr = hg.repository(ui, ui.expandpath(source))
+    patchespath = opts['patches'] or patchdir(sr)
+    try:
+        pr = hg.repository(ui, patchespath)
+    except hg.RepoError:
+        raise util.Abort(_('versioned patch repository not found'
+                           ' (see qinit -c)'))
     qbase, destrev = None, None
     if sr.local():
         if sr.mq.applied:
@@ -1865,10 +1896,13 @@ def rename(ui, repo, patch, name=None, *
     r = q.qrepo()
     if r:
         wlock = r.wlock()
-        if r.dirstate.state(name) == 'r':
-            r.undelete([name], wlock)
-        r.copy(patch, name, wlock)
-        r.remove([patch], False, wlock)
+        try:
+            if r.dirstate[name] == 'r':
+                r.undelete([name])
+            r.copy(patch, name)
+            r.remove([patch], False)
+        finally:
+            del wlock
 
     q.save_dirty()
 
@@ -2110,10 +2144,8 @@ cmdtable = {
           ('U', 'noupdate', None, _('do not update the new working directories')),
           ('', 'uncompressed', None,
            _('use uncompressed transfer (fast over LAN)')),
-          ('e', 'ssh', '', _('specify ssh command to use')),
           ('p', 'patches', '', _('location of source patch repo')),
-          ('', 'remotecmd', '',
-           _('specify hg command to run on the remote side'))],
+         ] + commands.remoteopts,
          _('hg qclone [OPTION]... SOURCE [DEST]')),
     "qcommit|qci":
         (commit,
@@ -2122,8 +2154,7 @@ cmdtable = {
     "^qdiff":
         (diff,
          [('g', 'git', None, _('use git extended diff format')),
-          ('I', 'include', [], _('include names matching the given patterns')),
-          ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+         ] + commands.walkopts,
          _('hg qdiff [-I] [-X] [-g] [FILE]...')),
     "qdelete|qremove|qrm":
         (delete,
@@ -2162,9 +2193,8 @@ cmdtable = {
         (new,
          [('e', 'edit', None, _('edit commit message')),
           ('f', 'force', None, _('import uncommitted changes into patch')),
-          ('I', 'include', [], _('include names matching the given patterns')),
-          ('X', 'exclude', [], _('exclude names matching the given patterns')),
-          ] + commands.commitopts,
+          ('g', 'git', None, _('use git extended diff format')),
+          ] + commands.walkopts + commands.commitopts,
          _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
     "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
     "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
@@ -2187,9 +2217,7 @@ cmdtable = {
          [('e', 'edit', None, _('edit commit message')),
           ('g', 'git', None, _('use git extended diff format')),
           ('s', 'short', None, _('refresh only files already in the patch')),
-          ('I', 'include', [], _('include names matching the given patterns')),
-          ('X', 'exclude', [], _('exclude names matching the given patterns')),
-          ] + commands.commitopts,
+          ] + commands.walkopts + commands.commitopts,
          _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
     'qrename|qmv':
         (rename, [], _('hg qrename PATCH1 [PATCH2]')),
new file mode 100644
--- /dev/null
+++ b/hgext/parentrevspec.py
@@ -0,0 +1,96 @@
+# Mercurial extension to make it easy to refer to the parent of a revision
+#
+# Copyright (C) 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+'''\
+use suffixes to refer to ancestor revisions
+
+This extension allows you to use git-style suffixes to refer to
+the ancestors of a specific revision.
+
+For example, if you can refer to a revision as "foo", then:
+
+- foo^N = Nth parent of foo:
+  foo^0 = foo
+  foo^1 = first parent of foo
+  foo^2 = second parent of foo
+  foo^  = foo^1
+
+- foo~N = Nth first grandparent of foo
+  foo~0 = foo
+  foo~1 = foo^1 = foo^ = first parent of foo
+  foo~2 = foo^1^1 = foo^^ = first parent of first parent of foo
+'''
+import mercurial.repo
+
+def reposetup(ui, repo):
+    if not repo.local():
+        return
+
+    class parentrevspecrepo(repo.__class__):
+        def lookup(self, key):
+            try:
+                _super = super(parentrevspecrepo, self)
+                return _super.lookup(key)
+            except mercurial.repo.RepoError:
+                pass
+
+            circ = key.find('^')
+            tilde = key.find('~')
+            if circ < 0 and tilde < 0:
+                raise
+            elif circ >= 0 and tilde >= 0:
+                end = min(circ, tilde)
+            else:
+                end = max(circ, tilde)
+
+            cl = self.changelog
+            base = key[:end]
+            try:
+                node = _super.lookup(base)
+            except mercurial.repo.RepoError:
+                # eek - reraise the first error
+                return _super.lookup(key)
+
+            rev = cl.rev(node)
+            suffix = key[end:]
+            i = 0
+            while i < len(suffix):
+                # foo^N => Nth parent of foo
+                # foo^0 == foo
+                # foo^1 == foo^ == 1st parent of foo
+                # foo^2 == 2nd parent of foo
+                if suffix[i] == '^':
+                    j = i + 1
+                    p = cl.parentrevs(rev)
+                    if j < len(suffix) and suffix[j].isdigit():
+                        j += 1
+                        n = int(suffix[i+1:j])
+                        if n > 2 or n == 2 and p[1] == -1:
+                            raise
+                    else:
+                        n = 1
+                    if n:
+                        rev = p[n - 1]
+                    i = j
+                # foo~N => Nth first grandparent of foo
+                # foo~0 = foo
+                # foo~1 = foo^1 == foo^ == 1st parent of foo
+                # foo~2 = foo^1^1 == foo^^ == 1st parent of 1st parent of foo
+                elif suffix[i] == '~':
+                    j = i + 1
+                    while j < len(suffix) and suffix[j].isdigit():
+                        j += 1
+                    if j == i + 1:
+                        raise
+                    n = int(suffix[i+1:j])
+                    for k in xrange(n):
+                        rev = cl.parentrevs(rev)[0]
+                    i = j
+                else:
+                    raise
+            return cl.node(rev)
+
+    repo.__class__ = parentrevspecrepo
--- a/hgext/patchbomb.py
+++ b/hgext/patchbomb.py
@@ -306,8 +306,12 @@ def patchbomb(ui, repo, *revs, **opts):
                 d = cdiffstat(_('Final summary:\n'), jumbo)
                 if d: body = '\n' + d
 
-            ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
-            body = ui.edit(body, sender)
+            if opts['desc']:
+                body = open(opts['desc']).read()
+            else:
+                ui.write(_('\nWrite the introductory message for the '
+                           'patch series.\n\n'))
+                body = ui.edit(body, sender)
 
             msg = email.MIMEText.MIMEText(body)
             msg['Subject'] = subj
@@ -417,6 +421,7 @@ cmdtable = {
           ('c', 'cc', [], _('email addresses of copy recipients')),
           ('d', 'diffstat', None, _('add diffstat output to messages')),
           ('', 'date', '', _('use the given date as the sending date')),
+          ('', 'desc', '', _('use the given file as the series description')),
           ('g', 'git', None, _('use git extended diff format')),
           ('f', 'from', '', _('email address of sender')),
           ('', 'plain', None, _('omit hg patch header')),
--- a/hgext/purge.py
+++ b/hgext/purge.py
@@ -27,7 +27,7 @@
 # along with this program; if not, write to the Free Software
 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
 
-from mercurial import hg, util
+from mercurial import hg, util, commands
 from mercurial.i18n import _
 import os
 
@@ -162,7 +162,6 @@ cmdtable = {
           ('p', 'print', None, _('print the file names instead of deleting them')),
           ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
                                   ' (implies -p)')),
-          ('I', 'include', [], _('include names matching the given patterns')),
-          ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+         ] + commands.walkopts,
          _('hg purge [OPTION]... [DIR]...'))
 }
new file mode 100644
--- /dev/null
+++ b/hgext/record.py
@@ -0,0 +1,415 @@
+# record.py
+#
+# Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
+#
+# This software may be used and distributed according to the terms of
+# the GNU General Public License, incorporated herein by reference.
+
+'''interactive change selection during commit'''
+
+from mercurial.i18n import _
+from mercurial import cmdutil, commands, cmdutil, hg, mdiff, patch, revlog
+from mercurial import util
+import copy, cStringIO, errno, operator, os, re, shutil, tempfile
+
+lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
+
+def scanpatch(fp):
+    lr = patch.linereader(fp)
+
+    def scanwhile(first, p):
+        lines = [first]
+        while True:
+            line = lr.readline()
+            if not line:
+                break
+            if p(line):
+                lines.append(line)
+            else:
+                lr.push(line)
+                break
+        return lines
+
+    while True:
+        line = lr.readline()
+        if not line:
+            break
+        if line.startswith('diff --git a/'):
+            def notheader(line):
+                s = line.split(None, 1)
+                return not s or s[0] not in ('---', 'diff')
+            header = scanwhile(line, notheader)
+            fromfile = lr.readline()
+            if fromfile.startswith('---'):
+                tofile = lr.readline()
+                header += [fromfile, tofile]
+            else:
+                lr.push(fromfile)
+            yield 'file', header
+        elif line[0] == ' ':
+            yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
+        elif line[0] in '-+':
+            yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
+        else:
+            m = lines_re.match(line)
+            if m:
+                yield 'range', m.groups()
+            else:
+                raise patch.PatchError('unknown patch content: %r' % line)
+
+class header(object):
+    diff_re = re.compile('diff --git a/(.*) b/(.*)$')
+    allhunks_re = re.compile('(?:index|new file|deleted file) ')
+    pretty_re = re.compile('(?:new file|deleted file) ')
+    special_re = re.compile('(?:index|new|deleted|copy|rename) ')
+
+    def __init__(self, header):
+        self.header = header
+        self.hunks = []
+
+    def binary(self):
+        for h in self.header:
+            if h.startswith('index '):
+                return True
+
+    def pretty(self, fp):
+        for h in self.header:
+            if h.startswith('index '):
+                fp.write(_('this modifies a binary file (all or nothing)\n'))
+                break
+            if self.pretty_re.match(h):
+                fp.write(h)
+                if self.binary():
+                    fp.write(_('this is a binary file\n'))
+                break
+            if h.startswith('---'):
+                fp.write(_('%d hunks, %d lines changed\n') %
+                         (len(self.hunks),
+                          sum([h.added + h.removed for h in self.hunks])))
+                break
+            fp.write(h)
+
+    def write(self, fp):
+        fp.write(''.join(self.header))
+
+    def allhunks(self):
+        for h in self.header:
+            if self.allhunks_re.match(h):
+                return True
+
+    def files(self):
+        fromfile, tofile = self.diff_re.match(self.header[0]).groups()
+        if fromfile == tofile:
+            return [fromfile]
+        return [fromfile, tofile]
+
+    def filename(self):
+        return self.files()[-1]
+
+    def __repr__(self):
+        return '<header %s>' % (' '.join(map(repr, self.files())))
+
+    def special(self):
+        for h in self.header:
+            if self.special_re.match(h):
+                return True
+
+def countchanges(hunk):
+    add = len([h for h in hunk if h[0] == '+'])
+    rem = len([h for h in hunk if h[0] == '-'])
+    return add, rem
+
+class hunk(object):
+    maxcontext = 3
+
+    def __init__(self, header, fromline, toline, proc, before, hunk, after):
+        def trimcontext(number, lines):
+            delta = len(lines) - self.maxcontext
+            if False and delta > 0:
+                return number + delta, lines[:self.maxcontext]
+            return number, lines
+
+        self.header = header
+        self.fromline, self.before = trimcontext(fromline, before)
+        self.toline, self.after = trimcontext(toline, after)
+        self.proc = proc
+        self.hunk = hunk
+        self.added, self.removed = countchanges(self.hunk)
+
+    def write(self, fp):
+        delta = len(self.before) + len(self.after)
+        fromlen = delta + self.removed
+        tolen = delta + self.added
+        fp.write('@@ -%d,%d +%d,%d @@%s\n' %
+                 (self.fromline, fromlen, self.toline, tolen,
+                  self.proc and (' ' + self.proc)))
+        fp.write(''.join(self.before + self.hunk + self.after))
+
+    pretty = write
+
+    def filename(self):
+        return self.header.filename()
+
+    def __repr__(self):
+        return '<hunk %r@%d>' % (self.filename(), self.fromline)
+
+def parsepatch(fp):
+    class parser(object):
+        def __init__(self):
+            self.fromline = 0
+            self.toline = 0
+            self.proc = ''
+            self.header = None
+            self.context = []
+            self.before = []
+            self.hunk = []
+            self.stream = []
+
+        def addrange(self, (fromstart, fromend, tostart, toend, proc)):
+            self.fromline = int(fromstart)
+            self.toline = int(tostart)
+            self.proc = proc
+
+        def addcontext(self, context):
+            if self.hunk:
+                h = hunk(self.header, self.fromline, self.toline, self.proc,
+                         self.before, self.hunk, context)
+                self.header.hunks.append(h)
+                self.stream.append(h)
+                self.fromline += len(self.before) + h.removed
+                self.toline += len(self.before) + h.added
+                self.before = []
+                self.hunk = []
+                self.proc = ''
+            self.context = context
+
+        def addhunk(self, hunk):
+            if self.context:
+                self.before = self.context
+                self.context = []
+            self.hunk = data
+
+        def newfile(self, hdr):
+            self.addcontext([])
+            h = header(hdr)
+            self.stream.append(h)
+            self.header = h
+
+        def finished(self):
+            self.addcontext([])
+            return self.stream
+
+        transitions = {
+            'file': {'context': addcontext,
+                     'file': newfile,
+                     'hunk': addhunk,
+                     'range': addrange},
+            'context': {'file': newfile,
+                        'hunk': addhunk,
+                        'range': addrange},
+            'hunk': {'context': addcontext,
+                     'file': newfile,
+                     'range': addrange},
+            'range': {'context': addcontext,
+                      'hunk': addhunk},
+            }
+
+    p = parser()
+
+    state = 'context'
+    for newstate, data in scanpatch(fp):
+        try:
+            p.transitions[state][newstate](p, data)
+        except KeyError:
+            raise patch.PatchError('unhandled transition: %s -> %s' %
+                                   (state, newstate))
+        state = newstate
+    return p.finished()
+
+def filterpatch(ui, chunks):
+    chunks = list(chunks)
+    chunks.reverse()
+    seen = {}
+    def consumefile():
+        consumed = []
+        while chunks:
+            if isinstance(chunks[-1], header):
+                break
+            else:
+                consumed.append(chunks.pop())
+        return consumed
+    resp_all = [None]
+    resp_file = [None]
+    applied = {}
+    def prompt(query):
+        if resp_all[0] is not None:
+            return resp_all[0]
+        if resp_file[0] is not None:
+            return resp_file[0]
+        while True:
+            r = (ui.prompt(query + _(' [Ynsfdaq?] '), '[Ynsfdaq?]?$',
+                           matchflags=re.I) or 'y').lower()
+            if r == '?':
+                c = record.__doc__.find('y - record this change')
+                for l in record.__doc__[c:].splitlines():
+                    if l: ui.write(_(l.strip()), '\n')
+                continue
+            elif r == 's':
+                r = resp_file[0] = 'n'
+            elif r == 'f':
+                r = resp_file[0] = 'y'
+            elif r == 'd':
+                r = resp_all[0] = 'n'
+            elif r == 'a':
+                r = resp_all[0] = 'y'
+            elif r == 'q':
+                raise util.Abort(_('user quit'))
+            return r
+    while chunks:
+        chunk = chunks.pop()
+        if isinstance(chunk, header):
+            resp_file = [None]
+            fixoffset = 0
+            hdr = ''.join(chunk.header)
+            if hdr in seen:
+                consumefile()
+                continue
+            seen[hdr] = True
+            if resp_all[0] is None:
+                chunk.pretty(ui)
+            r = prompt(_('examine changes to %s?') %
+                       _(' and ').join(map(repr, chunk.files())))
+            if r == 'y':
+                applied[chunk.filename()] = [chunk]
+                if chunk.allhunks():
+                    applied[chunk.filename()] += consumefile()
+            else:
+                consumefile()
+        else:
+            if resp_file[0] is None and resp_all[0] is None:
+                chunk.pretty(ui)
+            r = prompt(_('record this change to %r?') %
+                       chunk.filename())
+            if r == 'y':
+                if fixoffset:
+                    chunk = copy.copy(chunk)
+                    chunk.toline += fixoffset
+                applied[chunk.filename()].append(chunk)
+            else:
+                fixoffset += chunk.removed - chunk.added
+    return reduce(operator.add, [h for h in applied.itervalues()
+                                 if h[0].special() or len(h) > 1], [])
+
+def record(ui, repo, *pats, **opts):
+    '''interactively select changes to commit
+
+    If a list of files is omitted, all changes reported by "hg status"
+    will be candidates for recording.
+
+    You will be prompted for whether to record changes to each
+    modified file, and for files with multiple changes, for each
+    change to use.  For each query, the following responses are
+    possible:
+
+    y - record this change
+    n - skip this change
+
+    s - skip remaining changes to this file
+    f - record remaining changes to this file
+
+    d - done, skip remaining changes and files
+    a - record all changes to all remaining files
+    q - quit, recording no changes
+
+    ? - display help'''
+
+    if not ui.interactive:
+        raise util.Abort(_('running non-interactively, use commit instead'))
+
+    def recordfunc(ui, repo, files, message, match, opts):
+        if files:
+            changes = None
+        else:
+            changes = repo.status(files=files, match=match)[:5]
+            modified, added, removed = changes[:3]
+            files = modified + added + removed
+        diffopts = mdiff.diffopts(git=True, nodates=True)
+        fp = cStringIO.StringIO()
+        patch.diff(repo, repo.dirstate.parents()[0], files=files,
+                   match=match, changes=changes, opts=diffopts, fp=fp)
+        fp.seek(0)
+
+        chunks = filterpatch(ui, parsepatch(fp))
+        del fp
+
+        contenders = {}
+        for h in chunks:
+            try: contenders.update(dict.fromkeys(h.files()))
+            except AttributeError: pass
+
+        newfiles = [f for f in files if f in contenders]
+
+        if not newfiles:
+            ui.status(_('no changes to record\n'))
+            return 0
+
+        if changes is None:
+            changes = repo.status(files=newfiles, match=match)[:5]
+        modified = dict.fromkeys(changes[0])
+
+        backups = {}
+        backupdir = repo.join('record-backups')
+        try:
+            os.mkdir(backupdir)
+        except OSError, err:
+            if err.errno != errno.EEXIST:
+                raise
+        try:
+            for f in newfiles:
+                if f not in modified:
+                    continue
+                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
+                                               dir=backupdir)
+                os.close(fd)
+                ui.debug('backup %r as %r\n' % (f, tmpname))
+                util.copyfile(repo.wjoin(f), tmpname)
+                backups[f] = tmpname
+
+            fp = cStringIO.StringIO()
+            for c in chunks:
+                if c.filename() in backups:
+                    c.write(fp)
+            dopatch = fp.tell()
+            fp.seek(0)
+
+            if backups:
+                hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)
+
+            if dopatch:
+                ui.debug('applying patch\n')
+                ui.debug(fp.getvalue())
+                patch.internalpatch(fp, ui, 1, repo.root)
+            del fp
+
+            repo.commit(newfiles, message, opts['user'], opts['date'], match,
+                        force_editor=opts.get('force_editor'))
+            return 0
+        finally:
+            try:
+                for realname, tmpname in backups.iteritems():
+                    ui.debug('restoring %r to %r\n' % (tmpname, realname))
+                    util.copyfile(tmpname, repo.wjoin(realname))
+                    os.unlink(tmpname)
+                os.rmdir(backupdir)
+            except OSError:
+                pass
+    return cmdutil.commit(ui, repo, recordfunc, pats, opts)
+
+cmdtable = {
+    "record":
+        (record,
+         [('A', 'addremove', None,
+           _('mark new/missing files as added/removed before committing')),
+         ] + commands.walkopts + commands.commitopts + commands.commitopts2,
+         _('hg record [OPTION]... [FILE]...')),
+}
--- a/hgext/transplant.py
+++ b/hgext/transplant.py
@@ -96,9 +96,10 @@ class transplanter:
         diffopts = patch.diffopts(self.ui, opts)
         diffopts.git = True
 
-        wlock = repo.wlock()
-        lock = repo.lock()
+        lock = wlock = None
         try:
+            wlock = repo.wlock()
+            lock = repo.lock()
             for rev in revs:
                 node = revmap[rev]
                 revstr = '%s:%s' % (rev, revlog.short(node))
@@ -118,9 +119,8 @@ class transplanter:
                         continue
                     if pulls:
                         if source != repo:
-                            repo.pull(source, heads=pulls, lock=lock)
-                        merge.update(repo, pulls[-1], False, False, None,
-                                     wlock=wlock)
+                            repo.pull(source, heads=pulls)
+                        merge.update(repo, pulls[-1], False, False, None)
                         p1, p2 = repo.dirstate.parents()
                         pulls = []
 
@@ -131,7 +131,7 @@ class transplanter:
                     # fail.
                     domerge = True
                     if not hasnode(repo, node):
-                        repo.pull(source, heads=[node], lock=lock)
+                        repo.pull(source, heads=[node])
 
                 if parents[1] != revlog.nullid:
                     self.ui.note(_('skipping merge changeset %s:%s\n')
@@ -146,11 +146,11 @@ class transplanter:
                 del revmap[rev]
                 if patchfile or domerge:
                     try:
-                        n = self.applyone(repo, node, source.changelog.read(node),
+                        n = self.applyone(repo, node,
+                                          source.changelog.read(node),
                                           patchfile, merge=domerge,
                                           log=opts.get('log'),
-                                          filter=opts.get('filter'),
-                                          lock=lock, wlock=wlock)
+                                          filter=opts.get('filter'))
                         if n and domerge:
                             self.ui.status(_('%s merged at %s\n') % (revstr,
                                       revlog.short(n)))
@@ -161,11 +161,12 @@ class transplanter:
                         if patchfile:
                             os.unlink(patchfile)
             if pulls:
-                repo.pull(source, heads=pulls, lock=lock)
-                merge.update(repo, pulls[-1], False, False, None, wlock=wlock)
+                repo.pull(source, heads=pulls)
+                merge.update(repo, pulls[-1], False, False, None)
         finally:
             self.saveseries(revmap, merges)
             self.transplants.write()
+            del lock, wlock
 
     def filter(self, filter, changelog, patchfile):
         '''arbitrarily rewrite changeset before applying it'''
@@ -193,7 +194,7 @@ class transplanter:
         return (user, date, msg)
 
     def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
-                 filter=None, lock=None, wlock=None):
+                 filter=None):
         '''apply the patch in patchfile to the repository as a transplant'''
         (manifest, user, (time, timezone), files, message) = cl[:5]
         date = "%d %d" % (time, timezone)
@@ -219,7 +220,7 @@ class transplanter:
                         self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
                         return None
                 finally:
-                    files = patch.updatedir(self.ui, repo, files, wlock=wlock)
+                    files = patch.updatedir(self.ui, repo, files)
             except Exception, inst:
                 if filter:
                     os.unlink(patchfile)
@@ -237,8 +238,7 @@ class transplanter:
             p1, p2 = repo.dirstate.parents()
             repo.dirstate.setparents(p1, node)
 
-        n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
-                        extra=extra)
+        n = repo.commit(files, message, user, date, extra=extra)
         if not merge:
             self.transplants.set(n, node)
 
@@ -272,20 +272,24 @@ class transplanter:
 
         extra = {'transplant_source': node}
         wlock = repo.wlock()
-        p1, p2 = repo.dirstate.parents()
-        if p1 != parents[0]:
-            raise util.Abort(_('working dir not at transplant parent %s') %
-                             revlog.hex(parents[0]))
-        if merge:
-            repo.dirstate.setparents(p1, parents[1])
-        n = repo.commit(None, message, user, date, wlock=wlock, extra=extra)
-        if not n:
-            raise util.Abort(_('commit failed'))
-        if not merge:
-            self.transplants.set(n, node)
-        self.unlog()
+        try:
+            p1, p2 = repo.dirstate.parents()
+            if p1 != parents[0]:
+                raise util.Abort(
+                    _('working dir not at transplant parent %s') %
+                                 revlog.hex(parents[0]))
+            if merge:
+                repo.dirstate.setparents(p1, parents[1])
+            n = repo.commit(None, message, user, date, extra=extra)
+            if not n:
+                raise util.Abort(_('commit failed'))
+            if not merge:
+                self.transplants.set(n, node)
+            self.unlog()
 
-        return n, node
+            return n, node
+        finally:
+            del wlock
 
     def readseries(self):
         nodes = []
--- a/hgext/win32text.py
+++ b/hgext/win32text.py
@@ -1,7 +1,24 @@
-import mercurial.util
+from mercurial import util, ui
+from mercurial.i18n import gettext as _
+import re
+
+# regexp for single LF without CR preceding.
+re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
 
 def dumbdecode(s, cmd):
-    return s.replace('\n', '\r\n')
+    # warn if already has CRLF in repository.
+    # it might cause unexpected eol conversion.
+    # see issue 302:
+    #   http://www.selenic.com/mercurial/bts/issue302
+    if '\r\n' in s:
+        u = ui.ui()
+        u.warn(_('WARNING: file in repository already has CRLF line ending \n'
+                 ' which does not need eol conversion by win32text plugin.\n'
+                 ' Please reconsider encode/decode setting in'
+                 ' mercurial.ini or .hg/hgrc\n'
+                 ' before next commit.\n'))
+    # replace single LF to CRLF
+    return re_single_lf.sub('\\1\r\n', s)
 
 def dumbencode(s, cmd):
     return s.replace('\r\n', '\n')
@@ -20,7 +37,7 @@ def cleverencode(s, cmd):
         return dumbencode(s, cmd)
     return s
 
-mercurial.util.filtertable.update({
+util.filtertable.update({
     'dumbdecode:': dumbdecode,
     'dumbencode:': dumbencode,
     'cleverdecode:': cleverdecode,
--- a/hgmerge
+++ b/hgmerge
@@ -96,6 +96,20 @@ ask_if_merged() {
     done
 }
 
+# Check if conflict markers are present and ask if the merge was successful
+conflicts_or_success() {
+    while egrep '^(<<<<<<< .*|=======|>>>>>>> .*)$' "$LOCAL" >/dev/null; do
+        echo "$LOCAL contains conflict markers."
+        echo "Keep this version? [y/n]"
+        read answer
+        case "$answer" in
+            y*|Y*) success;;
+            n*|N*) failure;;
+        esac
+    done
+    success
+}
+
 # Clean up when interrupted
 trap "failure" 1 2 3 6 15 # HUP INT QUIT ABRT TERM
 
@@ -123,20 +137,20 @@ if [ -n "$FILEMERGE" ]; then
     # filemerge prefers the right by default
     $FILEMERGE -left "$OTHER" -right "$LOCAL" -ancestor "$BASE" -merge "$LOCAL"
     [ $? -ne 0 ] && echo "FileMerge failed to launch" && failure
-    $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+    $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
 fi
 
 if [ -n "$DISPLAY" ]; then
     # try using kdiff3, which is fairly nice
     if [ -n "$KDIFF3" ]; then
         $KDIFF3 --auto "$BASE" "$BACKUP" "$OTHER" -o "$LOCAL" || failure
-        success
+        conflicts_or_success
     fi
 
     # try using tkdiff, which is a bit less sophisticated
     if [ -n "$TKDIFF" ]; then
         $TKDIFF "$BACKUP" "$OTHER" -a "$BASE" -o "$LOCAL" || failure
-        success
+        conflicts_or_success
     fi
 
     if [ -n "$MELD" ]; then
@@ -147,7 +161,7 @@ if [ -n "$DISPLAY" ]; then
         # use the file with conflicts
         $MELD "$LOCAL.tmp.$RAND" "$LOCAL" "$OTHER" || failure
         # Also it doesn't return good error code
-        $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+        $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
     fi
 fi
 
@@ -155,10 +169,17 @@ fi
 if [ -n "$MERGE" -o -n "$DIFF3" ]; then
     echo "conflicts detected in $LOCAL"
     cp "$BACKUP" "$CHGTEST"
-    $EDITOR "$LOCAL" || failure
+    case "$EDITOR" in
+        "emacs")
+            $EDITOR "$LOCAL" --eval '(condition-case nil (smerge-mode 1) (error nil))' || failure
+            ;;
+        *)
+            $EDITOR "$LOCAL" || failure
+            ;;
+    esac
     # Some editors do not return meaningful error codes
     # Do not take any chances
-    $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+    $TEST "$LOCAL" -nt "$CHGTEST" && conflicts_or_success || ask_if_merged
 fi
 
 # attempt to manually merge with diff and patch
--- a/hgweb.cgi
+++ b/hgweb.cgi
@@ -2,14 +2,17 @@
 #
 # An example CGI script to use hgweb, edit as necessary
 
+# adjust python path if not a system-wide install:
+#import sys
+#sys.path.insert(0, "/path/to/python/lib")
+
+# enable importing on demand to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
 # send python tracebacks to the browser if an error occurs:
 import cgitb
 cgitb.enable()
 
-# adjust python path if not a system-wide install:
-#import sys
-#sys.path.insert(0, "/path/to/python/lib")
-
 # If you'd like to serve pages with UTF-8 instead of your default
 # locale charset, you can do so by uncommenting the following lines.
 # Note that this will cause your .hgrc files to be interpreted in
--- a/hgwebdir.cgi
+++ b/hgwebdir.cgi
@@ -2,14 +2,17 @@
 #
 # An example CGI script to export multiple hgweb repos, edit as necessary
 
+# adjust python path if not a system-wide install:
+#import sys
+#sys.path.insert(0, "/path/to/python/lib")
+
+# enable importing on demand to reduce startup time
+from mercurial import demandimport; demandimport.enable()
+
 # send python tracebacks to the browser if an error occurs:
 import cgitb
 cgitb.enable()
 
-# adjust python path if not a system-wide install:
-#import sys
-#sys.path.insert(0, "/path/to/python/lib")
-
 # If you'd like to serve pages with UTF-8 instead of your default
 # locale charset, you can do so by uncommenting the following lines.
 # Note that this will cause your .hgrc files to be interpreted in
--- a/mercurial/bundlerepo.py
+++ b/mercurial/bundlerepo.py
@@ -12,8 +12,7 @@ of the GNU General Public License, incor
 
 from node import *
 from i18n import _
-import changegroup, util, os, struct, bz2, tempfile
-
+import changegroup, util, os, struct, bz2, tempfile, mdiff
 import localrepo, changelog, manifest, filelog, revlog
 
 class bundlerevlog(revlog.revlog):
@@ -57,14 +56,11 @@ class bundlerevlog(revlog.revlog):
 
             if not prev:
                 prev = p1
-            # start, size, base is not used, link, p1, p2, delta ref
-            if self.version == revlog.REVLOGV0:
-                e = (start, size, None, link, p1, p2, node)
-            else:
-                e = (self.offset_type(start, 0), size, -1, None, link,
-                     self.rev(p1), self.rev(p2), node)
+            # start, size, full unc. size, base (unused), link, p1, p2, node
+            e = (revlog.offset_type(start, 0), size, -1, -1, link,
+                 self.rev(p1), self.rev(p2), node)
             self.basemap[n] = prev
-            self.index.append(e)
+            self.index.insert(-1, e)
             self.nodemap[node] = n
             prev = node
             n += 1
@@ -80,7 +76,7 @@ class bundlerevlog(revlog.revlog):
         # not against rev - 1
         # XXX: could use some caching
         if not self.bundle(rev):
-            return revlog.revlog.chunk(self, rev, df, cachelen)
+            return revlog.revlog.chunk(self, rev, df)
         self.bundlefile.seek(self.start(rev))
         return self.bundlefile.read(self.length(rev))
 
@@ -94,7 +90,7 @@ class bundlerevlog(revlog.revlog):
         elif not self.bundle(rev1) and not self.bundle(rev2):
             return revlog.revlog.revdiff(self, rev1, rev2)
 
-        return self.diff(self.revision(self.node(rev1)),
+        return mdiff.textdiff(self.revision(self.node(rev1)),
                          self.revision(self.node(rev2)))
 
     def revision(self, node):
@@ -107,8 +103,8 @@ class bundlerevlog(revlog.revlog):
         rev = self.rev(iter_node)
         # reconstruct the revision if it is from a changegroup
         while self.bundle(rev):
-            if self.cache and self.cache[0] == iter_node:
-                text = self.cache[2]
+            if self._cache and self._cache[0] == iter_node:
+                text = self._cache[2]
                 break
             chain.append(rev)
             iter_node = self.bundlebase(rev)
@@ -118,14 +114,14 @@ class bundlerevlog(revlog.revlog):
 
         while chain:
             delta = self.chunk(chain.pop())
-            text = self.patches(text, [delta])
+            text = mdiff.patches(text, [delta])
 
         p1, p2 = self.parents(node)
         if node != revlog.hash(text, p1, p2):
             raise revlog.RevlogError(_("integrity check failed on %s:%d")
                                      % (self.datafile, self.rev(node)))
 
-        self.cache = (node, self.rev(node), text)
+        self._cache = (node, self.rev(node), text)
         return text
 
     def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
@@ -197,18 +193,27 @@ class bundlerepository(localrepo.localre
         else:
             raise util.Abort(_("%s: unknown bundle compression type")
                              % bundlename)
-        self.changelog = bundlechangelog(self.sopener, self.bundlefile)
-        self.manifest = bundlemanifest(self.sopener, self.bundlefile,
-                                       self.changelog.rev)
         # dict with the mapping 'filename' -> position in the bundle
         self.bundlefilespos = {}
-        while 1:
-            f = changegroup.getchunk(self.bundlefile)
-            if not f:
-                break
-            self.bundlefilespos[f] = self.bundlefile.tell()
-            for c in changegroup.chunkiter(self.bundlefile):
-                pass
+
+    def __getattr__(self, name):
+        if name == 'changelog':
+            self.changelog = bundlechangelog(self.sopener, self.bundlefile)
+            self.manstart = self.bundlefile.tell()
+            return self.changelog
+        if name == 'manifest':
+            self.bundlefile.seek(self.manstart)
+            self.manifest = bundlemanifest(self.sopener, self.bundlefile,
+                                           self.changelog.rev)
+            self.filestart = self.bundlefile.tell()
+            return self.manifest
+        if name == 'manstart':
+            self.changelog
+            return self.manstart
+        if name == 'filestart':
+            self.manifest
+            return self.filestart
+        return localrepo.localrepository.__getattr__(self, name)
 
     def url(self):
         return self._url
@@ -217,6 +222,16 @@ class bundlerepository(localrepo.localre
         return -1
 
     def file(self, f):
+        if not self.bundlefilespos:
+            self.bundlefile.seek(self.filestart)
+            while 1:
+                chunk = changegroup.getchunk(self.bundlefile)
+                if not chunk:
+                    break
+                self.bundlefilespos[chunk] = self.bundlefile.tell()
+                for c in changegroup.chunkiter(self.bundlefile):
+                    pass
+
         if f[0] == '/':
             f = f[1:]
         if f in self.bundlefilespos:
--- a/mercurial/changelog.py
+++ b/mercurial/changelog.py
@@ -58,7 +58,6 @@ class appender:
     def read(self, count=-1):
         '''only trick here is reads that span real file and data'''
         ret = ""
-        old_offset = self.offset
         if self.offset < self.size:
             s = self.fp.read(count)
             ret = s
@@ -131,7 +130,10 @@ class changelog(revlog):
         return extra
 
     def encode_extra(self, d):
-        items = [_string_escape(":".join(t)) for t in d.iteritems()]
+        # keys must be sorted to produce a deterministic changelog entry
+        keys = d.keys()
+        keys.sort()
+        items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
         return "\0".join(items)
 
     def extract(self, text):
--- a/mercurial/cmdutil.py
+++ b/mercurial/cmdutil.py
@@ -7,9 +7,8 @@
 
 from node import *
 from i18n import _
-import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
-import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
-import fancyopts, revlog, version, extensions, hook
+import os, sys, bisect, stat
+import mdiff, bdiff, util, templater, patch
 
 revrangesep = ':'
 
@@ -17,130 +16,8 @@ class UnknownCommand(Exception):
     """Exception raised if command is not in the command table."""
 class AmbiguousCommand(Exception):
     """Exception raised if command shortcut matches more than one command."""
-class ParseError(Exception):
-    """Exception raised on errors in parsing the command line."""
 
-def runcatch(ui, args):
-    def catchterm(*args):
-        raise util.SignalInterrupt
-
-    for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
-        num = getattr(signal, name, None)
-        if num: signal.signal(num, catchterm)
-
-    try:
-        try:
-            # enter the debugger before command execution
-            if '--debugger' in args:
-                pdb.set_trace()
-            try:
-                return dispatch(ui, args)
-            finally:
-                ui.flush()
-        except:
-            # enter the debugger when we hit an exception
-            if '--debugger' in args:
-                pdb.post_mortem(sys.exc_info()[2])
-            ui.print_exc()
-            raise
-
-    except ParseError, inst:
-        if inst.args[0]:
-            ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
-            commands.help_(ui, inst.args[0])
-        else:
-            ui.warn(_("hg: %s\n") % inst.args[1])
-            commands.help_(ui, 'shortlist')
-    except AmbiguousCommand, inst:
-        ui.warn(_("hg: command '%s' is ambiguous:\n    %s\n") %
-                (inst.args[0], " ".join(inst.args[1])))
-    except UnknownCommand, inst:
-        ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
-        commands.help_(ui, 'shortlist')
-    except hg.RepoError, inst:
-        ui.warn(_("abort: %s!\n") % inst)
-    except lock.LockHeld, inst:
-        if inst.errno == errno.ETIMEDOUT:
-            reason = _('timed out waiting for lock held by %s') % inst.locker
-        else:
-            reason = _('lock held by %s') % inst.locker
-        ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
-    except lock.LockUnavailable, inst:
-        ui.warn(_("abort: could not lock %s: %s\n") %
-               (inst.desc or inst.filename, inst.strerror))
-    except revlog.RevlogError, inst:
-        ui.warn(_("abort: %s!\n") % inst)
-    except util.SignalInterrupt:
-        ui.warn(_("killed!\n"))
-    except KeyboardInterrupt:
-        try:
-            ui.warn(_("interrupted!\n"))
-        except IOError, inst:
-            if inst.errno == errno.EPIPE:
-                if ui.debugflag:
-                    ui.warn(_("\nbroken pipe\n"))
-            else:
-                raise
-    except socket.error, inst:
-        ui.warn(_("abort: %s\n") % inst[1])
-    except IOError, inst:
-        if hasattr(inst, "code"):
-            ui.warn(_("abort: %s\n") % inst)
-        elif hasattr(inst, "reason"):
-            try: # usually it is in the form (errno, strerror)
-                reason = inst.reason.args[1]
-            except: # it might be anything, for example a string
-                reason = inst.reason
-            ui.warn(_("abort: error: %s\n") % reason)
-        elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
-            if ui.debugflag:
-                ui.warn(_("broken pipe\n"))
-        elif getattr(inst, "strerror", None):
-            if getattr(inst, "filename", None):
-                ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
-            else:
-                ui.warn(_("abort: %s\n") % inst.strerror)
-        else:
-            raise
-    except OSError, inst:
-        if getattr(inst, "filename", None):
-            ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
-        else:
-            ui.warn(_("abort: %s\n") % inst.strerror)
-    except util.UnexpectedOutput, inst:
-        ui.warn(_("abort: %s") % inst[0])
-        if not isinstance(inst[1], basestring):
-            ui.warn(" %r\n" % (inst[1],))
-        elif not inst[1]:
-            ui.warn(_(" empty string\n"))
-        else:
-            ui.warn("\n%r\n" % util.ellipsis(inst[1]))
-    except ImportError, inst:
-        m = str(inst).split()[-1]
-        ui.warn(_("abort: could not import module %s!\n" % m))
-        if m in "mpatch bdiff".split():
-            ui.warn(_("(did you forget to compile extensions?)\n"))
-        elif m in "zlib".split():
-            ui.warn(_("(is your Python install correct?)\n"))
-
-    except util.Abort, inst:
-        ui.warn(_("abort: %s\n") % inst)
-    except SystemExit, inst:
-        # Commands shouldn't sys.exit directly, but give a return code.
-        # Just in case catch this and and pass exit code to caller.
-        return inst.code
-    except:
-        ui.warn(_("** unknown exception encountered, details follow\n"))
-        ui.warn(_("** report bug details to "
-                 "http://www.selenic.com/mercurial/bts\n"))
-        ui.warn(_("** or mercurial@selenic.com\n"))
-        ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
-               % version.get_version())
-        raise
-
-    return -1
-
-def findpossible(ui, cmd):
+def findpossible(ui, cmd, table):
     """
     Return cmd -> (aliases, command table entry)
     for each matching command.
@@ -148,7 +25,7 @@ def findpossible(ui, cmd):
     """
     choice = {}
     debugchoice = {}
-    for e in commands.table.keys():
+    for e in table.keys():
         aliases = e.lstrip("^").split("|")
         found = None
         if cmd in aliases:
@@ -160,18 +37,18 @@ def findpossible(ui, cmd):
                     break
         if found is not None:
             if aliases[0].startswith("debug") or found.startswith("debug"):
-                debugchoice[found] = (aliases, commands.table[e])
+                debugchoice[found] = (aliases, table[e])
             else:
-                choice[found] = (aliases, commands.table[e])
+                choice[found] = (aliases, table[e])
 
     if not choice and debugchoice:
         choice = debugchoice
 
     return choice
 
-def findcmd(ui, cmd):
+def findcmd(ui, cmd, table):
     """Return (aliases, command table entry) for command string."""
-    choice = findpossible(ui, cmd)
+    choice = findpossible(ui, cmd, table)
 
     if choice.has_key(cmd):
         return choice[cmd]
@@ -186,247 +63,6 @@ def findcmd(ui, cmd):
 
     raise UnknownCommand(cmd)
 
-def findrepo():
-    p = os.getcwd()
-    while not os.path.isdir(os.path.join(p, ".hg")):
-        oldp, p = p, os.path.dirname(p)
-        if p == oldp:
-            return None
-
-    return p
-
-def parse(ui, args):
-    options = {}
-    cmdoptions = {}
-
-    try:
-        args = fancyopts.fancyopts(args, commands.globalopts, options)
-    except fancyopts.getopt.GetoptError, inst:
-        raise ParseError(None, inst)
-
-    if args:
-        cmd, args = args[0], args[1:]
-        aliases, i = findcmd(ui, cmd)
-        cmd = aliases[0]
-        defaults = ui.config("defaults", cmd)
-        if defaults:
-            args = shlex.split(defaults) + args
-        c = list(i[1])
-    else:
-        cmd = None
-        c = []
-
-    # combine global options into local
-    for o in commands.globalopts:
-        c.append((o[0], o[1], options[o[1]], o[3]))
-
-    try:
-        args = fancyopts.fancyopts(args, c, cmdoptions)
-    except fancyopts.getopt.GetoptError, inst:
-        raise ParseError(cmd, inst)
-
-    # separate global options back out
-    for o in commands.globalopts:
-        n = o[1]
-        options[n] = cmdoptions[n]
-        del cmdoptions[n]
-
-    return (cmd, cmd and i[0] or None, args, options, cmdoptions)
-
-def parseconfig(config):
-    """parse the --config options from the command line"""
-    parsed = []
-    for cfg in config:
-        try:
-            name, value = cfg.split('=', 1)
-            section, name = name.split('.', 1)
-            if not section or not name:
-                raise IndexError
-            parsed.append((section, name, value))
-        except (IndexError, ValueError):
-            raise util.Abort(_('malformed --config option: %s') % cfg)
-    return parsed
-
-def earlygetopt(aliases, args):
-    """Return list of values for an option (or aliases).
-
-    The values are listed in the order they appear in args.
-    The options and values are removed from args.
-    """
-    try:
-        argcount = args.index("--")
-    except ValueError:
-        argcount = len(args)
-    shortopts = [opt for opt in aliases if len(opt) == 2]
-    values = []
-    pos = 0
-    while pos < argcount:
-        if args[pos] in aliases:
-            if pos + 1 >= argcount:
-                # ignore and let getopt report an error if there is no value
-                break
-            del args[pos]
-            values.append(args.pop(pos))
-            argcount -= 2
-        elif args[pos][:2] in shortopts:
-            # short option can have no following space, e.g. hg log -Rfoo
-            values.append(args.pop(pos)[2:])
-            argcount -= 1
-        else:
-            pos += 1
-    return values
-
-def dispatch(ui, args):
-    # read --config before doing anything else
-    # (e.g. to change trust settings for reading .hg/hgrc)
-    config = earlygetopt(['--config'], args)
-    if config:
-        ui.updateopts(config=parseconfig(config))
-
-    # check for cwd
-    cwd = earlygetopt(['--cwd'], args)
-    if cwd:
-        os.chdir(cwd[-1])
-
-    # read the local repository .hgrc into a local ui object
-    path = findrepo() or ""
-    if not path:
-        lui = ui
-    if path:
-        try:
-            lui = commands.ui.ui(parentui=ui)
-            lui.readconfig(os.path.join(path, ".hg", "hgrc"))
-        except IOError:
-            pass
-
-    # now we can expand paths, even ones in .hg/hgrc
-    rpath = earlygetopt(["-R", "--repository", "--repo"], args)
-    if rpath:
-        path = lui.expandpath(rpath[-1])
-        lui = commands.ui.ui(parentui=ui)
-        lui.readconfig(os.path.join(path, ".hg", "hgrc"))
-
-    extensions.loadall(lui)
-    # check for fallback encoding
-    fallback = lui.config('ui', 'fallbackencoding')
-    if fallback:
-        util._fallbackencoding = fallback
-
-    fullargs = args
-    cmd, func, args, options, cmdoptions = parse(lui, args)
-
-    if options["config"]:
-        raise util.Abort(_("Option --config may not be abbreviated!"))
-    if options["cwd"]:
-        raise util.Abort(_("Option --cwd may not be abbreviated!"))
-    if options["repository"]:
-        raise util.Abort(_(
-            "Option -R has to be separated from other options (i.e. not -qR) "
-            "and --repository may only be abbreviated as --repo!"))
-
-    if options["encoding"]:
-        util._encoding = options["encoding"]
-    if options["encodingmode"]:
-        util._encodingmode = options["encodingmode"]
-    if options["time"]:
-        def get_times():
-            t = os.times()
-            if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
-                t = (t[0], t[1], t[2], t[3], time.clock())
-            return t
-        s = get_times()
-        def print_time():
-            t = get_times()
-            ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
-                (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
-        atexit.register(print_time)
-
-    ui.updateopts(options["verbose"], options["debug"], options["quiet"],
-                 not options["noninteractive"], options["traceback"])
-
-    if options['help']:
-        return commands.help_(ui, cmd, options['version'])
-    elif options['version']:
-        return commands.version_(ui)
-    elif not cmd:
-        return commands.help_(ui, 'shortlist')
-
-    repo = None
-    if cmd not in commands.norepo.split():
-        try:
-            repo = hg.repository(ui, path=path)
-            ui = repo.ui
-            if not repo.local():
-                raise util.Abort(_("repository '%s' is not local") % path)
-        except hg.RepoError:
-            if cmd not in commands.optionalrepo.split():
-                if not path:
-                    raise hg.RepoError(_("There is no Mercurial repository here"
-                                         " (.hg not found)"))
-                raise
-        d = lambda: func(ui, repo, *args, **cmdoptions)
-    else:
-        d = lambda: func(ui, *args, **cmdoptions)
-
-    # run pre-hook, and abort if it fails
-    ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
-    if ret:
-        return ret
-    ret = runcommand(ui, options, cmd, d)
-    # run post-hook, passing command result
-    hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
-              result = ret)
-    return ret
-
-def runcommand(ui, options, cmd, cmdfunc):
-    def checkargs():
-        try:
-            return cmdfunc()
-        except TypeError, inst:
-            # was this an argument error?
-            tb = traceback.extract_tb(sys.exc_info()[2])
-            if len(tb) != 2: # no
-                raise
-            raise ParseError(cmd, _("invalid arguments"))
-
-    if options['profile']:
-        import hotshot, hotshot.stats
-        prof = hotshot.Profile("hg.prof")
-        try:
-            try:
-                return prof.runcall(checkargs)
-            except:
-                try:
-                    ui.warn(_('exception raised - generating '
-                             'profile anyway\n'))
-                except:
-                    pass
-                raise
-        finally:
-            prof.close()
-            stats = hotshot.stats.load("hg.prof")
-            stats.strip_dirs()
-            stats.sort_stats('time', 'calls')
-            stats.print_stats(40)
-    elif options['lsprof']:
-        try:
-            from mercurial import lsprof
-        except ImportError:
-            raise util.Abort(_(
-                'lsprof not available - install from '
-                'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
-        p = lsprof.Profiler()
-        p.enable(subcalls=True)
-        try:
-            return checkargs()
-        finally:
-            p.disable()
-            stats = lsprof.Stats(p.getstats())
-            stats.sort()
-            stats.pprint(top=10, file=sys.stderr, climit=5)
-    else:
-        return checkargs()
-
 def bail_if_changed(repo):
     modified, added, removed, deleted = repo.status()[:4]
     if modified or added or removed or deleted:
@@ -458,15 +94,6 @@ def setremoteconfig(ui, opts):
     if opts.get('remotecmd'):
         ui.setconfig("ui", "remotecmd", opts['remotecmd'])
 
-def parseurl(url, revs):
-    '''parse url#branch, returning url, branch + revs'''
-
-    if '#' not in url:
-        return url, (revs or None), None
-
-    url, rev = url.split('#', 1)
-    return url, revs + [rev], rev
-
 def revpair(repo, revs):
     '''return pair of nodes, given list of revisions. second item can
     be None, meaning use working dir.'''
@@ -625,8 +252,7 @@ def findrenames(repo, added=None, remove
         if bestname:
             yield bestname, a, bestscore
 
-def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
-              similarity=None):
+def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
     if dry_run is None:
         dry_run = opts.get('dry_run')
     if similarity is None:
@@ -635,19 +261,19 @@ def addremove(repo, pats=[], opts={}, wl
     mapping = {}
     for src, abs, rel, exact in walk(repo, pats, opts):
         target = repo.wjoin(abs)
-        if src == 'f' and repo.dirstate.state(abs) == '?':
+        if src == 'f' and abs not in repo.dirstate:
             add.append(abs)
             mapping[abs] = rel, exact
             if repo.ui.verbose or not exact:
                 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
-        if repo.dirstate.state(abs) != 'r' and not util.lexists(target):
+        if repo.dirstate[abs] != 'r' and not util.lexists(target):
             remove.append(abs)
             mapping[abs] = rel, exact
             if repo.ui.verbose or not exact:
                 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
     if not dry_run:
-        repo.add(add, wlock=wlock)
-        repo.remove(remove, wlock=wlock)
+        repo.add(add)
+        repo.remove(remove)
     if similarity > 0:
         for old, new, score in findrenames(repo, add, remove, similarity):
             oldrel, oldexact = mapping[old]
@@ -657,7 +283,7 @@ def addremove(repo, pats=[], opts={}, wl
                                  '(%d%% similar)\n') %
                                (oldrel, newrel, score * 100))
             if not dry_run:
-                repo.copy(old, new, wlock=wlock)
+                repo.copy(old, new)
 
 def service(opts, parentfn=None, initfn=None, runfn=None):
     '''Run a command as a service.'''
@@ -1273,3 +899,45 @@ def walkchangerevs(ui, repo, pats, chang
             for rev in nrevs:
                 yield 'iter', rev, None
     return iterate(), matchfn
+
+def commit(ui, repo, commitfunc, pats, opts):
+    '''commit the specified files or all outstanding changes'''
+    message = logmessage(opts)
+
+    if opts['addremove']:
+        addremove(repo, pats, opts)
+    fns, match, anypats = matchpats(repo, pats, opts)
+    if pats:
+        status = repo.status(files=fns, match=match)
+        modified, added, removed, deleted, unknown = status[:5]
+        files = modified + added + removed
+        slist = None
+        for f in fns:
+            if f == '.':
+                continue
+            if f not in files:
+                rf = repo.wjoin(f)
+                try:
+                    mode = os.lstat(rf)[stat.ST_MODE]
+                except OSError:
+                    raise util.Abort(_("file %s not found!") % rf)
+                if stat.S_ISDIR(mode):
+                    name = f + '/'
+                    if slist is None:
+                        slist = list(files)
+                        slist.sort()
+                    i = bisect.bisect(slist, name)
+                    if i >= len(slist) or not slist[i].startswith(name):
+                        raise util.Abort(_("no match under directory %s!")
+                                         % rf)
+                elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
+                    raise util.Abort(_("can't commit %s: "
+                                       "unsupported file type!") % rf)
+                elif f not in repo.dirstate:
+                    raise util.Abort(_("file %s not tracked!") % rf)
+    else:
+        files = []
+    try:
+        return commitfunc(ui, repo, files, message, match, opts)
+    except ValueError, inst:
+        raise util.Abort(str(inst))
--- a/mercurial/commands.py
+++ b/mercurial/commands.py
@@ -5,11 +5,10 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-import demandimport; demandimport.enable()
 from node import *
 from i18n import _
-import bisect, os, re, sys, urllib, shlex, stat
-import ui, hg, util, revlog, bundlerepo, extensions
+import os, re, sys, urllib
+import hg, util, revlog, bundlerepo, extensions
 import difflib, patch, time, help, mdiff, tempfile
 import errno, version, socket
 import archival, changegroup, cmdutil, hgweb.server, sshserver
@@ -33,7 +32,7 @@ def add(ui, repo, *pats, **opts):
             if ui.verbose:
                 ui.status(_('adding %s\n') % rel)
             names.append(abs)
-        elif repo.dirstate.state(abs) == '?':
+        elif abs not in repo.dirstate:
             ui.status(_('adding %s\n') % rel)
             names.append(abs)
     if not opts.get('dry_run'):
@@ -73,19 +72,31 @@ def annotate(ui, repo, *pats, **opts):
     detects as binary. With -a, annotate will generate an annotation
     anyway, probably with undesirable results.
     """
-    getdate = util.cachefunc(lambda x: util.datestr(x.date()))
+    getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
 
     if not pats:
         raise util.Abort(_('at least one file name or pattern required'))
 
-    opmap = [['user', lambda x: ui.shortuser(x.user())],
-             ['number', lambda x: str(x.rev())],
-             ['changeset', lambda x: short(x.node())],
-             ['date', getdate], ['follow', lambda x: x.path()]]
+    opmap = [('user', lambda x: ui.shortuser(x[0].user())),
+             ('number', lambda x: str(x[0].rev())),
+             ('changeset', lambda x: short(x[0].node())),
+             ('date', getdate),
+             ('follow', lambda x: x[0].path()),
+            ]
+
     if (not opts['user'] and not opts['changeset'] and not opts['date']
         and not opts['follow']):
         opts['number'] = 1
 
+    linenumber = opts.get('line_number') is not None
+    if (linenumber and (not opts['changeset']) and (not opts['number'])):
+        raise util.Abort(_('at least one of -n/-c is required for -l'))
+
+    funcmap = [func for op, func in opmap if opts.get(op)]
+    if linenumber:
+        lastfunc = funcmap[-1]
+        funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
+
     ctx = repo.changectx(opts['rev'])
 
     for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
@@ -95,15 +106,15 @@ def annotate(ui, repo, *pats, **opts):
             ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
             continue
 
-        lines = fctx.annotate(follow=opts.get('follow'))
+        lines = fctx.annotate(follow=opts.get('follow'),
+                              linenumber=linenumber)
         pieces = []
 
-        for o, f in opmap:
-            if opts[o]:
-                l = [f(n) for n, dummy in lines]
-                if l:
-                    m = max(map(len, l))
-                    pieces.append(["%*s" % (m, x) for x in l])
+        for f in funcmap:
+            l = [f(n) for n, dummy in lines]
+            if l:
+                m = max(map(len, l))
+                pieces.append(["%*s" % (m, x) for x in l])
 
         if pieces:
             for p, l in zip(zip(*pieces), lines):
@@ -324,7 +335,7 @@ def bundle(ui, repo, fname, dest=None, *
                         visit.append(p)
     else:
         cmdutil.setremoteconfig(ui, opts)
-        dest, revs, checkout = cmdutil.parseurl(
+        dest, revs, checkout = hg.parseurl(
             ui.expandpath(dest or 'default-push', dest or 'default'), revs)
         other = hg.repository(ui, dest)
         o = repo.findoutgoing(other, force=opts['force'])
@@ -416,48 +427,12 @@ def commit(ui, repo, *pats, **opts):
     If no commit message is specified, the editor configured in your hgrc
     or in the EDITOR environment variable is started to enter a message.
     """
-    message = cmdutil.logmessage(opts)
-
-    if opts['addremove']:
-        cmdutil.addremove(repo, pats, opts)
-    fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
-    if pats:
-        status = repo.status(files=fns, match=match)
-        modified, added, removed, deleted, unknown = status[:5]
-        files = modified + added + removed
-        slist = None
-        for f in fns:
-            if f == '.':
-                continue
-            if f not in files:
-                rf = repo.wjoin(f)
-                try:
-                    mode = os.lstat(rf)[stat.ST_MODE]
-                except OSError:
-                    raise util.Abort(_("file %s not found!") % rf)
-                if stat.S_ISDIR(mode):
-                    name = f + '/'
-                    if slist is None:
-                        slist = list(files)
-                        slist.sort()
-                    i = bisect.bisect(slist, name)
-                    if i >= len(slist) or not slist[i].startswith(name):
-                        raise util.Abort(_("no match under directory %s!")
-                                         % rf)
-                elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
-                    raise util.Abort(_("can't commit %s: "
-                                       "unsupported file type!") % rf)
-                elif repo.dirstate.state(f) == '?':
-                    raise util.Abort(_("file %s not tracked!") % rf)
-    else:
-        files = []
-    try:
-        repo.commit(files, message, opts['user'], opts['date'], match,
-                    force_editor=opts.get('force_editor'))
-    except ValueError, inst:
-        raise util.Abort(str(inst))
-
-def docopy(ui, repo, pats, opts, wlock):
+    def commitfunc(ui, repo, files, message, match, opts):
+        return repo.commit(files, message, opts['user'], opts['date'], match,
+                           force_editor=opts.get('force_editor'))
+    cmdutil.commit(ui, repo, commitfunc, pats, opts)
+
+def docopy(ui, repo, pats, opts):
     # called with the repo lock held
     #
     # hgsep => pathname that uses "/" to separate directories
@@ -473,7 +448,7 @@ def docopy(ui, repo, pats, opts, wlock):
     def okaytocopy(abs, rel, exact):
         reasons = {'?': _('is not managed'),
                    'r': _('has been marked for remove')}
-        state = repo.dirstate.state(abs)
+        state = repo.dirstate[abs]
         reason = reasons.get(state)
         if reason:
             if exact:
@@ -501,7 +476,7 @@ def docopy(ui, repo, pats, opts, wlock):
                      repo.pathto(prevsrc, cwd)))
             return
         if (not opts['after'] and os.path.exists(target) or
-            opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
+            opts['after'] and repo.dirstate[abstarget] in 'mn'):
             if not opts['force']:
                 ui.warn(_('%s: not overwriting - file exists\n') %
                         reltarget)
@@ -516,16 +491,16 @@ def docopy(ui, repo, pats, opts, wlock):
             if not os.path.isdir(targetdir) and not opts.get('dry_run'):
                 os.makedirs(targetdir)
             try:
-                restore = repo.dirstate.state(abstarget) == 'r'
+                restore = repo.dirstate[abstarget] == 'r'
                 if restore and not opts.get('dry_run'):
-                    repo.undelete([abstarget], wlock)
+                    repo.undelete([abstarget])
                 try:
                     if not opts.get('dry_run'):
                         util.copyfile(src, target)
                     restore = False
                 finally:
                     if restore:
-                        repo.remove([abstarget], wlock=wlock)
+                        repo.remove([abstarget])
             except IOError, inst:
                 if inst.errno == errno.ENOENT:
                     ui.warn(_('%s: deleted in working copy\n') % relsrc)
@@ -538,15 +513,15 @@ def docopy(ui, repo, pats, opts, wlock):
             ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
         targets[abstarget] = abssrc
         if abstarget != origsrc:
-            if repo.dirstate.state(origsrc) == 'a':
+            if repo.dirstate[origsrc] == 'a':
                 if not ui.quiet:
                     ui.warn(_("%s has not been committed yet, so no copy "
                               "data will be stored for %s.\n")
                             % (repo.pathto(origsrc, cwd), reltarget))
                 if abstarget not in repo.dirstate and not opts.get('dry_run'):
-                    repo.add([abstarget], wlock)
+                    repo.add([abstarget])
             elif not opts.get('dry_run'):
-                repo.copy(origsrc, abstarget, wlock)
+                repo.copy(origsrc, abstarget)
         copied.append((abssrc, relsrc, exact))
 
     # pat: ossep
@@ -669,8 +644,11 @@ def copy(ui, repo, *pats, **opts):
     This command takes effect in the next commit. To undo a copy
     before that, see hg revert.
     """
-    wlock = repo.wlock(0)
-    errs, copied = docopy(ui, repo, pats, opts, wlock)
+    wlock = repo.wlock(False)
+    try:
+        errs, copied = docopy(ui, repo, pats, opts)
+    finally:
+        del wlock
     return errs
 
 def debugancestor(ui, index, rev1, rev2):
@@ -686,7 +664,7 @@ def debugcomplete(ui, cmd='', **opts):
         options = []
         otables = [globalopts]
         if cmd:
-            aliases, entry = cmdutil.findcmd(ui, cmd)
+            aliases, entry = cmdutil.findcmd(ui, cmd, table)
             otables.append(entry[1])
         for t in otables:
             for o in t:
@@ -696,7 +674,7 @@ def debugcomplete(ui, cmd='', **opts):
         ui.write("%s\n" % "\n".join(options))
         return
 
-    clist = cmdutil.findpossible(ui, cmd).keys()
+    clist = cmdutil.findpossible(ui, cmd, table).keys()
     clist.sort()
     ui.write("%s\n" % "\n".join(clist))
 
@@ -707,17 +685,19 @@ def debugrebuildstate(ui, repo, rev=""):
     ctx = repo.changectx(rev)
     files = ctx.manifest()
     wlock = repo.wlock()
-    repo.dirstate.rebuild(rev, files)
+    try:
+        repo.dirstate.rebuild(rev, files)
+    finally:
+        del wlock
 
 def debugcheckstate(ui, repo):
     """validate the correctness of the current dirstate"""
     parent1, parent2 = repo.dirstate.parents()
-    dc = repo.dirstate
     m1 = repo.changectx(parent1).manifest()
     m2 = repo.changectx(parent2).manifest()
     errors = 0
-    for f in dc:
-        state = repo.dirstate.state(f)
+    for f in repo.dirstate:
+        state = repo.dirstate[f]
         if state in "nr" and f not in m1:
             ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
             errors += 1
@@ -729,7 +709,7 @@ def debugcheckstate(ui, repo):
                     (f, state))
             errors += 1
     for f in m1:
-        state = repo.dirstate.state(f)
+        state = repo.dirstate[f]
         if state not in "nrm":
             ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
             errors += 1
@@ -777,22 +757,25 @@ def debugsetparents(ui, repo, rev1, rev2
     try:
         repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
     finally:
-        wlock.release()
+        del wlock
 
 def debugstate(ui, repo):
     """show the contents of the current dirstate"""
-    dc = repo.dirstate
-    for file_ in dc:
-        if dc[file_][3] == -1:
+    k = repo.dirstate._map.items()
+    k.sort()
+    for file_, ent in k:
+        if ent[3] == -1:
             # Pad or slice to locale representation
             locale_len = len(time.strftime("%x %X", time.localtime(0)))
             timestr = 'unset'
             timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
         else:
-            timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
-        ui.write("%c %3o %10d %s %s\n"
-                 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
-                    timestr, file_))
+            timestr = time.strftime("%x %X", time.localtime(ent[3]))
+        if ent[1] & 020000:
+            mode = 'lnk'
+        else:
+            mode = '%3o' % (ent[1] & 0777)
+        ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
     for f in repo.dirstate.copies():
         ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
 
@@ -823,7 +806,10 @@ def debugindex(ui, file_):
              " nodeid       p1           p2\n")
     for i in xrange(r.count()):
         node = r.node(i)
-        pp = r.parents(node)
+        try:
+            pp = r.parents(node)
+        except:
+            pp = [nullid, nullid]
         ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
                 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
             short(node), short(pp[0]), short(pp[1])))
@@ -844,7 +830,7 @@ def debuginstall(ui):
     '''test Mercurial installation'''
 
     def writetemp(contents):
-        (fd, name) = tempfile.mkstemp()
+        (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
         f = os.fdopen(fd, "wb")
         f.write(contents)
         f.close()
@@ -883,42 +869,40 @@ def debuginstall(ui):
 
     # patch
     ui.status(_("Checking patch...\n"))
-    patcher = ui.config('ui', 'patch')
-    patcher = ((patcher and util.find_exe(patcher)) or
-               util.find_exe('gpatch') or
-               util.find_exe('patch'))
-    if not patcher:
-        ui.write(_(" Can't find patch or gpatch in PATH\n"))
-        ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
-        problems += 1
+    patchproblems = 0
+    a = "1\n2\n3\n4\n"
+    b = "1\n2\n3\ninsert\n4\n"
+    fa = writetemp(a)
+    d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
+    fd = writetemp(d)
+
+    files = {}
+    try:
+        patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
+    except util.Abort, e:
+        ui.write(_(" patch call failed:\n"))
+        ui.write(" " + str(e) + "\n")
+        patchproblems += 1
     else:
-        # actually attempt a patch here
-        a = "1\n2\n3\n4\n"
-        b = "1\n2\n3\ninsert\n4\n"
-        fa = writetemp(a)
-        d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
-        fd = writetemp(d)
-
-        files = {}
-        try:
-            patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
-        except util.Abort, e:
-            ui.write(_(" patch call failed:\n"))
-            ui.write(" " + str(e) + "\n")
-            problems += 1
+        if list(files) != [os.path.basename(fa)]:
+            ui.write(_(" unexpected patch output!\n"))
+            patchproblems += 1
+        a = file(fa).read()
+        if a != b:
+            ui.write(_(" patch test failed!\n"))
+            patchproblems += 1
+
+    if patchproblems:
+        if ui.config('ui', 'patch'):
+            ui.write(_(" (Current patch tool may be incompatible with patch,"
+                       " or misconfigured. Please check your .hgrc file)\n"))
         else:
-            if list(files) != [os.path.basename(fa)]:
-                ui.write(_(" unexpected patch output!"))
-                ui.write(_(" (you may have an incompatible version of patch)\n"))
-                problems += 1
-            a = file(fa).read()
-            if a != b:
-                ui.write(_(" patch test failed!"))
-                ui.write(_(" (you may have an incompatible version of patch)\n"))
-                problems += 1
-
-        os.unlink(fa)
-        os.unlink(fd)
+            ui.write(_(" Internal patcher failure, please report this error" 
+                       " to http://www.selenic.com/mercurial/bts\n"))
+    problems += patchproblems
+
+    os.unlink(fa)
+    os.unlink(fd)
 
     # merge helper
     ui.status(_("Checking merge helper...\n"))
@@ -1326,7 +1310,7 @@ def help_(ui, name=None, with_version=Fa
         if with_version:
             version_(ui)
             ui.write('\n')
-        aliases, i = cmdutil.findcmd(ui, name)
+        aliases, i = cmdutil.findcmd(ui, name, table)
         # synopsis
         ui.write("%s\n\n" % i[2])
 
@@ -1498,7 +1482,7 @@ def identify(ui, repo, source=None,
     output = []
 
     if source:
-        source, revs, checkout = cmdutil.parseurl(ui.expandpath(source), [])
+        source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
         srepo = hg.repository(ui, source)
         if not rev and revs:
             rev = revs[0]
@@ -1579,70 +1563,77 @@ def import_(ui, repo, patch1, *patches, 
 
     d = opts["base"]
     strip = opts["strip"]
-
-    wlock = repo.wlock()
-    lock = repo.lock()
-
-    for p in patches:
-        pf = os.path.join(d, p)
-
-        if pf == '-':
-            ui.status(_("applying patch from stdin\n"))
-            tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
-        else:
-            ui.status(_("applying %s\n") % p)
-            tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
-
-        if tmpname is None:
-            raise util.Abort(_('no diffs found'))
-
-        try:
-            cmdline_message = cmdutil.logmessage(opts)
-            if cmdline_message:
-                # pickup the cmdline msg
-                message = cmdline_message
-            elif message:
-                # pickup the patch msg
-                message = message.strip()
+    wlock = lock = None
+    try:
+        wlock = repo.wlock()
+        lock = repo.lock()
+        for p in patches:
+            pf = os.path.join(d, p)
+
+            if pf == '-':
+                ui.status(_("applying patch from stdin\n"))
+                data = patch.extract(ui, sys.stdin)
             else:
-                # launch the editor
-                message = None
-            ui.debug(_('message:\n%s\n') % message)
-
-            wp = repo.workingctx().parents()
-            if opts.get('exact'):
-                if not nodeid or not p1:
-                    raise util.Abort(_('not a mercurial patch'))
-                p1 = repo.lookup(p1)
-                p2 = repo.lookup(p2 or hex(nullid))
-
-                if p1 != wp[0].node():
-                    hg.clean(repo, p1, wlock=wlock)
-                repo.dirstate.setparents(p1, p2)
-            elif p2:
-                try:
+                ui.status(_("applying %s\n") % p)
+                if os.path.exists(pf):
+                    data = patch.extract(ui, file(pf, 'rb'))
+                else:
+                    data = patch.extract(ui, urllib.urlopen(pf))
+            tmpname, message, user, date, branch, nodeid, p1, p2 = data
+
+            if tmpname is None:
+                raise util.Abort(_('no diffs found'))
+
+            try:
+                cmdline_message = cmdutil.logmessage(opts)
+                if cmdline_message:
+                    # pickup the cmdline msg
+                    message = cmdline_message
+                elif message:
+                    # pickup the patch msg
+                    message = message.strip()
+                else:
+                    # launch the editor
+                    message = None
+                ui.debug(_('message:\n%s\n') % message)
+
+                wp = repo.workingctx().parents()
+                if opts.get('exact'):
+                    if not nodeid or not p1:
+                        raise util.Abort(_('not a mercurial patch'))
                     p1 = repo.lookup(p1)
-                    p2 = repo.lookup(p2)
-                    if p1 == wp[0].node():
-                        repo.dirstate.setparents(p1, p2)
-                except hg.RepoError:
-                    pass
-            if opts.get('exact') or opts.get('import_branch'):
-                repo.dirstate.setbranch(branch or 'default')
-
-            files = {}
-            try:
-                fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
-                                   files=files)
+                    p2 = repo.lookup(p2 or hex(nullid))
+
+                    if p1 != wp[0].node():
+                        hg.clean(repo, p1)
+                    repo.dirstate.setparents(p1, p2)
+                elif p2:
+                    try:
+                        p1 = repo.lookup(p1)
+                        p2 = repo.lookup(p2)
+                        if p1 == wp[0].node():
+                            repo.dirstate.setparents(p1, p2)
+                    except hg.RepoError:
+                        pass
+                if opts.get('exact') or opts.get('import_branch'):
+                    repo.dirstate.setbranch(branch or 'default')
+
+                files = {}
+                try:
+                    fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
+                                       files=files)
+                finally:
+                    files = patch.updatedir(ui, repo, files)
+                n = repo.commit(files, message, user, date)
+                if opts.get('exact'):
+                    if hex(n) != nodeid:
+                        repo.rollback()
+                        raise util.Abort(_('patch is damaged' +
+                                           ' or loses information'))
             finally:
-                files = patch.updatedir(ui, repo, files, wlock=wlock)
-            n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
-            if opts.get('exact'):
-                if hex(n) != nodeid:
-                    repo.rollback(wlock=wlock, lock=lock)
-                    raise util.Abort(_('patch is damaged or loses information'))
-        finally:
-            os.unlink(tmpname)
+                os.unlink(tmpname)
+    finally:
+        del lock, wlock
 
 def incoming(ui, repo, source="default", **opts):
     """show new changesets found in source
@@ -1656,18 +1647,13 @@ def incoming(ui, repo, source="default",
 
     See pull for valid source format details.
     """
-    source, revs, checkout = cmdutil.parseurl(ui.expandpath(source),
-                                              opts['rev'])
+    source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
     cmdutil.setremoteconfig(ui, opts)
 
     other = hg.repository(ui, source)
     ui.status(_('comparing with %s\n') % source)
     if revs:
-        if 'lookup' in other.capabilities:
-            revs = [other.lookup(rev) for rev in revs]
-        else:
-            error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
-            raise util.Abort(error)
+        revs = [other.lookup(rev) for rev in revs]
     incoming = repo.findincoming(other, heads=revs, force=opts["force"])
     if not incoming:
         try:
@@ -1685,8 +1671,6 @@ def incoming(ui, repo, source="default",
             if revs is None:
                 cg = other.changegroup(incoming, "incoming")
             else:
-                if 'changegroupsubset' not in other.capabilities:
-                    raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
                 cg = other.changegroupsubset(incoming, revs, 'incoming')
             bundletype = other.local() and "HG10BZ" or "HG10UN"
             fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
@@ -1758,7 +1742,7 @@ def locate(ui, repo, *pats, **opts):
                                              default='relglob'):
         if src == 'b':
             continue
-        if not node and repo.dirstate.state(abs) == '?':
+        if not node and abs not in repo.dirstate:
             continue
         if opts['fullpath']:
             ui.write(os.path.join(repo.root, abs), end)
@@ -1890,7 +1874,7 @@ def log(ui, repo, *pats, **opts):
             if displayer.flush(rev):
                 count += 1
 
-def manifest(ui, repo, rev=None):
+def manifest(ui, repo, node=None, rev=None):
     """output the current or given revision of the project manifest
 
     Print a list of version controlled files for the given revision.
@@ -1904,7 +1888,13 @@ def manifest(ui, repo, rev=None):
     file revision hashes.
     """
 
-    m = repo.changectx(rev).manifest()
+    if rev and node:
+        raise util.Abort(_("please specify just one revision"))
+
+    if not node:
+        node = rev
+
+    m = repo.changectx(node).manifest()
     files = m.keys()
     files.sort()
 
@@ -1931,7 +1921,6 @@ def merge(ui, repo, node=None, force=Non
 
     if rev and node:
         raise util.Abort(_("please specify just one revision"))
-
     if not node:
         node = rev
 
@@ -1941,10 +1930,13 @@ def merge(ui, repo, node=None, force=Non
             raise util.Abort(_('repo has %d heads - '
                                'please merge with an explicit rev') %
                              len(heads))
+        parent = repo.dirstate.parents()[0]
         if len(heads) == 1:
-            raise util.Abort(_('there is nothing to merge - '
-                               'use "hg update" instead'))
-        parent = repo.dirstate.parents()[0]
+            msg = _('there is nothing to merge')
+            if parent != repo.lookup(repo.workingctx().branch()):
+                msg = _('%s - use "hg update" instead' % msg)
+            raise util.Abort(msg)
+
         if parent not in heads:
             raise util.Abort(_('working dir not at a head rev - '
                                'use "hg update" or merge with an explicit rev'))
@@ -1960,7 +1952,7 @@ def outgoing(ui, repo, dest=None, **opts
 
     See pull for valid destination format details.
     """
-    dest, revs, checkout = cmdutil.parseurl(
+    dest, revs, checkout = hg.parseurl(
         ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
     cmdutil.setremoteconfig(ui, opts)
     if revs:
@@ -2096,17 +2088,17 @@ def pull(ui, repo, source="default", **o
       Alternatively specify "ssh -C" as your ssh command in your hgrc or
       with the --ssh command line option.
     """
-    source, revs, checkout = cmdutil.parseurl(ui.expandpath(source),
-                                              opts['rev'])
+    source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
     cmdutil.setremoteconfig(ui, opts)
 
     other = hg.repository(ui, source)
     ui.status(_('pulling from %s\n') % (source))
     if revs:
-        if 'lookup' in other.capabilities:
+        try:
             revs = [other.lookup(rev) for rev in revs]
-        else:
-            error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
+        except repo.NoCapability:
+            error = _("Other repository doesn't support revision lookup, "
+                      "so a rev cannot be specified.")
             raise util.Abort(error)
 
     modheads = repo.pull(other, heads=revs, force=opts['force'])
@@ -2142,7 +2134,7 @@ def push(ui, repo, dest=None, **opts):
     Pushing to http:// and https:// URLs is only possible, if this
     feature is explicitly enabled on the remote Mercurial server.
     """
-    dest, revs, checkout = cmdutil.parseurl(
+    dest, revs, checkout = hg.parseurl(
         ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
     cmdutil.setremoteconfig(ui, opts)
 
@@ -2211,7 +2203,6 @@ def remove(ui, repo, *pats, **opts):
     Modified files and added files are not removed by default.  To
     remove them, use the -f/--force option.
     """
-    names = []
     if not opts['after'] and not pats:
         raise util.Abort(_('no files specified'))
     files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
@@ -2228,7 +2219,7 @@ def remove(ui, repo, *pats, **opts):
                 forget.append(abs)
                 continue
             reason = _('has been marked for add (use -f to force removal)')
-        elif repo.dirstate.state(abs) == '?':
+        elif abs not in repo.dirstate:
             reason = _('is not managed')
         elif opts['after'] and not exact and abs not in deleted:
             continue
@@ -2258,16 +2249,19 @@ def rename(ui, repo, *pats, **opts):
     This command takes effect in the next commit. To undo a rename
     before that, see hg revert.
     """
-    wlock = repo.wlock(0)
-    errs, copied = docopy(ui, repo, pats, opts, wlock)
-    names = []
-    for abs, rel, exact in copied:
-        if ui.verbose or not exact:
-            ui.status(_('removing %s\n') % rel)
-        names.append(abs)
-    if not opts.get('dry_run'):
-        repo.remove(names, True, wlock=wlock)
-    return errs
+    wlock = repo.wlock(False)
+    try:
+        errs, copied = docopy(ui, repo, pats, opts)
+        names = []
+        for abs, rel, exact in copied:
+            if ui.verbose or not exact:
+                ui.status(_('removing %s\n') % rel)
+            names.append(abs)
+        if not opts.get('dry_run'):
+            repo.remove(names, True)
+        return errs
+    finally:
+        del wlock
 
 def revert(ui, repo, *pats, **opts):
     """revert files or dirs to their states as of some revision
@@ -2321,8 +2315,6 @@ def revert(ui, repo, *pats, **opts):
     else:
         pmf = None
 
-    wlock = repo.wlock()
-
     # need all matching names in dirstate and manifest of target rev,
     # so have to walk both. do not print errors if files exist in one
     # but not other.
@@ -2330,117 +2322,124 @@ def revert(ui, repo, *pats, **opts):
     names = {}
     target_only = {}
 
-    # walk dirstate.
-
-    for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
-                                             badmatch=mf.has_key):
-        names[abs] = (rel, exact)
-        if src == 'b':
-            target_only[abs] = True
-
-    # walk target manifest.
-
-    def badmatch(path):
-        if path in names:
-            return True
-        path_ = path + '/'
-        for f in names:
-            if f.startswith(path_):
+    wlock = repo.wlock()
+    try:
+        # walk dirstate.
+        for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
+                                                 badmatch=mf.has_key):
+            names[abs] = (rel, exact)
+            if src == 'b':
+                target_only[abs] = True
+
+        # walk target manifest.
+
+        def badmatch(path):
+            if path in names:
                 return True
-        return False
-
-    for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
-                                             badmatch=badmatch):
-        if abs in names or src == 'b':
-            continue
-        names[abs] = (rel, exact)
-        target_only[abs] = True
-
-    changes = repo.status(match=names.has_key, wlock=wlock)[:5]
-    modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
-
-    # if f is a rename, also revert the source
-    cwd = repo.getcwd()
-    for f in added:
-        src = repo.dirstate.copied(f)
-        if src and src not in names and repo.dirstate[src][0] == 'r':
-            removed[src] = None
-            names[src] = (repo.pathto(src, cwd), True)
-
-    revert = ([], _('reverting %s\n'))
-    add = ([], _('adding %s\n'))
-    remove = ([], _('removing %s\n'))
-    forget = ([], _('forgetting %s\n'))
-    undelete = ([], _('undeleting %s\n'))
-    update = {}
-
-    disptable = (
-        # dispatch table:
-        #   file state
-        #   action if in target manifest
-        #   action if not in target manifest
-        #   make backup if in target manifest
-        #   make backup if not in target manifest
-        (modified, revert, remove, True, True),
-        (added, revert, forget, True, False),
-        (removed, undelete, None, False, False),
-        (deleted, revert, remove, False, False),
-        (unknown, add, None, True, False),
-        (target_only, add, None, False, False),
-        )
-
-    entries = names.items()
-    entries.sort()
-
-    for abs, (rel, exact) in entries:
-        mfentry = mf.get(abs)
-        target = repo.wjoin(abs)
-        def handle(xlist, dobackup):
-            xlist[0].append(abs)
-            update[abs] = 1
-            if dobackup and not opts['no_backup'] and util.lexists(target):
-                bakname = "%s.orig" % rel
-                ui.note(_('saving current version of %s as %s\n') %
-                        (rel, bakname))
-                if not opts.get('dry_run'):
-                    util.copyfile(target, bakname)
-            if ui.verbose or not exact:
-                ui.status(xlist[1] % rel)
-        for table, hitlist, misslist, backuphit, backupmiss in disptable:
-            if abs not in table: continue
-            # file has changed in dirstate
-            if mfentry:
-                handle(hitlist, backuphit)
-            elif misslist is not None:
-                handle(misslist, backupmiss)
+            path_ = path + '/'
+            for f in names:
+                if f.startswith(path_):
+                    return True
+            return False
+
+        for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
+                                                 badmatch=badmatch):
+            if abs in names or src == 'b':
+                continue
+            names[abs] = (rel, exact)
+            target_only[abs] = True
+
+        changes = repo.status(match=names.has_key)[:5]
+        modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
+
+        # if f is a rename, also revert the source
+        cwd = repo.getcwd()
+        for f in added:
+            src = repo.dirstate.copied(f)
+            if src and src not in names and repo.dirstate[src] == 'r':
+                removed[src] = None
+                names[src] = (repo.pathto(src, cwd), True)
+
+        revert = ([], _('reverting %s\n'))
+        add = ([], _('adding %s\n'))
+        remove = ([], _('removing %s\n'))
+        forget = ([], _('forgetting %s\n'))
+        undelete = ([], _('undeleting %s\n'))
+        update = {}
+
+        disptable = (
+            # dispatch table:
+            #   file state
+            #   action if in target manifest
+            #   action if not in target manifest
+            #   make backup if in target manifest
+            #   make backup if not in target manifest
+            (modified, revert, remove, True, True),
+            (added, revert, forget, True, False),
+            (removed, undelete, None, False, False),
+            (deleted, revert, remove, False, False),
+            (unknown, add, None, True, False),
+            (target_only, add, None, False, False),
+            )
+
+        entries = names.items()
+        entries.sort()
+
+        for abs, (rel, exact) in entries:
+            mfentry = mf.get(abs)
+            target = repo.wjoin(abs)
+            def handle(xlist, dobackup):
+                xlist[0].append(abs)
+                update[abs] = 1
+                if dobackup and not opts['no_backup'] and util.lexists(target):
+                    bakname = "%s.orig" % rel
+                    ui.note(_('saving current version of %s as %s\n') %
+                            (rel, bakname))
+                    if not opts.get('dry_run'):
+                        util.copyfile(target, bakname)
+                if ui.verbose or not exact:
+                    ui.status(xlist[1] % rel)
+            for table, hitlist, misslist, backuphit, backupmiss in disptable:
+                if abs not in table: continue
+                # file has changed in dirstate
+                if mfentry:
+                    handle(hitlist, backuphit)
+                elif misslist is not None:
+                    handle(misslist, backupmiss)
+                else:
+                    if exact: ui.warn(_('file not managed: %s\n') % rel)
+                break
             else:
-                if exact: ui.warn(_('file not managed: %s\n') % rel)
-            break
-        else:
-            # file has not changed in dirstate
-            if node == parent:
-                if exact: ui.warn(_('no changes needed to %s\n') % rel)
-                continue
-            if pmf is None:
-                # only need parent manifest in this unlikely case,
-                # so do not read by default
-                pmf = repo.changectx(parent).manifest()
-            if abs in pmf:
-                if mfentry:
-                    # if version of file is same in parent and target
-                    # manifests, do nothing
-                    if pmf[abs] != mfentry:
-                        handle(revert, False)
-                else:
-                    handle(remove, False)
-
-    if not opts.get('dry_run'):
-        repo.dirstate.forget(forget[0])
-        r = hg.revert(repo, node, update.has_key, wlock)
-        repo.dirstate.update(add[0], 'a')
-        repo.dirstate.update(undelete[0], 'n')
-        repo.dirstate.update(remove[0], 'r')
-        return r
+                # file has not changed in dirstate
+                if node == parent:
+                    if exact: ui.warn(_('no changes needed to %s\n') % rel)
+                    continue
+                if pmf is None:
+                    # only need parent manifest in this unlikely case,
+                    # so do not read by default
+                    pmf = repo.changectx(parent).manifest()
+                if abs in pmf:
+                    if mfentry:
+                        # if version of file is same in parent and target
+                        # manifests, do nothing
+                        if pmf[abs] != mfentry:
+                            handle(revert, False)
+                    else:
+                        handle(remove, False)
+
+        if not opts.get('dry_run'):
+            for f in forget[0]:
+                repo.dirstate.forget(f)
+            r = hg.revert(repo, node, update.has_key)
+            for f in add[0]:
+                repo.dirstate.add(f)
+            for f in undelete[0]:
+                repo.dirstate.normal(f)
+            for f in remove[0]:
+                repo.dirstate.remove(f)
+            return r
+    finally:
+        del wlock
 
 def rollback(ui, repo):
     """roll back the last transaction in this repository
@@ -2498,7 +2497,7 @@ def serve(ui, repo, **opts):
 
     parentui = ui.parentui or ui
     optlist = ("name templates style address port ipv6"
-               " accesslog errorlog webdir_conf")
+               " accesslog errorlog webdir_conf certificate")
     for o in optlist.split():
         if opts[o]:
             parentui.setconfig("web", o, str(opts[o]))
@@ -2681,7 +2680,6 @@ def unbundle(ui, repo, fname1, *fnames, 
     bundle command.
     """
     fnames = (fname1,) + fnames
-    result = None
     for fname in fnames:
         if os.path.exists(fname):
             f = open(fname, "rb")
@@ -2788,6 +2786,11 @@ commitopts = [
     ('l', 'logfile', '', _('read commit message from <file>')),
 ]
 
+commitopts2 = [
+    ('d', 'date', '', _('record datecode as commit date')),
+    ('u', 'user', '', _('record user as committer')),
+]
+
 table = {
     "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
     "addremove":
@@ -2805,8 +2808,10 @@ table = {
           ('d', 'date', None, _('list the date')),
           ('n', 'number', None, _('list the revision number (default)')),
           ('c', 'changeset', None, _('list the changeset')),
+          ('l', 'line-number', None,
+           _('show line number at the first appearance'))
          ] + walkopts,
-         _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
+         _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
     "archive":
         (archive,
          [('', 'no-decode', None, _('do not pass files through decoders')),
@@ -2819,11 +2824,9 @@ table = {
         (backout,
          [('', 'merge', None,
            _('merge with old dirstate parent after backout')),
-          ('d', 'date', '', _('record datecode as commit date')),
           ('', 'parent', '', _('parent to choose when backing out merge')),
-          ('u', 'user', '', _('record user as committer')),
           ('r', 'rev', '', _('revision to backout')),
-         ] + walkopts + commitopts,
+         ] + walkopts + commitopts + commitopts2,
          _('hg backout [OPTION]... [-r] REV')),
     "branch":
         (branch,
@@ -2865,9 +2868,7 @@ table = {
         (commit,
          [('A', 'addremove', None,
            _('mark new/missing files as added/removed before committing')),
-          ('d', 'date', '', _('record datecode as commit date')),
-          ('u', 'user', '', _('record user as commiter')),
-         ] + walkopts + commitopts,
+         ] + walkopts + commitopts + commitopts2,
          _('hg commit [OPTION]... [FILE]...')),
     "copy|cp":
         (copy,
@@ -3013,7 +3014,8 @@ table = {
           ('', 'template', '', _('display with template')),
          ] + walkopts,
          _('hg log [OPTION]... [FILE]')),
-    "manifest": (manifest, [], _('hg manifest [REV]')),
+    "manifest": (manifest, [('r', 'rev', '', _('revision to display'))],
+                 _('hg manifest [-r REV]')),
     "^merge":
         (merge,
          [('f', 'force', None, _('force a merge with outstanding changes')),
@@ -3057,10 +3059,8 @@ table = {
     "debugrawcommit|rawcommit":
         (rawcommit,
          [('p', 'parent', [], _('parent')),
-          ('d', 'date', '', _('date code')),
-          ('u', 'user', '', _('user')),
           ('F', 'files', '', _('file list'))
-          ] + commitopts,
+          ] + commitopts + commitopts2,
          _('hg debugrawcommit [OPTION]... [FILE]...')),
     "recover": (recover, [], _('hg recover')),
     "^remove|rm":
@@ -3106,7 +3106,8 @@ table = {
           ('', 'stdio', None, _('for remote clients')),
           ('t', 'templates', '', _('web templates to use')),
           ('', 'style', '', _('template style to use')),
-          ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
+          ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
+          ('', 'certificate', '', _('SSL certificate file'))],
          _('hg serve [OPTION]...')),
     "^status|st":
         (status,
@@ -3129,11 +3130,11 @@ table = {
         (tag,
          [('f', 'force', None, _('replace existing tag')),
           ('l', 'local', None, _('make the tag local')),
-          ('m', 'message', '', _('message for tag commit log entry')),
-          ('d', 'date', '', _('record datecode as commit date')),
-          ('u', 'user', '', _('record user as commiter')),
           ('r', 'rev', '', _('revision to tag')),
-          ('', 'remove', None, _('remove a tag'))],
+          ('', 'remove', None, _('remove a tag')),
+          # -l/--local is already there, commitopts cannot be used
+          ('m', 'message', '', _('use <text> as commit message')),
+         ] + commitopts2,
          _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
     "tags": (tags, [], _('hg tags')),
     "tip":
@@ -3160,14 +3161,3 @@ table = {
 norepo = ("clone init version help debugancestor debugcomplete debugdata"
           " debugindex debugindexdot debugdate debuginstall")
 optionalrepo = ("identify paths serve showconfig")
-
-def dispatch(args):
-    try:
-        u = ui.ui(traceback='--traceback' in args)
-    except util.Abort, inst:
-        sys.stderr.write(_("abort: %s\n") % inst)
-        return -1
-    return cmdutil.runcatch(u, args)
-
-def run():
-    sys.exit(dispatch(sys.argv[1:]))
--- a/mercurial/context.py
+++ b/mercurial/context.py
@@ -60,6 +60,18 @@ class changectx(object):
         else:
             raise AttributeError, name
 
+    def __contains__(self, key):
+        return key in self._manifest
+
+    def __getitem__(self, key):
+        return self.filectx(key)
+
+    def __iter__(self):
+        a = self._manifest.keys()
+        a.sort()
+        for f in a:
+            return f
+
     def changeset(self): return self._changeset
     def manifest(self): return self._manifest
 
@@ -184,7 +196,7 @@ class filectx(object):
     def __eq__(self, other):
         try:
             return (self._path == other._path
-                    and self._changeid == other._changeid)
+                    and self._fileid == other._fileid)
         except AttributeError:
             return False
 
@@ -240,14 +252,32 @@ class filectx(object):
         return [filectx(self._repo, self._path, fileid=x,
                         filelog=self._filelog) for x in c]
 
-    def annotate(self, follow=False):
+    def annotate(self, follow=False, linenumber=None):
         '''returns a list of tuples of (ctx, line) for each line
         in the file, where ctx is the filectx of the node where
-        that line was last changed'''
+        that line was last changed.
+        This returns tuples of ((ctx, linenumber), line) for each line,
+        if "linenumber" parameter is NOT "None".
+        In such tuples, linenumber means one at the first appearance
+        in the managed file.
+        To reduce annotation cost,
+        this returns fixed value(False is used) as linenumber,
+        if "linenumber" parameter is "False".'''
 
-        def decorate(text, rev):
+        def decorate_compat(text, rev):
             return ([rev] * len(text.splitlines()), text)
 
+        def without_linenumber(text, rev):
+            return ([(rev, False)] * len(text.splitlines()), text)
+
+        def with_linenumber(text, rev):
+            size = len(text.splitlines())
+            return ([(rev, i) for i in xrange(1, size + 1)], text)
+
+        decorate = (((linenumber is None) and decorate_compat) or
+                    (linenumber and with_linenumber) or
+                    without_linenumber)
+
         def pair(parent, child):
             for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
                 child[0][b1:b2] = parent[0][a1:a2]
new file mode 100644
--- /dev/null
+++ b/mercurial/diffhelpers.c
@@ -0,0 +1,150 @@
+/*
+ * diffhelpers.c - helper routines for mpatch
+ *
+ * Copyright 2007 Chris Mason <chris.mason@oracle.com>
+ *
+ * This software may be used and distributed according to the terms
+ * of the GNU General Public License v2, incorporated herein by reference.
+ */
+
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+
+static char diffhelpers_doc[] = "Efficient diff parsing";
+static PyObject *diffhelpers_Error;
+
+
+/* fixup the last lines of a and b when the patch has no newline at eof */
+static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
+{
+	int hunksz = PyList_Size(hunk);
+	PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
+	char *l = PyString_AS_STRING(s);
+	int sz = PyString_GET_SIZE(s);
+	int alen = PyList_Size(a);
+	int blen = PyList_Size(b);
+	char c = l[0];
+
+	PyObject *hline = PyString_FromStringAndSize(l, sz-1);
+	if (c == ' ' || c == '+') {
+		PyObject *rline = PyString_FromStringAndSize(l+1, sz-2);
+		PyList_SetItem(b, blen-1, rline);
+	}
+	if (c == ' ' || c == '-') {
+		Py_INCREF(hline);
+		PyList_SetItem(a, alen-1, hline);
+	}
+	PyList_SetItem(hunk, hunksz-1, hline);
+}
+
+/* python callable form of _fix_newline */
+static PyObject *
+fix_newline(PyObject *self, PyObject *args)
+{
+	PyObject *hunk, *a, *b;
+	if (!PyArg_ParseTuple(args, "OOO", &hunk, &a, &b))
+		return NULL;
+	_fix_newline(hunk, a, b);
+	return Py_BuildValue("l", 0);
+}
+
+/*
+ * read lines from fp into the hunk.  The hunk is parsed into two arrays
+ * a and b.  a gets the old state of the text, b gets the new state
+ * The control char from the hunk is saved when inserting into a, but not b
+ * (for performance while deleting files)
+ */
+static PyObject *
+addlines(PyObject *self, PyObject *args)
+{
+
+	PyObject *fp, *hunk, *a, *b, *x;
+	int i;
+	int lena, lenb;
+	int num;
+	int todoa, todob;
+	char *s, c;
+	PyObject *l;
+	if (!PyArg_ParseTuple(args, "OOiiOO", &fp, &hunk, &lena, &lenb, &a, &b))
+		return NULL;
+
+	while(1) {
+		todoa = lena - PyList_Size(a);
+		todob = lenb - PyList_Size(b);
+		num = todoa > todob ? todoa : todob;
+		if (num == 0)
+		    break;
+		for (i = 0 ; i < num ; i++) {
+			x = PyFile_GetLine(fp, 0);
+			s = PyString_AS_STRING(x);
+			c = *s;
+			if (strcmp(s, "\\ No newline at end of file\n") == 0) {
+				_fix_newline(hunk, a, b);
+				continue;
+			}
+			PyList_Append(hunk, x);
+			if (c == '+') {
+				l = PyString_FromString(s + 1);
+				PyList_Append(b, l);
+				Py_DECREF(l);
+			} else if (c == '-') {
+				PyList_Append(a, x);
+			} else {
+				l = PyString_FromString(s + 1);
+				PyList_Append(b, l);
+				Py_DECREF(l);
+				PyList_Append(a, x);
+			}
+			Py_DECREF(x);
+		}
+	}
+	return Py_BuildValue("l", 0);
+}
+
+/*
+ * compare the lines in a with the lines in b.  a is assumed to have
+ * a control char at the start of each line, this char is ignored in the
+ * compare
+ */
+static PyObject *
+testhunk(PyObject *self, PyObject *args)
+{
+
+	PyObject *a, *b;
+	long bstart;
+	int alen, blen;
+	int i;
+	char *sa, *sb;
+
+	if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
+		return NULL;
+	alen = PyList_Size(a);
+	blen = PyList_Size(b);
+	if (alen > blen - bstart) {
+		return Py_BuildValue("l", -1);
+	}
+	for (i = 0 ; i < alen ; i++) {
+		sa = PyString_AS_STRING(PyList_GET_ITEM(a, i));
+		sb = PyString_AS_STRING(PyList_GET_ITEM(b, i + bstart));
+		if (strcmp(sa+1, sb) != 0)
+			return Py_BuildValue("l", -1);
+	}
+	return Py_BuildValue("l", 0);
+}
+
+static PyMethodDef methods[] = {
+	{"addlines", addlines, METH_VARARGS, "add lines to a hunk\n"},
+	{"fix_newline", fix_newline, METH_VARARGS, "fixup newline counters\n"},
+	{"testhunk", testhunk, METH_VARARGS, "test lines in a hunk\n"},
+	{NULL, NULL}
+};
+
+PyMODINIT_FUNC
+initdiffhelpers(void)
+{
+	Py_InitModule3("diffhelpers", methods, diffhelpers_doc);
+	diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
+	                                        NULL, NULL);
+}
+
--- a/mercurial/dirstate.py
+++ b/mercurial/dirstate.py
@@ -20,8 +20,8 @@ class dirstate(object):
     def __init__(self, opener, ui, root):
         self._opener = opener
         self._root = root
-        self._dirty = 0
-        self._dirtypl = 0
+        self._dirty = False
+        self._dirtypl = False
         self._ui = ui
 
     def __getattr__(self, name):
@@ -53,7 +53,7 @@ class dirstate(object):
                 self._incpath(f)
             return self._dirs
         elif name == '_ignore':
-            files = [self.wjoin('.hgignore')]
+            files = [self._join('.hgignore')]
             for name, path in self._ui.configitems("ui"):
                 if name == 'ignore' or name.startswith('ignore.'):
                     files.append(os.path.expanduser(path))
@@ -65,7 +65,7 @@ class dirstate(object):
         else:
             raise AttributeError, name
 
-    def wjoin(self, f):
+    def _join(self, f):
         return os.path.join(self._root, f)
 
     def getcwd(self):
@@ -89,11 +89,14 @@ class dirstate(object):
             return path.replace(os.sep, '/')
         return path
 
-    def __del__(self):
-        self.write()
-
     def __getitem__(self, key):
-        return self._map[key]
+        ''' current states:
+        n  normal
+        m  needs merging
+        r  marked for removal
+        a  marked for addition
+        ?  not tracked'''
+        return self._map.get(key, ("?",))[0]
 
     def __contains__(self, key):
         return key in self._map
@@ -110,21 +113,14 @@ class dirstate(object):
     def branch(self):
         return self._branch
 
-    def markdirty(self):
-        self._dirty = 1
-
     def setparents(self, p1, p2=nullid):
-        self.markdirty()
-        self._dirtypl = 1
+        self._dirty = self._dirtypl = True
         self._pl = p1, p2
 
     def setbranch(self, branch):
         self._branch = branch
         self._opener("branch", "w").write(branch + '\n')
 
-    def state(self, key):
-        return self._map.get(key, ("?",))[0]
-
     def _read(self):
         self._map = {}
         self._copymap = {}
@@ -145,31 +141,29 @@ class dirstate(object):
         dmap = self._map
         copymap = self._copymap
         unpack = struct.unpack
-
-        pos = 40
         e_size = struct.calcsize(_format)
+        pos1 = 40
+        l = len(st)
 
-        while pos < len(st):
-            newpos = pos + e_size
-            e = unpack(_format, st[pos:newpos])
-            l = e[4]
-            pos = newpos
-            newpos = pos + l
-            f = st[pos:newpos]
+        # the inner loop
+        while pos1 < l:
+            pos2 = pos1 + e_size
+            e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
+            pos1 = pos2 + e[4]
+            f = st[pos2:pos1]
             if '\0' in f:
                 f, c = f.split('\0')
                 copymap[f] = c
-            dmap[f] = e[:4]
-            pos = newpos
+            dmap[f] = e # we hold onto e[4] because making a subtuple is slow
 
     def invalidate(self):
         for a in "_map _copymap _branch _pl _dirs _ignore".split():
             if a in self.__dict__:
                 delattr(self, a)
-        self._dirty = 0
+        self._dirty = False
 
     def copy(self, source, dest):
-        self.markdirty()
+        self._dirty = True
         self._copymap[dest] = source
 
     def copied(self, file):
@@ -179,102 +173,134 @@ class dirstate(object):
         return self._copymap
 
     def _incpath(self, path):
-        for c in strutil.findall(path, '/'):
-            pc = path[:c]
-            self._dirs.setdefault(pc, 0)
-            self._dirs[pc] += 1
+        c = path.rfind('/')
+        if c >= 0:
+            dirs = self._dirs
+            base = path[:c]
+            if base not in dirs:
+                self._incpath(base)
+                dirs[base] = 1
+            else:
+                dirs[base] += 1
 
     def _decpath(self, path):
-        for c in strutil.findall(path, '/'):
-            pc = path[:c]
-            self._dirs.setdefault(pc, 0)
-            self._dirs[pc] -= 1
+        if "_dirs" in self.__dict__:
+            c = path.rfind('/')
+            if c >= 0:
+                base = path[:c]
+                dirs = self._dirs
+                if dirs[base] == 1:
+                    del dirs[base]
+                    self._decpath(base)
+                else:
+                    dirs[base] -= 1
 
     def _incpathcheck(self, f):
         if '\r' in f or '\n' in f:
             raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
         # shadows
         if f in self._dirs:
-            raise util.Abort(_('directory named %r already in dirstate') % f)
+            raise util.Abort(_('directory %r already in dirstate') % f)
         for c in strutil.rfindall(f, '/'):
             d = f[:c]
             if d in self._dirs:
                 break
             if d in self._map:
-                raise util.Abort(_('file named %r already in dirstate') % d)
+                raise util.Abort(_('file %r in dirstate clashes with %r') %
+                                 (d, f))
         self._incpath(f)
 
-    def update(self, files, state, **kw):
-        ''' current states:
-        n  normal
-        m  needs merging
-        r  marked for removal
-        a  marked for addition'''
+    def normal(self, f):
+        'mark a file normal and clean'
+        self._dirty = True
+        s = os.lstat(self._join(f))
+        self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
+        if self._copymap.has_key(f):
+            del self._copymap[f]
 
-        if not files: return
-        self.markdirty()
-        for f in files:
-            if self._copymap.has_key(f):
-                del self._copymap[f]
+    def normallookup(self, f):
+        'mark a file normal, but possibly dirty'
+        self._dirty = True
+        self._map[f] = ('n', 0, -1, -1, 0)
+        if f in self._copymap:
+            del self._copymap[f]
+
+    def normaldirty(self, f):
+        'mark a file normal, but dirty'
+        self._dirty = True
+        self._map[f] = ('n', 0, -2, -1, 0)
+        if f in self._copymap:
+            del self._copymap[f]
 
-            if state == "r":
-                self._map[f] = ('r', 0, 0, 0)
-                self._decpath(f)
-                continue
-            else:
-                if state == "a":
-                    self._incpathcheck(f)
-                s = os.lstat(self.wjoin(f))
-                st_size = kw.get('st_size', s.st_size)
-                st_mtime = kw.get('st_mtime', s.st_mtime)
-                self._map[f] = (state, s.st_mode, st_size, st_mtime)
+    def add(self, f):
+        'mark a file added'
+        self._dirty = True
+        self._incpathcheck(f)
+        self._map[f] = ('a', 0, -1, -1, 0)
+        if f in self._copymap:
+            del self._copymap[f]
+
+    def remove(self, f):
+        'mark a file removed'
+        self._dirty = True
+        self._map[f] = ('r', 0, 0, 0, 0)
+        self._decpath(f)
+        if f in self._copymap:
+            del self._copymap[f]
 
-    def forget(self, files):
-        if not files: return
-        self.markdirty()
-        for f in files:
-            try:
-                del self._map[f]
-                self._decpath(f)
-            except KeyError:
-                self._ui.warn(_("not in dirstate: %s!\n") % f)
-                pass
+    def merge(self, f):
+        'mark a file merged'
+        self._dirty = True
+        s = os.lstat(self._join(f))
+        self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
+        if f in self._copymap:
+            del self._copymap[f]
+
+    def forget(self, f):
+        'forget a file'
+        self._dirty = True
+        try:
+            del self._map[f]
+            self._decpath(f)
+        except KeyError:
+            self._ui.warn(_("not in dirstate: %s!\n") % f)
 
     def clear(self):
         self._map = {}
         self._copymap = {}
         self._pl = [nullid, nullid]
-        self.markdirty()
+        self._dirty = True
 
     def rebuild(self, parent, files):
         self.clear()
         for f in files:
             if files.execf(f):
-                self._map[f] = ('n', 0777, -1, 0)
+                self._map[f] = ('n', 0777, -1, 0, 0)
             else:
-                self._map[f] = ('n', 0666, -1, 0)
+                self._map[f] = ('n', 0666, -1, 0, 0)
         self._pl = (parent, nullid)
-        self.markdirty()
+        self._dirty = True
 
     def write(self):
         if not self._dirty:
             return
         cs = cStringIO.StringIO()
-        cs.write("".join(self._pl))
+        copymap = self._copymap
+        pack = struct.pack
+        write = cs.write
+        write("".join(self._pl))
         for f, e in self._map.iteritems():
-            c = self.copied(f)
-            if c:
-                f = f + "\0" + c
-            e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
-            cs.write(e)
-            cs.write(f)
+            if f in copymap:
+                f = "%s\0%s" % (f, copymap[f])
+            e = pack(_format, e[0], e[1], e[2], e[3], len(f))
+            write(e)
+            write(f)
         st = self._opener("dirstate", "w", atomictemp=True)
         st.write(cs.getvalue())
         st.rename()
-        self._dirty = 0
-        self._dirtypl = 0
+        self._dirty = self._dirtypl = False
 
-    def filterfiles(self, files):
+    def _filter(self, files):
         ret = {}
         unknown = []
 
@@ -304,16 +330,16 @@ class dirstate(object):
                 bs += 1
         return ret
 
-    def _supported(self, f, st, verbose=False):
-        if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
+    def _supported(self, f, mode, verbose=False):
+        if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
             return True
         if verbose:
             kind = 'unknown'
-            if stat.S_ISCHR(st.st_mode): kind = _('character device')
-            elif stat.S_ISBLK(st.st_mode): kind = _('block device')
-            elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
-            elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
-            elif stat.S_ISDIR(st.st_mode): kind = _('directory')
+            if stat.S_ISCHR(mode): kind = _('character device')
+            elif stat.S_ISBLK(mode): kind = _('block device')
+            elif stat.S_ISFIFO(mode): kind = _('fifo')
+            elif stat.S_ISSOCK(mode): kind = _('socket')
+            elif stat.S_ISDIR(mode): kind = _('directory')
             self._ui.warn(_('%s: unsupported file type (type is %s)\n')
                           % (self.pathto(f), kind))
         return False
@@ -345,7 +371,7 @@ class dirstate(object):
             dc = self._map.copy()
         else:
             files = util.unique(files)
-            dc = self.filterfiles(files)
+            dc = self._filter(files)
 
         def imatch(file_):
             if file_ not in dc and self._ignore(file_):
@@ -361,59 +387,73 @@ class dirstate(object):
         common_prefix_len = len(self._root)
         if not self._root.endswith(os.sep):
             common_prefix_len += 1
+
+        normpath = util.normpath
+        listdir = os.listdir
+        lstat = os.lstat
+        bisect_left = bisect.bisect_left
+        isdir = os.path.isdir
+        pconvert = util.pconvert
+        join = os.path.join
+        s_isdir = stat.S_ISDIR
+        supported = self._supported
+        _join = self._join
+        known = {'.hg': 1}
+
         # recursion free walker, faster than os.walk.
         def findfiles(s):
             work = [s]
+            wadd = work.append
+            found = []
+            add = found.append
             if directories:
-                yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
+                add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
             while work:
                 top = work.pop()
-                names = os.listdir(top)
+                names = listdir(top)
                 names.sort()
                 # nd is the top of the repository dir tree
-                nd = util.normpath(top[common_prefix_len:])
+                nd = normpath(top[common_prefix_len:])
                 if nd == '.':
                     nd = ''
                 else:
                     # do not recurse into a repo contained in this
                     # one. use bisect to find .hg directory so speed
                     # is good on big directory.
-                    hg = bisect.bisect_left(names, '.hg')
+                    hg = bisect_left(names, '.hg')
                     if hg < len(names) and names[hg] == '.hg':
-                        if os.path.isdir(os.path.join(top, '.hg')):
+                        if isdir(join(top, '.hg')):
                             continue
                 for f in names:
-                    np = util.pconvert(os.path.join(nd, f))
-                    if seen(np):
+                    np = pconvert(join(nd, f))
+                    if np in known:
                         continue
-                    p = os.path.join(top, f)
+                    known[np] = 1
+                    p = join(top, f)
                     # don't trip over symlinks
-                    st = os.lstat(p)
-                    if stat.S_ISDIR(st.st_mode):
+                    st = lstat(p)
+                    if s_isdir(st.st_mode):
                         if not ignore(np):
-                            work.append(p)
+                            wadd(p)
                             if directories:
-                                yield 'd', np, st
-                        if imatch(np) and np in dc:
-                            yield 'm', np, st
+                                add((np, 'd', st))
+                        if np in dc and match(np):
+                            add((np, 'm', st))
                     elif imatch(np):
-                        if self._supported(np, st):
-                            yield 'f', np, st
+                        if supported(np, st.st_mode):
+                            add((np, 'f', st))
                         elif np in dc:
-                            yield 'm', np, st
-
-        known = {'.hg': 1}
-        def seen(fn):
-            if fn in known: return True
-            known[fn] = 1
+                            add((np, 'm', st))
+            found.sort()
+            return found
 
         # step one, find all files that match our criteria
         files.sort()
         for ff in files:
-            nf = util.normpath(ff)
-            f = self.wjoin(ff)
+            nf = normpath(ff)
+            f = _join(ff)
             try:
-                st = os.lstat(f)
+                st = lstat(f)
             except OSError, inst:
                 found = False
                 for fn in dc:
@@ -427,15 +467,15 @@ class dirstate(object):
                     elif badmatch and badmatch(ff) and imatch(nf):
                         yield 'b', ff, None
                 continue
-            if stat.S_ISDIR(st.st_mode):
-                cmp1 = (lambda x, y: cmp(x[1], y[1]))
-                sorted_ = [ x for x in findfiles(f) ]
-                sorted_.sort(cmp1)
-                for e in sorted_:
-                    yield e
+            if s_isdir(st.st_mode):
+                for f, src, st in findfiles(f):
+                    yield src, f, st
             else:
-                if not seen(nf) and match(nf):
-                    if self._supported(ff, st, verbose=True):
+                if nf in known:
+                    continue
+                known[nf] = 1
+                if match(nf):
+                    if supported(ff, st.st_mode, verbose=True):
                         yield 'f', nf, st
                     elif ff in dc:
                         yield 'm', nf, st
@@ -445,58 +485,74 @@ class dirstate(object):
         ks = dc.keys()
         ks.sort()
         for k in ks:
-            if not seen(k) and imatch(k):
+            if k in known:
+                continue
+            known[k] = 1
+            if imatch(k):
                 yield 'm', k, None
 
-    def status(self, files=None, match=util.always, list_ignored=False,
-               list_clean=False):
+    def status(self, files, match, list_ignored, list_clean):
         lookup, modified, added, unknown, ignored = [], [], [], [], []
         removed, deleted, clean = [], [], []
 
+        _join = self._join
+        lstat = os.lstat
+        cmap = self._copymap
+        dmap = self._map
+        ladd = lookup.append
+        madd = modified.append
+        aadd = added.append
+        uadd = unknown.append
+        iadd = ignored.append
+        radd = removed.append
+        dadd = deleted.append
+        cadd = clean.append
+
         for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
-            try:
-                type_, mode, size, time = self[fn]
-            except KeyError:
+            if fn in dmap:
+                type_, mode, size, time, foo = dmap[fn]
+            else:
                 if list_ignored and self._ignore(fn):
-                    ignored.append(fn)
+                    iadd(fn)
                 else:
-                    unknown.append(fn)
+                    uadd(fn)
                 continue
             if src == 'm':
                 nonexistent = True
                 if not st:
                     try:
-                        st = os.lstat(self.wjoin(fn))
+                        st = lstat(_join(fn))
                     except OSError, inst:
                         if inst.errno != errno.ENOENT:
                             raise
                         st = None
                     # We need to re-check that it is a valid file
-                    if st and self._supported(fn, st):
+                    if st and self._supported(fn, st.st_mode):
                         nonexistent = False
                 # XXX: what to do with file no longer present in the fs
                 # who are not removed in the dirstate ?
                 if nonexistent and type_ in "nm":
-                    deleted.append(fn)
+                    dadd(fn)
                     continue
             # check the common case first
             if type_ == 'n':
                 if not st:
-                    st = os.lstat(self.wjoin(fn))
+                    st = lstat(_join(fn))
                 if (size >= 0 and (size != st.st_size
                                    or (mode ^ st.st_mode) & 0100)
+                    or size == -2
                     or fn in self._copymap):
-                    modified.append(fn)
+                    madd(fn)
                 elif time != int(st.st_mtime):
-                    lookup.append(fn)
+                    ladd(fn)
                 elif list_clean:
-                    clean.append(fn)
+                    cadd(fn)
             elif type_ == 'm':
-                modified.append(fn)
+                madd(fn)
             elif type_ == 'a':
-                added.append(fn)
+                aadd(fn)
             elif type_ == 'r':
-                removed.append(fn)
+                radd(fn)
 
         return (lookup, modified, added, removed, deleted, unknown, ignored,
                 clean)
new file mode 100644
--- /dev/null
+++ b/mercurial/dispatch.py
@@ -0,0 +1,401 @@
+# dispatch.py - command dispatching for mercurial
+#
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from node import *
+from i18n import _
+import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex, time
+import util, commands, hg, lock, fancyopts, revlog, version, extensions, hook
+import cmdutil
+import ui as _ui
+
+class ParseError(Exception):
+    """Exception raised on errors in parsing the command line."""
+
+def run():
+    "run the command in sys.argv"
+    sys.exit(dispatch(sys.argv[1:]))
+
+def dispatch(args):
+    "run the command specified in args"
+    try:
+        u = _ui.ui(traceback='--traceback' in args)
+    except util.Abort, inst:
+        sys.stderr.write(_("abort: %s\n") % inst)
+        return -1
+    return _runcatch(u, args)
+
+def _runcatch(ui, args):
+    def catchterm(*args):
+        raise util.SignalInterrupt
+
+    for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
+        num = getattr(signal, name, None)
+        if num: signal.signal(num, catchterm)
+
+    try:
+        try:
+            # enter the debugger before command execution
+            if '--debugger' in args:
+                pdb.set_trace()
+            try:
+                return _dispatch(ui, args)
+            finally:
+                ui.flush()
+        except:
+            # enter the debugger when we hit an exception
+            if '--debugger' in args:
+                pdb.post_mortem(sys.exc_info()[2])
+            ui.print_exc()
+            raise
+
+    except ParseError, inst:
+        if inst.args[0]:
+            ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
+            commands.help_(ui, inst.args[0])
+        else:
+            ui.warn(_("hg: %s\n") % inst.args[1])
+            commands.help_(ui, 'shortlist')
+    except cmdutil.AmbiguousCommand, inst:
+        ui.warn(_("hg: command '%s' is ambiguous:\n    %s\n") %
+                (inst.args[0], " ".join(inst.args[1])))
+    except cmdutil.UnknownCommand, inst:
+        ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
+        commands.help_(ui, 'shortlist')
+    except hg.RepoError, inst:
+        ui.warn(_("abort: %s!\n") % inst)
+    except lock.LockHeld, inst:
+        if inst.errno == errno.ETIMEDOUT:
+            reason = _('timed out waiting for lock held by %s') % inst.locker
+        else:
+            reason = _('lock held by %s') % inst.locker
+        ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
+    except lock.LockUnavailable, inst:
+        ui.warn(_("abort: could not lock %s: %s\n") %
+               (inst.desc or inst.filename, inst.strerror))
+    except revlog.RevlogError, inst:
+        ui.warn(_("abort: %s!\n") % inst)
+    except util.SignalInterrupt:
+        ui.warn(_("killed!\n"))
+    except KeyboardInterrupt:
+        try:
+            ui.warn(_("interrupted!\n"))
+        except IOError, inst:
+            if inst.errno == errno.EPIPE:
+                if ui.debugflag:
+                    ui.warn(_("\nbroken pipe\n"))
+            else:
+                raise
+    except socket.error, inst:
+        ui.warn(_("abort: %s\n") % inst[1])
+    except IOError, inst:
+        if hasattr(inst, "code"):
+            ui.warn(_("abort: %s\n") % inst)
+        elif hasattr(inst, "reason"):
+            try: # usually it is in the form (errno, strerror)
+                reason = inst.reason.args[1]
+            except: # it might be anything, for example a string
+                reason = inst.reason
+            ui.warn(_("abort: error: %s\n") % reason)
+        elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
+            if ui.debugflag:
+                ui.warn(_("broken pipe\n"))
+        elif getattr(inst, "strerror", None):
+            if getattr(inst, "filename", None):
+                ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
+            else:
+                ui.warn(_("abort: %s\n") % inst.strerror)
+        else:
+            raise
+    except OSError, inst:
+        if getattr(inst, "filename", None):
+            ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
+        else:
+            ui.warn(_("abort: %s\n") % inst.strerror)
+    except util.UnexpectedOutput, inst:
+        ui.warn(_("abort: %s") % inst[0])
+        if not isinstance(inst[1], basestring):
+            ui.warn(" %r\n" % (inst[1],))
+        elif not inst[1]:
+            ui.warn(_(" empty string\n"))
+        else:
+            ui.warn("\n%r\n" % util.ellipsis(inst[1]))
+    except ImportError, inst:
+        m = str(inst).split()[-1]
+        ui.warn(_("abort: could not import module %s!\n" % m))
+        if m in "mpatch bdiff".split():
+            ui.warn(_("(did you forget to compile extensions?)\n"))
+        elif m in "zlib".split():
+            ui.warn(_("(is your Python install correct?)\n"))
+
+    except util.Abort, inst:
+        ui.warn(_("abort: %s\n") % inst)
+    except SystemExit, inst:
+        # Commands shouldn't sys.exit directly, but give a return code.
+        # Just in case catch this and and pass exit code to caller.
+        return inst.code
+    except:
+        ui.warn(_("** unknown exception encountered, details follow\n"))
+        ui.warn(_("** report bug details to "
+                 "http://www.selenic.com/mercurial/bts\n"))
+        ui.warn(_("** or mercurial@selenic.com\n"))
+        ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
+               % version.get_version())
+        raise
+
+    return -1
+
+def _findrepo():
+    p = os.getcwd()
+    while not os.path.isdir(os.path.join(p, ".hg")):
+        oldp, p = p, os.path.dirname(p)
+        if p == oldp:
+            return None
+
+    return p
+
+def _parse(ui, args):
+    options = {}
+    cmdoptions = {}
+
+    try:
+        args = fancyopts.fancyopts(args, commands.globalopts, options)
+    except fancyopts.getopt.GetoptError, inst:
+        raise ParseError(None, inst)
+
+    if args:
+        cmd, args = args[0], args[1:]
+        aliases, i = cmdutil.findcmd(ui, cmd, commands.table)
+        cmd = aliases[0]
+        defaults = ui.config("defaults", cmd)
+        if defaults:
+            args = shlex.split(defaults) + args
+        c = list(i[1])
+    else:
+        cmd = None
+        c = []
+
+    # combine global options into local
+    for o in commands.globalopts:
+        c.append((o[0], o[1], options[o[1]], o[3]))
+
+    try:
+        args = fancyopts.fancyopts(args, c, cmdoptions)
+    except fancyopts.getopt.GetoptError, inst:
+        raise ParseError(cmd, inst)
+
+    # separate global options back out
+    for o in commands.globalopts:
+        n = o[1]
+        options[n] = cmdoptions[n]
+        del cmdoptions[n]
+
+    return (cmd, cmd and i[0] or None, args, options, cmdoptions)
+
+def _parseconfig(config):
+    """parse the --config options from the command line"""
+    parsed = []
+    for cfg in config:
+        try:
+            name, value = cfg.split('=', 1)
+            section, name = name.split('.', 1)
+            if not section or not name:
+                raise IndexError
+            parsed.append((section, name, value))
+        except (IndexError, ValueError):
+            raise util.Abort(_('malformed --config option: %s') % cfg)
+    return parsed
+
+def _earlygetopt(aliases, args):
+    """Return list of values for an option (or aliases).
+
+    The values are listed in the order they appear in args.
+    The options and values are removed from args.
+    """
+    try:
+        argcount = args.index("--")
+    except ValueError:
+        argcount = len(args)
+    shortopts = [opt for opt in aliases if len(opt) == 2]
+    values = []
+    pos = 0
+    while pos < argcount:
+        if args[pos] in aliases:
+            if pos + 1 >= argcount:
+                # ignore and let getopt report an error if there is no value
+                break
+            del args[pos]
+            values.append(args.pop(pos))
+            argcount -= 2
+        elif args[pos][:2] in shortopts:
+            # short option can have no following space, e.g. hg log -Rfoo
+            values.append(args.pop(pos)[2:])
+            argcount -= 1
+        else:
+            pos += 1
+    return values
+
+_loaded = {}
+def _dispatch(ui, args):
+    # read --config before doing anything else
+    # (e.g. to change trust settings for reading .hg/hgrc)
+    config = _earlygetopt(['--config'], args)
+    if config:
+        ui.updateopts(config=_parseconfig(config))
+
+    # check for cwd
+    cwd = _earlygetopt(['--cwd'], args)
+    if cwd:
+        os.chdir(cwd[-1])
+
+    # read the local repository .hgrc into a local ui object
+    path = _findrepo() or ""
+    if not path:
+        lui = ui
+    if path:
+        try:
+            lui = _ui.ui(parentui=ui)
+            lui.readconfig(os.path.join(path, ".hg", "hgrc"))
+        except IOError:
+            pass
+
+    # now we can expand paths, even ones in .hg/hgrc
+    rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
+    if rpath:
+        path = lui.expandpath(rpath[-1])
+        lui = _ui.ui(parentui=ui)
+        lui.readconfig(os.path.join(path, ".hg", "hgrc"))
+
+    extensions.loadall(lui)
+    for name, module in extensions.extensions():
+        if name in _loaded:
+            continue
+        cmdtable = getattr(module, 'cmdtable', {})
+        overrides = [cmd for cmd in cmdtable if cmd in commands.table]
+        if overrides:
+            ui.warn(_("extension '%s' overrides commands: %s\n")
+                    % (name, " ".join(overrides)))
+        commands.table.update(cmdtable)
+        _loaded[name] = 1
+    # check for fallback encoding
+    fallback = lui.config('ui', 'fallbackencoding')
+    if fallback:
+        util._fallbackencoding = fallback
+
+    fullargs = args
+    cmd, func, args, options, cmdoptions = _parse(lui, args)
+
+    if options["config"]:
+        raise util.Abort(_("Option --config may not be abbreviated!"))
+    if options["cwd"]:
+        raise util.Abort(_("Option --cwd may not be abbreviated!"))
+    if options["repository"]:
+        raise util.Abort(_(
+            "Option -R has to be separated from other options (i.e. not -qR) "
+            "and --repository may only be abbreviated as --repo!"))
+
+    if options["encoding"]:
+        util._encoding = options["encoding"]
+    if options["encodingmode"]:
+        util._encodingmode = options["encodingmode"]
+    if options["time"]:
+        def get_times():
+            t = os.times()
+            if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
+                t = (t[0], t[1], t[2], t[3], time.clock())
+            return t
+        s = get_times()
+        def print_time():
+            t = get_times()
+            ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
+                (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
+        atexit.register(print_time)
+
+    ui.updateopts(options["verbose"], options["debug"], options["quiet"],
+                 not options["noninteractive"], options["traceback"])
+
+    if options['help']:
+        return commands.help_(ui, cmd, options['version'])
+    elif options['version']:
+        return commands.version_(ui)
+    elif not cmd:
+        return commands.help_(ui, 'shortlist')
+
+    repo = None
+    if cmd not in commands.norepo.split():
+        try:
+            repo = hg.repository(ui, path=path)
+            ui = repo.ui
+            if not repo.local():
+                raise util.Abort(_("repository '%s' is not local") % path)
+        except hg.RepoError:
+            if cmd not in commands.optionalrepo.split():
+                if not path:
+                    raise hg.RepoError(_("There is no Mercurial repository here"
+                                         " (.hg not found)"))
+                raise
+        d = lambda: func(ui, repo, *args, **cmdoptions)
+    else:
+        d = lambda: func(ui, *args, **cmdoptions)
+
+    # run pre-hook, and abort if it fails
+    ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
+    if ret:
+        return ret
+    ret = _runcommand(ui, options, cmd, d)
+    # run post-hook, passing command result
+    hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
+              result = ret)
+    return ret
+
+def _runcommand(ui, options, cmd, cmdfunc):
+    def checkargs():
+        try:
+            return cmdfunc()
+        except TypeError, inst:
+            # was this an argument error?
+            tb = traceback.extract_tb(sys.exc_info()[2])
+            if len(tb) != 2: # no
+                raise
+            raise ParseError(cmd, _("invalid arguments"))
+
+    if options['profile']:
+        import hotshot, hotshot.stats
+        prof = hotshot.Profile("hg.prof")
+        try:
+            try:
+                return prof.runcall(checkargs)
+            except:
+                try:
+                    ui.warn(_('exception raised - generating '
+                             'profile anyway\n'))
+                except:
+                    pass
+                raise
+        finally:
+            prof.close()
+            stats = hotshot.stats.load("hg.prof")
+            stats.strip_dirs()
+            stats.sort_stats('time', 'calls')
+            stats.print_stats(40)
+    elif options['lsprof']:
+        try:
+            from mercurial import lsprof
+        except ImportError:
+            raise util.Abort(_(
+                'lsprof not available - install from '
+                'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
+        p = lsprof.Profiler()
+        p.enable(subcalls=True)
+        try:
+            return checkargs()
+        finally:
+            p.disable()
+            stats = lsprof.Stats(p.getstats())
+            stats.sort()
+            stats.pprint(top=10, file=sys.stderr, climit=5)
+    else:
+        return checkargs()
--- a/mercurial/extensions.py
+++ b/mercurial/extensions.py
@@ -6,10 +6,17 @@
 # of the GNU General Public License, incorporated herein by reference.
 
 import imp, os
-import commands, hg, util, sys
+import util, sys
 from i18n import _
 
 _extensions = {}
+_order = []
+
+def extensions():
+    for name in _order:
+        module = _extensions[name]
+        if module:
+            yield name, module
 
 def find(name):
     '''return module with given extension name'''
@@ -22,9 +29,13 @@ def find(name):
         raise KeyError(name)
 
 def load(ui, name, path):
-    if name in _extensions:
+    if name.startswith('hgext.'):
+        shortname = name[6:]
+    else:
+        shortname = name
+    if shortname in _extensions:
         return
-    _extensions[name] = None
+    _extensions[shortname] = None
     if path:
         # the module will be loaded in sys.modules
         # choose an unique name so that it doesn't
@@ -48,20 +59,12 @@ def load(ui, name, path):
             mod = importh("hgext.%s" % name)
         except ImportError:
             mod = importh(name)
-    _extensions[name] = mod
+    _extensions[shortname] = mod
+    _order.append(shortname)
 
     uisetup = getattr(mod, 'uisetup', None)
     if uisetup:
         uisetup(ui)
-    reposetup = getattr(mod, 'reposetup', None)
-    if reposetup:
-        hg.repo_setup_hooks.append(reposetup)
-    cmdtable = getattr(mod, 'cmdtable', {})
-    overrides = [cmd for cmd in cmdtable if cmd in commands.table]
-    if overrides:
-        ui.warn(_("extension '%s' overrides commands: %s\n")
-                % (name, " ".join(overrides)))
-    commands.table.update(cmdtable)
 
 def loadall(ui):
     result = ui.configitems("extensions")
--- a/mercurial/hg.py
+++ b/mercurial/hg.py
@@ -10,7 +10,7 @@ from node import *
 from repo import *
 from i18n import _
 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
-import errno, lock, os, shutil, util, cmdutil
+import errno, lock, os, shutil, util, extensions
 import merge as _merge
 import verify as _verify
 
@@ -18,16 +18,23 @@ def _local(path):
     return (os.path.isfile(util.drop_scheme('file', path)) and
             bundlerepo or localrepo)
 
+def parseurl(url, revs):
+    '''parse url#branch, returning url, branch + revs'''
+
+    if '#' not in url:
+        return url, (revs or None), None
+
+    url, rev = url.split('#', 1)
+    return url, revs + [rev], rev
+
 schemes = {
     'bundle': bundlerepo,
     'file': _local,
-    'hg': httprepo,
     'http': httprepo,
     'https': httprepo,
-    'old-http': statichttprepo,
     'ssh': sshrepo,
     'static-http': statichttprepo,
-    }
+}
 
 def _lookup(path):
     scheme = 'file'
@@ -50,14 +57,14 @@ def islocal(repo):
             return False
     return repo.local()
 
-repo_setup_hooks = []
-
 def repository(ui, path='', create=False):
     """return a repository object for the specified path"""
     repo = _lookup(path).instance(ui, path, create)
     ui = getattr(repo, "ui", ui)
-    for hook in repo_setup_hooks:
-        hook(ui, repo)
+    for name, module in extensions.extensions():
+        hook = getattr(module, 'reposetup', None)
+        if hook:
+            hook(ui, repo)
     return repo
 
 def defaultdest(source):
@@ -99,7 +106,7 @@ def clone(ui, source, dest=None, pull=Fa
     """
 
     origsource = source
-    source, rev, checkout = cmdutil.parseurl(ui.expandpath(source), rev)
+    source, rev, checkout = parseurl(ui.expandpath(source), rev)
 
     if isinstance(source, str):
         src_repo = repository(ui, source)
@@ -134,109 +141,105 @@ def clone(ui, source, dest=None, pull=Fa
             if self.dir_:
                 self.rmtree(self.dir_, True)
 
-    dir_cleanup = None
-    if islocal(dest):
-        dir_cleanup = DirCleanup(dest)
+    src_lock = dest_lock = dir_cleanup = None
+    try:
+        if islocal(dest):
+            dir_cleanup = DirCleanup(dest)
 
-    abspath = origsource
-    copy = False
-    if src_repo.local() and islocal(dest):
-        abspath = os.path.abspath(util.drop_scheme('file', origsource))
-        copy = not pull and not rev
+        abspath = origsource
+        copy = False
+        if src_repo.local() and islocal(dest):
+            abspath = os.path.abspath(util.drop_scheme('file', origsource))
+            copy = not pull and not rev
 
-    src_lock, dest_lock = None, None
-    if copy:
-        try:
-            # we use a lock here because if we race with commit, we
-            # can end up with extra data in the cloned revlogs that's
-            # not pointed to by changesets, thus causing verify to
-            # fail
-            src_lock = src_repo.lock()
-        except lock.LockException:
-            copy = False
+        if copy:
+            try:
+                # we use a lock here because if we race with commit, we
+                # can end up with extra data in the cloned revlogs that's
+                # not pointed to by changesets, thus causing verify to
+                # fail
+                src_lock = src_repo.lock()
+            except lock.LockException:
+                copy = False
 
-    if copy:
-        def force_copy(src, dst):
-            try:
-                util.copyfiles(src, dst)
-            except OSError, inst:
-                if inst.errno != errno.ENOENT:
-                    raise
+        if copy:
+            def force_copy(src, dst):
+                try:
+                    util.copyfiles(src, dst)
+                except OSError, inst:
+                    if inst.errno != errno.ENOENT:
+                        raise
 
-        src_store = os.path.realpath(src_repo.spath)
-        if not os.path.exists(dest):
-            os.mkdir(dest)
-        dest_path = os.path.realpath(os.path.join(dest, ".hg"))
-        os.mkdir(dest_path)
-        if src_repo.spath != src_repo.path:
-            # XXX racy
-            dummy_changelog = os.path.join(dest_path, "00changelog.i")
-            # copy the dummy changelog
-            force_copy(src_repo.join("00changelog.i"), dummy_changelog)
-            dest_store = os.path.join(dest_path, "store")
-            os.mkdir(dest_store)
-        else:
-            dest_store = dest_path
-        # copy the requires file
-        force_copy(src_repo.join("requires"),
-                   os.path.join(dest_path, "requires"))
-        # we lock here to avoid premature writing to the target
-        dest_lock = lock.lock(os.path.join(dest_store, "lock"))
+            src_store = os.path.realpath(src_repo.spath)
+            if not os.path.exists(dest):
+                os.mkdir(dest)
+            dest_path = os.path.realpath(os.path.join(dest, ".hg"))
+            os.mkdir(dest_path)
+            if src_repo.spath != src_repo.path:
+                # XXX racy
+                dummy_changelog = os.path.join(dest_path, "00changelog.i")
+                # copy the dummy changelog
+                force_copy(src_repo.join("00changelog.i"), dummy_changelog)
+                dest_store = os.path.join(dest_path, "store")
+                os.mkdir(dest_store)
+            else:
+                dest_store = dest_path
+            # copy the requires file
+            force_copy(src_repo.join("requires"),
+                       os.path.join(dest_path, "requires"))
+            # we lock here to avoid premature writing to the target
+            dest_lock = lock.lock(os.path.join(dest_store, "lock"))
 
-        files = ("data",
-                 "00manifest.d", "00manifest.i",
-                 "00changelog.d", "00changelog.i")
-        for f in files:
-            src = os.path.join(src_store, f)
-            dst = os.path.join(dest_store, f)
-            force_copy(src, dst)
+            files = ("data",
+                     "00manifest.d", "00manifest.i",
+                     "00changelog.d", "00changelog.i")
+            for f in files:
+                src = os.path.join(src_store, f)
+                dst = os.path.join(dest_store, f)
+                force_copy(src, dst)
+
+            # we need to re-init the repo after manually copying the data
+            # into it
+            dest_repo = repository(ui, dest)
+
+        else:
+            dest_repo = repository(ui, dest, create=True)
 
-        # we need to re-init the repo after manually copying the data
-        # into it
-        dest_repo = repository(ui, dest)
-
-    else:
-        dest_repo = repository(ui, dest, create=True)
+            revs = None
+            if rev:
+                if 'lookup' not in src_repo.capabilities:
+                    raise util.Abort(_("src repository does not support revision "
+                                       "lookup and so doesn't support clone by "
+                                       "revision"))
+                revs = [src_repo.lookup(r) for r in rev]
 
-        revs = None
-        if rev:
-            if 'lookup' not in src_repo.capabilities:
-                raise util.Abort(_("src repository does not support revision "
-                                   "lookup and so doesn't support clone by "
-                                   "revision"))
-            revs = [src_repo.lookup(r) for r in rev]
+            if dest_repo.local():
+                dest_repo.clone(src_repo, heads=revs, stream=stream)
+            elif src_repo.local():
+                src_repo.push(dest_repo, revs=revs)
+            else:
+                raise util.Abort(_("clone from remote to remote not supported"))
+
+        if dir_cleanup:
+            dir_cleanup.close()
 
         if dest_repo.local():
-            dest_repo.clone(src_repo, heads=revs, stream=stream)
-        elif src_repo.local():
-            src_repo.push(dest_repo, revs=revs)
-        else:
-            raise util.Abort(_("clone from remote to remote not supported"))
-
-    if src_lock:
-        src_lock.release()
-
-    if dir_cleanup:
-        dir_cleanup.close()
+            fp = dest_repo.opener("hgrc", "w", text=True)
+            fp.write("[paths]\n")
+            fp.write("default = %s\n" % abspath)
+            fp.close()
 
-    if dest_repo.local():
-        fp = dest_repo.opener("hgrc", "w", text=True)
-        fp.write("[paths]\n")
-        fp.write("default = %s\n" % abspath)
-        fp.close()
-
-        if dest_lock:
-            dest_lock.release()
+            if update:
+                if not checkout:
+                    try:
+                        checkout = dest_repo.lookup("default")
+                    except:
+                        checkout = dest_repo.changelog.tip()
+                _update(dest_repo, checkout)
 
-        if update:
-            if not checkout:
-                try:
-                    checkout = dest_repo.lookup("default")
-                except:
-                    checkout = dest_repo.changelog.tip()
-            _update(dest_repo, checkout)
-
-    return src_repo, dest_repo
+        return src_repo, dest_repo
+    finally:
+        del src_lock, dest_lock, dir_cleanup
 
 def _showstats(repo, stats):
     stats = ((stats[0], _("updated")),
@@ -251,7 +254,7 @@ def _update(repo, node): return update(r
 def update(repo, node):
     """update the working directory to node, merging linear changes"""
     pl = repo.parents()
-    stats = _merge.update(repo, node, False, False, None, None)
+    stats = _merge.update(repo, node, False, False, None)
     _showstats(repo, stats)
     if stats[3]:
         repo.ui.status(_("There are unresolved merges with"
@@ -265,15 +268,15 @@ def update(repo, node):
                        % (pl[0].rev(), repo.changectx(node).rev()))
     return stats[3]
 
-def clean(repo, node, wlock=None, show_stats=True):
+def clean(repo, node, show_stats=True):
     """forcibly switch the working directory to node, clobbering changes"""
-    stats = _merge.update(repo, node, False, True, None, wlock)
+    stats = _merge.update(repo, node, False, True, None)
     if show_stats: _showstats(repo, stats)
     return stats[3]
 
-def merge(repo, node, force=None, remind=True, wlock=None):
+def merge(repo, node, force=None, remind=True):
     """branch merge with node, resolving changes"""
-    stats = _merge.update(repo, node, True, force, False, wlock)
+    stats = _merge.update(repo, node, True, force, False)
     _showstats(repo, stats)
     if stats[3]:
         pl = repo.parents()
@@ -286,9 +289,9 @@ def merge(repo, node, force=None, remind
         repo.ui.status(_("(branch merge, don't forget to commit)\n"))
     return stats[3]
 
-def revert(repo, node, choose, wlock):
+def revert(repo, node, choose):
     """revert changes to revision in node without updating dirstate"""
-    return _merge.update(repo, node, False, True, choose, wlock)[3]
+    return _merge.update(repo, node, False, True, choose)[3]
 
 def verify(repo):
     """verify the consistency of a repository"""
--- a/mercurial/hgweb/hgweb_mod.py
+++ b/mercurial/hgweb/hgweb_mod.py
@@ -64,7 +64,7 @@ def revnavgen(pos, pagelen, limit, nodef
 
 class hgweb(object):
     def __init__(self, repo, name=None):
-        if type(repo) == type(""):
+        if isinstance(repo, str):
             parentui = ui.ui(report_untrusted=False, interactive=False)
             self.repo = hg.repository(parentui, repo)
         else:
@@ -210,7 +210,7 @@ class hgweb(object):
                                           opts=diffopts), f, tn)
 
     def changelog(self, ctx, shortlog=False):
-        def changelist(**map):
+        def changelist(limit=0,**map):
             cl = self.repo.changelog
             l = [] # build a list in forward order for efficiency
             for i in xrange(start, end):
@@ -230,6 +230,9 @@ class hgweb(object):
                              "tags": self.nodetagsdict(n),
                              "branches": self.nodebranchdict(ctx)})
 
+            if limit > 0:
+                l = l[:limit]
+
             for e in l:
                 yield e
 
@@ -247,7 +250,9 @@ class hgweb(object):
         yield self.t(shortlog and 'shortlog' or 'changelog',
                      changenav=changenav,
                      node=hex(cl.tip()),
-                     rev=pos, changesets=count, entries=changelist,
+                     rev=pos, changesets=count,
+                     entries=lambda **x: changelist(limit=0,**x),
+                     latestentry=lambda **x: changelist(limit=1,**x),
                      archives=self.archivelist("tip"))
 
     def search(self, query):
@@ -348,7 +353,7 @@ class hgweb(object):
         pos = end - 1
         parity = paritygen(self.stripecount, offset=start-end)
 
-        def entries(**map):
+        def entries(limit=0, **map):
             l = []
 
             for i in xrange(start, end):
@@ -366,13 +371,17 @@ class hgweb(object):
                              "child": self.siblings(fctx.children()),
                              "desc": ctx.description()})
 
+            if limit > 0:
+                l = l[:limit]
+
             for e in l:
                 yield e
 
         nodefunc = lambda x: fctx.filectx(fileid=x)
         nav = revnavgen(pos, pagelen, count, nodefunc)
         yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
-                     entries=entries)
+                     entries=lambda **x: entries(limit=0, **x),
+                     latestentry=lambda **x: entries(limit=1, **x))
 
     def filerevision(self, fctx):
         f = fctx.path()
@@ -477,10 +486,12 @@ class hgweb(object):
                 if not fnode:
                     continue
 
+                fctx = ctx.filectx(full)
                 yield {"file": full,
                        "parity": parity.next(),
                        "basename": f,
-                       "size": ctx.filectx(full).size(),
+                       "date": fctx.changectx().date(),
+                       "size": fctx.size(),
                        "permissions": mf.flags(full)}
 
         def dirlist(**map):
@@ -512,10 +523,14 @@ class hgweb(object):
         i.reverse()
         parity = paritygen(self.stripecount)
 
-        def entries(notip=False, **map):
+        def entries(notip=False,limit=0, **map):
+            count = 0
             for k, n in i:
                 if notip and k == "tip":
                     continue
+                if limit > 0 and count >= limit:
+                    continue
+                count = count + 1
                 yield {"parity": parity.next(),
                        "tag": k,
                        "date": self.repo.changectx(n).date(),
@@ -523,8 +538,9 @@ class hgweb(object):
 
         yield self.t("tags",
                      node=hex(self.repo.changelog.tip()),
-                     entries=lambda **x: entries(False, **x),
-                     entriesnotip=lambda **x: entries(True, **x))
+                     entries=lambda **x: entries(False,0, **x),
+                     entriesnotip=lambda **x: entries(True,0, **x),
+                     latestentry=lambda **x: entries(True,1, **x))
 
     def summary(self):
         i = self.repo.tagslist()
@@ -791,9 +807,17 @@ class hgweb(object):
             style = req.form['style'][0]
         mapfile = style_map(self.templatepath, style)
 
+        proto = req.env.get('wsgi.url_scheme')
+        if proto == 'https':
+            proto = 'https'
+            default_port = "443"
+        else:
+            proto = 'http'
+            default_port = "80"
+
         port = req.env["SERVER_PORT"]
-        port = port != "80" and (":" + port) or ""
-        urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
+        port = port != default_port and (":" + port) or ""
+        urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
         staticurl = self.config("web", "staticurl") or req.url + 'static/'
         if not staticurl.endswith('/'):
             staticurl += '/'
@@ -1067,7 +1091,7 @@ class hgweb(object):
         # replayed
         ssl_req = self.configbool('web', 'push_ssl', True)
         if ssl_req:
-            if not req.env.get('HTTPS'):
+            if req.env.get('wsgi.url_scheme') != 'https':
                 bail(_('ssl required\n'))
                 return
             proto = 'https'
@@ -1164,7 +1188,7 @@ class hgweb(object):
                     req.write('%d\n' % ret)
                     req.write(val)
                 finally:
-                    lock.release()
+                    del lock
             except (OSError, IOError), inst:
                 req.write('0\n')
                 filename = getattr(inst, 'filename', '')
--- a/mercurial/hgweb/hgwebdir_mod.py
+++ b/mercurial/hgweb/hgwebdir_mod.py
@@ -6,7 +6,6 @@
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
-from mercurial import demandimport; demandimport.enable()
 import os, mimetools, cStringIO
 from mercurial.i18n import gettext as _
 from mercurial import ui, hg, util, templater
@@ -92,8 +91,12 @@ class hgwebdir(object):
         url = req.env['REQUEST_URI'].split('?')[0]
         if not url.endswith('/'):
             url += '/'
+        pathinfo = req.env.get('PATH_INFO', '').strip('/') + '/'
+        base = url[:len(url) - len(pathinfo)]
+        if not base.endswith('/'):
+            base += '/'
 
-        staticurl = config('web', 'staticurl') or url + 'static/'
+        staticurl = config('web', 'staticurl') or base + 'static/'
         if not staticurl.endswith('/'):
             staticurl += '/'
 
@@ -120,7 +123,7 @@ class hgwebdir(object):
                     yield {"type" : i[0], "extension": i[1],
                            "node": nodeid, "url": url}
 
-        def entries(sortcolumn="", descending=False, **map):
+        def entries(sortcolumn="", descending=False, subdir="", **map):
             def sessionvars(**map):
                 fields = []
                 if req.form.has_key('style'):
@@ -136,6 +139,10 @@ class hgwebdir(object):
             rows = []
             parity = paritygen(self.stripecount)
             for name, path in self.repos:
+                if not name.startswith(subdir):
+                    continue
+                name = name[len(subdir):]
+
                 u = ui.ui(parentui=parentui)
                 try:
                     u.readconfig(os.path.join(path, '.hg', 'hgrc'))
@@ -188,6 +195,25 @@ class hgwebdir(object):
                     row['parity'] = parity.next()
                     yield row
 
+        def makeindex(req, subdir=""):
+            sortable = ["name", "description", "contact", "lastchange"]
+            sortcolumn, descending = self.repos_sorted
+            if req.form.has_key('sort'):
+                sortcolumn = req.form['sort'][0]
+                descending = sortcolumn.startswith('-')
+                if descending:
+                    sortcolumn = sortcolumn[1:]
+                if sortcolumn not in sortable:
+                    sortcolumn = ""
+
+            sort = [("sort_%s" % column,
+                     "%s%s" % ((not descending and column == sortcolumn)
+                               and "-" or "", column))
+                    for column in sortable]
+            req.write(tmpl("index", entries=entries, subdir=subdir,
+                           sortcolumn=sortcolumn, descending=descending,
+                           **dict(sort)))
+
         try:
             virtual = req.env.get("PATH_INFO", "").strip('/')
             if virtual.startswith('static/'):
@@ -196,25 +222,32 @@ class hgwebdir(object):
                 req.write(staticfile(static, fname, req) or
                           tmpl('error', error='%r not found' % fname))
             elif virtual:
+                repos = dict(self.repos)
                 while virtual:
-                    real = dict(self.repos).get(virtual)
+                    real = repos.get(virtual)
                     if real:
-                        break
+                        req.env['REPO_NAME'] = virtual
+                        try:
+                            repo = hg.repository(parentui, real)
+                            hgweb(repo).run_wsgi(req)
+                        except IOError, inst:
+                            req.write(tmpl("error", error=inst.strerror))
+                        except hg.RepoError, inst:
+                            req.write(tmpl("error", error=str(inst)))
+                        return
+
+                    # browse subdirectories
+                    subdir = virtual + '/'
+                    if [r for r in repos if r.startswith(subdir)]:
+                        makeindex(req, subdir)
+                        return
+
                     up = virtual.rfind('/')
                     if up < 0:
                         break
                     virtual = virtual[:up]
-                if real:
-                    req.env['REPO_NAME'] = virtual
-                    try:
-                        repo = hg.repository(parentui, real)
-                        hgweb(repo).run_wsgi(req)
-                    except IOError, inst:
-                        req.write(tmpl("error", error=inst.strerror))
-                    except hg.RepoError, inst:
-                        req.write(tmpl("error", error=str(inst)))
-                else:
-                    req.write(tmpl("notfound", repo=virtual))
+
+                req.write(tmpl("notfound", repo=virtual))
             else:
                 if req.form.has_key('static'):
                     static = os.path.join(templater.templatepath(), "static")
@@ -222,22 +255,6 @@ class hgwebdir(object):
                     req.write(staticfile(static, fname, req)
                               or tmpl("error", error="%r not found" % fname))
                 else:
-                    sortable = ["name", "description", "contact", "lastchange"]
-                    sortcolumn, descending = self.repos_sorted
-                    if req.form.has_key('sort'):
-                        sortcolumn = req.form['sort'][0]
-                        descending = sortcolumn.startswith('-')
-                        if descending:
-                            sortcolumn = sortcolumn[1:]
-                        if sortcolumn not in sortable:
-                            sortcolumn = ""
-
-                    sort = [("sort_%s" % column,
-                             "%s%s" % ((not descending and column == sortcolumn)
-                                       and "-" or "", column))
-                            for column in sortable]
-                    req.write(tmpl("index", entries=entries,
-                                   sortcolumn=sortcolumn, descending=descending,
-                                   **dict(sort)))
+                    makeindex(req)
         finally:
             tmpl = None
--- a/mercurial/hgweb/server.py
+++ b/mercurial/hgweb/server.py
@@ -37,6 +37,9 @@ class _error_logger(object):
             self.handler.log_error("HG error:  %s", msg)
 
 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
+
+    url_scheme = 'http'
+
     def __init__(self, *args, **kargs):
         self.protocol_version = 'HTTP/1.1'
         BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
@@ -53,13 +56,16 @@ class _hgwebhandler(object, BaseHTTPServ
                                               self.log_date_time_string(),
                                               format % args))
 
+    def do_write(self):
+        try:
+            self.do_hgweb()
+        except socket.error, inst:
+            if inst[0] != errno.EPIPE:
+                raise
+
     def do_POST(self):
         try:
-            try:
-                self.do_hgweb()
-            except socket.error, inst:
-                if inst[0] != errno.EPIPE:
-                    raise
+            self.do_write()
         except StandardError, inst:
             self._start_response("500 Internal Server Error", [])
             self._write("Internal Server Error")
@@ -101,7 +107,7 @@ class _hgwebhandler(object, BaseHTTPServ
                 env[hkey] = hval
         env['SERVER_PROTOCOL'] = self.request_version
         env['wsgi.version'] = (1, 0)
-        env['wsgi.url_scheme'] = 'http'
+        env['wsgi.url_scheme'] = self.url_scheme
         env['wsgi.input'] = self.rfile
         env['wsgi.errors'] = _error_logger(self)
         env['wsgi.multithread'] = isinstance(self.server,
@@ -164,6 +170,31 @@ class _hgwebhandler(object, BaseHTTPServ
         self.wfile.write(data)
         self.wfile.flush()
 
+class _shgwebhandler(_hgwebhandler):
+
+    url_scheme = 'https'
+
+    def setup(self):
+        self.connection = self.request
+        self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
+        self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
+
+    def do_write(self):
+        from OpenSSL.SSL import SysCallError
+        try:
+            super(_shgwebhandler, self).do_write()
+        except SysCallError, inst:
+            if inst.args[0] != errno.EPIPE:
+                raise
+
+    def handle_one_request(self):
+        from OpenSSL.SSL import SysCallError, ZeroReturnError
+        try:
+            super(_shgwebhandler, self).handle_one_request()
+        except (SysCallError, ZeroReturnError):
+            self.close_connection = True
+            pass
+
 def create_server(ui, repo):
     use_threads = True
 
@@ -180,6 +211,7 @@ def create_server(ui, repo):
     port = int(myui.config("web", "port", 8000))
     use_ipv6 = myui.configbool("web", "ipv6")
     webdir_conf = myui.config("web", "webdir_conf")
+    ssl_cert = myui.config("web", "certificate")
     accesslog = openlog(myui.config("web", "accesslog", "-"), sys.stdout)
     errorlog = openlog(myui.config("web", "errorlog", "-"), sys.stderr)
 
@@ -226,6 +258,19 @@ def create_server(ui, repo):
 
             self.addr, self.port = addr, port
 
+            if ssl_cert:
+                try:
+                    from OpenSSL import SSL
+                    ctx = SSL.Context(SSL.SSLv23_METHOD)
+                except ImportError:
+                    raise util.Abort("SSL support is unavailable")
+                ctx.use_privatekey_file(ssl_cert)
+                ctx.use_certificate_file(ssl_cert)
+                sock = socket.socket(self.address_family, self.socket_type)
+                self.socket = SSL.Connection(ctx, sock)
+                self.server_bind()
+                self.server_activate()
+
     class IPv6HTTPServer(MercurialHTTPServer):
         address_family = getattr(socket, 'AF_INET6', None)
 
@@ -234,10 +279,15 @@ def create_server(ui, repo):
                 raise hg.RepoError(_('IPv6 not available on this system'))
             super(IPv6HTTPServer, self).__init__(*args, **kwargs)
 
+    if ssl_cert:
+        handler = _shgwebhandler
+    else:
+        handler = _hgwebhandler
+
     try:
         if use_ipv6:
-            return IPv6HTTPServer((address, port), _hgwebhandler)
+            return IPv6HTTPServer((address, port), handler)
         else:
-            return MercurialHTTPServer((address, port), _hgwebhandler)
+            return MercurialHTTPServer((address, port), handler)
     except socket.error, inst:
         raise util.Abort(_('cannot start server: %s') % inst.args[1])
--- a/mercurial/hgweb/wsgicgi.py
+++ b/mercurial/hgweb/wsgicgi.py
@@ -23,7 +23,7 @@ def launch(application):
     environ['wsgi.multiprocess'] = True
     environ['wsgi.run_once'] = True
 
-    if environ.get('HTTPS','off') in ('on','1'):
+    if environ.get('HTTPS','off').lower() in ('on','1','yes'):
         environ['wsgi.url_scheme'] = 'https'
     else:
         environ['wsgi.url_scheme'] = 'http'
--- a/mercurial/httprepo.py
+++ b/mercurial/httprepo.py
@@ -9,7 +9,7 @@
 from node import *
 from remoterepo import *
 from i18n import _
-import hg, os, urllib, urllib2, urlparse, zlib, util, httplib
+import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
 import errno, keepalive, tempfile, socket, changegroup
 
 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
@@ -276,9 +276,9 @@ class httprepository(remoterepository):
     def get_caps(self):
         if self.caps is None:
             try:
-                self.caps = self.do_read('capabilities').split()
-            except hg.RepoError:
-                self.caps = ()
+                self.caps = util.set(self.do_read('capabilities').split())
+            except repo.RepoError:
+                self.caps = util.set()
             self.ui.debug(_('capabilities: %s\n') %
                           (' '.join(self.caps or ['none'])))
         return self.caps
@@ -328,7 +328,7 @@ class httprepository(remoterepository):
                 proto.startswith('text/plain') or
                 proto.startswith('application/hg-changegroup')):
             self.ui.debug(_("Requested URL: '%s'\n") % cu)
-            raise hg.RepoError(_("'%s' does not appear to be an hg repository")
+            raise repo.RepoError(_("'%s' does not appear to be an hg repository")
                                % self._url)
 
         if proto.startswith('application/mercurial-'):
@@ -336,10 +336,10 @@ class httprepository(remoterepository):
                 version = proto.split('-', 1)[1]
                 version_info = tuple([int(n) for n in version.split('.')])
             except ValueError:
-                raise hg.RepoError(_("'%s' sent a broken Content-type "
+                raise repo.RepoError(_("'%s' sent a broken Content-type "
                                      "header (%s)") % (self._url, proto))
             if version_info > (0, 1):
-                raise hg.RepoError(_("'%s' uses newer protocol %s") %
+                raise repo.RepoError(_("'%s' uses newer protocol %s") %
                                    (self._url, version))
 
         return resp
@@ -353,11 +353,12 @@ class httprepository(remoterepository):
             fp.close()
 
     def lookup(self, key):
+        self.requirecap('lookup', _('look up remote revision'))
         d = self.do_cmd("lookup", key = key).read()
         success, data = d[:-1].split(' ', 1)
         if int(success):
             return bin(data)
-        raise hg.RepoError(data)
+        raise repo.RepoError(data)
 
     def heads(self):
         d = self.do_read("heads")
@@ -390,6 +391,7 @@ class httprepository(remoterepository):
         return util.chunkbuffer(zgenerator(f))
 
     def changegroupsubset(self, bases, heads, source):
+        self.requirecap('changegroupsubset', _('look up remote changes'))
         baselst = " ".join([hex(n) for n in bases])
         headlst = " ".join([hex(n) for n in heads])
         f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
@@ -448,9 +450,6 @@ class httpsrepository(httprepository):
 def instance(ui, path, create):
     if create:
         raise util.Abort(_('cannot create new http repository'))
-    if path.startswith('hg:'):
-        ui.warn(_("hg:// syntax is deprecated, please use http:// instead\n"))
-        path = 'http:' + path[3:]
     if path.startswith('https:'):
         return httpsrepository(ui, path)
     return httprepository(ui, path)
--- a/mercurial/ignore.py
+++ b/mercurial/ignore.py
@@ -85,9 +85,3 @@ def ignore(root, files, warn):
                 util.matcher(root, inc=patlist, src=f))
 
     return ignorefunc
-
-
-    '''default match function used by dirstate and
-    localrepository.  this honours the repository .hgignore file
-    and any other files specified in the [ui] section of .hgrc.'''
-
--- a/mercurial/localrepo.py
+++ b/mercurial/localrepo.py
@@ -8,19 +8,16 @@
 from node import *
 from i18n import _
 import repo, changegroup
-import changelog, dirstate, filelog, manifest, context
-import re, lock, transaction, tempfile, stat, mdiff, errno, ui
+import changelog, dirstate, filelog, manifest, context, weakref
+import re, lock, transaction, tempfile, stat, errno, ui
 import os, revlog, time, util, extensions, hook
 
 class localrepository(repo.repository):
-    capabilities = ('lookup', 'changegroupsubset')
+    capabilities = util.set(('lookup', 'changegroupsubset'))
     supported = ('revlogv1', 'store')
 
-    def __del__(self):
-        self.transhandle = None
     def __init__(self, parentui, path=None, create=0):
         repo.repository.__init__(self)
-        self.path = path
         self.root = os.path.realpath(path)
         self.path = os.path.join(self.root, ".hg")
         self.origroot = path
@@ -71,7 +68,8 @@ class localrepository(repo.repository):
             self.encodefn = lambda x: x
             self.decodefn = lambda x: x
             self.spath = self.path
-        self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
+        self.sopener = util.encodedopener(util.opener(self.spath),
+                                          self.encodefn)
 
         self.ui = ui.ui(parentui=parentui)
         try:
@@ -84,7 +82,7 @@ class localrepository(repo.repository):
         self.branchcache = None
         self.nodetagscache = None
         self.filterpats = {}
-        self.transhandle = None
+        self._transref = self._lockref = self._wlockref = None
 
     def __getattr__(self, name):
         if name == 'changelog':
@@ -109,7 +107,8 @@ class localrepository(repo.repository):
 
     tag_disallowed = ':\r\n'
 
-    def _tag(self, name, node, message, local, user, date, parent=None):
+    def _tag(self, name, node, message, local, user, date, parent=None,
+             extra={}):
         use_dirstate = parent is None
 
         for c in self.tag_disallowed:
@@ -157,10 +156,11 @@ class localrepository(repo.repository):
         # committed tags are stored in UTF-8
         writetag(fp, name, util.fromlocal, prevtags)
 
-        if use_dirstate and self.dirstate.state('.hgtags') == '?':
+        if use_dirstate and '.hgtags' not in self.dirstate:
             self.add(['.hgtags'])
 
-        tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
+        tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
+                              extra=extra)
 
         self.hook('tag', node=hex(node), tag=name, local=local)
 
@@ -396,6 +396,11 @@ class localrepository(repo.repository):
         n = self.changelog._partialmatch(key)
         if n:
             return n
+        try:
+            if len(key) == 20:
+                key = hex(key)
+        except:
+            pass
         raise repo.RepoError(_("unknown revision '%s'") % key)
 
     def dev(self):
@@ -495,9 +500,8 @@ class localrepository(repo.repository):
         return self._filter("decode", filename, data)
 
     def transaction(self):
-        tr = self.transhandle
-        if tr != None and tr.running():
-            return tr.nest()
+        if self._transref and self._transref():
+            return self._transref().nest()
 
         # save dirstate for rollback
         try:
@@ -511,33 +515,38 @@ class localrepository(repo.repository):
         tr = transaction.transaction(self.ui.warn, self.sopener,
                                        self.sjoin("journal"),
                                        aftertrans(renames))
-        self.transhandle = tr
+        self._transref = weakref.ref(tr)
         return tr
 
     def recover(self):
         l = self.lock()
-        if os.path.exists(self.sjoin("journal")):
-            self.ui.status(_("rolling back interrupted transaction\n"))
-            transaction.rollback(self.sopener, self.sjoin("journal"))
-            self.invalidate()
-            return True
-        else:
-            self.ui.warn(_("no interrupted transaction available\n"))
-            return False
+        try:
+            if os.path.exists(self.sjoin("journal")):
+                self.ui.status(_("rolling back interrupted transaction\n"))
+                transaction.rollback(self.sopener, self.sjoin("journal"))
+                self.invalidate()
+                return True
+            else:
+                self.ui.warn(_("no interrupted transaction available\n"))
+                return False
+        finally:
+            del l
 
-    def rollback(self, wlock=None, lock=None):
-        if not wlock:
+    def rollback(self):
+        wlock = lock = None
+        try:
             wlock = self.wlock()
-        if not lock:
             lock = self.lock()
-        if os.path.exists(self.sjoin("undo")):
-            self.ui.status(_("rolling back last transaction\n"))
-            transaction.rollback(self.sopener, self.sjoin("undo"))
-            util.rename(self.join("undo.dirstate"), self.join("dirstate"))
-            self.invalidate()
-            self.dirstate.invalidate()
-        else:
-            self.ui.warn(_("no rollback information available\n"))
+            if os.path.exists(self.sjoin("undo")):
+                self.ui.status(_("rolling back last transaction\n"))
+                transaction.rollback(self.sopener, self.sjoin("undo"))
+                util.rename(self.join("undo.dirstate"), self.join("dirstate"))
+                self.invalidate()
+                self.dirstate.invalidate()
+            else:
+                self.ui.warn(_("no rollback information available\n"))
+        finally:
+            del lock, wlock
 
     def invalidate(self):
         for a in "changelog manifest".split():
@@ -546,8 +555,7 @@ class localrepository(repo.repository):
         self.tagscache = None
         self.nodetagscache = None
 
-    def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
-                desc=None):
+    def _lock(self, lockname, wait, releasefn, acquirefn, desc):
         try:
             l = lock.lock(lockname, 0, releasefn, desc=desc)
         except lock.LockHeld, inst:
@@ -562,17 +570,26 @@ class localrepository(repo.repository):
             acquirefn()
         return l
 
-    def lock(self, wait=1):
-        return self.do_lock(self.sjoin("lock"), wait,
-                            acquirefn=self.invalidate,
-                            desc=_('repository %s') % self.origroot)
+    def lock(self, wait=True):
+        if self._lockref and self._lockref():
+            return self._lockref()
+
+        l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
+                       _('repository %s') % self.origroot)
+        self._lockref = weakref.ref(l)
+        return l
 
-    def wlock(self, wait=1):
-        return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
-                            self.dirstate.invalidate,
-                            desc=_('working directory of %s') % self.origroot)
+    def wlock(self, wait=True):
+        if self._wlockref and self._wlockref():
+            return self._wlockref()
 
-    def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
+        l = self._lock(self.join("wlock"), wait, self.dirstate.write,
+                       self.dirstate.invalidate, _('working directory of %s') %
+                       self.origroot)
+        self._wlockref = weakref.ref(l)
+        return l
+
+    def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
         """
         commit an individual file as part of a larger transaction
         """
@@ -632,173 +649,183 @@ class localrepository(repo.repository):
             return fp1
 
         changelist.append(fn)
-        return fl.add(t, meta, transaction, linkrev, fp1, fp2)
+        return fl.add(t, meta, tr, linkrev, fp1, fp2)
 
-    def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
+    def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
         if p1 is None:
             p1, p2 = self.dirstate.parents()
         return self.commit(files=files, text=text, user=user, date=date,
-                           p1=p1, p2=p2, wlock=wlock, extra=extra)
+                           p1=p1, p2=p2, extra=extra, empty_ok=True)
 
     def commit(self, files=None, text="", user=None, date=None,
-               match=util.always, force=False, lock=None, wlock=None,
-               force_editor=False, p1=None, p2=None, extra={}):
-
-        commit = []
-        remove = []
-        changed = []
-        use_dirstate = (p1 is None) # not rawcommit
-        extra = extra.copy()
+               match=util.always, force=False, force_editor=False,
+               p1=None, p2=None, extra={}, empty_ok=False):
+        wlock = lock = tr = None
+        try:
+            commit = []
+            remove = []
+            changed = []
+            use_dirstate = (p1 is None) # not rawcommit
+            extra = extra.copy()
 
-        if use_dirstate:
-            if files:
-                for f in files:
-                    s = self.dirstate.state(f)
-                    if s in 'nmai':
-                        commit.append(f)
-                    elif s == 'r':
-                        remove.append(f)
-                    else:
-                        self.ui.warn(_("%s not tracked!\n") % f)
+            if use_dirstate:
+                if files:
+                    for f in files:
+                        s = self.dirstate[f]
+                        if s in 'nma':
+                            commit.append(f)
+                        elif s == 'r':
+                            remove.append(f)
+                        else:
+                            self.ui.warn(_("%s not tracked!\n") % f)
+                else:
+                    changes = self.status(match=match)[:5]
+                    modified, added, removed, deleted, unknown = changes
+                    commit = modified + added
+                    remove = removed
             else:
-                changes = self.status(match=match)[:5]
-                modified, added, removed, deleted, unknown = changes
-                commit = modified + added
-                remove = removed
-        else:
-            commit = files
+                commit = files
 
-        if use_dirstate:
-            p1, p2 = self.dirstate.parents()
-            update_dirstate = True
-        else:
-            p1, p2 = p1, p2 or nullid
-            update_dirstate = (self.dirstate.parents()[0] == p1)
+            if use_dirstate:
+                p1, p2 = self.dirstate.parents()
+                update_dirstate = True
+            else:
+                p1, p2 = p1, p2 or nullid
+                update_dirstate = (self.dirstate.parents()[0] == p1)
 
-        c1 = self.changelog.read(p1)
-        c2 = self.changelog.read(p2)
-        m1 = self.manifest.read(c1[0]).copy()
-        m2 = self.manifest.read(c2[0])
+            c1 = self.changelog.read(p1)
+            c2 = self.changelog.read(p2)
+            m1 = self.manifest.read(c1[0]).copy()
+            m2 = self.manifest.read(c2[0])
 
-        if use_dirstate:
-            branchname = self.workingctx().branch()
-            try:
-                branchname = branchname.decode('UTF-8').encode('UTF-8')
-            except UnicodeDecodeError:
-                raise util.Abort(_('branch name not in UTF-8!'))
-        else:
-            branchname = ""
+            if use_dirstate:
+                branchname = self.workingctx().branch()
+                try:
+                    branchname = branchname.decode('UTF-8').encode('UTF-8')
+                except UnicodeDecodeError:
+                    raise util.Abort(_('branch name not in UTF-8!'))
+            else:
+                branchname = ""
 
-        if use_dirstate:
-            oldname = c1[5].get("branch") # stored in UTF-8
-            if (not commit and not remove and not force and p2 == nullid
-                and branchname == oldname):
-                self.ui.status(_("nothing changed\n"))
-                return None
+            if use_dirstate:
+                oldname = c1[5].get("branch") # stored in UTF-8
+                if (not commit and not remove and not force and p2 == nullid
+                    and branchname == oldname):
+                    self.ui.status(_("nothing changed\n"))
+                    return None
 
-        xp1 = hex(p1)
-        if p2 == nullid: xp2 = ''
-        else: xp2 = hex(p2)
-
-        self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
+            xp1 = hex(p1)
+            if p2 == nullid: xp2 = ''
+            else: xp2 = hex(p2)
 
-        if not wlock:
+            self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
+
             wlock = self.wlock()
-        if not lock:
             lock = self.lock()
-        tr = self.transaction()
+            tr = self.transaction()
+            trp = weakref.proxy(tr)
 
-        # check in files
-        new = {}
-        linkrev = self.changelog.count()
-        commit.sort()
-        is_exec = util.execfunc(self.root, m1.execf)
-        is_link = util.linkfunc(self.root, m1.linkf)
-        for f in commit:
-            self.ui.note(f + "\n")
-            try:
-                new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
-                new_exec = is_exec(f)
-                new_link = is_link(f)
-                if (not changed or changed[-1] != f) and m2.get(f) != new[f]:
-                    # mention the file in the changelog if some flag changed,
-                    # even if there was no content change.
-                    old_exec = m1.execf(f)
-                    old_link = m1.linkf(f)
-                    if old_exec != new_exec or old_link != new_link:
-                        changed.append(f)
-                m1.set(f, new_exec, new_link)
-            except (OSError, IOError):
-                if use_dirstate:
-                    self.ui.warn(_("trouble committing %s!\n") % f)
-                    raise
-                else:
-                    remove.append(f)
+            # check in files
+            new = {}
+            linkrev = self.changelog.count()
+            commit.sort()
+            is_exec = util.execfunc(self.root, m1.execf)
+            is_link = util.linkfunc(self.root, m1.linkf)
+            for f in commit:
+                self.ui.note(f + "\n")
+                try:
+                    new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
+                    new_exec = is_exec(f)
+                    new_link = is_link(f)
+                    if ((not changed or changed[-1] != f) and
+                        m2.get(f) != new[f]):
+                        # mention the file in the changelog if some
+                        # flag changed, even if there was no content
+                        # change.
+                        old_exec = m1.execf(f)
+                        old_link = m1.linkf(f)
+                        if old_exec != new_exec or old_link != new_link:
+                            changed.append(f)
+                    m1.set(f, new_exec, new_link)
+                except (OSError, IOError):
+                    if use_dirstate:
+                        self.ui.warn(_("trouble committing %s!\n") % f)
+                        raise
+                    else:
+                        remove.append(f)
 
-        # update manifest
-        m1.update(new)
-        remove.sort()
-        removed = []
+            # update manifest
+            m1.update(new)
+            remove.sort()
+            removed = []
 
-        for f in remove:
-            if f in m1:
-                del m1[f]
-                removed.append(f)
-            elif f in m2:
-                removed.append(f)
-        mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
+            for f in remove:
+                if f in m1:
+                    del m1[f]
+                    removed.append(f)
+                elif f in m2:
+                    removed.append(f)
+            mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
+                                   (new, removed))
 
-        # add changeset
-        new = new.keys()
-        new.sort()
+            # add changeset
+            new = new.keys()
+            new.sort()
 
-        user = user or self.ui.username()
-        if not text or force_editor:
-            edittext = []
-            if text:
-                edittext.append(text)
-            edittext.append("")
-            edittext.append("HG: user: %s" % user)
-            if p2 != nullid:
-                edittext.append("HG: branch merge")
+            user = user or self.ui.username()
+            if (not empty_ok and not text) or force_editor:
+                edittext = []
+                if text:
+                    edittext.append(text)
+                edittext.append("")
+                edittext.append("HG: user: %s" % user)
+                if p2 != nullid:
+                    edittext.append("HG: branch merge")
+                if branchname:
+                    edittext.append("HG: branch %s" % util.tolocal(branchname))
+                edittext.extend(["HG: changed %s" % f for f in changed])
+                edittext.extend(["HG: removed %s" % f for f in removed])
+                if not changed and not remove:
+                    edittext.append("HG: no files changed")
+                edittext.append("")
+                # run editor in the repository root
+                olddir = os.getcwd()
+                os.chdir(self.root)
+                text = self.ui.edit("\n".join(edittext), user)
+                os.chdir(olddir)
+
             if branchname:
-                edittext.append("HG: branch %s" % util.tolocal(branchname))
-            edittext.extend(["HG: changed %s" % f for f in changed])
-            edittext.extend(["HG: removed %s" % f for f in removed])
-            if not changed and not remove:
-                edittext.append("HG: no files changed")
-            edittext.append("")
-            # run editor in the repository root
-            olddir = os.getcwd()
-            os.chdir(self.root)
-            text = self.ui.edit("\n".join(edittext), user)
-            os.chdir(olddir)
+                extra["branch"] = branchname
 
-        lines = [line.rstrip() for line in text.rstrip().splitlines()]
-        while lines and not lines[0]:
-            del lines[0]
-        if not lines:
-            return None
-        text = '\n'.join(lines)
-        if branchname:
-            extra["branch"] = branchname
-        n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
-                               user, date, extra)
-        self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
-                  parent2=xp2)
-        tr.close()
+            if use_dirstate:
+                lines = [line.rstrip() for line in text.rstrip().splitlines()]
+                while lines and not lines[0]:
+                    del lines[0]
+                if not lines:
+                    return None
+                text = '\n'.join(lines)
+
+            n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
+                                   user, date, extra)
+            self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
+                      parent2=xp2)
+            tr.close()
 
-        if self.branchcache and "branch" in extra:
-            self.branchcache[util.tolocal(extra["branch"])] = n
+            if self.branchcache and "branch" in extra:
+                self.branchcache[util.tolocal(extra["branch"])] = n
 
-        if use_dirstate or update_dirstate:
-            self.dirstate.setparents(n)
-            if use_dirstate:
-                self.dirstate.update(new, "n")
-                self.dirstate.forget(removed)
+            if use_dirstate or update_dirstate:
+                self.dirstate.setparents(n)
+                if use_dirstate:
+                    for f in new:
+                        self.dirstate.normal(f)
+                    for f in removed:
+                        self.dirstate.forget(f)
 
-        self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
-        return n
+            self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
+            return n
+        finally:
+            del tr, lock, wlock
 
     def walk(self, node=None, files=[], match=util.always, badmatch=None):
         '''
@@ -843,7 +870,7 @@ class localrepository(repo.repository):
                 yield src, fn
 
     def status(self, node1=None, node2=None, files=[], match=util.always,
-                wlock=None, list_ignored=False, list_clean=False):
+               list_ignored=False, list_clean=False):
         """return status of files between two nodes or node and working directory
 
         If node1 is None, use the first dirstate parent instead.
@@ -875,8 +902,6 @@ class localrepository(repo.repository):
             # all the revisions in parent->child order.
             mf1 = mfmatches(node1)
 
-        mywlock = False
-
         # are we comparing the working directory?
         if not node2:
             (lookup, modified, added, removed, deleted, unknown,
@@ -886,24 +911,30 @@ class localrepository(repo.repository):
             # are we comparing working dir against its parent?
             if compareworking:
                 if lookup:
+                    fixup = []
                     # do a full compare of any files that might have changed
-                    mnode = self.changelog.read(self.dirstate.parents()[0])[0]
-                    getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
-                                          nullid)
+                    ctx = self.changectx()
                     for f in lookup:
-                        if fcmp(f, getnode):
+                        if f not in ctx or ctx[f].cmp(self.wread(f)):
                             modified.append(f)
                         else:
+                            fixup.append(f)
                             if list_clean:
                                 clean.append(f)
-                            if not wlock and not mywlock:
-                                mywlock = True
-                                try:
-                                    wlock = self.wlock(wait=0)
-                                except lock.LockException:
-                                    pass
+
+                    # update dirstate for files that are actually clean
+                    if fixup:
+                        wlock = None
+                        try:
+                            try:
+                                wlock = self.wlock(False)
+                            except lock.LockException:
+                                pass
                             if wlock:
-                                self.dirstate.update([f], "n")
+                                for f in fixup:
+                                    self.dirstate.normal(f)
+                        finally:
+                            del wlock
             else:
                 # we are comparing working dir against non-parent
                 # generate a pseudo-manifest for the working dir
@@ -918,8 +949,6 @@ class localrepository(repo.repository):
                     if f in mf2:
                         del mf2[f]
 
-            if mywlock and wlock:
-                wlock.release()
         else:
             # we are comparing two revisions
             mf2 = mfmatches(node2)
@@ -952,85 +981,100 @@ class localrepository(repo.repository):
             l.sort()
         return (modified, added, removed, deleted, unknown, ignored, clean)
 
-    def add(self, list, wlock=None):
-        if not wlock:
-            wlock = self.wlock()
-        for f in list:
-            p = self.wjoin(f)
-            try:
-                st = os.lstat(p)
-            except:
-                self.ui.warn(_("%s does not exist!\n") % f)
-                continue
-            if st.st_size > 10000000:
-                self.ui.warn(_("%s: files over 10MB may cause memory and"
-                               " performance problems\n"
-                               "(use 'hg revert %s' to unadd the file)\n")
-                               % (f, f))
-            if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
-                self.ui.warn(_("%s not added: only files and symlinks "
-                               "supported currently\n") % f)
-            elif self.dirstate.state(f) in 'an':
-                self.ui.warn(_("%s already tracked!\n") % f)
-            else:
-                self.dirstate.update([f], "a")
-
-    def forget(self, list, wlock=None):
-        if not wlock:
-            wlock = self.wlock()
-        for f in list:
-            if self.dirstate.state(f) not in 'ai':
-                self.ui.warn(_("%s not added!\n") % f)
-            else:
-                self.dirstate.forget([f])
-
-    def remove(self, list, unlink=False, wlock=None):
-        if unlink:
+    def add(self, list):
+        wlock = self.wlock()
+        try:
             for f in list:
+                p = self.wjoin(f)
                 try:
-                    util.unlink(self.wjoin(f))
-                except OSError, inst:
-                    if inst.errno != errno.ENOENT:
-                        raise
-        if not wlock:
-            wlock = self.wlock()
-        for f in list:
-            if unlink and os.path.exists(self.wjoin(f)):
-                self.ui.warn(_("%s still exists!\n") % f)
-            elif self.dirstate.state(f) == 'a':
-                self.dirstate.forget([f])
-            elif f not in self.dirstate:
-                self.ui.warn(_("%s not tracked!\n") % f)
-            else:
-                self.dirstate.update([f], "r")
+                    st = os.lstat(p)
+                except:
+                    self.ui.warn(_("%s does not exist!\n") % f)
+                    continue
+                if st.st_size > 10000000:
+                    self.ui.warn(_("%s: files over 10MB may cause memory and"
+                                   " performance problems\n"
+                                   "(use 'hg revert %s' to unadd the file)\n")
+                                   % (f, f))
+                if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
+                    self.ui.warn(_("%s not added: only files and symlinks "
+                                   "supported currently\n") % f)
+                elif self.dirstate[f] in 'amn':
+                    self.ui.warn(_("%s already tracked!\n") % f)
+                elif self.dirstate[f] == 'r':
+                    self.dirstate.normallookup(f)
+                else:
+                    self.dirstate.add(f)
+        finally:
+            del wlock
 
-    def undelete(self, list, wlock=None):
-        manifests = [self.manifest.read(self.changelog.read(p)[0])
-                     for p in self.dirstate.parents() if p != nullid]
-        if not wlock:
+    def forget(self, list):
+        wlock = self.wlock()
+        try:
+            for f in list:
+                if self.dirstate[f] != 'a':
+                    self.ui.warn(_("%s not added!\n") % f)
+                else:
+                    self.dirstate.forget(f)
+        finally:
+            del wlock
+
+    def remove(self, list, unlink=False):
+        wlock = None
+        try:
+            if unlink:
+                for f in list:
+                    try:
+                        util.unlink(self.wjoin(f))
+                    except OSError, inst:
+                        if inst.errno != errno.ENOENT:
+                            raise
             wlock = self.wlock()
-        for f in list:
-            if self.dirstate.state(f) not in  "r":
-                self.ui.warn("%s not removed!\n" % f)
-            else:
-                m = f in manifests[0] and manifests[0] or manifests[1]
-                t = self.file(f).read(m[f])
-                self.wwrite(f, t, m.flags(f))
-                self.dirstate.update([f], "n")
+            for f in list:
+                if unlink and os.path.exists(self.wjoin(f)):
+                    self.ui.warn(_("%s still exists!\n") % f)
+                elif self.dirstate[f] == 'a':
+                    self.dirstate.forget(f)
+                elif f not in self.dirstate:
+                    self.ui.warn(_("%s not tracked!\n") % f)
+                else:
+                    self.dirstate.remove(f)
+        finally:
+            del wlock
 
-    def copy(self, source, dest, wlock=None):
-        p = self.wjoin(dest)
-        if not (os.path.exists(p) or os.path.islink(p)):
-            self.ui.warn(_("%s does not exist!\n") % dest)
-        elif not (os.path.isfile(p) or os.path.islink(p)):
-            self.ui.warn(_("copy failed: %s is not a file or a "
-                           "symbolic link\n") % dest)
-        else:
-            if not wlock:
+    def undelete(self, list):
+        wlock = None
+        try:
+            manifests = [self.manifest.read(self.changelog.read(p)[0])
+                         for p in self.dirstate.parents() if p != nullid]
+            wlock = self.wlock()
+            for f in list:
+                if self.dirstate[f] != 'r':
+                    self.ui.warn("%s not removed!\n" % f)
+                else:
+                    m = f in manifests[0] and manifests[0] or manifests[1]
+                    t = self.file(f).read(m[f])
+                    self.wwrite(f, t, m.flags(f))
+                    self.dirstate.normal(f)
+        finally:
+            del wlock
+
+    def copy(self, source, dest):
+        wlock = None
+        try:
+            p = self.wjoin(dest)
+            if not (os.path.exists(p) or os.path.islink(p)):
+                self.ui.warn(_("%s does not exist!\n") % dest)
+            elif not (os.path.isfile(p) or os.path.islink(p)):
+                self.ui.warn(_("copy failed: %s is not a file or a "
+                               "symbolic link\n") % dest)
+            else:
                 wlock = self.wlock()
-            if self.dirstate.state(dest) == '?':
-                self.dirstate.update([dest], "a")
-            self.dirstate.copy(source, dest)
+                if dest not in self.dirstate:
+                    self.dirstate.add(dest)
+                self.dirstate.copy(source, dest)
+        finally:
+            del wlock
 
     def heads(self, start=None):
         heads = self.changelog.heads(start)
@@ -1307,12 +1351,8 @@ class localrepository(repo.repository):
         else:
             return subset
 
-    def pull(self, remote, heads=None, force=False, lock=None):
-        mylock = False
-        if not lock:
-            lock = self.lock()
-            mylock = True
-
+    def pull(self, remote, heads=None, force=False):
+        lock = self.lock()
         try:
             fetch = self.findincoming(remote, heads=heads, force=force)
             if fetch == [nullid]:
@@ -1330,8 +1370,7 @@ class localrepository(repo.repository):
                 cg = remote.changegroupsubset(fetch, heads, 'pull')
             return self.addchangegroup(cg, 'pull', remote.url())
         finally:
-            if mylock:
-                lock.release()
+            del lock
 
     def push(self, remote, force=False, revs=None):
         # there are two ways to push to remote repo:
@@ -1404,12 +1443,14 @@ class localrepository(repo.repository):
 
     def push_addchangegroup(self, remote, force, revs):
         lock = remote.lock()
-
-        ret = self.prepush(remote, force, revs)
-        if ret[0] is not None:
-            cg, remote_heads = ret
-            return remote.addchangegroup(cg, 'push', self.url())
-        return ret[1]
+        try:
+            ret = self.prepush(remote, force, revs)
+            if ret[0] is not None:
+                cg, remote_heads = ret
+                return remote.addchangegroup(cg, 'push', self.url())
+            return ret[1]
+        finally:
+            del lock
 
     def push_unbundle(self, remote, force, revs):
         # local repo finds heads on server, finds out what revs it
@@ -1580,12 +1621,9 @@ class localrepository(repo.repository):
                 if r == next_rev[0]:
                     # If the last rev we looked at was the one just previous,
                     # we only need to see a diff.
-                    delta = mdiff.patchtext(mnfst.delta(mnfstnode))
+                    deltamf = mnfst.readdelta(mnfstnode)
                     # For each line in the delta
-                    for dline in delta.splitlines():
-                        # get the filename and filenode for that line
-                        f, fnode = dline.split('\0')
-                        fnode = bin(fnode[:40])
+                    for f, fnode in deltamf.items():
                         f = changedfiles.get(f, None)
                         # And if the file is in the list of files we care
                         # about.
@@ -1793,65 +1831,68 @@ class localrepository(repo.repository):
 
         changesets = files = revisions = 0
 
-        tr = self.transaction()
-
         # write changelog data to temp files so concurrent readers will not see
         # inconsistent view
         cl = self.changelog
         cl.delayupdate()
         oldheads = len(cl.heads())
 
-        # pull off the changeset group
-        self.ui.status(_("adding changesets\n"))
-        cor = cl.count() - 1
-        chunkiter = changegroup.chunkiter(source)
-        if cl.addgroup(chunkiter, csmap, tr, 1) is None:
-            raise util.Abort(_("received changelog group is empty"))
-        cnr = cl.count() - 1
-        changesets = cnr - cor
+        tr = self.transaction()
+        try:
+            trp = weakref.proxy(tr)
+            # pull off the changeset group
+            self.ui.status(_("adding changesets\n"))
+            cor = cl.count() - 1
+            chunkiter = changegroup.chunkiter(source)
+            if cl.addgroup(chunkiter, csmap, trp, 1) is None:
+                raise util.Abort(_("received changelog group is empty"))
+            cnr = cl.count() - 1
+            changesets = cnr - cor
 
-        # pull off the manifest group
-        self.ui.status(_("adding manifests\n"))
-        chunkiter = changegroup.chunkiter(source)
-        # no need to check for empty manifest group here:
-        # if the result of the merge of 1 and 2 is the same in 3 and 4,
-        # no new manifest will be created and the manifest group will
-        # be empty during the pull
-        self.manifest.addgroup(chunkiter, revmap, tr)
+            # pull off the manifest group
+            self.ui.status(_("adding manifests\n"))
+            chunkiter = changegroup.chunkiter(source)
+            # no need to check for empty manifest group here:
+            # if the result of the merge of 1 and 2 is the same in 3 and 4,
+            # no new manifest will be created and the manifest group will
+            # be empty during the pull
+            self.manifest.addgroup(chunkiter, revmap, trp)
 
-        # process the files
-        self.ui.status(_("adding file changes\n"))
-        while 1:
-            f = changegroup.getchunk(source)
-            if not f:
-                break
-            self.ui.debug(_("adding %s revisions\n") % f)
-            fl = self.file(f)
-            o = fl.count()
-            chunkiter = changegroup.chunkiter(source)
-            if fl.addgroup(chunkiter, revmap, tr) is None:
-                raise util.Abort(_("received file revlog group is empty"))
-            revisions += fl.count() - o
-            files += 1
+            # process the files
+            self.ui.status(_("adding file changes\n"))
+            while 1:
+                f = changegroup.getchunk(source)
+                if not f:
+                    break
+                self.ui.debug(_("adding %s revisions\n") % f)
+                fl = self.file(f)
+                o = fl.count()
+                chunkiter = changegroup.chunkiter(source)
+                if fl.addgroup(chunkiter, revmap, trp) is None:
+                    raise util.Abort(_("received file revlog group is empty"))
+                revisions += fl.count() - o
+                files += 1
+
+            # make changelog see real files again
+            cl.finalize(trp)
 
-        # make changelog see real files again
-        cl.finalize(tr)
+            newheads = len(self.changelog.heads())
+            heads = ""
+            if oldheads and newheads != oldheads:
+                heads = _(" (%+d heads)") % (newheads - oldheads)
 
-        newheads = len(self.changelog.heads())
-        heads = ""
-        if oldheads and newheads != oldheads:
-            heads = _(" (%+d heads)") % (newheads - oldheads)
+            self.ui.status(_("added %d changesets"
+                             " with %d changes to %d files%s\n")
+                             % (changesets, revisions, files, heads))
 
-        self.ui.status(_("added %d changesets"
-                         " with %d changes to %d files%s\n")
-                         % (changesets, revisions, files, heads))
+            if changesets > 0:
+                self.hook('pretxnchangegroup', throw=True,
+                          node=hex(self.changelog.node(cor+1)), source=srctype,
+                          url=url)
 
-        if changesets > 0:
-            self.hook('pretxnchangegroup', throw=True,
-                      node=hex(self.changelog.node(cor+1)), source=srctype,
-                      url=url)
-
-        tr.close()
+            tr.close()
+        finally:
+            del tr
 
         if changesets > 0:
             self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
--- a/mercurial/lock.py
+++ b/mercurial/lock.py
@@ -29,14 +29,13 @@ class lock(object):
     # old-style lock: symlink to pid
     # new-style lock: symlink to hostname:pid
 
+    _host = None
+
     def __init__(self, file, timeout=-1, releasefn=None, desc=None):
         self.f = file
         self.held = 0
         self.timeout = timeout
         self.releasefn = releasefn
-        self.id = None
-        self.host = None
-        self.pid = None
         self.desc = desc
         self.lock()
 
@@ -59,13 +58,12 @@ class lock(object):
                                inst.locker)
 
     def trylock(self):
-        if self.id is None:
-            self.host = socket.gethostname()
-            self.pid = os.getpid()
-            self.id = '%s:%s' % (self.host, self.pid)
+        if lock._host is None:
+            lock._host = socket.gethostname()
+        lockname = '%s:%s' % (lock._host, os.getpid())
         while not self.held:
             try:
-                util.makelock(self.id, self.f)
+                util.makelock(lockname, self.f)
                 self.held = 1
             except (OSError, IOError), why:
                 if why.errno == errno.EEXIST:
@@ -93,7 +91,7 @@ class lock(object):
             host, pid = locker.split(":", 1)
         except ValueError:
             return locker
-        if host != self.host:
+        if host != lock._host:
             return locker
         try:
             pid = int(pid)
--- a/mercurial/manifest.py
+++ b/mercurial/manifest.py
@@ -23,10 +23,6 @@ class manifestdict(dict):
     def linkf(self, f):
         "test for symlink in manifest flags"
         return "l" in self.flags(f)
-    def rawset(self, f, entry):
-        self[f] = bin(entry[:40])
-        fl = entry[40:-1]
-        if fl: self._flags[f] = fl
     def set(self, f, execf=False, linkf=False):
         if linkf: self._flags[f] = "l"
         elif execf: self._flags[f] = "x"
@@ -40,16 +36,20 @@ class manifest(revlog):
         self.listcache = None
         revlog.__init__(self, opener, "00manifest.i")
 
-    def parselines(self, lines):
-        for l in lines.splitlines(1):
-            yield l.split('\0')
+    def parse(self, lines):
+        mfdict = manifestdict()
+        fdict = mfdict._flags
+        for l in lines.splitlines():
+            f, n = l.split('\0')
+            if len(n) > 40:
+                fdict[f] = n[40:]
+                mfdict[f] = bin(n[:40])
+            else:
+                mfdict[f] = bin(n)
+        return mfdict
 
     def readdelta(self, node):
-        delta = mdiff.patchtext(self.delta(node))
-        deltamap = manifestdict()
-        for f, n in self.parselines(delta):
-            deltamap.rawset(f, n)
-        return deltamap
+        return self.parse(mdiff.patchtext(self.delta(node)))
 
     def read(self, node):
         if node == nullid: return manifestdict() # don't upset local cache
@@ -57,9 +57,7 @@ class manifest(revlog):
             return self.mapcache[1]
         text = self.revision(node)
         self.listcache = array.array('c', text)
-        mapping = manifestdict()
-        for f, n in self.parselines(text):
-            mapping.rawset(f, n)
+        mapping = self.parse(text)
         self.mapcache = (node, mapping)
         return mapping
 
--- a/mercurial/merge.py
+++ b/mercurial/merge.py
@@ -448,13 +448,15 @@ def applyupdates(repo, action, wctx, mct
                 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
                 repo.wwrite(fd, repo.wread(f), flags)
 
+    audit_path = util.path_auditor(repo.root)
+
     for a in action:
         f, m = a[:2]
         if f and f[0] == "/":
             continue
         if m == "r": # remove
             repo.ui.note(_("removing %s\n") % f)
-            util.audit_path(f)
+            audit_path(f)
             try:
                 util.unlink(repo.wjoin(f))
             except OSError, inst:
@@ -512,25 +514,25 @@ def recordupdates(repo, action, branchme
         f, m = a[:2]
         if m == "r": # remove
             if branchmerge:
-                repo.dirstate.update([f], 'r')
+                repo.dirstate.remove(f)
             else:
-                repo.dirstate.forget([f])
+                repo.dirstate.forget(f)
         elif m == "f": # forget
-            repo.dirstate.forget([f])
+            repo.dirstate.forget(f)
         elif m in "ge": # get or exec change
             if branchmerge:
-                repo.dirstate.update([f], 'n', st_mtime=-1)
+                repo.dirstate.normaldirty(f)
             else:
-                repo.dirstate.update([f], 'n')
+                repo.dirstate.normal(f)
         elif m == "m": # merge
             f2, fd, flag, move = a[2:]
             if branchmerge:
                 # We've done a branch merge, mark this file as merged
                 # so that we properly record the merger later
-                repo.dirstate.update([fd], 'm')
+                repo.dirstate.merge(fd)
                 if f != f2: # copy/rename
                     if move:
-                        repo.dirstate.update([f], 'r')
+                        repo.dirstate.remove(f)
                     if f != fd:
                         repo.dirstate.copy(f, fd)
                     else:
@@ -541,95 +543,94 @@ def recordupdates(repo, action, branchme
                 # of that file some time in the past. Thus our
                 # merge will appear as a normal local file
                 # modification.
-                repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
+                repo.dirstate.normallookup(fd)
                 if move:
-                    repo.dirstate.forget([f])
+                    repo.dirstate.forget(f)
         elif m == "d": # directory rename
             f2, fd, flag = a[2:]
             if not f2 and f not in repo.dirstate:
                 # untracked file moved
                 continue
             if branchmerge:
-                repo.dirstate.update([fd], 'a')
+                repo.dirstate.add(fd)
                 if f:
-                    repo.dirstate.update([f], 'r')
+                    repo.dirstate.remove(f)
                     repo.dirstate.copy(f, fd)
                 if f2:
                     repo.dirstate.copy(f2, fd)
             else:
-                repo.dirstate.update([fd], 'n')
+                repo.dirstate.normal(fd)
                 if f:
-                    repo.dirstate.forget([f])
+                    repo.dirstate.forget(f)
 
-def update(repo, node, branchmerge, force, partial, wlock):
+def update(repo, node, branchmerge, force, partial):
     """
     Perform a merge between the working directory and the given node
 
     branchmerge = whether to merge between branches
     force = whether to force branch merging or file overwriting
     partial = a function to filter file lists (dirstate not updated)
-    wlock = working dir lock, if already held
     """
 
-    if not wlock:
-        wlock = repo.wlock()
-
-    wc = repo.workingctx()
-    if node is None:
-        # tip of current branch
-        try:
-            node = repo.branchtags()[wc.branch()]
-        except KeyError:
-            raise util.Abort(_("branch %s not found") % wc.branch())
-    overwrite = force and not branchmerge
-    forcemerge = force and branchmerge
-    pl = wc.parents()
-    p1, p2 = pl[0], repo.changectx(node)
-    pa = p1.ancestor(p2)
-    fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
-    fastforward = False
+    wlock = repo.wlock()
+    try:
+        wc = repo.workingctx()
+        if node is None:
+            # tip of current branch
+            try:
+                node = repo.branchtags()[wc.branch()]
+            except KeyError:
+                raise util.Abort(_("branch %s not found") % wc.branch())
+        overwrite = force and not branchmerge
+        forcemerge = force and branchmerge
+        pl = wc.parents()
+        p1, p2 = pl[0], repo.changectx(node)
+        pa = p1.ancestor(p2)
+        fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
+        fastforward = False
 
-    ### check phase
-    if not overwrite and len(pl) > 1:
-        raise util.Abort(_("outstanding uncommitted merges"))
-    if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
-        if branchmerge:
-            if p1.branch() != p2.branch() and pa != p2:
-                fastforward = True
-            else:
-                raise util.Abort(_("there is nothing to merge, just use "
-                                   "'hg update' or look at 'hg heads'"))
-    elif not (overwrite or branchmerge):
-        raise util.Abort(_("update spans branches, use 'hg merge' "
-                           "or 'hg update -C' to lose changes"))
-    if branchmerge and not forcemerge:
-        if wc.files():
-            raise util.Abort(_("outstanding uncommitted changes"))
+        ### check phase
+        if not overwrite and len(pl) > 1:
+            raise util.Abort(_("outstanding uncommitted merges"))
+        if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
+            if branchmerge:
+                if p1.branch() != p2.branch() and pa != p2:
+                    fastforward = True
+                else:
+                    raise util.Abort(_("there is nothing to merge, just use "
+                                       "'hg update' or look at 'hg heads'"))
+        elif not (overwrite or branchmerge):
+            raise util.Abort(_("update spans branches, use 'hg merge' "
+                               "or 'hg update -C' to lose changes"))
+        if branchmerge and not forcemerge:
+            if wc.files():
+                raise util.Abort(_("outstanding uncommitted changes"))
 
-    ### calculate phase
-    action = []
-    if not force:
-        checkunknown(wc, p2)
-    if not util.checkfolding(repo.path):
-        checkcollision(p2)
-    if not branchmerge:
-        action += forgetremoved(wc, p2)
-    action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
+        ### calculate phase
+        action = []
+        if not force:
+            checkunknown(wc, p2)
+        if not util.checkfolding(repo.path):
+            checkcollision(p2)
+        if not branchmerge:
+            action += forgetremoved(wc, p2)
+        action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
 
-    ### apply phase
-    if not branchmerge: # just jump to the new rev
-        fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
-    if not partial:
-        repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
+        ### apply phase
+        if not branchmerge: # just jump to the new rev
+            fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
+        if not partial:
+            repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
 
-    stats = applyupdates(repo, action, wc, p2)
+        stats = applyupdates(repo, action, wc, p2)
 
-    if not partial:
-        recordupdates(repo, action, branchmerge)
-        repo.dirstate.setparents(fp1, fp2)
-        if not branchmerge and not fastforward:
-            repo.dirstate.setbranch(p2.branch())
-        repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
+        if not partial:
+            recordupdates(repo, action, branchmerge)
+            repo.dirstate.setparents(fp1, fp2)
+            if not branchmerge and not fastforward:
+                repo.dirstate.setbranch(p2.branch())
+            repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
 
-    return stats
-
+        return stats
+    finally:
+        del wlock
--- a/mercurial/node.py
+++ b/mercurial/node.py
@@ -12,11 +12,9 @@ import binascii
 nullrev = -1
 nullid = "\0" * 20
 
-def hex(node):
-    return binascii.hexlify(node)
-
-def bin(node):
-    return binascii.unhexlify(node)
+# This ugly style has a noticeable effect in manifest parsing
+hex = binascii.hexlify
+bin = binascii.unhexlify
 
 def short(node):
     return hex(node[:6])
--- a/mercurial/patch.py
+++ b/mercurial/patch.py
@@ -1,16 +1,23 @@
 # patch.py - patch file parsing routines
 #
 # Copyright 2006 Brendan Cully <brendan@kublai.com>
+# Copyright 2007 Chris Mason <chris.mason@oracle.com>
 #
 # This software may be used and distributed according to the terms
 # of the GNU General Public License, incorporated herein by reference.
 
 from i18n import _
 from node import *
-import base85, cmdutil, mdiff, util, context, revlog
+import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
 import cStringIO, email.Parser, os, popen2, re, sha
 import sys, tempfile, zlib
 
+class PatchError(Exception):
+    pass
+
+class NoHunks(PatchError):
+    pass
+
 # helper functions
 
 def copyfile(src, dst, basedir=None):
@@ -50,7 +57,7 @@ def extract(ui, fileobj):
     try:
         msg = email.Parser.Parser().parse(fileobj)
 
-        message = msg['Subject']
+        subject = msg['Subject']
         user = msg['From']
         # should try to parse msg['Date']
         date = None
@@ -58,18 +65,18 @@ def extract(ui, fileobj):
         branch = None
         parents = []
 
-        if message:
-            if message.startswith('[PATCH'):
-                pend = message.find(']')
+        if subject:
+            if subject.startswith('[PATCH'):
+                pend = subject.find(']')
                 if pend >= 0:
-                    message = message[pend+1:].lstrip()
-            message = message.replace('\n\t', ' ')
-            ui.debug('Subject: %s\n' % message)
+                    subject = subject[pend+1:].lstrip()
+            subject = subject.replace('\n\t', ' ')
+            ui.debug('Subject: %s\n' % subject)
         if user:
             ui.debug('From: %s\n' % user)
         diffs_seen = 0
         ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
-
+        message = ''
         for part in msg.walk():
             content_type = part.get_content_type()
             ui.debug('Content-Type: %s\n' % content_type)
@@ -84,9 +91,6 @@ def extract(ui, fileobj):
                 ui.debug(_('found patch at byte %d\n') % m.start(0))
                 diffs_seen += 1
                 cfp = cStringIO.StringIO()
-                if message:
-                    cfp.write(message)
-                    cfp.write('\n')
                 for line in payload[:m.start(0)].splitlines():
                     if line.startswith('# HG changeset patch'):
                         ui.debug(_('patch generated by hg export\n'))
@@ -94,6 +98,7 @@ def extract(ui, fileobj):
                         # drop earlier commit message content
                         cfp.seek(0)
                         cfp.truncate()
+                        subject = None
                     elif hgpatch:
                         if line.startswith('# User '):
                             user = line[7:]
@@ -123,6 +128,8 @@ def extract(ui, fileobj):
         os.unlink(tmpname)
         raise
 
+    if subject and not message.startswith(subject):
+        message = '%s\n%s' % (subject, message)
     tmpfp.close()
     if not diffs_seen:
         os.unlink(tmpname)
@@ -135,7 +142,7 @@ GP_PATCH  = 1 << 0  # we have to run pat
 GP_FILTER = 1 << 1  # there's some copy/rename operation
 GP_BINARY = 1 << 2  # there's a binary patch
 
-def readgitpatch(patchname):
+def readgitpatch(fp, firstline=None):
     """extract git-style metadata about patches from <patchname>"""
     class gitpatch:
         "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
@@ -148,16 +155,21 @@ def readgitpatch(patchname):
             self.lineno = 0
             self.binary = False
 
+    def reader(fp, firstline):
+        if firstline is not None:
+            yield firstline
+        for line in fp:
+            yield line
+
     # Filter patch for git information
     gitre = re.compile('diff --git a/(.*) b/(.*)')
-    pf = file(patchname)
     gp = None
     gitpatches = []
     # Can have a git patch with only metadata, causing patch to complain
     dopatch = 0
 
     lineno = 0
-    for line in pf:
+    for line in reader(fp, firstline):
         lineno += 1
         if line.startswith('diff --git'):
             m = gitre.match(line)
@@ -190,9 +202,9 @@ def readgitpatch(patchname):
                 gp.op = 'DELETE'
             elif line.startswith('new file mode '):
                 gp.op = 'ADD'
-                gp.mode = int(line.rstrip()[-3:], 8)
+                gp.mode = int(line.rstrip()[-6:], 8)
             elif line.startswith('new mode '):
-                gp.mode = int(line.rstrip()[-3:], 8)
+                gp.mode = int(line.rstrip()[-6:], 8)
             elif line.startswith('GIT binary patch'):
                 dopatch |= GP_BINARY
                 gp.binary = True
@@ -204,157 +216,793 @@ def readgitpatch(patchname):
 
     return (dopatch, gitpatches)
 
-def dogitpatch(patchname, gitpatches, cwd=None):
-    """Preprocess git patch so that vanilla patch can handle it"""
-    def extractbin(fp):
-        i = [0] # yuck
-        def readline():
-            i[0] += 1
-            return fp.readline().rstrip()
-        line = readline()
+def patch(patchname, ui, strip=1, cwd=None, files={}):
+    """apply <patchname> to the working directory.
+    returns whether patch was applied with fuzz factor."""
+    patcher = ui.config('ui', 'patch')
+    args = []
+    try:
+        if patcher:
+            return externalpatch(patcher, args, patchname, ui, strip, cwd,
+                                 files)
+        else:
+            try:
+                return internalpatch(patchname, ui, strip, cwd, files)
+            except NoHunks:
+                patcher = util.find_exe('gpatch') or util.find_exe('patch')
+                ui.debug('no valid hunks found; trying with %r instead\n' %
+                         patcher)
+                if util.needbinarypatch():
+                    args.append('--binary')
+                return externalpatch(patcher, args, patchname, ui, strip, cwd,
+                                     files)
+    except PatchError, err:
+        s = str(err)
+        if s:
+            raise util.Abort(s)
+        else:
+            raise util.Abort(_('patch failed to apply'))
+
+def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
+    """use <patcher> to apply <patchname> to the working directory.
+    returns whether patch was applied with fuzz factor."""
+
+    fuzz = False
+    if cwd:
+        args.append('-d %s' % util.shellquote(cwd))
+    fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
+                                       util.shellquote(patchname)))
+
+    for line in fp:
+        line = line.rstrip()
+        ui.note(line + '\n')
+        if line.startswith('patching file '):
+            pf = util.parse_patch_output(line)
+            printed_file = False
+            files.setdefault(pf, (None, None))
+        elif line.find('with fuzz') >= 0:
+            fuzz = True
+            if not printed_file:
+                ui.warn(pf + '\n')
+                printed_file = True
+            ui.warn(line + '\n')
+        elif line.find('saving rejects to file') >= 0:
+            ui.warn(line + '\n')
+        elif line.find('FAILED') >= 0:
+            if not printed_file:
+                ui.warn(pf + '\n')
+                printed_file = True
+            ui.warn(line + '\n')
+    code = fp.close()
+    if code:
+        raise PatchError(_("patch command failed: %s") %
+                         util.explain_exit(code)[0])
+    return fuzz
+
+def internalpatch(patchobj, ui, strip, cwd, files={}):
+    """use builtin patch to apply <patchobj> to the working directory.
+    returns whether patch was applied with fuzz factor."""
+    try:
+        fp = file(patchobj, 'rb')
+    except TypeError:
+        fp = patchobj
+    if cwd:
+        curdir = os.getcwd()
+        os.chdir(cwd)
+    try:
+        ret = applydiff(ui, fp, files, strip=strip)
+    finally:
+        if cwd:
+            os.chdir(curdir)
+    if ret < 0:
+        raise PatchError
+    return ret > 0
+
+# @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
+unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
+contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
+
+class patchfile:
+    def __init__(self, ui, fname):
+        self.fname = fname
+        self.ui = ui
+        try:
+            fp = file(fname, 'rb')
+            self.lines = fp.readlines()
+            self.exists = True
+        except IOError:
+            dirname = os.path.dirname(fname)
+            if dirname and not os.path.isdir(dirname):
+                dirs = dirname.split(os.path.sep)
+                d = ""
+                for x in dirs:
+                    d = os.path.join(d, x)
+                    if not os.path.isdir(d):
+                        os.mkdir(d)
+            self.lines = []
+            self.exists = False
+
+        self.hash = {}
+        self.dirty = 0
+        self.offset = 0
+        self.rej = []
+        self.fileprinted = False
+        self.printfile(False)
+        self.hunks = 0
+
+    def printfile(self, warn):
+        if self.fileprinted:
+            return
+        if warn or self.ui.verbose:
+            self.fileprinted = True
+        s = _("patching file %s\n") % self.fname
+        if warn:
+            self.ui.warn(s)
+        else:
+            self.ui.note(s)
+
+
+    def findlines(self, l, linenum):
+        # looks through the hash and finds candidate lines.  The
+        # result is a list of line numbers sorted based on distance
+        # from linenum
+        def sorter(a, b):
+            vala = abs(a - linenum)
+            valb = abs(b - linenum)
+            return cmp(vala, valb)
+
+        try:
+            cand = self.hash[l]
+        except:
+            return []
+
+        if len(cand) > 1:
+            # resort our list of potentials forward then back.
+            cand.sort(cmp=sorter)
+        return cand
+
+    def hashlines(self):
+        self.hash = {}
+        for x in xrange(len(self.lines)):
+            s = self.lines[x]
+            self.hash.setdefault(s, []).append(x)
+
+    def write_rej(self):
+        # our rejects are a little different from patch(1).  This always
+        # creates rejects in the same form as the original patch.  A file
+        # header is inserted so that you can run the reject through patch again
+        # without having to type the filename.
+
+        if not self.rej:
+            return
+        if self.hunks != 1:
+            hunkstr = "s"
+        else:
+            hunkstr = ""
+
+        fname = self.fname + ".rej"
+        self.ui.warn(
+            _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
+            (len(self.rej), self.hunks, hunkstr, fname))
+        try: os.unlink(fname)
+        except:
+            pass
+        fp = file(fname, 'wb')
+        base = os.path.basename(self.fname)
+        fp.write("--- %s\n+++ %s\n" % (base, base))
+        for x in self.rej:
+            for l in x.hunk:
+                fp.write(l)
+                if l[-1] != '\n':
+                    fp.write("\n\ No newline at end of file\n")
+
+    def write(self, dest=None):
+        if self.dirty:
+            if not dest:
+                dest = self.fname
+            st = None
+            try:
+                st = os.lstat(dest)
+                if st.st_nlink > 1:
+                    os.unlink(dest)
+            except: pass
+            fp = file(dest, 'wb')
+            if st:
+                os.chmod(dest, st.st_mode)
+            fp.writelines(self.lines)
+            fp.close()
+
+    def close(self):
+        self.write()
+        self.write_rej()
+
+    def apply(self, h, reverse):
+        if not h.complete():
+            raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
+                            (h.number, h.desc, len(h.a), h.lena, len(h.b),
+                            h.lenb))
+
+        self.hunks += 1
+        if reverse:
+            h.reverse()
+
+        if self.exists and h.createfile():
+            self.ui.warn(_("file %s already exists\n") % self.fname)
+            self.rej.append(h)
+            return -1
+
+        if isinstance(h, binhunk):
+            if h.rmfile():
+                os.unlink(self.fname)
+            else:
+                self.lines[:] = h.new()
+                self.offset += len(h.new())
+                self.dirty = 1
+            return 0
+
+        # fast case first, no offsets, no fuzz
+        old = h.old()
+        # patch starts counting at 1 unless we are adding the file
+        if h.starta == 0:
+            start = 0
+        else:
+            start = h.starta + self.offset - 1
+        orig_start = start
+        if diffhelpers.testhunk(old, self.lines, start) == 0:
+            if h.rmfile():
+                os.unlink(self.fname)
+            else:
+                self.lines[start : start + h.lena] = h.new()
+                self.offset += h.lenb - h.lena
+                self.dirty = 1
+            return 0
+
+        # ok, we couldn't match the hunk.  Lets look for offsets and fuzz it
+        self.hashlines()
+        if h.hunk[-1][0] != ' ':
+            # if the hunk tried to put something at the bottom of the file
+            # override the start line and use eof here
+            search_start = len(self.lines)
+        else:
+            search_start = orig_start
+
+        for fuzzlen in xrange(3):
+            for toponly in [ True, False ]:
+                old = h.old(fuzzlen, toponly)
+
+                cand = self.findlines(old[0][1:], search_start)
+                for l in cand:
+                    if diffhelpers.testhunk(old, self.lines, l) == 0:
+                        newlines = h.new(fuzzlen, toponly)
+                        self.lines[l : l + len(old)] = newlines
+                        self.offset += len(newlines) - len(old)
+                        self.dirty = 1
+                        if fuzzlen:
+                            fuzzstr = "with fuzz %d " % fuzzlen
+                            f = self.ui.warn
+                            self.printfile(True)
+                        else:
+                            fuzzstr = ""
+                            f = self.ui.note
+                        offset = l - orig_start - fuzzlen
+                        if offset == 1:
+                            linestr = "line"
+                        else:
+                            linestr = "lines"
+                        f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
+                          (h.number, l+1, fuzzstr, offset, linestr))
+                        return fuzzlen
+        self.printfile(True)
+        self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
+        self.rej.append(h)
+        return -1
+
+class hunk:
+    def __init__(self, desc, num, lr, context):
+        self.number = num
+        self.desc = desc
+        self.hunk = [ desc ]
+        self.a = []
+        self.b = []
+        if context:
+            self.read_context_hunk(lr)
+        else:
+            self.read_unified_hunk(lr)
+
+    def read_unified_hunk(self, lr):
+        m = unidesc.match(self.desc)
+        if not m:
+            raise PatchError(_("bad hunk #%d") % self.number)
+        self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
+        if self.lena == None:
+            self.lena = 1
+        else:
+            self.lena = int(self.lena)
+        if self.lenb == None:
+            self.lenb = 1
+        else:
+            self.lenb = int(self.lenb)
+        self.starta = int(self.starta)
+        self.startb = int(self.startb)
+        diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
+        # if we hit eof before finishing out the hunk, the last line will
+        # be zero length.  Lets try to fix it up.
+        while len(self.hunk[-1]) == 0:
+                del self.hunk[-1]
+                del self.a[-1]
+                del self.b[-1]
+                self.lena -= 1
+                self.lenb -= 1
+
+    def read_context_hunk(self, lr):
+        self.desc = lr.readline()
+        m = contextdesc.match(self.desc)
+        if not m:
+            raise PatchError(_("bad hunk #%d") % self.number)
+        foo, self.starta, foo2, aend, foo3 = m.groups()
+        self.starta = int(self.starta)
+        if aend == None:
+            aend = self.starta
+        self.lena = int(aend) - self.starta
+        if self.starta:
+            self.lena += 1
+        for x in xrange(self.lena):
+            l = lr.readline()
+            if l.startswith('---'):
+                lr.push(l)
+                break
+            s = l[2:]
+            if l.startswith('- ') or l.startswith('! '):
+                u = '-' + s
+            elif l.startswith('  '):
+                u = ' ' + s
+            else:
+                raise PatchError(_("bad hunk #%d old text line %d") %
+                                 (self.number, x))
+            self.a.append(u)
+            self.hunk.append(u)
+
+        l = lr.readline()
+        if l.startswith('\ '):
+            s = self.a[-1][:-1]
+            self.a[-1] = s
+            self.hunk[-1] = s
+            l = lr.readline()
+        m = contextdesc.match(l)
+        if not m:
+            raise PatchError(_("bad hunk #%d") % self.number)
+        foo, self.startb, foo2, bend, foo3 = m.groups()
+        self.startb = int(self.startb)
+        if bend == None:
+            bend = self.startb
+        self.lenb = int(bend) - self.startb
+        if self.startb:
+            self.lenb += 1
+        hunki = 1
+        for x in xrange(self.lenb):
+            l = lr.readline()
+            if l.startswith('\ '):
+                s = self.b[-1][:-1]
+                self.b[-1] = s
+                self.hunk[hunki-1] = s
+                continue
+            if not l:
+                lr.push(l)
+                break
+            s = l[2:]
+            if l.startswith('+ ') or l.startswith('! '):
+                u = '+' + s
+            elif l.startswith('  '):
+                u = ' ' + s
+            elif len(self.b) == 0:
+                # this can happen when the hunk does not add any lines
+                lr.push(l)
+                break
+            else:
+                raise PatchError(_("bad hunk #%d old text line %d") %
+                                 (self.number, x))
+            self.b.append(s)
+            while True:
+                if hunki >= len(self.hunk):
+                    h = ""
+                else:
+                    h = self.hunk[hunki]
+                hunki += 1
+                if h == u:
+                    break
+                elif h.startswith('-'):
+                    continue
+                else:
+                    self.hunk.insert(hunki-1, u)
+                    break
+
+        if not self.a:
+            # this happens when lines were only added to the hunk
+            for x in self.hunk:
+                if x.startswith('-') or x.startswith(' '):
+                    self.a.append(x)
+        if not self.b:
+            # this happens when lines were only deleted from the hunk
+            for x in self.hunk:
+                if x.startswith('+') or x.startswith(' '):
+                    self.b.append(x[1:])
+        # @@ -start,len +start,len @@
+        self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
+                                             self.startb, self.lenb)
+        self.hunk[0] = self.desc
+
+    def reverse(self):
+        origlena = self.lena
+        origstarta = self.starta
+        self.lena = self.lenb
+        self.starta = self.startb
+        self.lenb = origlena
+        self.startb = origstarta
+        self.a = []
+        self.b = []
+        # self.hunk[0] is the @@ description
+        for x in xrange(1, len(self.hunk)):
+            o = self.hunk[x]
+            if o.startswith('-'):
+                n = '+' + o[1:]
+                self.b.append(o[1:])
+            elif o.startswith('+'):
+                n = '-' + o[1:]
+                self.a.append(n)
+            else:
+                n = o
+                self.b.append(o[1:])
+                self.a.append(o)
+            self.hunk[x] = o
+
+    def fix_newline(self):
+        diffhelpers.fix_newline(self.hunk, self.a, self.b)
+
+    def complete(self):
+        return len(self.a) == self.lena and len(self.b) == self.lenb
+
+    def createfile(self):
+        return self.starta == 0 and self.lena == 0
+
+    def rmfile(self):
+        return self.startb == 0 and self.lenb == 0
+
+    def fuzzit(self, l, fuzz, toponly):
+        # this removes context lines from the top and bottom of list 'l'.  It
+        # checks the hunk to make sure only context lines are removed, and then
+        # returns a new shortened list of lines.
+        fuzz = min(fuzz, len(l)-1)
+        if fuzz:
+            top = 0
+            bot = 0
+            hlen = len(self.hunk)
+            for x in xrange(hlen-1):
+                # the hunk starts with the @@ line, so use x+1
+                if self.hunk[x+1][0] == ' ':
+                    top += 1
+                else:
+                    break
+            if not toponly:
+                for x in xrange(hlen-1):
+                    if self.hunk[hlen-bot-1][0] == ' ':
+                        bot += 1
+                    else:
+                        break
+
+            # top and bot now count context in the hunk
+            # adjust them if either one is short
+            context = max(top, bot, 3)
+            if bot < context:
+                bot = max(0, fuzz - (context - bot))
+            else:
+                bot = min(fuzz, bot)
+            if top < context:
+                top = max(0, fuzz - (context - top))
+            else:
+                top = min(fuzz, top)
+
+            return l[top:len(l)-bot]
+        return l
+
+    def old(self, fuzz=0, toponly=False):
+        return self.fuzzit(self.a, fuzz, toponly)
+
+    def newctrl(self):
+        res = []
+        for x in self.hunk:
+            c = x[0]
+            if c == ' ' or c == '+':
+                res.append(x)
+        return res
+
+    def new(self, fuzz=0, toponly=False):
+        return self.fuzzit(self.b, fuzz, toponly)
+
+class binhunk:
+    'A binary patch file. Only understands literals so far.'
+    def __init__(self, gitpatch):
+        self.gitpatch = gitpatch
+        self.text = None
+        self.hunk = ['GIT binary patch\n']
+
+    def createfile(self):
+        return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
+
+    def rmfile(self):
+        return self.gitpatch.op == 'DELETE'
+
+    def complete(self):
+        return self.text is not None
+
+    def new(self):
+        return [self.text]
+
+    def extract(self, fp):
+        line = fp.readline()
+        self.hunk.append(line)
         while line and not line.startswith('literal '):
-            line = readline()
+            line = fp.readline()
+            self.hunk.append(line)
         if not line:
-            return None, i[0]
-        size = int(line[8:])
+            raise PatchError(_('could not extract binary patch'))
+        size = int(line[8:].rstrip())
         dec = []
-        line = readline()
-        while line:
+        line = fp.readline()
+        self.hunk.append(line)
+        while len(line) > 1:
             l = line[0]
             if l <= 'Z' and l >= 'A':
                 l = ord(l) - ord('A') + 1
             else:
                 l = ord(l) - ord('a') + 27
-            dec.append(base85.b85decode(line[1:])[:l])
-            line = readline()
+            dec.append(base85.b85decode(line[1:-1])[:l])
+            line = fp.readline()
+            self.hunk.append(line)
         text = zlib.decompress(''.join(dec))
         if len(text) != size:
-            raise util.Abort(_('binary patch is %d bytes, not %d') %
-                             (len(text), size))
-        return text, i[0]
+            raise PatchError(_('binary patch is %d bytes, not %d') %
+                             len(text), size)
+        self.text = text
 
-    pf = file(patchname)
-    pfline = 1
-
-    fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
-    tmpfp = os.fdopen(fd, 'w')
+def parsefilename(str):
+    # --- filename \t|space stuff
+    s = str[4:]
+    i = s.find('\t')
+    if i < 0:
+        i = s.find(' ')
+        if i < 0:
+            return s
+    return s[:i]
 
-    try:
-        for i in xrange(len(gitpatches)):
-            p = gitpatches[i]
-            if not p.copymod and not p.binary:
-                continue
-
-            # rewrite patch hunk
-            while pfline < p.lineno:
-                tmpfp.write(pf.readline())
-                pfline += 1
+def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
+    def pathstrip(path, count=1):
+        pathlen = len(path)
+        i = 0
+        if count == 0:
+            return path.rstrip()
+        while count > 0:
+            i = path.find('/', i)
+            if i == -1:
+                raise PatchError(_("unable to strip away %d dirs from %s") %
+                                 (count, path))
+            i += 1
+            # consume '//' in the path
+            while i < pathlen - 1 and path[i] == '/':
+                i += 1
+            count -= 1
+        return path[i:].rstrip()
 
-            if p.binary:
-                text, delta = extractbin(pf)
-                if not text:
-                    raise util.Abort(_('binary patch extraction failed'))
-                pfline += delta
-                if not cwd:
-                    cwd = os.getcwd()
-                absdst = os.path.join(cwd, p.path)
-                basedir = os.path.dirname(absdst)
-                if not os.path.isdir(basedir):
-                    os.makedirs(basedir)
-                out = file(absdst, 'wb')
-                out.write(text)
-                out.close()
-            elif p.copymod:
-                copyfile(p.oldpath, p.path, basedir=cwd)
-                tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
-                line = pf.readline()
-                pfline += 1
-                while not line.startswith('--- a/'):
-                    tmpfp.write(line)
-                    line = pf.readline()
-                    pfline += 1
-                tmpfp.write('--- a/%s\n' % p.path)
+    nulla = afile_orig == "/dev/null"
+    nullb = bfile_orig == "/dev/null"
+    afile = pathstrip(afile_orig, strip)
+    gooda = os.path.exists(afile) and not nulla
+    bfile = pathstrip(bfile_orig, strip)
+    if afile == bfile:
+        goodb = gooda
+    else:
+        goodb = os.path.exists(bfile) and not nullb
+    createfunc = hunk.createfile
+    if reverse:
+        createfunc = hunk.rmfile
+    if not goodb and not gooda and not createfunc():
+        raise PatchError(_("unable to find %s or %s for patching") %
+                         (afile, bfile))
+    if gooda and goodb:
+        fname = bfile
+        if afile in bfile:
+            fname = afile
+    elif gooda:
+        fname = afile
+    elif not nullb:
+        fname = bfile
+        if afile in bfile:
+            fname = afile
+    elif not nulla:
+        fname = afile
+    return fname
+
+class linereader:
+    # simple class to allow pushing lines back into the input stream
+    def __init__(self, fp):
+        self.fp = fp
+        self.buf = []
+
+    def push(self, line):
+        self.buf.append(line)
 
-        line = pf.readline()
-        while line:
-            tmpfp.write(line)
-            line = pf.readline()
-    except:
-        tmpfp.close()
-        os.unlink(patchname)
-        raise
+    def readline(self):
+        if self.buf:
+            l = self.buf[0]
+            del self.buf[0]
+            return l
+        return self.fp.readline()
+
+def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
+              rejmerge=None, updatedir=None):
+    """reads a patch from fp and tries to apply it.  The dict 'changed' is
+       filled in with all of the filenames changed by the patch.  Returns 0
+       for a clean patch, -1 if any rejects were found and 1 if there was
+       any fuzz."""
+
+    def scangitpatch(fp, firstline, cwd=None):
+        '''git patches can modify a file, then copy that file to
+        a new file, but expect the source to be the unmodified form.
+        So we scan the patch looking for that case so we can do
+        the copies ahead of time.'''
 
-    tmpfp.close()
-    return patchname
+        pos = 0
+        try:
+            pos = fp.tell()
+        except IOError:
+            fp = cStringIO.StringIO(fp.read())
+
+        (dopatch, gitpatches) = readgitpatch(fp, firstline)
+        for gp in gitpatches:
+            if gp.copymod:
+                copyfile(gp.oldpath, gp.path, basedir=cwd)
+
+        fp.seek(pos)
 
-def patch(patchname, ui, strip=1, cwd=None, files={}):
-    """apply the patch <patchname> to the working directory.
-    a list of patched files is returned"""
+        return fp, dopatch, gitpatches
+
+    current_hunk = None
+    current_file = None
+    afile = ""
+    bfile = ""
+    state = None
+    hunknum = 0
+    rejects = 0
+
+    git = False
+    gitre = re.compile('diff --git (a/.*) (b/.*)')
 
-    # helper function
-    def __patch(patchname):
-        """patch and updates the files and fuzz variables"""
-        fuzz = False
-
-        args = []
-        patcher = ui.config('ui', 'patch')
-        if not patcher:
-            patcher = util.find_exe('gpatch') or util.find_exe('patch')
-            # Try to be smart only if patch call was not supplied
-            if util.needbinarypatch():
-                args.append('--binary')
-
-        if not patcher:
-            raise util.Abort(_('no patch command found in hgrc or PATH'))
-
-        if cwd:
-            args.append('-d %s' % util.shellquote(cwd))
-        fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
-                                           util.shellquote(patchname)))
+    # our states
+    BFILE = 1
+    err = 0
+    context = None
+    lr = linereader(fp)
+    dopatch = True
+    gitworkdone = False
 
-        for line in fp:
-            line = line.rstrip()
-            ui.note(line + '\n')
-            if line.startswith('patching file '):
-                pf = util.parse_patch_output(line)
-                printed_file = False
-                files.setdefault(pf, (None, None))
-            elif line.find('with fuzz') >= 0:
-                fuzz = True
-                if not printed_file:
-                    ui.warn(pf + '\n')
-                    printed_file = True
-                ui.warn(line + '\n')
-            elif line.find('saving rejects to file') >= 0:
-                ui.warn(line + '\n')
-            elif line.find('FAILED') >= 0:
-                if not printed_file:
-                    ui.warn(pf + '\n')
-                    printed_file = True
-                ui.warn(line + '\n')
-        code = fp.close()
-        if code:
-            raise util.Abort(_("patch command failed: %s") %
-                             util.explain_exit(code)[0])
-        return fuzz
+    while True:
+        newfile = False
+        x = lr.readline()
+        if not x:
+            break
+        if current_hunk:
+            if x.startswith('\ '):
+                current_hunk.fix_newline()
+            ret = current_file.apply(current_hunk, reverse)
+            if ret >= 0:
+                changed.setdefault(current_file.fname, (None, None))
+                if ret > 0:
+                    err = 1
+            current_hunk = None
+            gitworkdone = False
+        if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
+            ((context or context == None) and x.startswith('***************')))):
+            try:
+                if context == None and x.startswith('***************'):
+                    context = True
+                current_hunk = hunk(x, hunknum + 1, lr, context)
+            except PatchError, err:
+                ui.debug(err)
+                current_hunk = None
+                continue
+            hunknum += 1
+            if not current_file:
+                if sourcefile:
+                    current_file = patchfile(ui, sourcefile)
+                else:
+                    current_file = selectfile(afile, bfile, current_hunk,
+                                              strip, reverse)
+                    current_file = patchfile(ui, current_file)
+        elif state == BFILE and x.startswith('GIT binary patch'):
+            current_hunk = binhunk(changed[bfile[2:]][1])
+            if not current_file:
+                if sourcefile:
+                    current_file = patchfile(ui, sourcefile)
+                else:
+                    current_file = selectfile(afile, bfile, current_hunk,
+                                              strip, reverse)
+                    current_file = patchfile(ui, current_file)
+            hunknum += 1
+            current_hunk.extract(fp)
+        elif x.startswith('diff --git'):
+            # check for git diff, scanning the whole patch file if needed
+            m = gitre.match(x)
+            if m:
+                afile, bfile = m.group(1, 2)
+                if not git:
+                    git = True
+                    fp, dopatch, gitpatches = scangitpatch(fp, x)
+                    for gp in gitpatches:
+                        changed[gp.path] = (gp.op, gp)
+                # else error?
+                # copy/rename + modify should modify target, not source
+                if changed.get(bfile[2:], (None, None))[0] in ('COPY',
+                                                               'RENAME'):
+                    afile = bfile
+                    gitworkdone = True
+            newfile = True
+        elif x.startswith('---'):
+            # check for a unified diff
+            l2 = lr.readline()
+            if not l2.startswith('+++'):
+                lr.push(l2)
+                continue
+            newfile = True
+            context = False
+            afile = parsefilename(x)
+            bfile = parsefilename(l2)
+        elif x.startswith('***'):
+            # check for a context diff
+            l2 = lr.readline()
+            if not l2.startswith('---'):
+                lr.push(l2)
+                continue
+            l3 = lr.readline()
+            lr.push(l3)
+            if not l3.startswith("***************"):
+                lr.push(l2)
+                continue
+            newfile = True
+            context = True
+            afile = parsefilename(x)
+            bfile = parsefilename(l2)
 
-    (dopatch, gitpatches) = readgitpatch(patchname)
-    for gp in gitpatches:
-        files[gp.path] = (gp.op, gp)
-
-    fuzz = False
-    if dopatch:
-        filterpatch = dopatch & (GP_FILTER | GP_BINARY)
-        if filterpatch:
-            patchname = dogitpatch(patchname, gitpatches, cwd=cwd)
-        try:
-            if dopatch & GP_PATCH:
-                fuzz = __patch(patchname)
-        finally:
-            if filterpatch:
-                os.unlink(patchname)
-
-    return fuzz
+        if newfile:
+            if current_file:
+                current_file.close()
+                if rejmerge:
+                    rejmerge(current_file)
+                rejects += len(current_file.rej)
+            state = BFILE
+            current_file = None
+            hunknum = 0
+    if current_hunk:
+        if current_hunk.complete():
+            ret = current_file.apply(current_hunk, reverse)
+            if ret >= 0:
+                changed.setdefault(current_file.fname, (None, None))
+                if ret > 0:
+                    err = 1
+        else:
+            fname = current_file and current_file.fname or None
+            raise PatchError(_("malformed patch %s %s") % (fname,
+                             current_hunk.desc))
+    if current_file:
+        current_file.close()
+        if rejmerge:
+            rejmerge(current_file)
+        rejects += len(current_file.rej)
+    if updatedir and git:
+        updatedir(gitpatches)
+    if rejects:
+        return -1
+    if hunknum == 0 and dopatch and not gitworkdone:
+        raise NoHunks
+    return err
 
 def diffopts(ui, opts={}, untrusted=False):
     def get(key, name=None):
@@ -369,7 +1017,7 @@ def diffopts(ui, opts={}, untrusted=Fals
         ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
         ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
 
-def updatedir(ui, repo, patches, wlock=None):
+def updatedir(ui, repo, patches):
     '''Update dirstate after patch application according to metadata'''
     if not patches:
         return
@@ -391,29 +1039,32 @@ def updatedir(ui, repo, patches, wlock=N
     for src, dst, after in copies:
         if not after:
             copyfile(src, dst, repo.root)
-        repo.copy(src, dst, wlock=wlock)
+        repo.copy(src, dst)
     removes = removes.keys()
     if removes:
         removes.sort()
-        repo.remove(removes, True, wlock=wlock)
+        repo.remove(removes, True)
     for f in patches:
         ctype, gp = patches[f]
         if gp and gp.mode:
             x = gp.mode & 0100 != 0
+            l = gp.mode & 020000 != 0
             dst = os.path.join(repo.root, gp.path)
             # patch won't create empty files
             if ctype == 'ADD' and not os.path.exists(dst):
                 repo.wwrite(gp.path, '', x and 'x' or '')
             else:
-                util.set_exec(dst, x)
-    cmdutil.addremove(repo, cfiles, wlock=wlock)
+                util.set_link(dst, l)
+                if not l:
+                    util.set_exec(dst, x)
+    cmdutil.addremove(repo, cfiles)
     files = patches.keys()
     files.extend([r for r in removes if r not in files])
     files.sort()
 
     return files
 
-def b85diff(fp, to, tn):
+def b85diff(to, tn):
     '''print base85-encoded binary diff'''
     def gitindex(text):
         if not text:
@@ -497,24 +1148,30 @@ def diff(repo, node1=None, node2=None, f
     if node2:
         ctx2 = context.changectx(repo, node2)
         execf2 = ctx2.manifest().execf
+        linkf2 = ctx2.manifest().linkf
     else:
         ctx2 = context.workingctx(repo)
         execf2 = util.execfunc(repo.root, None)
+        linkf2 = util.linkfunc(repo.root, None)
         if execf2 is None:
-            execf2 = ctx2.parents()[0].manifest().copy().execf
+            mc = ctx2.parents()[0].manifest().copy()
+            execf2 = mc.execf
+            linkf2 = mc.linkf
 
     # returns False if there was no rename between ctx1 and ctx2
     # returns None if the file was created between ctx1 and ctx2
     # returns the (file, node) present in ctx1 that was renamed to f in ctx2
-    def renamed(f):
-        startrev = ctx1.rev()
-        c = ctx2
+    # This will only really work if c1 is the Nth 1st parent of c2.
+    def renamed(c1, c2, man, f):
+        startrev = c1.rev()
+        c = c2
         crev = c.rev()
         if crev is None:
             crev = repo.changelog.count()
         orig = f
+        files = (f,)
         while crev > startrev:
-            if f in c.files():
+            if f in files:
                 try:
                     src = getfilectx(f, c).renamed()
                 except revlog.LookupError:
@@ -524,7 +1181,8 @@ def diff(repo, node1=None, node2=None, f
             crev = c.parents()[0].rev()
             # try to reuse
             c = getctx(crev)
-        if f not in man1:
+            files = c.files()
+        if f not in man:
             return None
         if f == orig:
             return False
@@ -538,11 +1196,27 @@ def diff(repo, node1=None, node2=None, f
 
     if opts.git:
         copied = {}
-        for f in added:
-            src = renamed(f)
+        c1, c2 = ctx1, ctx2
+        files = added
+        man = man1
+        if node2 and ctx1.rev() >= ctx2.rev():
+            # renamed() starts at c2 and walks back in history until c1.
+            # Since ctx1.rev() >= ctx2.rev(), invert ctx2 and ctx1 to
+            # detect (inverted) copies.
+            c1, c2 = ctx2, ctx1
+            files = removed
+            man = ctx2.manifest()
+        for f in files:
+            src = renamed(c1, c2, man, f)
             if src:
                 copied[f] = src
-        srcs = [x[1] for x in copied.items()]
+        if ctx1 == c2:
+            # invert the copied dict
+            copied = dict([(v, k) for (k, v) in copied.iteritems()])
+        # If we've renamed file foo to bar (copied['bar'] = 'foo'),
+        # avoid showing a diff for foo if we're going to show
+        # the rename to bar.
+        srcs = [x[1] for x in copied.iteritems() if x[0] in added]
 
     all = modified + added + removed
     all.sort()
@@ -558,8 +1232,8 @@ def diff(repo, node1=None, node2=None, f
         if f not in removed:
             tn = getfilectx(f, ctx2).data()
         if opts.git:
-            def gitmode(x):
-                return x and '100755' or '100644'
+            def gitmode(x, l):
+                return l and '120000' or (x and '100755' or '100644')
             def addmodehdr(header, omode, nmode):
                 if omode != nmode:
                     header.append('old mode %s\n' % omode)
@@ -567,10 +1241,10 @@ def diff(repo, node1=None, node2=None, f
 
             a, b = f, f
             if f in added:
-                mode = gitmode(execf2(f))
+                mode = gitmode(execf2(f), linkf2(f))
                 if f in copied:
                     a = copied[f]
-                    omode = gitmode(man1.execf(a))
+                    omode = gitmode(man1.execf(a), man1.linkf(a))
                     addmodehdr(header, omode, mode)
                     if a in removed and a not in gone:
                         op = 'rename'
@@ -588,11 +1262,11 @@ def diff(repo, node1=None, node2=None, f
                 if f in srcs:
                     dodiff = False
                 else:
-                    mode = gitmode(man1.execf(f))
+                    mode = gitmode(man1.execf(f), man1.linkf(f))
                     header.append('deleted file mode %s\n' % mode)
             else:
-                omode = gitmode(man1.execf(f))
-                nmode = gitmode(execf2(f))
+                omode = gitmode(man1.execf(f), man1.linkf(f))
+                nmode = gitmode(execf2(f), linkf2(f))
                 addmodehdr(header, omode, nmode)
                 if util.binary(to) or util.binary(tn):
                     dodiff = 'binary'
@@ -600,7 +1274,7 @@ def diff(repo, node1=None, node2=None, f
             header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
         if dodiff:
             if dodiff == 'binary':
-                text = b85diff(fp, to, tn)
+                text = b85diff(to, tn)
             else:
                 text = mdiff.unidiff(to, date1,
                                     # ctx2 date may be dynamic
--- a/mercurial/repo.py
+++ b/mercurial/repo.py
@@ -9,16 +9,26 @@
 class RepoError(Exception):
     pass
 
+class NoCapability(RepoError):
+    pass
+
 class repository(object):
     def capable(self, name):
         '''tell whether repo supports named capability.
         return False if not supported.
         if boolean capability, return True.
         if string capability, return string.'''
+        if name in self.capabilities:
+            return True
         name_eq = name + '='
         for cap in self.capabilities:
-            if name == cap:
-                return True
             if cap.startswith(name_eq):
                 return cap[len(name_eq):]
         return False
+
+    def requirecap(self, name, purpose):
+        '''raise an exception if the given capability is not present'''
+        if not self.capable(name):
+            raise NoCapability(_('cannot %s; remote repository does not '
+                                 'support the %r capability') %
+                               (purpose, name))
--- a/mercurial/revlog.py
+++ b/mercurial/revlog.py
@@ -15,17 +15,34 @@ from i18n import _
 import binascii, changegroup, errno, ancestor, mdiff, os
 import sha, struct, util, zlib
 
-# revlog version strings
-REVLOGV0 = 0
-REVLOGNG = 1
+_pack = struct.pack
+_unpack = struct.unpack
+_compress = zlib.compress
+_decompress = zlib.decompress
+_sha = sha.new
 
 # revlog flags
+REVLOGV0 = 0
+REVLOGNG = 1
 REVLOGNGINLINEDATA = (1 << 16)
 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
-
 REVLOG_DEFAULT_FORMAT = REVLOGNG
 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
 
+class RevlogError(Exception):
+    pass
+class LookupError(RevlogError):
+    pass
+
+def getoffset(q):
+    return int(q >> 16)
+
+def gettype(q):
+    return int(q & 0xFFFF)
+
+def offset_type(offset, type):
+    return long(long(offset) << 16 | type)
+
 def hash(text, p1, p2):
     """generate a hash from the given text and its parent hashes
 
@@ -35,48 +52,39 @@ def hash(text, p1, p2):
     """
     l = [p1, p2]
     l.sort()
-    s = sha.new(l[0])
+    s = _sha(l[0])
     s.update(l[1])
     s.update(text)
     return s.digest()
 
 def compress(text):
     """ generate a possibly-compressed representation of text """
-    if not text: return ("", text)
+    if not text:
+        return ("", text)
     if len(text) < 44:
-        if text[0] == '\0': return ("", text)
+        if text[0] == '\0':
+            return ("", text)
         return ('u', text)
-    bin = zlib.compress(text)
+    bin = _compress(text)
     if len(bin) > len(text):
-        if text[0] == '\0': return ("", text)
+        if text[0] == '\0':
+            return ("", text)
         return ('u', text)
     return ("", bin)
 
 def decompress(bin):
     """ decompress the given input """
-    if not bin: return bin
+    if not bin:
+        return bin
     t = bin[0]
-    if t == '\0': return bin
-    if t == 'x': return zlib.decompress(bin)
-    if t == 'u': return bin[1:]
+    if t == '\0':
+        return bin
+    if t == 'x':
+        return _decompress(bin)
+    if t == 'u':
+        return bin[1:]
     raise RevlogError(_("unknown compression type %r") % t)
 
-indexformatv0 = ">4l20s20s20s"
-v0shaoffset = 56
-# index ng:
-# 6 bytes offset
-# 2 bytes flags
-# 4 bytes compressed length
-# 4 bytes uncompressed length
-# 4 bytes: base rev
-# 4 bytes link rev
-# 4 bytes parent 1 rev
-# 4 bytes parent 2 rev
-# 32 bytes: nodeid
-indexformatng = ">Qiiiiii20s12x"
-ngshaoffset = 32
-versionformat = ">I"
-
 class lazyparser(object):
     """
     this class avoids the need to parse the entirety of large indices
@@ -88,11 +96,9 @@ class lazyparser(object):
     safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
                                       hasattr(util, 'win32api'))
 
-    def __init__(self, dataf, size, indexformat, shaoffset):
+    def __init__(self, dataf, size):
         self.dataf = dataf
-        self.format = indexformat
-        self.s = struct.calcsize(indexformat)
-        self.indexformat = indexformat
+        self.s = struct.calcsize(indexformatng)
         self.datasize = size
         self.l = size/self.s
         self.index = [None] * self.l
@@ -100,7 +106,6 @@ class lazyparser(object):
         self.allmap = 0
         self.all = 0
         self.mapfind_count = 0
-        self.shaoffset = shaoffset
 
     def loadmap(self):
         """
@@ -109,7 +114,8 @@ class lazyparser(object):
         which is fairly slow.  loadmap can load up just the node map,
         which takes much less time.
         """
-        if self.allmap: return
+        if self.allmap:
+            return
         end = self.datasize
         self.allmap = 1
         cur = 0
@@ -120,7 +126,7 @@ class lazyparser(object):
             data = self.dataf.read(blocksize)
             off = 0
             for x in xrange(256):
-                n = data[off + self.shaoffset:off + self.shaoffset + 20]
+                n = data[off + ngshaoffset:off + ngshaoffset + 20]
                 self.map[n] = count
                 count += 1
                 if count >= self.l:
@@ -129,7 +135,8 @@ class lazyparser(object):
             cur += blocksize
 
     def loadblock(self, blockstart, blocksize, data=None):
-        if self.all: return
+        if self.all:
+            return
         if data is None:
             self.dataf.seek(blockstart)
             if blockstart + blocksize > self.datasize:
@@ -148,13 +155,14 @@ class lazyparser(object):
             if self.index[i + x] == None:
                 b = data[off : off + self.s]
                 self.index[i + x] = b
-                n = b[self.shaoffset:self.shaoffset + 20]
+                n = b[ngshaoffset:ngshaoffset + 20]
                 self.map[n] = i + x
             off += self.s
 
     def findnode(self, node):
         """search backwards through the index file for a specific node"""
-        if self.allmap: return None
+        if self.allmap:
+            return None
 
         # hg log will cause many many searches for the manifest
         # nodes.  After we get called a few times, just load the whole
@@ -180,14 +188,14 @@ class lazyparser(object):
             data = self.dataf.read(end - start)
             findend = end - start
             while True:
-                # we're searching backwards, so weh have to make sure
+                # we're searching backwards, so we have to make sure
                 # we don't find a changeset where this node is a parent
-                off = data.rfind(node, 0, findend)
+                off = data.find(node, 0, findend)
                 findend = off
                 if off >= 0:
                     i = off / self.s
                     off = i * self.s
-                    n = data[off + self.shaoffset:off + self.shaoffset + 20]
+                    n = data[off + ngshaoffset:off + ngshaoffset + 20]
                     if n == node:
                         self.map[n] = i + start / self.s
                         return node
@@ -197,11 +205,12 @@ class lazyparser(object):
         return None
 
     def loadindex(self, i=None, end=None):
-        if self.all: return
+        if self.all:
+            return
         all = False
         if i == None:
             blockstart = 0
-            blocksize = (512 / self.s) * self.s
+            blocksize = (65536 / self.s) * self.s
             end = self.datasize
             all = True
         else:
@@ -210,13 +219,14 @@ class lazyparser(object):
                 end = end * self.s
                 blocksize = end - blockstart
             else:
-                blockstart = (i & ~63) * self.s
-                blocksize = self.s * 64
+                blockstart = (i & ~1023) * self.s
+                blocksize = self.s * 1024
                 end = blockstart + blocksize
         while blockstart < end:
             self.loadblock(blockstart, blocksize)
             blockstart += blocksize
-        if all: self.all = True
+        if all:
+            self.all = True
 
 class lazyindex(object):
     """a lazy version of the index array"""
@@ -230,16 +240,15 @@ class lazyindex(object):
         self.p.loadindex(pos)
         return self.p.index[pos]
     def __getitem__(self, pos):
-        ret = self.p.index[pos] or self.load(pos)
-        if isinstance(ret, str):
-            ret = struct.unpack(self.p.indexformat, ret)
-        return ret
+        return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
     def __setitem__(self, pos, item):
-        self.p.index[pos] = item
+        self.p.index[pos] = _pack(indexformatng, *item)
     def __delitem__(self, pos):
         del self.p.index[pos]
+    def insert(self, pos, e):
+        self.p.index.insert(pos, _pack(indexformatng, *e))
     def append(self, e):
-        self.p.index.append(e)
+        self.p.index.append(_pack(indexformatng, *e))
 
 class lazymap(object):
     """a lazy version of the node map"""
@@ -262,8 +271,8 @@ class lazymap(object):
                 self.p.loadindex(i)
                 ret = self.p.index[i]
             if isinstance(ret, str):
-                ret = struct.unpack(self.p.indexformat, ret)
-            yield ret[-1]
+                ret = _unpack(indexformatng, ret)
+            yield ret[7]
     def __getitem__(self, key):
         try:
             return self.p.map[key]
@@ -278,8 +287,112 @@ class lazymap(object):
     def __delitem__(self, key):
         del self.p.map[key]
 
-class RevlogError(Exception): pass
-class LookupError(RevlogError): pass
+indexformatv0 = ">4l20s20s20s"
+v0shaoffset = 56
+
+class revlogoldio(object):
+    def __init__(self):
+        self.size = struct.calcsize(indexformatv0)
+
+    def parseindex(self, fp, inline):
+        s = self.size
+        index = []
+        nodemap =  {nullid: nullrev}
+        n = off = 0
+        data = fp.read()
+        l = len(data)
+        while off + s <= l:
+            cur = data[off:off + s]
+            off += s
+            e = _unpack(indexformatv0, cur)
+            # transform to revlogv1 format
+            e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
+                  nodemap[e[4]], nodemap[e[5]], e[6])
+            index.append(e2)
+            nodemap[e[6]] = n
+            n += 1
+
+        return index, nodemap, None
+
+    def packentry(self, entry, node, version, rev):
+        e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
+              node(entry[5]), node(entry[6]), entry[7])
+        return _pack(indexformatv0, *e2)
+
+# index ng:
+# 6 bytes offset
+# 2 bytes flags
+# 4 bytes compressed length
+# 4 bytes uncompressed length
+# 4 bytes: base rev
+# 4 bytes link rev
+# 4 bytes parent 1 rev
+# 4 bytes parent 2 rev
+# 32 bytes: nodeid
+indexformatng = ">Qiiiiii20s12x"
+ngshaoffset = 32
+versionformat = ">I"
+
+class revlogio(object):
+    def __init__(self):
+        self.size = struct.calcsize(indexformatng)
+
+    def parseindex(self, fp, inline):
+        try:
+            size = util.fstat(fp).st_size
+        except AttributeError:
+            size = 0
+
+        if lazyparser.safe_to_use and not inline and size > 1000000:
+            # big index, let's parse it on demand
+            parser = lazyparser(fp, size)
+            index = lazyindex(parser)
+            nodemap = lazymap(parser)
+            e = list(index[0])
+            type = gettype(e[0])
+            e[0] = offset_type(0, type)
+            index[0] = e
+            return index, nodemap, None
+
+        s = self.size
+        cache = None
+        index = []
+        nodemap =  {nullid: nullrev}
+        n = off = 0
+        # if we're not using lazymap, always read the whole index
+        data = fp.read()
+        l = len(data) - s
+        append = index.append
+        if inline:
+            cache = (0, data)
+            while off <= l:
+                e = _unpack(indexformatng, data[off:off + s])
+                nodemap[e[7]] = n
+                append(e)
+                n += 1
+                if e[1] < 0:
+                    break
+                off += e[1] + s
+        else:
+            while off <= l:
+                e = _unpack(indexformatng, data[off:off + s])
+                nodemap[e[7]] = n
+                append(e)
+                n += 1
+                off += s
+
+        e = list(index[0])
+        type = gettype(e[0])
+        e[0] = offset_type(0, type)
+        index[0] = e
+
+        return index, nodemap, cache
+
+    def packentry(self, entry, node, version, rev):
+        p = _pack(indexformatng, *entry)
+        if rev == 0:
+            p = _pack(versionformat, version) + p[4:]
+        return p
 
 class revlog(object):
     """
@@ -316,200 +429,101 @@ class revlog(object):
         self.indexfile = indexfile
         self.datafile = indexfile[:-2] + ".d"
         self.opener = opener
+        self._cache = None
+        self._chunkcache = None
+        self.nodemap = {nullid: nullrev}
+        self.index = []
 
-        self.indexstat = None
-        self.cache = None
-        self.chunkcache = None
-        self.defversion = REVLOG_DEFAULT_VERSION
+        v = REVLOG_DEFAULT_VERSION
         if hasattr(opener, "defversion"):
-            self.defversion = opener.defversion
-            if self.defversion & REVLOGNG:
-                self.defversion |= REVLOGNGINLINEDATA
-        self.load()
+            v = opener.defversion
+            if v & REVLOGNG:
+                v |= REVLOGNGINLINEDATA
 
-    def load(self):
-        v = self.defversion
+        i = ""
         try:
             f = self.opener(self.indexfile)
             i = f.read(4)
             f.seek(0)
+            if len(i) > 0:
+                v = struct.unpack(versionformat, i)[0]
         except IOError, inst:
             if inst.errno != errno.ENOENT:
                 raise
-            i = ""
-        else:
-            try:
-                st = util.fstat(f)
-            except AttributeError, inst:
-                st = None
-            else:
-                oldst = self.indexstat
-                if (oldst and st.st_dev == oldst.st_dev
-                    and st.st_ino == oldst.st_ino
-                    and st.st_mtime == oldst.st_mtime
-                    and st.st_ctime == oldst.st_ctime
-                    and st.st_size == oldst.st_size):
-                    return
-                self.indexstat = st
-            if len(i) > 0:
-                v = struct.unpack(versionformat, i)[0]
+
+        self.version = v
+        self._inline = v & REVLOGNGINLINEDATA
         flags = v & ~0xFFFF
         fmt = v & 0xFFFF
-        if fmt == REVLOGV0:
-            if flags:
-                raise RevlogError(_("index %s unknown flags %#04x for format v0")
-                                  % (self.indexfile, flags >> 16))
-        elif fmt == REVLOGNG:
-            if flags & ~REVLOGNGINLINEDATA:
-                raise RevlogError(_("index %s unknown flags %#04x for revlogng")
-                                  % (self.indexfile, flags >> 16))
-        else:
+        if fmt == REVLOGV0 and flags:
+            raise RevlogError(_("index %s unknown flags %#04x for format v0")
+                              % (self.indexfile, flags >> 16))
+        elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
+            raise RevlogError(_("index %s unknown flags %#04x for revlogng")
+                              % (self.indexfile, flags >> 16))
+        elif fmt > REVLOGNG:
             raise RevlogError(_("index %s unknown format %d")
                               % (self.indexfile, fmt))
-        self.version = v
-        if v == REVLOGV0:
-            self.indexformat = indexformatv0
-            shaoffset = v0shaoffset
-        else:
-            self.indexformat = indexformatng
-            shaoffset = ngshaoffset
-
-        if i:
-            if (lazyparser.safe_to_use and not self.inlinedata() and
-                st and st.st_size > 10000):
-                # big index, let's parse it on demand
-                parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
-                self.index = lazyindex(parser)
-                self.nodemap = lazymap(parser)
-            else:
-                self.parseindex(f, st)
-            if self.version != REVLOGV0:
-                e = list(self.index[0])
-                type = self.ngtype(e[0])
-                e[0] = self.offset_type(0, type)
-                self.index[0] = e
-        else:
-            self.nodemap = {nullid: nullrev}
-            self.index = []
-
 
-    def parseindex(self, fp, st):
-        s = struct.calcsize(self.indexformat)
-        self.index = []
-        self.nodemap =  {nullid: nullrev}
-        inline = self.inlinedata()
-        n = 0
-        leftover = None
-        while True:
-            if st:
-                data = fp.read(65536)
-            else:
-                # hack for httprangereader, it doesn't do partial reads well
-                data = fp.read()
-            if not data:
-                break
-            if n == 0 and self.inlinedata():
-                # cache the first chunk
-                self.chunkcache = (0, data)
-            if leftover:
-                data = leftover + data
-                leftover = None
-            off = 0
-            l = len(data)
-            while off < l:
-                if l - off < s:
-                    leftover = data[off:]
-                    break
-                cur = data[off:off + s]
-                off += s
-                e = struct.unpack(self.indexformat, cur)
-                self.index.append(e)
-                self.nodemap[e[-1]] = n
-                n += 1
-                if inline:
-                    if e[1] < 0:
-                        break
-                    off += e[1]
-                    if off > l:
-                        # some things don't seek well, just read it
-                        fp.read(off - l)
-                        break
-            if not st:
-                break
+        self._io = revlogio()
+        if self.version == REVLOGV0:
+            self._io = revlogoldio()
+        if i:
+            d = self._io.parseindex(f, self._inline)
+            self.index, self.nodemap, self._chunkcache = d
 
-
-    def ngoffset(self, q):
-        if q & 0xFFFF:
-            raise RevlogError(_('%s: incompatible revision flag %x') %
-                              (self.indexfile, q))
-        return long(q >> 16)
+        # add the magic null revision at -1
+        self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
 
-    def ngtype(self, q):
-        return int(q & 0xFFFF)
-
-    def offset_type(self, offset, type):
-        return long(long(offset) << 16 | type)
-
-    def loadindex(self, start, end):
+    def _loadindex(self, start, end):
         """load a block of indexes all at once from the lazy parser"""
         if isinstance(self.index, lazyindex):
             self.index.p.loadindex(start, end)
 
-    def loadindexmap(self):
+    def _loadindexmap(self):
         """loads both the map and the index from the lazy parser"""
         if isinstance(self.index, lazyindex):
             p = self.index.p
             p.loadindex()
             self.nodemap = p.map
 
-    def loadmap(self):
+    def _loadmap(self):
         """loads the map from the lazy parser"""
         if isinstance(self.nodemap, lazymap):
             self.nodemap.p.loadmap()
             self.nodemap = self.nodemap.p.map
 
-    def inlinedata(self): return self.version & REVLOGNGINLINEDATA
-    def tip(self): return self.node(len(self.index) - 1)
-    def count(self): return len(self.index)
-    def node(self, rev):
-        return rev == nullrev and nullid or self.index[rev][-1]
+    def tip(self):
+        return self.node(len(self.index) - 2)
+    def count(self):
+        return len(self.index) - 1
+
     def rev(self, node):
         try:
             return self.nodemap[node]
         except KeyError:
             raise LookupError(_('%s: no node %s') % (self.indexfile, hex(node)))
+    def node(self, rev):
+        return self.index[rev][7]
     def linkrev(self, node):
-        return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
+        return self.index[self.rev(node)][4]
     def parents(self, node):
-        if node == nullid: return (nullid, nullid)
-        r = self.rev(node)
-        d = self.index[r][-3:-1]
-        if self.version == REVLOGV0:
-            return d
+        d = self.index[self.rev(node)][5:7]
         return (self.node(d[0]), self.node(d[1]))
     def parentrevs(self, rev):
-        if rev == nullrev:
-            return (nullrev, nullrev)
-        d = self.index[rev][-3:-1]
-        if self.version == REVLOGV0:
-            return (self.rev(d[0]), self.rev(d[1]))
-        return d
+        return self.index[rev][5:7]
     def start(self, rev):
-        if rev == nullrev:
-            return 0
-        if self.version != REVLOGV0:
-            return self.ngoffset(self.index[rev][0])
-        return self.index[rev][0]
-
-    def end(self, rev): return self.start(rev) + self.length(rev)
+        return int(self.index[rev][0] >> 16)
+    def end(self, rev):
+        return self.start(rev) + self.length(rev)
+    def length(self, rev):
+        return self.index[rev][1]
+    def base(self, rev):
+        return self.index[rev][3]
 
     def size(self, rev):
         """return the length of the uncompressed text for a given revision"""
-        if rev == nullrev:
-            return 0
-        l = -1
-        if self.version != REVLOGV0:
-            l = self.index[rev][2]
+        l = self.index[rev][2]
         if l >= 0:
             return l
 
@@ -536,17 +550,6 @@ class revlog(object):
         return l
         """
 
-    def length(self, rev):
-        if rev == nullrev:
-            return 0
-        else:
-            return self.index[rev][1]
-    def base(self, rev):
-        if (rev == nullrev):
-            return nullrev
-        else:
-            return self.index[rev][-5]
-
     def reachable(self, node, stop=None):
         """return a hash of all nodes ancestral to a given node, including
          the node itself, stopping when stop is matched"""
@@ -730,6 +733,17 @@ class revlog(object):
         if stop is specified, it will consider all the revs from stop
         as if they had no children
         """
+        if start is None and stop is None:
+            count = self.count()
+            if not count:
+                return [nullid]
+            ishead = [1] * (count + 1)
+            index = self.index
+            for r in xrange(count):
+                e = index[r]
+                ishead[e[5]] = ishead[e[6]] = 0
+            return [self.node(r) for r in xrange(count) if ishead[r]]
+
         if start is None:
             start = nullid
         if stop is None:
@@ -781,9 +795,12 @@ class revlog(object):
         try:
             # str(rev)
             rev = int(id)
-            if str(rev) != id: raise ValueError
-            if rev < 0: rev = self.count() + rev
-            if rev < 0 or rev >= self.count(): raise ValueError
+            if str(rev) != id:
+                raise ValueError
+            if rev < 0:
+                rev = self.count() + rev
+            if rev < 0 or rev >= self.count():
+                raise ValueError
             return self.node(rev)
         except (ValueError, OverflowError):
             pass
@@ -817,7 +834,6 @@ class revlog(object):
             - revision number or str(revision number)
             - nodeid or subset of hex nodeid
         """
-
         n = self._match(id)
         if n is not None:
             return n
@@ -832,56 +848,42 @@ class revlog(object):
         p1, p2 = self.parents(node)
         return hash(text, p1, p2) != node
 
-    def makenode(self, node, text):
-        """calculate a file nodeid for text, descended or possibly
-        unchanged from node"""
-
-        if self.cmp(node, text):
-            return hash(text, node, nullid)
-        return node
-
-    def diff(self, a, b):
-        """return a delta between two revisions"""
-        return mdiff.textdiff(a, b)
-
-    def patches(self, t, pl):
-        """apply a list of patches to a string"""
-        return mdiff.patches(t, pl)
-
-    def chunk(self, rev, df=None, cachelen=4096):
-        start, length = self.start(rev), self.length(rev)
-        inline = self.inlinedata()
-        if inline:
-            start += (rev + 1) * struct.calcsize(self.indexformat)
-        end = start + length
+    def chunk(self, rev, df=None):
         def loadcache(df):
-            cache_length = max(cachelen, length) # 4k
             if not df:
-                if inline:
+                if self._inline:
                     df = self.opener(self.indexfile)
                 else:
                     df = self.opener(self.datafile)
             df.seek(start)
-            self.chunkcache = (start, df.read(cache_length))
+            self._chunkcache = (start, df.read(cache_length))
 
-        if not self.chunkcache:
-            loadcache(df)
+        start, length = self.start(rev), self.length(rev)
+        if self._inline:
+            start += (rev + 1) * self._io.size
+        end = start + length
 
-        cache_start = self.chunkcache[0]
-        cache_end = cache_start + len(self.chunkcache[1])
-        if start >= cache_start and end <= cache_end:
-            # it is cached
-            offset = start - cache_start
-        else:
+        offset = 0
+        if not self._chunkcache:
+            cache_length = max(65536, length)
             loadcache(df)
-            offset = 0
+        else:
+            cache_start = self._chunkcache[0]
+            cache_length = len(self._chunkcache[1])
+            cache_end = cache_start + cache_length
+            if start >= cache_start and end <= cache_end:
+                # it is cached
+                offset = start - cache_start
+            else:
+                cache_length = max(65536, length)
+                loadcache(df)
 
-        #def checkchunk():
-        #    df = self.opener(self.datafile)
-        #    df.seek(start)
-        #    return df.read(length)
-        #assert s == checkchunk()
-        return decompress(self.chunkcache[1][offset:offset + length])
+        # avoid copying large chunks
+        c = self._chunkcache[1]
+        if cache_length != length:
+            c = c[offset:offset + length]
+
+        return decompress(c)
 
     def delta(self, node):
         """return or calculate a delta between a node and its predecessor"""
@@ -890,55 +892,56 @@ class revlog(object):
 
     def revdiff(self, rev1, rev2):
         """return or calculate a delta between two revisions"""
-        b1 = self.base(rev1)
-        b2 = self.base(rev2)
-        if b1 == b2 and rev1 + 1 == rev2:
+        if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
             return self.chunk(rev2)
-        else:
-            return self.diff(self.revision(self.node(rev1)),
-                             self.revision(self.node(rev2)))
+
+        return mdiff.textdiff(self.revision(self.node(rev1)),
+                              self.revision(self.node(rev2)))
 
     def revision(self, node):
         """return an uncompressed revision of a given"""
-        if node == nullid: return ""
-        if self.cache and self.cache[0] == node: return self.cache[2]
+        if node == nullid:
+            return ""
+        if self._cache and self._cache[0] == node:
+            return self._cache[2]
 
         # look up what we need to read
         text = None
         rev = self.rev(node)
         base = self.base(rev)
 
-        if self.inlinedata():
+        # check rev flags
+        if self.index[rev][0] & 0xFFFF:
+            raise RevlogError(_('incompatible revision flag %x') %
+                              (self.index[rev][0] & 0xFFFF))
+
+        if self._inline:
             # we probably have the whole chunk cached
             df = None
         else:
             df = self.opener(self.datafile)
 
         # do we have useful data cached?
-        if self.cache and self.cache[1] >= base and self.cache[1] < rev:
-            base = self.cache[1]
-            text = self.cache[2]
-            self.loadindex(base, rev + 1)
+        if self._cache and self._cache[1] >= base and self._cache[1] < rev:
+            base = self._cache[1]
+            text = self._cache[2]
+            self._loadindex(base, rev + 1)
         else:
-            self.loadindex(base, rev + 1)
+            self._loadindex(base, rev + 1)
             text = self.chunk(base, df=df)
 
-        bins = []
-        for r in xrange(base + 1, rev + 1):
-            bins.append(self.chunk(r, df=df))
-
-        text = self.patches(text, bins)
-
+        bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
+        text = mdiff.patches(text, bins)
         p1, p2 = self.parents(node)
         if node != hash(text, p1, p2):
             raise RevlogError(_("integrity check failed on %s:%d")
                               % (self.datafile, rev))
 
-        self.cache = (node, rev, text)
+        self._cache = (node, rev, text)
         return text
 
     def checkinlinesize(self, tr, fp=None):
-        if not self.inlinedata():
+        if not self._inline:
             return
         if not fp:
             fp = self.opener(self.indexfile, 'r')
@@ -956,7 +959,7 @@ class revlog(object):
 
         tr.add(self.datafile, dataoff)
         df = self.opener(self.datafile, 'w')
-        calc = struct.calcsize(self.indexformat)
+        calc = self._io.size
         for r in xrange(self.count()):
             start = self.start(r) + (r + 1) * calc
             length = self.length(r)
@@ -967,16 +970,9 @@ class revlog(object):
         df.close()
         fp = self.opener(self.indexfile, 'w', atomictemp=True)
         self.version &= ~(REVLOGNGINLINEDATA)
-        if self.count():
-            x = self.index[0]
-            e = struct.pack(self.indexformat, *x)[4:]
-            l = struct.pack(versionformat, self.version)
-            fp.write(l)
-            fp.write(e)
-
-        for i in xrange(1, self.count()):
-            x = self.index[i]
-            e = struct.pack(self.indexformat, *x)
+        self._inline = False
+        for i in xrange(self.count()):
+            e = self._io.packentry(self.index[i], self.node, self.version, i)
             fp.write(e)
 
         # if we don't call rename, the temp file will never replace the
@@ -984,9 +980,9 @@ class revlog(object):
         fp.rename()
 
         tr.replace(self.indexfile, trindex * calc)
-        self.chunkcache = None
+        self._chunkcache = None
 
-    def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
+    def addrevision(self, text, transaction, link, p1, p2, d=None):
         """add a revision to the log
 
         text - the revision data to add
@@ -995,84 +991,60 @@ class revlog(object):
         p1, p2 - the parent nodeids of the revision
         d - an optional precomputed delta
         """
-        if not self.inlinedata():
+        dfh = None
+        if not self._inline:
             dfh = self.opener(self.datafile, "a")
-        else:
-            dfh = None
         ifh = self.opener(self.indexfile, "a+")
         return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
 
     def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
-        if text is None: text = ""
-        if p1 is None: p1 = self.tip()
-        if p2 is None: p2 = nullid
-
         node = hash(text, p1, p2)
-
         if node in self.nodemap:
             return node
 
-        n = self.count()
-        t = n - 1
+        curr = self.count()
+        prev = curr - 1
+        base = self.base(prev)
+        offset = self.end(prev)
 
-        if n:
-            base = self.base(t)
-            start = self.start(base)
-            end = self.end(t)
+        if curr:
             if not d:
-                prev = self.revision(self.tip())
-                d = self.diff(prev, text)
+                ptext = self.revision(self.node(prev))
+                d = mdiff.textdiff(ptext, text)
             data = compress(d)
             l = len(data[1]) + len(data[0])
-            dist = end - start + l
+            dist = l + offset - self.start(base)
 
         # full versions are inserted when the needed deltas
         # become comparable to the uncompressed text
-        if not n or dist > len(text) * 2:
+        if not curr or dist > len(text) * 2:
             data = compress(text)
             l = len(data[1]) + len(data[0])
-            base = n
-        else:
-            base = self.base(t)
-
-        offset = 0
-        if t >= 0:
-            offset = self.end(t)
+            base = curr
 
-        if self.version == REVLOGV0:
-            e = (offset, l, base, link, p1, p2, node)
-        else:
-            e = (self.offset_type(offset, 0), l, len(text),
-                 base, link, self.rev(p1), self.rev(p2), node)
+        e = (offset_type(offset, 0), l, len(text),
+             base, link, self.rev(p1), self.rev(p2), node)
+        self.index.insert(-1, e)
+        self.nodemap[node] = curr
 
-        self.index.append(e)
-        self.nodemap[node] = n
-        entry = struct.pack(self.indexformat, *e)
-
-        if not self.inlinedata():
+        entry = self._io.packentry(e, self.node, self.version, curr)
+        if not self._inline:
             transaction.add(self.datafile, offset)
-            transaction.add(self.indexfile, n * len(entry))
+            transaction.add(self.indexfile, curr * len(entry))
             if data[0]:
                 dfh.write(data[0])
             dfh.write(data[1])
             dfh.flush()
+            ifh.write(entry)
         else:
-            ifh.seek(0, 2)
-            transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
-
-        if len(self.index) == 1 and self.version != REVLOGV0:
-            l = struct.pack(versionformat, self.version)
-            ifh.write(l)
-            entry = entry[4:]
-
-        ifh.write(entry)
-
-        if self.inlinedata():
+            offset += curr * self._io.size
+            transaction.add(self.indexfile, offset, curr)
+            ifh.write(entry)
             ifh.write(data[0])
             ifh.write(data[1])
             self.checkinlinesize(transaction, ifh)
 
-        self.cache = (node, n, text)
+        self._cache = (node, curr, text)
         return node
 
     def ancestor(self, a, b):
@@ -1142,11 +1114,12 @@ class revlog(object):
             end = self.end(t)
 
         ifh = self.opener(self.indexfile, "a+")
-        ifh.seek(0, 2)
-        transaction.add(self.indexfile, ifh.tell(), self.count())
-        if self.inlinedata():
+        isize = r * self._io.size
+        if self._inline:
+            transaction.add(self.indexfile, end + isize, r)
             dfh = None
         else:
+            transaction.add(self.indexfile, isize, r)
             transaction.add(self.datafile, end)
             dfh = self.opener(self.datafile, "a")
 
@@ -1190,10 +1163,10 @@ class revlog(object):
                     dfh.flush()
                 ifh.flush()
                 text = self.revision(chain)
-                text = self.patches(text, [delta])
+                text = mdiff.patches(text, [delta])
                 chk = self._addrevision(text, transaction, link, p1, p2, None,
                                         ifh, dfh)
-                if not dfh and not self.inlinedata():
+                if not dfh and not self._inline:
                     # addrevision switched from inline to conventional
                     # reopen the index
                     dfh = self.opener(self.datafile, "a")
@@ -1202,23 +1175,21 @@ class revlog(object):
                     raise RevlogError(_("consistency error adding group"))
                 textlen = len(text)
             else:
-                if self.version == REVLOGV0:
-                    e = (end, len(cdelta), base, link, p1, p2, node)
-                else:
-                    e = (self.offset_type(end, 0), len(cdelta), textlen, base,
-                         link, self.rev(p1), self.rev(p2), node)
-                self.index.append(e)
+                e = (offset_type(end, 0), len(cdelta), textlen, base,
+                     link, self.rev(p1), self.rev(p2), node)
+                self.index.insert(-1, e)
                 self.nodemap[node] = r
-                if self.inlinedata():
-                    ifh.write(struct.pack(self.indexformat, *e))
+                entry = self._io.packentry(e, self.node, self.version, r)
+                if self._inline:
+                    ifh.write(entry)
                     ifh.write(cdelta)
                     self.checkinlinesize(transaction, ifh)
-                    if not self.inlinedata():
+                    if not self._inline:
                         dfh = self.opener(self.datafile, "a")
                         ifh = self.opener(self.indexfile, "a")
                 else:
                     dfh.write(cdelta)
-                    ifh.write(struct.pack(self.indexformat, *e))
+                    ifh.write(entry)
 
             t, r, chain, prev = r, r + 1, node, node
             base = self.base(t)
@@ -1232,41 +1203,41 @@ class revlog(object):
             return
 
         if isinstance(self.index, lazyindex):
-            self.loadindexmap()
+            self._loadindexmap()
 
         # When stripping away a revision, we need to make sure it
         # does not actually belong to an older changeset.
         # The minlink parameter defines the oldest revision
         # we're allowed to strip away.
-        while minlink > self.index[rev][-4]:
+        while minlink > self.index[rev][4]:
             rev += 1
             if rev >= self.count():
                 return
 
         # first truncate the files on disk
         end = self.start(rev)
-        if not self.inlinedata():
+        if not self._inline:
             df = self.opener(self.datafile, "a")
             df.truncate(end)
-            end = rev * struct.calcsize(self.indexformat)
+            end = rev * self._io.size
         else:
-            end += rev * struct.calcsize(self.indexformat)
+            end += rev * self._io.size
 
         indexf = self.opener(self.indexfile, "a")
         indexf.truncate(end)
 
         # then reset internal state in memory to forget those revisions
-        self.cache = None
-        self.chunkcache = None
+        self._cache = None
+        self._chunkcache = None
         for x in xrange(rev, self.count()):
             del self.nodemap[self.node(x)]
 
-        del self.index[rev:]
+        del self.index[rev:-1]
 
     def checksize(self):
         expected = 0
         if self.count():
-            expected = self.end(self.count() - 1)
+            expected = max(0, self.end(self.count() - 1))
 
         try:
             f = self.opener(self.datafile)
@@ -1282,13 +1253,13 @@ class revlog(object):
             f = self.opener(self.indexfile)
             f.seek(0, 2)
             actual = f.tell()
-            s = struct.calcsize(self.indexformat)
-            i = actual / s
+            s = self._io.size
+            i = max(0, actual / s)
             di = actual - (i * s)
-            if self.inlinedata():
+            if self._inline:
                 databytes = 0
                 for r in xrange(self.count()):
-                    databytes += self.length(r)
+                    databytes += max(0, self.length(r))
                 dd = 0
                 di = actual - self.count() * s - databytes
         except IOError, inst:
@@ -1297,5 +1268,3 @@ class revlog(object):
             di = 0
 
         return (dd, di)
-
-
--- a/mercurial/sshrepo.py
+++ b/mercurial/sshrepo.py
@@ -8,7 +8,7 @@
 from node import *
 from remoterepo import *
 from i18n import _
-import hg, os, re, stat, util
+import repo, os, re, stat, util
 
 class sshrepository(remoterepository):
     def __init__(self, ui, path, create=0):
@@ -17,7 +17,7 @@ class sshrepository(remoterepository):
 
         m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
         if not m:
-            self.raise_(hg.RepoError(_("couldn't parse location %s") % path))
+            self.raise_(repo.RepoError(_("couldn't parse location %s") % path))
 
         self.user = m.group(2)
         self.host = m.group(3)
@@ -37,7 +37,7 @@ class sshrepository(remoterepository):
             ui.note('running %s\n' % cmd)
             res = util.system(cmd)
             if res != 0:
-                self.raise_(hg.RepoError(_("could not create remote repo")))
+                self.raise_(repo.RepoError(_("could not create remote repo")))
 
         self.validate_repo(ui, sshcmd, args, remotecmd)
 
@@ -70,13 +70,13 @@ class sshrepository(remoterepository):
             lines.append(l)
             max_noise -= 1
         else:
-            self.raise_(hg.RepoError(_("no suitable response from remote hg")))
+            self.raise_(repo.RepoError(_("no suitable response from remote hg")))
 
-        self.capabilities = ()
+        self.capabilities = util.set()
         lines.reverse()
         for l in lines:
             if l.startswith("capabilities:"):
-                self.capabilities = l[:-1].split(":")[1].split()
+                self.capabilities.update(l[:-1].split(":")[1].split())
                 break
 
     def readerr(self):
@@ -132,12 +132,13 @@ class sshrepository(remoterepository):
         self.call("unlock")
 
     def lookup(self, key):
+        self.requirecap('lookup', _('look up remote revision'))
         d = self.call("lookup", key=key)
         success, data = d[:-1].split(" ", 1)
         if int(success):
             return bin(data)
         else:
-            self.raise_(hg.RepoError(data))
+            self.raise_(repo.RepoError(data))
 
     def heads(self):
         d = self.call("heads")
@@ -169,6 +170,7 @@ class sshrepository(remoterepository):
         return self.do_cmd("changegroup", roots=n)
 
     def changegroupsubset(self, bases, heads, kind):
+        self.requirecap('changegroupsubset', _('look up remote changes'))
         bases = " ".join(map(hex, bases))
         heads = " ".join(map(hex, heads))
         return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
@@ -177,7 +179,7 @@ class sshrepository(remoterepository):
         d = self.call("unbundle", heads=' '.join(map(hex, heads)))
         if d:
             # remote may send "unsynced changes"
-            self.raise_(hg.RepoError(_("push refused: %s") % d))
+            self.raise_(repo.RepoError(_("push refused: %s") % d))
 
         while 1:
             d = cg.read(4096)
@@ -204,7 +206,7 @@ class sshrepository(remoterepository):
     def addchangegroup(self, cg, source, url):
         d = self.call("addchangegroup")
         if d:
-            self.raise_(hg.RepoError(_("push refused: %s") % d))
+            self.raise_(repo.RepoError(_("push refused: %s") % d))
         while 1:
             d = cg.read(4096)
             if not d: break
--- a/mercurial/statichttprepo.py
+++ b/mercurial/statichttprepo.py
@@ -75,10 +75,4 @@ class statichttprepository(localrepo.loc
 def instance(ui, path, create):
     if create:
         raise util.Abort(_('cannot create new static-http repository'))
-    if path.startswith('old-http:'):
-        ui.warn(_("old-http:// syntax is deprecated, "
-                  "please use static-http:// instead\n"))
-        path = path[4:]
-    else:
-        path = path[7:]
-    return statichttprepository(ui, path)
+    return statichttprepository(ui, path[7:])
--- a/mercurial/streamclone.py
+++ b/mercurial/streamclone.py
@@ -66,22 +66,25 @@ def stream_out(repo, fileobj, untrusted=
 
     # get consistent snapshot of repo. lock during scan so lock not
     # needed while we stream, and commits can happen.
+    lock = None
     try:
-        repolock = repo.lock()
-    except (lock.LockHeld, lock.LockUnavailable), inst:
-        repo.ui.warn('locking the repository failed: %s\n' % (inst,))
-        fileobj.write('2\n')
-        return
+        try:
+            repolock = repo.lock()
+        except (lock.LockHeld, lock.LockUnavailable), inst:
+            repo.ui.warn('locking the repository failed: %s\n' % (inst,))
+            fileobj.write('2\n')
+            return
 
-    fileobj.write('0\n')
-    repo.ui.debug('scanning\n')
-    entries = []
-    total_bytes = 0
-    for name, size in walkrepo(repo.spath):
-        name = repo.decodefn(util.pconvert(name))
-        entries.append((name, size))
-        total_bytes += size
-    repolock.release()
+        fileobj.write('0\n')
+        repo.ui.debug('scanning\n')
+        entries = []
+        total_bytes = 0
+        for name, size in walkrepo(repo.spath):
+            name = repo.decodefn(util.pconvert(name))
+            entries.append((name, size))
+            total_bytes += size
+    finally:
+        del repolock
 
     repo.ui.debug('%d files, %d bytes to transfer\n' %
                   (len(entries), total_bytes))
--- a/mercurial/templater.py
+++ b/mercurial/templater.py
@@ -270,6 +270,7 @@ common_filters = {
     "permissions": permissions,
     "person": person,
     "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
+    "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S", True, "%+03d:%02d"),
     "short": lambda x: x[:12],
     "shortdate": shortdate,
     "stringify": stringify,
--- a/mercurial/ui.py
+++ b/mercurial/ui.py
@@ -24,6 +24,8 @@ def updateconfig(source, dest, sections=
             dest.set(section, name, value)
 
 class ui(object):
+    _isatty = None
+
     def __init__(self, verbose=False, debug=False, quiet=False,
                  interactive=True, traceback=False, report_untrusted=True,
                  parentui=None):
@@ -62,6 +64,11 @@ class ui(object):
     def __getattr__(self, key):
         return getattr(self.parentui, key)
 
+    def isatty(self):
+        if ui._isatty is None:
+            ui._isatty = sys.stdin.isatty()
+        return ui._isatty
+
     def updateopts(self, verbose=False, debug=False, quiet=False,
                    interactive=True, traceback=False, config=[]):
         for section, name, value in config:
@@ -204,7 +211,11 @@ class ui(object):
             if name is None or name in ('quiet', 'verbose', 'debug'):
                 self.verbosity_constraints()
             if name is None or name == 'interactive':
-                self.interactive = self.configbool("ui", "interactive", True)
+                interactive = self.configbool("ui", "interactive", None)
+                if interactive is None and self.interactive:
+                    self.interactive = self.isatty()
+                else:
+                    self.interactive = interactive
             if name is None or name == 'report_untrusted':
                 self.report_untrusted = (
                     self.configbool("ui", "report_untrusted", True))
@@ -382,17 +393,29 @@ class ui(object):
         try: sys.stderr.flush()
         except: pass
 
-    def readline(self):
-        return sys.stdin.readline()[:-1]
-    def prompt(self, msg, pat=None, default="y"):
+    def _readline(self, prompt=''):
+        if self.isatty():
+            try:
+                # magically add command line editing support, where
+                # available
+                import readline
+                # force demandimport to really load the module
+                readline.read_history_file
+            except ImportError:
+                pass
+        return raw_input(prompt)
+
+    def prompt(self, msg, pat=None, default="y", matchflags=0):
         if not self.interactive: return default
-        while 1:
-            self.write(msg, " ")
-            r = self.readline()
-            if not pat or re.match(pat, r):
+        try:
+            r = self._readline(msg + ' ')
+            if not pat or re.match(pat, r, matchflags):
                 return r
             else:
                 self.write(_("unrecognized response\n"))
+        except EOFError:
+            raise util.Abort(_('response expected'))
+
     def getpass(self, prompt=None, default=None):
         if not self.interactive: return default
         return getpass.getpass(prompt or _('password: '))
--- a/mercurial/util.py
+++ b/mercurial/util.py
@@ -13,8 +13,8 @@ platform-specific details from the core.
 """
 
 from i18n import _
-import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
-import os, threading, time, calendar, ConfigParser, locale, glob
+import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
+import os, stat, threading, time, calendar, ConfigParser, locale, glob
 
 try:
     set = set
@@ -63,7 +63,7 @@ def fromlocal(s):
     Convert a string from the local character encoding to UTF-8
 
     We attempt to decode strings using the encoding mode set by
-    HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
+    HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
     characters will cause an error message. Other modes include
     'replace', which replaces unknown characters with a special
     Unicode character, and 'ignore', which drops the character.
@@ -366,6 +366,7 @@ def canonpath(root, cwd, myname):
     if not os.path.isabs(name):
         name = os.path.join(root, cwd, name)
     name = os.path.normpath(name)
+    audit_path = path_auditor(root)
     if name != rootsep and name.startswith(rootsep):
         name = name[len(rootsep):]
         audit_path(name)
@@ -628,7 +629,7 @@ def rename(src, dst):
     """forcibly rename a file"""
     try:
         os.rename(src, dst)
-    except OSError, err:
+    except OSError, err: # FIXME: check err (EEXIST ?)
         # on windows, rename to existing file is not allowed, so we
         # must delete destination first. but if file is open, unlink
         # schedules it for delete but does not delete it. rename
@@ -689,12 +690,59 @@ def copyfiles(src, dst, hardlink=None):
         else:
             shutil.copy(src, dst)
 
-def audit_path(path):
-    """Abort if path contains dangerous components"""
-    parts = os.path.normcase(path).split(os.sep)
-    if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
-        or os.pardir in parts):
-        raise Abort(_("path contains illegal component: %s") % path)
+class path_auditor(object):
+    '''ensure that a filesystem path contains no banned components.
+    the following properties of a path are checked:
+
+    - under top-level .hg
+    - starts at the root of a windows drive
+    - contains ".."
+    - traverses a symlink (e.g. a/symlink_here/b)
+    - inside a nested repository'''
+
+    def __init__(self, root):
+        self.audited = set()
+        self.auditeddir = set()
+        self.root = root
+
+    def __call__(self, path):
+        if path in self.audited:
+            return
+        normpath = os.path.normcase(path)
+        parts = normpath.split(os.sep)
+        if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
+            or os.pardir in parts):
+            raise Abort(_("path contains illegal component: %s") % path)
+        def check(prefix):
+            curpath = os.path.join(self.root, prefix)
+            try:
+                st = os.lstat(curpath)
+            except OSError, err:
+                # EINVAL can be raised as invalid path syntax under win32.
+                # They must be ignored for patterns can be checked too.
+                if err.errno not in (errno.ENOENT, errno.EINVAL):
+                    raise
+            else:
+                if stat.S_ISLNK(st.st_mode):
+                    raise Abort(_('path %r traverses symbolic link %r') %
+                                (path, prefix))
+                elif (stat.S_ISDIR(st.st_mode) and
+                      os.path.isdir(os.path.join(curpath, '.hg'))):
+                    raise Abort(_('path %r is inside repo %r') %
+                                (path, prefix))
+
+        prefixes = []
+        for c in strutil.rfindall(normpath, os.sep):
+            prefix = normpath[:c]
+            if prefix in self.auditeddir:
+                break
+            check(prefix)
+            prefixes.append(prefix)
+
+        self.audited.add(path)
+        # only add prefixes to the cache after checking everything: we don't
+        # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
+        self.auditeddir.update(prefixes)
 
 def _makelock_file(info, pathname):
     ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
@@ -1284,7 +1332,10 @@ class opener(object):
     """
     def __init__(self, base, audit=True):
         self.base = base
-        self.audit = audit
+        if audit:
+            self.audit_path = path_auditor(base)
+        else:
+            self.audit_path = always
 
     def __getattr__(self, name):
         if name == '_can_symlink':
@@ -1293,8 +1344,7 @@ class opener(object):
         raise AttributeError(name)
 
     def __call__(self, path, mode="r", text=False, atomictemp=False):
-        if self.audit:
-            audit_path(path)
+        self.audit_path(path)
         f = os.path.join(self.base, path)
 
         if not text and "b" not in mode:
@@ -1315,8 +1365,7 @@ class opener(object):
         return posixfile(f, mode)
 
     def symlink(self, src, dst):
-        if self.audit:
-            audit_path(dst)
+        self.audit_path(dst)
         linkname = os.path.join(self.base, dst)
         try:
             os.unlink(linkname)
@@ -1328,7 +1377,11 @@ class opener(object):
             os.makedirs(dirname)
 
         if self._can_symlink:
-            os.symlink(src, linkname)
+            try:
+                os.symlink(src, linkname)
+            except OSError, err:
+                raise OSError(err.errno, _('could not symlink to %r: %s') %
+                              (src, err.strerror), linkname)
         else:
             f = self(dst, "w")
             f.write(src)
@@ -1404,7 +1457,7 @@ def makedate():
         tz = time.timezone
     return time.mktime(lt), tz
 
-def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
+def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
     """represent a (unixtime, offset) tuple as a localized time.
     unixtime is seconds since the epoch, and offset is the time zone's
     number of seconds away from UTC. if timezone is false, do not
@@ -1412,7 +1465,7 @@ def datestr(date=None, format='%a %b %d 
     t, tz = date or makedate()
     s = time.strftime(format, time.gmtime(float(t) - tz))
     if timezone:
-        s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
+        s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
     return s
 
 def strdate(string, format, defaults):
--- a/mercurial/verify.py
+++ b/mercurial/verify.py
@@ -7,20 +7,36 @@
 
 from node import *
 from i18n import _
-import revlog, mdiff
+import revlog
 
 def verify(repo):
+    lock = repo.lock()
+    try:
+        return _verify(repo)
+    finally:
+        del lock
+
+def _verify(repo):
     filelinkrevs = {}
     filenodes = {}
     changesets = revisions = files = 0
+    firstbad = [None]
     errors = [0]
     warnings = [0]
     neededmanifests = {}
 
-    lock = repo.lock()
-
-    def err(msg):
-        repo.ui.warn(msg + "\n")
+    def err(linkrev, msg, filename=None):
+        if linkrev != None:
+            if firstbad[0] != None:
+                firstbad[0] = min(firstbad[0], linkrev)
+            else:
+                firstbad[0] = linkrev
+        else:
+            linkrev = "?"
+        msg = "%s: %s" % (linkrev, msg)
+        if filename:
+            msg = "%s@%s" % (filename, msg)
+        repo.ui.warn(" " + msg + "\n")
         errors[0] += 1
 
     def warn(msg):
@@ -30,9 +46,9 @@ def verify(repo):
     def checksize(obj, name):
         d = obj.checksize()
         if d[0]:
-            err(_("%s data length off by %d bytes") % (name, d[0]))
+            err(None, _("data length off by %d bytes") % d[0], name)
         if d[1]:
-            err(_("%s index contains %d extra bytes") % (name, d[1]))
+            err(None, _("index contains %d extra bytes") % d[1], name)
 
     def checkversion(obj, name):
         if obj.version != revlog.REVLOGV0:
@@ -55,25 +71,25 @@ def verify(repo):
         n = repo.changelog.node(i)
         l = repo.changelog.linkrev(n)
         if l != i:
-            err(_("incorrect link (%d) for changeset revision %d") %(l, i))
+            err(i, _("incorrect link (%d) for changeset") %(l))
         if n in seen:
-            err(_("duplicate changeset at revision %d") % i)
-        seen[n] = 1
+            err(i, _("duplicates changeset at revision %d") % seen[n])
+        seen[n] = i
 
         for p in repo.changelog.parents(n):
             if p not in repo.changelog.nodemap:
-                err(_("changeset %s has unknown parent %s") %
-                             (short(n), short(p)))
+                err(i, _("changeset has unknown parent %s") % short(p))
         try:
             changes = repo.changelog.read(n)
         except KeyboardInterrupt:
             repo.ui.warn(_("interrupted"))
             raise
         except Exception, inst:
-            err(_("unpacking changeset %s: %s") % (short(n), inst))
+            err(i, _("unpacking changeset: %s") % inst)
             continue
 
-        neededmanifests[changes[0]] = n
+        if changes[0] not in neededmanifests:
+            neededmanifests[changes[0]] = i
 
         for f in changes[3]:
             filelinkrevs.setdefault(f, []).append(i)
@@ -88,45 +104,50 @@ def verify(repo):
         l = repo.manifest.linkrev(n)
 
         if l < 0 or l >= repo.changelog.count():
-            err(_("bad manifest link (%d) at revision %d") % (l, i))
+            err(None, _("bad link (%d) at manifest revision %d") % (l, i))
 
         if n in neededmanifests:
             del neededmanifests[n]
 
         if n in seen:
-            err(_("duplicate manifest at revision %d") % i)
+            err(l, _("duplicates manifest from %d") % seen[n])
 
-        seen[n] = 1
+        seen[n] = l
 
         for p in repo.manifest.parents(n):
             if p not in repo.manifest.nodemap:
-                err(_("manifest %s has unknown parent %s") %
-                    (short(n), short(p)))
+                err(l, _("manifest has unknown parent %s") % short(p))
 
         try:
             for f, fn in repo.manifest.readdelta(n).iteritems():
-                filenodes.setdefault(f, {})[fn] = 1
+                fns = filenodes.setdefault(f, {})
+                if fn not in fns:
+                    fns[fn] = n
         except KeyboardInterrupt:
             repo.ui.warn(_("interrupted"))
             raise
         except Exception, inst:
-            err(_("reading delta for manifest %s: %s") % (short(n), inst))
+            err(l, _("reading manifest delta: %s") % inst)
             continue
 
     repo.ui.status(_("crosschecking files in changesets and manifests\n"))
 
-    for m, c in neededmanifests.items():
-        err(_("Changeset %s refers to unknown manifest %s") %
-            (short(m), short(c)))
-    del neededmanifests
+    nm = neededmanifests.items()
+    nm.sort()
+    for m, c in nm:
+        err(m, _("changeset refers to unknown manifest %s") % short(c))
+    del neededmanifests, nm
 
     for f in filenodes:
         if f not in filelinkrevs:
-            err(_("file %s in manifest but not in changesets") % f)
+            lrs = [repo.manifest.linkrev(n) for n in filenodes[f]]
+            lrs.sort()
+            err(lrs[0], _("in manifest but not in changeset"), f)
 
     for f in filelinkrevs:
         if f not in filenodes:
-            err(_("file %s in changeset but not in manifest") % f)
+            lr = filelinkrevs[f][0]
+            err(lr, _("in changeset but not in manifest"), f)
 
     repo.ui.status(_("checking files\n"))
     ff = filenodes.keys()
@@ -136,32 +157,40 @@ def verify(repo):
             continue
         files += 1
         if not f:
-            err(_("file without name in manifest %s") % short(n))
+            lr = repo.manifest.linkrev(filenodes[f][0])
+            err(lr, _("file without name in manifest %s") % short(ff[n]))
             continue
         fl = repo.file(f)
         checkversion(fl, f)
         checksize(fl, f)
 
+        seen = {}
         nodes = {nullid: 1}
-        seen = {}
         for i in xrange(fl.count()):
             revisions += 1
             n = fl.node(i)
+            flr = fl.linkrev(n)
+
+            if flr not in filelinkrevs.get(f, []):
+                if flr < 0 or flr >= repo.changelog.count():
+                    err(None, _("rev %d point to nonexistent changeset %d")
+                        % (i, flr), f)
+                else:
+                    err(None, _("rev %d points to unexpected changeset %d")
+                        % (i, flr), f)
+                if f in filelinkrevs:
+                    warn(_(" (expected %s)") % filelinkrevs[f][0])
+                flr = None # can't be trusted
+            else:
+                filelinkrevs[f].remove(flr)
 
             if n in seen:
-                err(_("%s: duplicate revision %d") % (f, i))
+                err(flr, _("duplicate revision %d") % i, f)
             if n not in filenodes[f]:
-                err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
+                err(flr, _("%s not in manifests") % (short(n)), f)
             else:
                 del filenodes[f][n]
 
-            flr = fl.linkrev(n)
-            if flr not in filelinkrevs.get(f, []):
-                err(_("%s:%s points to unexpected changeset %d")
-                        % (f, short(n), flr))
-            else:
-                filelinkrevs[f].remove(flr)
-
             # verify contents
             try:
                 t = fl.read(n)
@@ -169,16 +198,22 @@ def verify(repo):
                 repo.ui.warn(_("interrupted"))
                 raise
             except Exception, inst:
-                err(_("unpacking file %s %s: %s") % (f, short(n), inst))
+                err(flr, _("unpacking %s: %s") % (short(n), inst), f)
 
             # verify parents
-            (p1, p2) = fl.parents(n)
-            if p1 not in nodes:
-                err(_("file %s:%s unknown parent 1 %s") %
-                    (f, short(n), short(p1)))
-            if p2 not in nodes:
-                err(_("file %s:%s unknown parent 2 %s") %
-                        (f, short(n), short(p1)))
+            try:
+                (p1, p2) = fl.parents(n)
+                if p1 not in nodes:
+                    err(flr, _("unknown parent 1 %s of %s") %
+                        (short(p1), short(n)), f)
+                if p2 not in nodes:
+                    err(flr, _("unknown parent 2 %s of %s") %
+                            (short(p2), short(p1)), f)
+            except KeyboardInterrupt:
+                repo.ui.warn(_("interrupted"))
+                raise
+            except Exception, inst:
+                err(flr, _("checking parents of %s: %s") % (short(n), inst), f)
             nodes[n] = 1
 
             # check renames
@@ -191,11 +226,15 @@ def verify(repo):
                 repo.ui.warn(_("interrupted"))
                 raise
             except Exception, inst:
-                err(_("checking rename on file %s %s: %s") % (f, short(n), inst))
+                err(flr, _("checking rename of %s: %s") %
+                    (short(n), inst), f)
 
         # cross-check
-        for node in filenodes[f]:
-            err(_("node %s in manifests not in %s") % (hex(node), f))
+        fns = [(repo.manifest.linkrev(filenodes[f][n]), n)
+               for n in filenodes[f]]
+        fns.sort()
+        for lr, node in fns:
+            err(lr, _("%s in manifests not found") % short(node), f)
 
     repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
                    (files, changesets, revisions))
@@ -204,5 +243,8 @@ def verify(repo):
         repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
     if errors[0]:
         repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
+        if firstbad[0]:
+            repo.ui.warn(_("(first damaged changeset appears to be %d)\n")
+                         % firstbad[0])
         return 1
 
--- a/setup.py
+++ b/setup.py
@@ -2,8 +2,8 @@
 #
 # This is the mercurial setup script.
 #
-# './setup.py install', or
-# './setup.py --help' for more options
+# 'python setup.py install', or
+# 'python setup.py --help' for more options
 
 import sys
 if not hasattr(sys, 'version_info') or sys.version_info < (2, 3, 0, 'final'):
@@ -14,8 +14,6 @@ from distutils.core import setup, Extens
 from distutils.command.install_data import install_data
 
 import mercurial.version
-import mercurial.demandimport
-mercurial.demandimport.enable = lambda: None
 
 extra = {}
 
@@ -64,7 +62,8 @@ setup(name='mercurial',
       packages=['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert'],
       ext_modules=[Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
                    Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
-                   Extension('mercurial.base85', ['mercurial/base85.c'])],
+                   Extension('mercurial.base85', ['mercurial/base85.c']),
+                   Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'])],
       data_files=[(os.path.join('mercurial', root),
                    [os.path.join(root, file_) for file_ in files])
                   for root, dirs, files in os.walk('templates')],
new file mode 100644
--- /dev/null
+++ b/templates/atom/changelog.tmpl
@@ -0,0 +1,10 @@
+#header#
+ <!-- Changelog -->
+ <id>{urlbase}{url}</id>
+ <link rel="self" href="{urlbase}{url}atom-log"/>
+ <link rel="alternate" href="{urlbase}{url}"/>
+ <title>#repo|escape# Changelog</title>
+ #latestentry%feedupdated#
+
+#entries%changelogentry#
+</feed>
new file mode 100644
--- /dev/null
+++ b/templates/atom/changelogentry.tmpl
@@ -0,0 +1,16 @@
+ <entry>
+  <title>#desc|strip|firstline|strip|escape#</title>
+  <id>http://www.selenic.com/mercurial/#changeset-{node}</id>
+  <link href="{urlbase}{url}rev/{node}"/>
+  <author>
+   <name>#author|person|escape#</name>
+   <email>#author|email|obfuscate#</email>
+  </author>
+  <updated>#date|rfc3339date#</updated>
+  <published>#date|rfc3339date#</published>
+  <content type="xhtml">
+   <div xmlns="http://www.w3.org/1999/xhtml">
+    <pre xml:space="preserve">#desc|escape#</pre>
+   </div>
+  </content>
+ </entry>
new file mode 100644
--- /dev/null
+++ b/templates/atom/filelog.tmpl
@@ -0,0 +1,8 @@
+#header#
+ <id>{urlbase}{url}atom-log/tip/{file|escape}</id>
+ <link rel="self" href="{urlbase}{url}atom-log/tip/{file|escape}"/>
+ <title>#repo|escape#: #file|escape# history</title>
+ #latestentry%feedupdated#
+
+#entries%changelogentry#
+</feed>
new file mode 100644
--- /dev/null
+++ b/templates/atom/header.tmpl
@@ -0,0 +1,4 @@
+Content-type: application/atom+xml; charset={encoding}
+
+<?xml version="1.0" encoding="{encoding}"?>
+<feed xmlns="http://www.w3.org/2005/Atom">
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/templates/atom/map
@@ -0,0 +1,9 @@
+default = 'changelog'
+feedupdated = '<updated>#date|rfc3339date#</updated>'
+header = header.tmpl
+changelog = changelog.tmpl
+changelogentry = changelogentry.tmpl
+filelog = filelog.tmpl
+filelogentry = filelogentry.tmpl
+tags = tags.tmpl
+tagentry = tagentry.tmpl
new file mode 100644
--- /dev/null
+++ b/templates/atom/tagentry.tmpl
@@ -0,0 +1,8 @@
+ <entry>
+  <title>#tag|escape#</title>
+  <link rel="alternate" href="{urlbase}{url}rev/{node}"/>
+  <id>http://www.selenic.com/mercurial/#tag-{node}</id>
+  <updated>#date|rfc3339date#</updated>
+  <published>#date|rfc3339date#</published>
+  <content type="text">#tag|strip|escape#</content>
+ </entry>
new file mode 100644
--- /dev/null
+++ b/templates/atom/tags.tmpl
@@ -0,0 +1,11 @@
+#header#
+ <id>{urlbase}{url}</id>
+ <link rel="self" href="{urlbase}{url}atom-tags"/>
+ <link rel="alternate" href="{urlbase}{url}tags"/>
+ <title>#repo|escape#: tags</title>
+ <summary>#repo|escape# tag history</summary>
+ <author><name>Mercurial SCM</name></author>
+ #latestentry%feedupdated#
+
+#entriesnotip%tagentry#
+</feed>
--- a/templates/changelog.tmpl
+++ b/templates/changelog.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: changelog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="#url#rss-log" title="RSS feed for #repo|escape#">
 </head>
@@ -11,6 +13,7 @@
 <a href="#url#file/#node|short#{sessionvars%urlparameter}">manifest</a>
 #archives%archiveentry#
 <a type="application/rss+xml" href="#url#rss-log">rss</a>
+<a type="application/atom+xml" href="#url#atom-log" title="Atom feed for #repo|escape#">atom</a>
 </div>
 
 <h2>changelog for #repo|escape#</h2>
--- a/templates/filelog.tmpl
+++ b/templates/filelog.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: #file|escape# history</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log/tip/#file|urlescape#" title="Atom feed for #repo|escape#:#file#">
 <link rel="alternate" type="application/rss+xml"
    href="#url#rss-log/tip/#file|urlescape#" title="RSS feed for #repo|escape#:#file#">
 </head>
@@ -13,6 +15,7 @@
 <a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a>
 <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a>
 <a type="application/rss+xml" href="#url#rss-log/tip/#file|urlescape#">rss</a>
+<a type="application/atom+xml" href="#url#atom-log/tip/#file|urlescape#" title="Atom feed for #repo|escape#:#file#">atom</a>
 </div>
 
 <h2>#file|escape# revision history</h2>
--- a/templates/gitweb/changelog.tmpl
+++ b/templates/gitweb/changelog.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: Changelog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/gitweb/changeset.tmpl
+++ b/templates/gitweb/changeset.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>{repo|escape}: changeset {rev}:{node|short}</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/gitweb/error.tmpl
+++ b/templates/gitweb/error.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: Error</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/gitweb/fileannotate.tmpl
+++ b/templates/gitweb/fileannotate.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>{repo|escape}: {file|escape}@{node|short} (annotated)</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/gitweb/filediff.tmpl
+++ b/templates/gitweb/filediff.tmpl
@@ -1,5 +1,7 @@
 {header}
 <title>{repo|escape}: diff {file|escape}</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for {repo|escape}">
 </head>
--- a/templates/gitweb/filelog.tmpl
+++ b/templates/gitweb/filelog.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: File revisions</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/gitweb/filerevision.tmpl
+++ b/templates/gitweb/filerevision.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>{repo|escape}: {file|escape}@{node|short}</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/gitweb/footer.tmpl
+++ b/templates/gitweb/footer.tmpl
@@ -1,6 +1,7 @@
 <div class="page_footer">
 <div class="page_footer_text">#repo|escape#</div>
-<a class="rss_logo" href="#url#rss-log">RSS</a>
+<a class="rss_logo" href="#url#rss-log">RSS</a> 
+<a class="rss_logo" href="#url#atom-log">Atom</a>
 <br />
 #motd#
 </div>
--- a/templates/gitweb/index.tmpl
+++ b/templates/gitweb/index.tmpl
@@ -14,6 +14,7 @@
         <td><a href="?sort=#sort_contact#">Contact</a></td>
         <td><a href="?sort=#sort_lastchange#">Last change</a></td>
         <td>&nbsp;</td>
+        <td>&nbsp;</td>
     <tr>
     #entries%indexentry#
 </table>
--- a/templates/gitweb/manifest.tmpl
+++ b/templates/gitweb/manifest.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: Manifest</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
@@ -23,6 +25,7 @@ manifest |
 <tr class="parity#upparity#">
 <td style="font-family:monospace">drwxr-xr-x</td>
 <td style="font-family:monospace"></td>
+<td style="font-family:monospace"></td>
 <td><a href="{url}file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a></td>
 <td class="link">&nbsp;</td>
 </tr>
--- a/templates/gitweb/map
+++ b/templates/gitweb/map
@@ -16,8 +16,8 @@ changelogentry = changelogentry.tmpl
 searchentry = changelogentry.tmpl
 changeset = changeset.tmpl
 manifest = manifest.tmpl
-manifestdirentry = '<tr class="parity#parity#"><td style="font-family:monospace">drwxr-xr-x</td><td style="font-family:monospace"></td><td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a></td></tr>'
-manifestfileentry = '<tr class="parity#parity#"><td style="font-family:monospace">#permissions|permissions#</td><td style="font-family:monospace" align=right>#size#</td><td class="list"><a class="list" href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a></td></tr>'
+manifestdirentry = '<tr class="parity#parity#"><td style="font-family:monospace">drwxr-xr-x</td><td style="font-family:monospace"></td><td style="font-family:monospace"></td><td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a></td></tr>'
+manifestfileentry = '<tr class="parity#parity#"><td style="font-family:monospace">#permissions|permissions#</td><td style="font-family:monospace" align=right>#date|isodate#</td><td style="font-family:monospace" align=right>#size#</td><td class="list"><a class="list" href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a></td></tr>'
 filerevision = filerevision.tmpl
 fileannotate = fileannotate.tmpl
 filediff = filediff.tmpl
@@ -52,7 +52,7 @@ branchtag = '<span class="branchtag" tit
 shortlogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><i>#author|person#</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b> <span class="logtags">{branches%branchtag}{tags%tagtag}</span></a></td><td class="link" nowrap><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>'
 filelogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b></a></td><td class="link"><a href="{url}file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a>&nbsp;|&nbsp;<a href="{url}diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a>&nbsp;|&nbsp;<a href="{url}annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> #rename%filelogrename#</td></tr>'
 archiveentry = ' | <a href="{url}archive/{node|short}{extension}">#type|escape#</a> '
-indexentry = '<tr class="parity#parity#"><td><a class="list" href="#url#{sessionvars%urlparameter}"><b>#name|escape#</b></a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a class="rss_logo" href="#url#rss-log">RSS</a> #archives%archiveentry#</td></tr>'
+indexentry = '<tr class="parity#parity#"><td><a class="list" href="#url#{sessionvars%urlparameter}"><b>#name|escape#</b></a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks">#archives%archiveentry#</td><td><a class="rss_logo" href="#url#rss-log">RSS</a> <a class="rss_logo" href="#url#atom-log">Atom</a></td></tr>'
 index = index.tmpl
 urlparameter = '#separator##name#=#value|urlescape#'
 hiddenformentry = '<input type="hidden" name="#name#" value="#value|escape#" />'
--- a/templates/gitweb/search.tmpl
+++ b/templates/gitweb/search.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: Search</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/gitweb/shortlog.tmpl
+++ b/templates/gitweb/shortlog.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: Shortlog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/gitweb/summary.tmpl
+++ b/templates/gitweb/summary.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: Summary</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/gitweb/tags.tmpl
+++ b/templates/gitweb/tags.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: Tags</title>
+<link rel="alternate" type="application/atom+xml"
+   href="{url}atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="{url}rss-log" title="RSS feed for #repo|escape#">
 </head>
--- a/templates/manifest.tmpl
+++ b/templates/manifest.tmpl
@@ -17,6 +17,7 @@
 <tr class="parity#upparity#">
   <td><tt>drwxr-xr-x</tt>&nbsp;
   <td>&nbsp;
+  <td>&nbsp;
   <td><a href="#url#file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a>
 </tr>
 #dentries%manifestdirentry#
--- a/templates/map
+++ b/templates/map
@@ -15,8 +15,8 @@ changelogentry = changelogentry.tmpl
 searchentry = changelogentry.tmpl
 changeset = changeset.tmpl
 manifest = manifest.tmpl
-manifestdirentry = '<tr class="parity#parity#"><td><tt>drwxr-xr-x</tt>&nbsp;<td>&nbsp;<td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#/</a>'
-manifestfileentry = '<tr class="parity#parity#"><td><tt>#permissions|permissions#</tt>&nbsp;<td align=right><tt>#size#</tt>&nbsp;<td><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a>'
+manifestdirentry = '<tr class="parity#parity#"><td><tt>drwxr-xr-x</tt>&nbsp;<td>&nbsp;<td>&nbsp;<td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#/</a>'
+manifestfileentry = '<tr class="parity#parity#"><td><tt>#permissions|permissions#</tt>&nbsp;<td align=right><tt class="date">#date|isodate#</tt>&nbsp;<td align=right><tt>#size#</tt>&nbsp;<td><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a>'
 filerevision = filerevision.tmpl
 fileannotate = fileannotate.tmpl
 filediff = filediff.tmpl
@@ -47,7 +47,7 @@ filediffparent = '<tr><th class="parent"
 filelogparent = '<tr><th>parent #rev#:</th><td><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
 filediffchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="#url#rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
 filelogchild = '<tr><th>child #rev#:</th><td><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
-indexentry = '<tr class="parity#parity#"><td><a href="#url#{sessionvars%urlparameter}">#name|escape#</a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a href="#url#rss-log">RSS</a> #archives%archiveentry#</td></tr>'
+indexentry = '<tr class="parity#parity#"><td><a href="#url#{sessionvars%urlparameter}">#name|escape#</a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a href="#url#rss-log">RSS</a> <a href="#url#atom-log">Atom</a> #archives%archiveentry#</td></tr>'
 index = index.tmpl
 archiveentry = '<a href="#url#archive/#node|short##extension|urlescape#">#type|escape#</a> '
 notfound = notfound.tmpl
--- a/templates/old/changelog.tmpl
+++ b/templates/old/changelog.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: changelog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
 </head>
@@ -11,6 +13,7 @@
 <a href="?mf=#node|short#;path=/">manifest</a>
 #archives%archiveentry#
 <a type="application/rss+xml" href="?style=rss">rss</a>
+<a type="application/atom+xml" href="#url#atom-log" title="Atom feed for #repo|escape#">atom</a>
 </div>
 
 <h2>changelog for #repo|escape#</h2>
--- a/templates/old/filelog.tmpl
+++ b/templates/old/filelog.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: #file|escape# history</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log/tip/#file|urlescape#" title="Atom feed for #repo|escape#:#file#">
 <link rel="alternate" type="application/rss+xml"
    href="?fl=0;file=#file|urlescape#;style=rss" title="RSS feed for #repo|escape#:#file#">
 </head>
@@ -13,6 +15,7 @@
 <a href="?f=#node|short#;file=#file|urlescape#">file</a>
 <a href="?fa=#node|short#;file=#file|urlescape#">annotate</a>
 <a type="application/rss+xml" href="?fl=0;file=#file|urlescape#;style=rss">rss</a>
+<a type="application/atom+xml" href="#url#atom-log/tip/#file|urlescape#" title="Atom feed for #repo|escape#:#file#">atom</a>
 </div>
 
 <h2>#file|escape# revision history</h2>
--- a/templates/old/map
+++ b/templates/old/map
@@ -46,7 +46,7 @@ filediffparent = '<tr><th class="parent"
 filelogparent = '<tr><th>parent #rev#:</th><td><a href="?f=#node|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
 filediffchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="?cs=#node|short#">#node|short#</a></td></tr>'
 filelogchild = '<tr><th>child #rev#:</th><td><a href="?f=#node|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
-indexentry = '<tr class="parity#parity#"><td><a href="#url#">#name|escape#</a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a href="#url#?cl=tip;style=rss">RSS</a> #archives%archiveentry#</td></tr>'
+indexentry = '<tr class="parity#parity#"><td><a href="#url#">#name|escape#</a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a href="#url#?cl=tip;style=rss">RSS</a> <a href="#url#atom-log">Atom</a> #archives%archiveentry#</td></tr>'
 index = index.tmpl
 archiveentry = '<a href="#url#?ca=#node|short#;type=#type|urlescape#">#type|escape#</a> '
 notfound = notfound.tmpl
--- a/templates/old/shortlog.tmpl
+++ b/templates/old/shortlog.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: shortlog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
 </head>
@@ -11,6 +13,7 @@
 <a href="?mf=#node|short#;path=/">manifest</a>
 #archives%archiveentry#
 <a type="application/rss+xml" href="?style=rss">rss</a>
+<a type="application/atom+xml" href="#url#atom-log" title="Atom feed for #repo|escape#">atom</a>
 </div>
 
 <h2>shortlog for #repo|escape#</h2>
--- a/templates/old/tags.tmpl
+++ b/templates/old/tags.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: tags</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-tags" title="Atom feed for #repo|escape#: tags">
 <link rel="alternate" type="application/rss+xml"
    href="?cmd=tags;style=rss" title="RSS feed for #repo|escape#: tags">
 </head>
@@ -10,6 +12,7 @@
 <a href="?sl=tip">shortlog</a>
 <a href="?mf=#node|short#;path=/">manifest</a>
 <a type="application/rss+xml" href="?cmd=tags;style=rss">rss</a>
+<a type="application/atom+xml" href="#url#atom-tags">atom</a>
 </div>
 
 <h2>tags:</h2>
--- a/templates/shortlog.tmpl
+++ b/templates/shortlog.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: shortlog</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-log" title="Atom feed for #repo|escape#">
 <link rel="alternate" type="application/rss+xml"
    href="#url#rss-log" title="RSS feed for #repo|escape#">
 </head>
@@ -11,6 +13,7 @@
 <a href="#url#file/#node|short#/{sessionvars%urlparameter}">manifest</a>
 #archives%archiveentry#
 <a type="application/rss+xml" href="#url#rss-log">rss</a>
+<a type="application/rss+xml" href="#url#atom-log" title="Atom feed for #repo|escape#">atom</a>
 </div>
 
 <h2>shortlog for #repo|escape#</h2>
--- a/templates/static/style.css
+++ b/templates/static/style.css
@@ -1,5 +1,6 @@
 a { text-decoration:none; }
 .age { white-space:nowrap; }
+.date { white-space:nowrap; }
 .indexlinks { white-space:nowrap; }
 .parity0 { background-color: #dddddd; }
 .parity1 { background-color: #eeeeee; }
--- a/templates/tags.tmpl
+++ b/templates/tags.tmpl
@@ -1,5 +1,7 @@
 #header#
 <title>#repo|escape#: tags</title>
+<link rel="alternate" type="application/atom+xml"
+   href="#url#atom-tags" title="Atom feed for #repo|escape#: tags">
 <link rel="alternate" type="application/rss+xml"
    href="#url#rss-tags" title="RSS feed for #repo|escape#: tags">
 </head>
@@ -10,6 +12,7 @@
 <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a>
 <a href="#url#file/#node|short#/{sessionvars%urlparameter}">manifest</a>
 <a type="application/rss+xml" href="#url#rss-tags">rss</a>
+<a type="application/atom+xml" href="#url#atom-tags">atom</a>
 </div>
 
 <h2>tags:</h2>
--- a/tests/coverage.py
+++ b/tests/coverage.py
@@ -504,7 +504,7 @@ class coverage:
     def get_suite_spots(self, tree, spots):
         import symbol, token
         for i in range(1, len(tree)):
-            if type(tree[i]) == type(()):
+            if isinstance(tree[i], tuple):
                 if tree[i][0] == symbol.suite:
                     # Found a suite, look back for the colon and keyword.
                     lineno_colon = lineno_word = None
--- a/tests/hghave
+++ b/tests/hghave
@@ -71,6 +71,17 @@ def has_lsprof():
 def has_git():
     return matchoutput('git --version 2>&1', r'^git version')
 
+def has_svn():
+    return matchoutput('svn --version 2>&1', r'^svn, version') and \
+        matchoutput('svnadmin --version 2>&1', r'^svnadmin, version')
+
+def has_svn_bindings():
+    try:
+        import svn.core
+        return True
+    except ImportError:
+        return False
+
 checks = {
     "cvs": (has_cvs, "cvs client"),
     "cvsps": (has_cvsps, "cvsps utility"),
@@ -80,6 +91,8 @@ checks = {
     "fifo": (has_fifo, "named pipes"),
     "hotshot": (has_hotshot, "python hotshot module"),
     "lsprof": (has_lsprof, "python lsprof module"),
+    "svn": (has_svn, "subversion client and admin tools"),
+    "svn-bindings": (has_svn_bindings, "subversion python bindings"),
     "symlink": (has_symlink, "symbolic links"),
 }
 
--- a/tests/run-tests.py
+++ b/tests/run-tests.py
@@ -134,7 +134,8 @@ def install_hg():
     vlog("# Performing temporary installation of HG")
     installerrs = os.path.join("tests", "install.err")
 
-    os.chdir("..") # Get back to hg root
+    # Run installer in hg root
+    os.chdir(os.path.join(os.path.dirname(sys.argv[0]), '..'))
     cmd = ('%s setup.py clean --all'
            ' install --force --home="%s" --install-lib="%s"'
            ' --install-scripts="%s" >%s 2>&1'
@@ -152,7 +153,14 @@ def install_hg():
     os.chdir(TESTDIR)
 
     os.environ["PATH"] = "%s%s%s" % (BINDIR, os.pathsep, os.environ["PATH"])
-    os.environ["PYTHONPATH"] = PYTHONDIR
+
+    pydir = os.pathsep.join([PYTHONDIR, TESTDIR])
+    pythonpath = os.environ.get("PYTHONPATH")
+    if pythonpath:
+        pythonpath = pydir + os.pathsep + pythonpath
+    else:
+        pythonpath = pydir
+    os.environ["PYTHONPATH"] = pythonpath
 
     use_correct_python()
 
--- a/tests/test-abort-checkin.out
+++ b/tests/test-abort-checkin.out
@@ -1,8 +1,8 @@
 error: pretxncommit.nocommits hook failed: no commits allowed
-abort: no commits allowed
 transaction abort!
 rollback completed
+abort: no commits allowed
 error: pretxncommit.nocommits hook failed: no commits allowed
-abort: no commits allowed
 transaction abort!
 rollback completed
+abort: no commits allowed
--- a/tests/test-acl.out
+++ b/tests/test-acl.out
@@ -129,9 +129,9 @@ acl: acl.allow enabled, 0 entries for us
 acl: acl.deny not enabled
 acl: user fred not allowed on foo/file.txt
 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
-abort: acl: access denied for changeset ef1ea85a6374
 transaction abort!
 rollback completed
+abort: acl: access denied for changeset ef1ea85a6374
 no rollback information available
 0:6675d58eff77
 
@@ -170,9 +170,9 @@ acl: allowing changeset ef1ea85a6374
 acl: allowing changeset f9cafe1212c8
 acl: user fred not allowed on quux/file.py
 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
-abort: acl: access denied for changeset 911600dab2ae
 transaction abort!
 rollback completed
+abort: acl: access denied for changeset 911600dab2ae
 no rollback information available
 0:6675d58eff77
 
@@ -210,9 +210,9 @@ acl: acl.allow enabled, 0 entries for us
 acl: acl.deny enabled, 0 entries for user barney
 acl: user barney not allowed on foo/file.txt
 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
-abort: acl: access denied for changeset ef1ea85a6374
 transaction abort!
 rollback completed
+abort: acl: access denied for changeset ef1ea85a6374
 no rollback information available
 0:6675d58eff77
 
@@ -253,9 +253,9 @@ acl: allowing changeset ef1ea85a6374
 acl: allowing changeset f9cafe1212c8
 acl: user fred not allowed on quux/file.py
 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
-abort: acl: access denied for changeset 911600dab2ae
 transaction abort!
 rollback completed
+abort: acl: access denied for changeset 911600dab2ae
 no rollback information available
 0:6675d58eff77
 
@@ -296,9 +296,9 @@ acl: acl.deny enabled, 2 entries for use
 acl: allowing changeset ef1ea85a6374
 acl: user fred denied on foo/Bar/file.txt
 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
-abort: acl: access denied for changeset f9cafe1212c8
 transaction abort!
 rollback completed
+abort: acl: access denied for changeset f9cafe1212c8
 no rollback information available
 0:6675d58eff77
 
@@ -338,9 +338,9 @@ acl: acl.allow enabled, 0 entries for us
 acl: acl.deny enabled, 0 entries for user barney
 acl: user barney not allowed on foo/file.txt
 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
-abort: acl: access denied for changeset ef1ea85a6374
 transaction abort!
 rollback completed
+abort: acl: access denied for changeset ef1ea85a6374
 no rollback information available
 0:6675d58eff77
 
@@ -427,9 +427,9 @@ acl: allowing changeset ef1ea85a6374
 acl: allowing changeset f9cafe1212c8
 acl: user wilma not allowed on quux/file.py
 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
-abort: acl: access denied for changeset 911600dab2ae
 transaction abort!
 rollback completed
+abort: acl: access denied for changeset 911600dab2ae
 no rollback information available
 0:6675d58eff77
 
@@ -471,9 +471,9 @@ adding quux/file.py revisions
 added 3 changesets with 3 changes to 3 files
 calling hook pretxnchangegroup.acl: hgext.acl.hook
 error: pretxnchangegroup.acl hook failed: unable to open ../acl.config: No such file or directory
-abort: unable to open ../acl.config: No such file or directory
 transaction abort!
 rollback completed
+abort: unable to open ../acl.config: No such file or directory
 no rollback information available
 0:6675d58eff77
 
@@ -524,9 +524,9 @@ acl: allowing changeset ef1ea85a6374
 acl: allowing changeset f9cafe1212c8
 acl: user betty not allowed on quux/file.py
 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
-abort: acl: access denied for changeset 911600dab2ae
 transaction abort!
 rollback completed
+abort: acl: access denied for changeset 911600dab2ae
 no rollback information available
 0:6675d58eff77
 
new file mode 100755
--- /dev/null
+++ b/tests/test-add
@@ -0,0 +1,42 @@
+#!/bin/sh
+
+hg init a
+cd a
+echo a > a
+hg add -n
+hg st
+hg add
+hg st
+
+echo b > b
+hg add -n b
+hg st
+hg add b
+hg st
+echo % should fail
+hg add b
+hg st
+
+hg ci -m 0
+echo % should fail
+hg add a
+
+echo aa > a
+hg ci -m 1
+hg up 0
+echo aaa > a
+hg ci -m 2
+
+hg merge
+hg st
+echo % should fail
+hg add a
+hg st
+hg ci -m merge
+
+echo % issue683
+hg rm a
+hg st
+echo a > a
+hg add a
+hg st
new file mode 100644
--- /dev/null
+++ b/tests/test-add.out
@@ -0,0 +1,29 @@
+adding a
+? a
+adding a
+A a
+A a
+? b
+A a
+A b
+% should fail
+b already tracked!
+A a
+A b
+% should fail
+a already tracked!
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+warning: conflicts during merge.
+merging a
+merging a failed!
+0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+There are unresolved merges, you can redo the full merge using:
+  hg update -C 2
+  hg merge 1
+M a
+% should fail
+a already tracked!
+M a
+% issue683
+R a
+M a
new file mode 100755
--- /dev/null
+++ b/tests/test-alias
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+cat > $HGRCPATH <<EOF
+[extensions]
+alias=
+
+[alias]
+myinit = init
+cleanstatus = status -c
+unknown = bargle
+ambiguous = s
+recursive = recursive
+EOF
+
+echo '% basic'
+hg myinit alias
+
+echo '% unknown'
+hg unknown
+
+echo '% ambiguous'
+hg ambiguous
+
+echo '% recursive'
+hg recursive
+
+cd alias
+echo foo > foo
+hg ci -Amfoo
+
+echo '% with opts'
+hg cleanst
new file mode 100644
--- /dev/null
+++ b/tests/test-alias.out
@@ -0,0 +1,10 @@
+% basic
+% unknown
+*** [alias] unknown: command bargle is unknown
+% ambiguous
+*** [alias] ambiguous: command s is ambiguous
+% recursive
+*** [alias] recursive: circular dependency on recursive
+adding foo
+% with opts
+C foo
--- a/tests/test-annotate
+++ b/tests/test-annotate
@@ -12,18 +12,27 @@ hg ci -A -m test -u nobody -d '1 0'
 echo % annotate -c
 hg annotate -c a
 
+echo % annotate -cl
+hg annotate -cl a
+
 echo % annotate -d
 hg annotate -d a
 
 echo % annotate -n
 hg annotate -n a
 
+echo % annotate -nl
+hg annotate -nl a
+
 echo % annotate -u
 hg annotate -u a
 
 echo % annotate -cdnu
 hg annotate -cdnu a
 
+echo % annotate -cdnul
+hg annotate -cdnul a
+
 cat <<EOF >>a
 a
 a
@@ -32,28 +41,34 @@ hg ci -ma1 -d '1 0'
 hg cp a b
 hg ci -mb -d '1 0'
 cat <<EOF >> b
-b
-b
-b
+b4
+b5
+b6
 EOF
 hg ci -mb2 -d '2 0'
 
-echo % annotate b
-hg annotate b
+echo % annotate -n b
+hg annotate -n b
+echo % annotate -nl b
+hg annotate -nl b
 echo % annotate -nf b
 hg annotate -nf b
+echo % annotate -nlf b
+hg annotate -nlf b
 
 hg up -C 2
 cat <<EOF >> b
-b
+b4
 c
-b
+b5
 EOF
 hg ci -mb2.1 -d '2 0'
 hg merge
 hg ci -mmergeb -d '3 0'
 echo % annotate after merge
 hg annotate -nf b
+echo % annotate after merge with -l
+hg annotate -nlf b
 
 hg up -C 1
 hg cp a b
@@ -65,17 +80,21 @@ EOF
 hg ci -mc -d '3 0'
 hg merge
 cat <<EOF >> b
-b
+b4
 c
-b
+b5
 EOF
 echo d >> b
 hg ci -mmerge2 -d '4 0'
 echo % annotate after rename merge
 hg annotate -nf b
+echo % annotate after rename merge with -l
+hg annotate -nlf b
 
 echo % linkrev vs rev
-hg annotate -r tip a
+hg annotate -r tip -n a
+echo % linkrev vs rev with -l
+hg annotate -r tip -nl a
 
 # test issue 589
 # annotate was crashing when trying to --follow something
--- a/tests/test-annotate.out
+++ b/tests/test-annotate.out
@@ -3,28 +3,48 @@
 adding a
 % annotate -c
 8435f90966e4: a
+% annotate -cl
+8435f90966e4:1: a
 % annotate -d
 Thu Jan 01 00:00:01 1970 +0000: a
 % annotate -n
 0: a
+% annotate -nl
+0:1: a
 % annotate -u
 nobody: a
 % annotate -cdnu
 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a
-% annotate b
+% annotate -cdnul
+nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a
+% annotate -n b
 2: a
 2: a
 2: a
-3: b
-3: b
-3: b
+3: b4
+3: b5
+3: b6
+% annotate -nl b
+2:1: a
+2:2: a
+2:3: a
+3:4: b4
+3:5: b5
+3:6: b6
 % annotate -nf b
 0 a: a
 1 a: a
 1 a: a
-3 b: b
-3 b: b
-3 b: b
+3 b: b4
+3 b: b5
+3 b: b6
+% annotate -nlf b
+0 a:1: a
+1 a:2: a
+1 a:3: a
+3 b:4: b4
+3 b:5: b5
+3 b:6: b6
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 merging b
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -33,9 +53,16 @@ 0 files updated, 1 files merged, 0 files
 0 a: a
 1 a: a
 1 a: a
-3 b: b
+3 b: b4
 4 b: c
-3 b: b
+3 b: b5
+% annotate after merge with -l
+0 a:1: a
+1 a:2: a
+1 a:3: a
+3 b:4: b4
+4 b:5: c
+3 b:5: b5
 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
 merging b
 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -44,14 +71,26 @@ 0 files updated, 1 files merged, 0 files
 0 a: a
 6 b: z
 1 a: a
-3 b: b
+3 b: b4
 4 b: c
-3 b: b
+3 b: b5
 7 b: d
+% annotate after rename merge with -l
+0 a:1: a
+6 b:2: z
+1 a:3: a
+3 b:4: b4
+4 b:5: c
+3 b:5: b5
+7 b:7: d
 % linkrev vs rev
 0: a
 1: a
 1: a
+% linkrev vs rev with -l
+0:1: a
+1:2: a
+1:3: a
 % generate ABA rename configuration
 % annotate after ABA with follow
 foo: foo
--- a/tests/test-archive
+++ b/tests/test-archive
@@ -63,6 +63,7 @@ hg archive -t zip -r 2 test.zip
 unzip -t test.zip
 
 hg archive -t tar - | tar tf - | sed "s/$QTIP/TIP/"
+
 hg archive -r 0 -t tar rev-%r.tar
 if [ -f rev-0.tar ]; then
     echo 'rev-0.tar created'
new file mode 100755
--- /dev/null
+++ b/tests/test-audit-path
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+hg init
+
+echo % should fail
+hg add .hg/00changelog.i
+
+mkdir a
+echo a > a/a
+hg ci -Ama
+ln -s a b
+echo b > a/b
+
+echo % should fail
+hg add b/b
+
+echo % should succeed
+hg add b
+
+echo % should still fail - maybe
+hg add b/b
+
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-audit-path.out
@@ -0,0 +1,8 @@
+% should fail
+abort: path contains illegal component: .hg/00changelog.i
+adding a/a
+% should fail
+abort: path 'b/b' traverses symbolic link 'b'
+% should succeed
+% should still fail - maybe
+abort: path 'b/b' traverses symbolic link 'b'
--- a/tests/test-bad-extension.out
+++ b/tests/test-bad-extension.out
@@ -1,5 +1,4 @@
 *** failed to import extension badext: bit bucket overflow
-extension 'hgext.gpg' overrides commands: sigs sigcheck sign
 hg help [COMMAND]
 
 show help for a command, extension, or list of commands
--- a/tests/test-bundle-r.out
+++ b/tests/test-bundle-r.out
@@ -152,9 +152,9 @@ 1 files updated, 0 files merged, 0 files
 % 2
 2:d62976ca1e50
 adding changesets
-abort: unknown parent ac69c658229d!
 transaction abort!
 rollback completed
+abort: unknown parent ac69c658229d!
 % 2
 2:d62976ca1e50
 adding changesets
new file mode 100755
--- /dev/null
+++ b/tests/test-children
@@ -0,0 +1,59 @@
+#!/bin/sh
+# test children command
+
+cat <<EOF >> $HGRCPATH
+[extensions]
+hgext.children=
+EOF
+
+echo "% init"
+hg init t
+cd t
+
+echo "% no working directory"
+hg children
+
+echo % setup
+echo 0 > file0
+hg ci -qAm 0 -d '0 0'
+
+echo 1 > file1
+hg ci -qAm 1 -d '1 0'
+
+echo 2 >> file0
+hg ci -qAm 2 -d '2 0'
+
+hg co null
+echo 3 > file3
+hg ci -qAm 3 -d '3 0'
+
+echo "% hg children at revision 3 (tip)"
+hg children
+
+hg co null
+echo "% hg children at nullrev (should be 0 and 3)"
+hg children
+
+hg co 1
+echo "% hg children at revision 1 (should be 2)"
+hg children
+
+hg co 2
+echo "% hg children at revision 2 (other head)"
+hg children
+
+for i in null 0 1 2 3; do
+  echo "% hg children -r $i"
+  hg children -r $i
+done
+
+echo "% hg children -r 0 file0 (should be 2)"
+hg children -r 0 file0
+
+echo "% hg children -r 1 file0 (should be 2)"
+hg children -r 1 file0
+
+hg co 0
+echo "% hg children file0 at revision 0 (should be 2)"
+hg children file0
+
new file mode 100644
--- /dev/null
+++ b/tests/test-children.out
@@ -0,0 +1,62 @@
+% init
+% no working directory
+% setup
+0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+% hg children at revision 3 (tip)
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% hg children at nullrev (should be 0 and 3)
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% hg children at revision 1 (should be 2)
+changeset:   2:8f5eea5023c2
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     2
+
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% hg children at revision 2 (other head)
+% hg children -r null
+changeset:   0:4df8521a7374
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     0
+
+changeset:   3:e2962852269d
+tag:         tip
+parent:      -1:000000000000
+user:        test
+date:        Thu Jan 01 00:00:03 1970 +0000
+summary:     3
+
+% hg children -r 0
+changeset:   1:708c093edef0
+user:        test
+date:        Thu Jan 01 00:00:01 1970 +0000
+summary:     1
+
+% hg children -r 1
+changeset:   2:8f5eea5023c2
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     2
+
+% hg children -r 2
+% hg children -r 3
+% hg children -r 0 file0 (should be 2)
+changeset:   2:8f5eea5023c2
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     2
+
+% hg children -r 1 file0 (should be 2)
+changeset:   2:8f5eea5023c2
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     2
+
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% hg children file0 at revision 0 (should be 2)
+changeset:   2:8f5eea5023c2
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     2
+
--- a/tests/test-clone-pull-corruption.out
+++ b/tests/test-clone-pull-corruption.out
@@ -1,8 +1,8 @@
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 pulling from ../source
-abort: pretxncommit hook exited with status 1
 transaction abort!
 rollback completed
+abort: pretxncommit hook exited with status 1
 searching for changes
 adding changesets
 adding manifests
--- a/tests/test-commit.out
+++ b/tests/test-commit.out
@@ -1,4 +1,6 @@
 % commit date test
+transaction abort!
+rollback completed
 abort: impossible time zone offset: 4444444
 transaction abort!
 rollback completed
@@ -6,8 +8,6 @@ abort: invalid date: '1\t15.1'
 transaction abort!
 rollback completed
 abort: invalid date: 'foo bar' 
-transaction abort!
-rollback completed
 nothing changed
 % partial commit test
 trouble committing bar!
--- a/tests/test-committer.out
+++ b/tests/test-committer.out
@@ -22,7 +22,7 @@ user:        foo@bar.com
 date:        Mon Jan 12 13:46:40 1970 +0000
 summary:     commit-1
 
-abort: Please specify a username.
 transaction abort!
 rollback completed
+abort: Please specify a username.
 No username found, using user@host instead
new file mode 100755
--- /dev/null
+++ b/tests/test-convert
@@ -0,0 +1,21 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert=" >> $HGRCPATH
+
+hg init a
+cd a
+echo a > a
+hg ci -d'0 0' -Ama
+hg cp a b
+hg ci -d'1 0' -mb
+hg rm a
+hg ci -d'2 0' -mc
+hg mv b a
+hg ci -d'3 0' -md
+echo a >> a
+hg ci -d'4 0' -me
+
+cd ..
+hg convert a 2>&1 | grep -v 'subversion python bindings could not be loaded'
+hg --cwd a-hg pull ../a
--- a/tests/test-convert-cvs.out
+++ b/tests/test-convert-cvs.out
@@ -10,8 +10,8 @@ No conflicts created by this import
 U src/a
 U src/b/c
 % convert fresh repo
+initializing destination src-hg repository
 connecting to cvsrepo
-initializing destination src-hg repository
 scanning source...
 sorting...
 converting...
@@ -21,17 +21,13 @@ updating tags
 a
 c
 % commit new file revisions
-Checking in a;
 src/a,v  <--  a
 new revision: 1.2; previous revision: 1.1
-done
-Checking in b/c;
-src/b/c,v  <--  c
+src/b/c,v  <--  b/c
 new revision: 1.2; previous revision: 1.1
-done
 % convert again
+destination src-hg is a Mercurial repository
 connecting to cvsrepo
-destination src-hg is a Mercurial repository
 scanning source...
 sorting...
 converting...
--- a/tests/test-convert-git.out
+++ b/tests/test-convert-git.out
@@ -7,10 +7,10 @@ 3 t1
 2 t2.1
 1 t2.2
 0 Merge branch other
-changeset:   3:f0873470732d
+changeset:   3:69b3a302b4a1
 tag:         tip
-parent:      1:cb991dbbb06b
-parent:      2:600bef931ca4
+parent:      1:0de2a40e261b
+parent:      2:8815d3b33506
 user:        test <test@example.org>
 date:        Mon Jan 01 00:00:13 2007 +0000
 files:       a
new file mode 100755
--- /dev/null
+++ b/tests/test-convert-hg-sink
@@ -0,0 +1,53 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "hgext.convert=" >> $HGRCPATH
+
+hg init orig
+cd orig
+echo foo > foo
+echo bar > bar
+hg ci -qAm 'add foo and bar' -d '0 0'
+
+hg rm foo
+hg ci -m 'remove foo' -d '0 0'
+
+mkdir foo
+echo file > foo/file
+hg ci -qAm 'add foo/file' -d '0 0'
+
+hg tag -d '0 0' some-tag
+
+hg log
+cd ..
+
+hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
+cd new
+hg out ../orig
+
+echo '% dirstate should be empty:'
+hg debugstate
+hg parents -q
+
+hg up -C
+hg copy bar baz
+echo '% put something in the dirstate:'
+hg debugstate > debugstate
+grep baz debugstate
+
+echo '% add a new revision in the original repo'
+cd ../orig
+echo baz > baz
+hg ci -qAm 'add baz'
+
+cd ..
+hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
+cd new
+hg out ../orig
+echo '% dirstate should be the same (no output below):'
+hg debugstate > new-debugstate
+diff debugstate new-debugstate
+
+echo '% no copies'
+hg up -C
+hg debugrename baz
new file mode 100644
--- /dev/null
+++ b/tests/test-convert-hg-sink.out
@@ -0,0 +1,51 @@
+changeset:   3:593cbf6fb2b4
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     Added tag some-tag for changeset ad681a868e44
+
+changeset:   2:ad681a868e44
+tag:         some-tag
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add foo/file
+
+changeset:   1:cbba8ecc03b7
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     remove foo
+
+changeset:   0:327daa9251fa
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     add foo and bar
+
+initializing destination new repository
+scanning source...
+sorting...
+converting...
+3 add foo and bar
+2 remove foo
+1 add foo/file
+0 Added tag some-tag for changeset ad681a868e44
+comparing with ../orig
+searching for changes
+no changes found
+% dirstate should be empty:
+3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% put something in the dirstate:
+a   0         -1 unset             baz
+copy: bar -> baz
+% add a new revision in the original repo
+destination new is a Mercurial repository
+scanning source...
+sorting...
+converting...
+0 add baz
+comparing with ../orig
+searching for changes
+no changes found
+% dirstate should be the same (no output below):
+% no copies
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+baz not renamed
new file mode 100755
--- /dev/null
+++ b/tests/test-convert-hg-source
@@ -0,0 +1,33 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "hgext.convert=" >> $HGRCPATH
+
+hg init orig
+cd orig
+
+echo foo > foo
+echo bar > bar
+hg ci -qAm 'add foo bar' -d '0 0'
+
+echo >> foo
+hg ci -m 'change foo' -d '1 0'
+
+hg up -qC 0
+hg copy --after --force foo bar
+hg copy foo baz
+hg ci -m 'make bar and baz copies of foo' -d '2 0'
+
+hg merge
+hg ci -m 'merge local copy' -d '3 0'
+
+hg up -C 1
+hg merge 2
+hg ci -m 'merge remote copy' -d '4 0'
+
+cd ..
+hg convert --datesort orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
+cd new
+hg out ../orig
+
+true
new file mode 100644
--- /dev/null
+++ b/tests/test-convert-hg-source.out
@@ -0,0 +1,19 @@
+merging baz and foo
+1 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+merging foo and baz
+1 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+initializing destination new repository
+scanning source...
+sorting...
+converting...
+4 add foo bar
+3 change foo
+2 make bar and baz copies of foo
+1 merge local copy
+0 merge remote copy
+comparing with ../orig
+searching for changes
+no changes found
new file mode 100755
--- /dev/null
+++ b/tests/test-convert-svn
@@ -0,0 +1,53 @@
+#!/bin/sh
+
+"$TESTDIR/hghave" svn svn-bindings || exit 80
+
+fix_path()
+{
+    tr '\\' /
+}
+
+echo "[extensions]" >> $HGRCPATH
+echo "convert = " >> $HGRCPATH
+
+svnadmin create svn-repo
+
+echo % initial svn import
+mkdir t
+cd t
+echo a > a
+cd ..
+
+svnpath=`pwd | fix_path`
+# SVN wants all paths to start with a slash. Unfortunately,
+# Windows ones don't. Handle that.
+expr $svnpath : "\/" > /dev/null
+if [ $? -ne 0 ]; then
+    svnpath='/'$svnpath
+fi
+
+svnurl=file://$svnpath/svn-repo/trunk
+svn import -m init t $svnurl | fix_path
+
+echo % update svn repository
+svn co $svnurl t2 | fix_path
+cd t2
+echo b >> a
+echo b > b
+svn add b
+svn ci -m changea
+cd ..
+
+echo % convert to hg once
+hg convert $svnurl
+
+echo % update svn repository again
+cd t2
+echo c >> a
+echo c >> b
+svn ci -m changeb
+cd ..
+
+echo % test incremental conversion
+hg convert $svnurl
+
new file mode 100644
--- /dev/null
+++ b/tests/test-convert-svn.out
@@ -0,0 +1,32 @@
+% initial svn import
+Adding         t/a
+
+Committed revision 1.
+% update svn repository
+A    t2/a
+Checked out revision 1.
+A         b
+Sending        a
+Adding         b
+Transmitting file data ..
+Committed revision 2.
+% convert to hg once
+assuming destination trunk-hg
+initializing destination trunk-hg repository
+scanning source...
+sorting...
+converting...
+1 init
+0 changea
+% update svn repository again
+Sending        a
+Sending        b
+Transmitting file data ..
+Committed revision 3.
+% test incremental conversion
+assuming destination trunk-hg
+destination trunk-hg is a Mercurial repository
+scanning source...
+sorting...
+converting...
+0 changeb
new file mode 100644
--- /dev/null
+++ b/tests/test-convert.out
@@ -0,0 +1,14 @@
+adding a
+assuming destination a-hg
+initializing destination a-hg repository
+scanning source...
+sorting...
+converting...
+4 a
+3 b
+2 c
+1 d
+0 e
+pulling from ../a
+searching for changes
+no changes found
--- a/tests/test-debugcomplete.out
+++ b/tests/test-debugcomplete.out
@@ -110,6 +110,7 @@ rawcommit
 % Show the options for the "serve" command
 --accesslog
 --address
+--certificate
 --config
 --cwd
 --daemon
--- a/tests/test-dispatch.py
+++ b/tests/test-dispatch.py
@@ -1,32 +1,32 @@
 import os
-from mercurial import commands
+from mercurial import dispatch
 
-def dispatch(cmd):
-    """Simple wrapper around commands.dispatch()
+def testdispatch(cmd):
+    """Simple wrapper around dispatch.dispatch()
 
     Prints command and result value, but does not handle quoting.
     """
     print "running: %s" % (cmd,)
-    result = commands.dispatch(cmd.split())
+    result = dispatch.dispatch(cmd.split())
     print "result: %r" % (result,)
 
 
-dispatch("init test1")
+testdispatch("init test1")
 os.chdir('test1')
 
 # create file 'foo', add and commit
 f = file('foo', 'wb')
 f.write('foo\n')
 f.close()
-dispatch("add foo")
-dispatch("commit -m commit1 -d 2000-01-01 foo")
+testdispatch("add foo")
+testdispatch("commit -m commit1 -d 2000-01-01 foo")
 
 # append to file 'foo' and commit
 f = file('foo', 'ab')
 f.write('bar\n')
 f.close()
-dispatch("commit -m commit2 -d 2000-01-02 foo")
+testdispatch("commit -m commit2 -d 2000-01-02 foo")
 
 # check 88803a69b24 (fancyopts modified command table)
-dispatch("log -r 0")
-dispatch("log -r tip")
+testdispatch("log -r 0")
+testdispatch("log -r tip")
--- a/tests/test-encoding.out
+++ b/tests/test-encoding.out
@@ -9,9 +9,9 @@ M a
 ? latin-1
 ? latin-1-tag
 ? utf-8
-abort: decoding near ' encoded: é': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)!
 transaction abort!
 rollback completed
+abort: decoding near ' encoded: é': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)!
 % these should work
 marked working directory as branch é
 % ascii
--- a/tests/test-extdiff
+++ b/tests/test-extdiff
@@ -6,7 +6,9 @@ echo "extdiff=" >> $HGRCPATH
 hg init a
 cd a
 echo a > a
+echo b > b
 hg add
+# should diff cloned directories
 hg extdiff -o -r $opt
 
 echo "[extdiff]" >> $HGRCPATH
@@ -22,13 +24,17 @@ hg ci -d '0 0' -mtest1
 echo b >> a
 hg ci -d '1 0' -mtest2
 
+# should diff cloned files directly
 hg falabala -r 0:1
 
 # test diff during merge
 hg update 0
-echo b >> b
-hg add b
+echo c >> c
+hg add c
 hg ci -m "new branch" -d '1 0'
 hg update -C 1
 hg merge tip
-hg falabala || echo "diff-like tools yield a non-zero exit code"
+# should diff cloned file against wc file
+hg falabala > out || echo "diff-like tools yield a non-zero exit code"
+# cleanup the output since the wc is a tmp directory
+sed  's:\(.* \).*\(\/test-extdiff\):\1[tmp]\2:' out
--- a/tests/test-extdiff.out
+++ b/tests/test-extdiff.out
@@ -1,9 +1,7 @@
 adding a
-making snapshot of 0 files from rev 000000000000
-making snapshot of 1 files from working dir
+adding b
 Only in a: a
-making snapshot of 0 files from rev 000000000000
-making snapshot of 1 files from working dir
+Only in a: b
 diffing a.000000000000 a
 hg falabala [OPTION]... [FILE]...
 
@@ -26,14 +24,10 @@ options:
  -X --exclude  exclude names matching the given patterns
 
 use "hg -v help falabala" to show global options
-making snapshot of 1 files from rev e27a2475d60a
-making snapshot of 1 files from rev 5e49ec8d3f05
-diffing a.e27a2475d60a a.5e49ec8d3f05
+diffing a.8a5febb7f867/a a.34eed99112ab/a
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
-making snapshot of 1 files from rev 5e49ec8d3f05
-making snapshot of 1 files from working dir
-diffing a.5e49ec8d3f05 a
 diff-like tools yield a non-zero exit code
+diffing a.34eed99112ab/c [tmp]/test-extdiff/a/c
--- a/tests/test-git-export
+++ b/tests/test-git-export
@@ -78,6 +78,8 @@ hg mv dst2 dst3
 hg ci -m 'mv dst2 dst3; revert start' -d '0 0'
 
 hg diff --git -r 9:11
+echo '%  reversed'
+hg diff --git -r 11:9
 
 echo a >> foo
 hg add foo
@@ -92,12 +94,18 @@ hg ci -m 'change bar'
 echo
 echo '% file created before r1 and renamed before r2'
 hg diff --git -r -3:-1
+echo '%  reversed'
+hg diff --git -r -1:-3
 echo
 echo '% file created in r1 and renamed before r2'
 hg diff --git -r -4:-1
+echo '%  reversed'
+hg diff --git -r -1:-4
 echo
 echo '% file created after r1 and renamed before r2'
 hg diff --git -r -5:-1
+echo '%  reversed'
+hg diff --git -r -1:-5
 
 echo
 echo '% comparing with the working dir'
@@ -139,6 +147,8 @@ hg cp brand-new2 brand-new3
 hg mv brand-new2 brand-new3-2
 hg ci -m 'multiple renames/copies'
 hg diff --git -r -2 -r -1
+echo '%  reversed'
+hg diff --git -r -1 -r -2
 
 echo '% there should be a trailing TAB if there are spaces in the file name'
 echo foo > 'with spaces'
--- a/tests/test-git-export.out
+++ b/tests/test-git-export.out
@@ -75,6 +75,10 @@ rename to renamed.bin
 diff --git a/dst2 b/dst3
 rename from dst2
 rename to dst3
+%  reversed
+diff --git a/dst3 b/dst2
+rename from dst3
+rename to dst2
 
 % file created before r1 and renamed before r2
 diff --git a/foo b/bar
@@ -86,6 +90,16 @@ rename to bar
  a
  b
 +c
+%  reversed
+diff --git a/bar b/foo
+rename from bar
+rename to foo
+--- a/foo
++++ b/foo
+@@ -1,3 +1,2 @@ a
+ a
+ b
+-c
 
 % file created in r1 and renamed before r2
 diff --git a/foo b/bar
@@ -97,6 +111,16 @@ rename to bar
  a
 +b
 +c
+%  reversed
+diff --git a/bar b/foo
+rename from bar
+rename to foo
+--- a/foo
++++ b/foo
+@@ -1,3 +1,1 @@ a
+ a
+-b
+-c
 
 % file created after r1 and renamed before r2
 diff --git a/bar b/bar
@@ -107,6 +131,15 @@ new file mode 100644
 +a
 +b
 +c
+%  reversed
+diff --git a/bar b/bar
+deleted file mode 100644
+--- a/bar
++++ /dev/null
+@@ -1,3 +0,0 @@
+-a
+-b
+-c
 
 % comparing with the working dir
 % there's a copy in the working dir...
@@ -145,6 +178,16 @@ rename to brand-new3
 diff --git a/brand-new2 b/brand-new3-2
 copy from brand-new2
 copy to brand-new3-2
+%  reversed
+diff --git a/brand-new3 b/brand-new2
+rename from brand-new3
+rename to brand-new2
+diff --git a/brand-new3-2 b/brand-new3-2
+deleted file mode 100644
+--- a/brand-new3-2
++++ /dev/null
+@@ -1,1 +0,0 @@
+-
 % there should be a trailing TAB if there are spaces in the file name
 diff --git a/with spaces b/with spaces
 new file mode 100644
--- a/tests/test-hook.out
+++ b/tests/test-hook.out
@@ -60,9 +60,9 @@ precommit hook: HG_PARENT1=8ea2ef7ad3e8c
 pretxncommit hook: HG_NODE=fad284daf8c032148abaffcd745dafeceefceb61 HG_PARENT1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 
 5:fad284daf8c0
 pretxncommit.forbid hook: HG_NODE=fad284daf8c032148abaffcd745dafeceefceb61 HG_PARENT1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 
-abort: pretxncommit.forbid1 hook exited with status 1
 transaction abort!
 rollback completed
+abort: pretxncommit.forbid1 hook exited with status 1
 4:8ea2ef7ad3e8
 precommit hook: HG_PARENT1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 
 precommit.forbid hook: HG_PARENT1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 
@@ -86,9 +86,9 @@ adding changesets
 adding manifests
 adding file changes
 added 1 changesets with 1 changes to 1 files
-abort: pretxnchangegroup.forbid1 hook exited with status 1
 transaction abort!
 rollback completed
+abort: pretxnchangegroup.forbid1 hook exited with status 1
 3:4c52fb2e4022
 preoutgoing hook: HG_SOURCE=pull 
 outgoing hook: HG_NODE=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 HG_SOURCE=pull 
--- a/tests/test-hup.out
+++ b/tests/test-hup.out
@@ -1,7 +1,7 @@
 0
 0
 adding changesets
-killed!
 transaction abort!
 rollback completed
+killed!
 .hg/00changelog.i .hg/journal.dirstate .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00changelog.i.a
new file mode 100755
--- /dev/null
+++ b/tests/test-imerge
@@ -0,0 +1,64 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "imerge=" >> $HGRCPATH
+HGMERGE=true
+export HGMERGE
+
+hg init base
+cd base
+
+echo foo > foo
+echo bar > bar
+hg ci -Am0 -d '0 0'
+
+hg mv foo foo2
+echo foo >> foo2
+hg ci -m1 -d '1 0'
+
+hg up -C 0
+echo bar >> foo
+echo bar >> bar
+hg ci -m2 -d '2 0'
+
+echo % start imerge
+hg imerge
+
+cat foo2
+cat bar
+
+echo % status -v
+hg -v imerge st
+
+echo % next
+hg imerge next
+
+echo % merge next
+hg --traceback imerge
+
+echo % unresolve
+hg imerge unres foo
+
+echo % merge foo
+hg imerge merge foo
+
+echo % save
+echo foo > foo2
+hg imerge save ../savedmerge
+
+echo % load
+hg up -C 0
+hg imerge --traceback load ../savedmerge
+cat foo2
+
+hg ci -m'merged' -d '3 0'
+hg tip -v
+
+echo % nothing to merge -- tip
+hg imerge
+
+hg up 0
+echo % nothing to merge
+hg imerge
+
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-imerge.out
@@ -0,0 +1,48 @@
+adding bar
+adding foo
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% start imerge
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+U foo
+foo
+bar
+bar
+bar
+% status -v
+merging e6da46716401 and 30d266f502e7
+U foo (foo2)
+% next
+foo
+% merge next
+merging foo and foo2
+all conflicts resolved
+% unresolve
+% merge foo
+merging foo and foo2
+all conflicts resolved
+% save
+% load
+2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+R foo
+all conflicts resolved
+foo
+changeset:   3:fa9a6defdcaf
+tag:         tip
+parent:      2:e6da46716401
+parent:      1:30d266f502e7
+user:        test
+date:        Thu Jan 01 00:00:03 1970 +0000
+files:       foo foo2
+description:
+merged
+
+
+% nothing to merge -- tip
+abort: there is nothing to merge
+2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% nothing to merge
+abort: there is nothing to merge - use "hg update" instead
--- a/tests/test-import
+++ b/tests/test-import
@@ -93,6 +93,24 @@ python mkmsg.py | hg --cwd b import -
 hg --cwd b tip | grep second
 rm -r b
 
+# subject: duplicate detection, removal of [PATCH]
+cat > mkmsg2.py <<EOF
+import email.Message, sys
+msg = email.Message.Message()
+msg.set_payload('email patch\n\nnext line\n' + open('tip.patch').read())
+msg['Subject'] = '[PATCH] email patch'
+msg['From'] = 'email patcher'
+sys.stdout.write(msg.as_string())
+EOF
+
+echo '% plain diff in email, [PATCH] subject, message body with subject'
+hg clone -r0 a b
+hg --cwd a diff -r0:1 > tip.patch
+python mkmsg2.py | hg --cwd b import -
+hg --cwd b tip --template '{desc}\n'
+rm -r b
+
+
 # bug non regression test
 # importing a patch in a subdirectory failed at the commit stage
 echo line 2 >> a/d1/d2/a
--- a/tests/test-import.out
+++ b/tests/test-import.out
@@ -100,6 +100,17 @@ added 1 changesets with 2 changes to 2 f
 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 applying patch from stdin
 summary:     second change
+% plain diff in email, [PATCH] subject, message body with subject
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 2 changes to 2 files
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+applying patch from stdin
+email patch
+
+next line
 % hg import in a subdirectory
 requesting all changes
 adding changesets
--- a/tests/test-issue322.out
+++ b/tests/test-issue322.out
@@ -1,12 +1,12 @@
 % file replaced with directory
 adding a
 % should fail - would corrupt dirstate
-abort: file named 'a' already in dirstate
+abort: file 'a' in dirstate clashes with 'a/a'
 % directory replaced with file
 adding a/a
 % should fail - would corrupt dirstate
-abort: directory named 'a' already in dirstate
+abort: directory 'a' already in dirstate
 % directory replaced with file
 adding b/c/d
 % should fail - would corrupt dirstate
-abort: directory named 'b' already in dirstate
+abort: directory 'b' already in dirstate
new file mode 100755
--- /dev/null
+++ b/tests/test-issue522
@@ -0,0 +1,31 @@
+#!/bin/sh
+
+# In the merge below, the file "foo" has the same contents in both
+# parents, but if we look at the file-level history, we'll notice that
+# the version in p1 is an ancestor of the version in p2.  This test
+# makes sure that we'll use the version from p2 in the manifest of the
+# merge revision.
+
+hg init repo
+cd repo
+
+echo foo > foo
+hg ci -d '0 0' -qAm 'add foo'
+
+echo bar >> foo
+hg ci -d '0 0' -m 'change foo'
+
+hg backout -d '0 0' -r tip -m 'backout changed foo'
+
+hg up -C 0
+touch bar
+hg ci -d '0 0' -qAm 'add bar'
+
+hg merge --debug
+hg debugstate | grep foo
+hg st -A foo
+hg ci -d '0 0' -m 'merge'
+
+hg manifest --debug | grep foo
+hg debugindex .hg/store/data/foo.i
+
new file mode 100644
--- /dev/null
+++ b/tests/test-issue522.out
@@ -0,0 +1,17 @@
+reverting foo
+changeset 2:4d9e78aaceee backs out changeset 1:b515023e500e
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+resolving manifests
+ overwrite None partial False
+ ancestor bbd179dfa0a7 local 71766447bdbb+ remote 4d9e78aaceee
+ foo: remote is newer -> g
+getting foo
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+n   0         -2 unset             foo
+M foo
+c6fc755d7e68f49f880599da29f15add41f42f5a 644 foo
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       5      0       0 2ed2a3912a0b 000000000000 000000000000
+     1         5       9      1       1 6f4310b00b9a 2ed2a3912a0b 000000000000
+     2        14       5      2       2 c6fc755d7e68 6f4310b00b9a 000000000000
new file mode 100755
--- /dev/null
+++ b/tests/test-manifest
@@ -0,0 +1,20 @@
+#!/bin/sh
+
+hg init
+echo a > a
+hg ci -Ama -d'0 0'
+mkdir b
+echo a > b/a
+hg ci -Amb -d'1 0'
+hg manifest
+hg manifest -v
+hg manifest --debug
+hg manifest -r 0
+hg manifest -r 1
+hg manifest -r tip
+
+echo % should fail
+hg manifest -r 2
+hg manifest -r tip tip
+
+hg manifest tip
new file mode 100644
--- /dev/null
+++ b/tests/test-manifest.out
@@ -0,0 +1,18 @@
+adding a
+adding b/a
+a
+b/a
+644 a
+644 b/a
+b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
+b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 b/a
+a
+a
+b/a
+a
+b/a
+% should fail
+abort: unknown revision '2'!
+abort: please specify just one revision
+a
+b/a
--- a/tests/test-merge-default
+++ b/tests/test-merge-default
@@ -34,6 +34,10 @@ echo % should succeed - 2 heads
 hg merge
 hg commit -mm2
 
+echo % should fail because at tip
+hg merge
+
+hg up 0
 echo % should fail because 1 head
 hg merge
 
--- a/tests/test-merge-default.out
+++ b/tests/test-merge-default.out
@@ -13,5 +13,8 @@ 0 files updated, 0 files merged, 0 files
 % should succeed - 2 heads
 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
+% should fail because at tip
+abort: there is nothing to merge
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 % should fail because 1 head
 abort: there is nothing to merge - use "hg update" instead
--- a/tests/test-mq
+++ b/tests/test-mq
@@ -338,6 +338,46 @@ hg qrefresh --git
 cat .hg/patches/bar
 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r .
 
+echo % refresh omitting an added file
+hg qnew baz
+echo newfile > newfile
+hg add newfile
+hg qrefresh
+hg st -A newfile
+hg qrefresh -X newfile
+hg st -A newfile
+hg revert newfile
+rm newfile
+hg qpop
+hg qdel baz
+
+echo % create a git patch
+echo a > alexander
+hg add alexander
+hg qnew -f --git addalexander
+grep diff .hg/patches/addalexander
+
+echo % create a git binary patch
+cat > writebin.py <<EOF
+import sys
+path = sys.argv[1]
+open(path, 'wb').write('BIN\x00ARY')
+EOF
+python writebin.py bucephalus
+
+python "$TESTDIR/md5sum.py" bucephalus
+hg add bucephalus
+hg qnew -f --git addbucephalus
+grep diff .hg/patches/addbucephalus
+
+echo % check binary patches can be popped and pushed
+hg qpop
+test -f bucephalus && echo % bucephalus should not be there
+hg qpush
+test -f bucephalus || echo % bucephalus should be there
+python "$TESTDIR/md5sum.py" bucephalus
+
+
 echo '% strip again'
 cd ..
 hg init strip
@@ -370,10 +410,17 @@ cd qclonesource
 echo foo > foo
 hg add foo
 hg ci -m 'add foo'
-hg qinit -c
+hg qinit
 hg qnew patch1
 echo bar >> foo
 hg qrefresh -m 'change foo'
+cd ..
+
+# repo with unversioned patch dir
+hg qclone qclonesource failure
+
+cd qclonesource
+hg qinit -c
 hg qci -m checkpoint
 qlog
 cd ..
new file mode 100755
--- /dev/null
+++ b/tests/test-mq-symlinks
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+echo "[extensions]" >> $HGRCPATH
+echo "mq=" >> $HGRCPATH
+
+cat >> readlink.py <<EOF
+import errno, os, sys
+
+for f in sys.argv[1:]:
+    try:
+        print f, '->', os.readlink(f)
+    except OSError, err:
+        if err.errno != errno.EINVAL: raise
+        print f, 'not a symlink'
+EOF
+
+hg init
+hg qinit
+hg qnew base.patch
+echo a > a
+echo b > b
+hg add a b
+hg qrefresh
+python readlink.py a
+
+hg qnew symlink.patch
+rm a
+ln -s b a
+hg qrefresh --git
+python readlink.py a
+
+hg qpop
+hg qpush
+python readlink.py a
new file mode 100644
--- /dev/null
+++ b/tests/test-mq-symlinks.out
@@ -0,0 +1,6 @@
+a -> a not a symlink
+a -> b
+Now at: base.patch
+applying symlink.patch
+Now at: symlink.patch
+a -> b
--- a/tests/test-mq.out
+++ b/tests/test-mq.out
@@ -262,7 +262,8 @@ M a
 Patch queue now empty
 applying foo
 applying bar
-1 out of 1 hunk ignored -- saving rejects to file foo.rej
+file foo already exists
+1 out of 1 hunk FAILED -- saving rejects to file foo.rej
 patch failed, unable to continue (try -v)
 patch failed, rejects left in working dir
 Errors during apply, please fix and refresh bar
@@ -359,6 +360,20 @@ new file mode 100644
 @@ -0,0 +1,1 @@
 +bar
 3 barney (foo)
+% refresh omitting an added file
+C newfile
+A newfile
+Now at: bar
+% create a git patch
+diff --git a/alexander b/alexander
+% create a git binary patch
+8ba2a2f3e77b55d03051ff9c24ad65e7  bucephalus
+diff --git a/bucephalus b/bucephalus
+% check binary patches can be popped and pushed
+Now at: addalexander
+applying addbucephalus
+Now at: addbucephalus
+8ba2a2f3e77b55d03051ff9c24ad65e7  bucephalus
 % strip again
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 merging foo
@@ -409,6 +424,8 @@ date:        Thu Jan 01 00:00:00 1970 +0
 summary:     add foo
 
 % qclone
+abort: versioned patch repository not found (see qinit -c)
+adding .hg/patches/patch1
 main repo:
     rev 1: change foo
     rev 0: add foo
--- a/tests/test-nested-repo
+++ b/tests/test-nested-repo
@@ -4,16 +4,25 @@ hg init a
 cd a
 hg init b
 echo x > b/x
+
 echo '# should print nothing'
+hg add b
 hg st
-echo '# should print ? b/x'
+
+echo '# should fail'
 hg st b/x
-
 hg add b/x
 
-echo '# should print A b/x'
+echo '# should fail'
+hg add b b/x
 hg st
-echo '# should forget b/x'
-hg revert --all
-echo '# should print nothing'
+
+echo '# should arguably print nothing'
 hg st b
+
+echo a > a
+hg ci -Ama a
+
+echo '# should fail'
+hg mv a b
+hg st
--- a/tests/test-nested-repo.out
+++ b/tests/test-nested-repo.out
@@ -1,8 +1,9 @@
 # should print nothing
-# should print ? b/x
-? b/x
-# should print A b/x
-A b/x
-# should forget b/x
-forgetting b/x
-# should print nothing
+# should fail
+abort: path 'b/x' is inside repo 'b'
+abort: path 'b/x' is inside repo 'b'
+# should fail
+abort: path 'b/x' is inside repo 'b'
+# should arguably print nothing
+# should fail
+abort: path 'b/a' is inside repo 'b'
new file mode 100755
--- /dev/null
+++ b/tests/test-non-interactive-wsgi
@@ -0,0 +1,70 @@
+#!/bin/sh
+
+mkdir repo
+cd repo
+hg init
+echo foo > bar
+hg add bar
+hg commit -m "test" -d "0 0"
+hg tip
+
+cat > request.py <<EOF
+from mercurial import dispatch
+from mercurial.hgweb.hgweb_mod import hgweb
+from mercurial.hgweb.request import _wsgirequest
+from mercurial.ui import ui
+from mercurial import hg
+from StringIO import StringIO
+import sys
+
+class FileLike(object):
+    def __init__(self, real):
+        self.real = real
+    def fileno(self):
+        print >> sys.__stdout__, 'FILENO'
+        return self.real.fileno()
+    def read(self):
+        print >> sys.__stdout__, 'READ'
+        return self.real.read()
+    def readline(self):
+        print >> sys.__stdout__, 'READLINE'
+        return self.real.readline()
+    def isatty(self):
+        print >> sys.__stdout__, 'ISATTY'
+        return False
+
+sys.stdin = FileLike(sys.stdin)
+errors = StringIO()
+input = StringIO()
+output = StringIO()
+
+def startrsp(headers, data):
+	print '---- HEADERS'
+	print headers
+	print '---- DATA'
+	print data
+	return output.write
+
+env = {
+	'wsgi.version': (1, 0),
+	'wsgi.url_scheme': 'http',
+	'wsgi.errors': errors,
+	'wsgi.input': input,
+	'wsgi.multithread': False,
+	'wsgi.multiprocess': False,
+	'wsgi.run_once': False,
+	'REQUEST_METHOD': 'GET',
+	'SCRIPT_NAME': '',
+	'PATH_INFO': '',
+	'QUERY_STRING': '',
+	'SERVER_NAME': '127.0.0.1',
+	'SERVER_PORT': '20059',
+	'SERVER_PROTOCOL': 'HTTP/1.0'
+}
+
+_wsgirequest(hgweb('.'), env, startrsp)
+print '---- ERRORS'
+print errors.getvalue()
+EOF
+
+python request.py
new file mode 100644
--- /dev/null
+++ b/tests/test-non-interactive-wsgi.out
@@ -0,0 +1,12 @@
+changeset:   0:61c9426e69fe
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     test
+
+---- HEADERS
+200 Script output follows
+---- DATA
+[('content-type', 'text/html; charset=ascii')]
+---- ERRORS
+
new file mode 100755
--- /dev/null
+++ b/tests/test-parentrevspec
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+commit()
+{
+    msg=$1
+    p1=$2
+    p2=$3
+
+    if [ "$p1" ]; then
+	hg up -qC $p1
+    fi
+
+    if [ "$p2" ]; then
+	HGMERGE=true hg merge -q $p2
+    fi
+
+    echo >> foo
+
+    hg commit -d '0 0' -qAm "$msg" foo
+}
+
+hg init repo
+cd repo
+
+echo '[extensions]' > .hg/hgrc
+echo 'hgext.parentrevspec =' >> .hg/hgrc
+
+commit '0: add foo'
+commit '1: change foo 1'
+commit '2: change foo 2a'
+commit '3: change foo 3a'
+commit '4: change foo 2b' 1
+commit '5: merge' 3 4
+commit '6: change foo again'
+
+hg log --template '#rev#:#node|short# #parents#\n'
+echo
+
+lookup()
+{
+    for rev in "$@"; do
+	printf "$rev: "
+	hg id -nr $rev
+    done
+    true
+}
+
+tipnode=`hg id -ir tip`
+
+echo 'should work with tag/branch/node/rev'
+for r in tip default $tipnode 6; do
+    lookup "$r^"
+done
+echo
+
+echo 'some random lookups'
+lookup "6^^" "6^^^" "6^^^^" "6^^^^^" "6^^^^^^" "6^1" "6^2" "6^^2" "6^1^2" "6^^3"
+lookup "6~" "6~1" "6~2" "6~3" "6~4" "6~5" "6~42" "6~1^2" "6~1^2~2"
+echo
+
+echo 'with a tag "6^" pointing to rev 1'
+hg tag -l -r 1 "6^"
+lookup "6^" "6^1" "6~1" "6^^"
+echo
+
+echo 'with a tag "foo^bar" pointing to rev 2'
+hg tag -l -r 2 "foo^bar"
+lookup "foo^bar" "foo^bar^"
+
new file mode 100644
--- /dev/null
+++ b/tests/test-parentrevspec.out
@@ -0,0 +1,44 @@
+6:755d1e0d79e9 
+5:9ce2ce29723a 3:a3e00c7dbf11 4:bb4475edb621 
+4:bb4475edb621 1:5d953a1917d1 
+3:a3e00c7dbf11 
+2:befc7d89d081 
+1:5d953a1917d1 
+0:837088b6e1d9 
+
+should work with tag/branch/node/rev
+tip^: 5
+default^: 5
+755d1e0d79e9^: 5
+6^: 5
+
+some random lookups
+6^^: 3
+6^^^: 2
+6^^^^: 1
+6^^^^^: 0
+6^^^^^^: -1
+6^1: 5
+6^2: abort: unknown revision '6^2'!
+6^^2: 4
+6^1^2: 4
+6^^3: abort: unknown revision '6^^3'!
+6~: abort: unknown revision '6~'!
+6~1: 5
+6~2: 3
+6~3: 2
+6~4: 1
+6~5: 0
+6~42: -1
+6~1^2: 4
+6~1^2~2: 0
+
+with a tag "6^" pointing to rev 1
+6^: 1
+6^1: 5
+6~1: 5
+6^^: 3
+
+with a tag "foo^bar" pointing to rev 2
+foo^bar: 2
+foo^bar^: abort: unknown revision 'foo^bar^'!
--- a/tests/test-parse-date.out
+++ b/tests/test-parse-date.out
@@ -3,6 +3,8 @@ changeset 3:107ce1ee2b43 backs out chang
 merging with changeset 2:e6c3abc120e7
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 (branch merge, don't forget to commit)
+transaction abort!
+rollback completed
 abort: invalid date: 'should fail' 
 transaction abort!
 rollback completed
@@ -10,8 +12,6 @@ abort: date exceeds 32 bits: 10000000000
 transaction abort!
 rollback completed
 abort: impossible time zone offset: 1400000
-transaction abort!
-rollback completed
 Sun Jan 15 13:30:00 2006 +0500
 Sun Jan 15 13:30:00 2006 -0800
 Sat Jul 15 13:30:00 2006 +0500
--- a/tests/test-rebuildstate.out
+++ b/tests/test-rebuildstate.out
@@ -1,7 +1,7 @@
 adding bar
 adding foo
 % state dump
-a 644          0 baz
+a   0         -1 baz
 n 644          0 foo
 r   0          0 bar
 % status
new file mode 100755
--- /dev/null
+++ b/tests/test-record
@@ -0,0 +1,266 @@
+#!/bin/sh
+
+echo "[ui]" >> $HGRCPATH
+echo "interactive=true" >> $HGRCPATH
+echo "[extensions]" >> $HGRCPATH
+echo "record=" >> $HGRCPATH
+
+echo % help
+
+hg help record
+
+hg init a
+cd a
+
+echo % select no files
+
+touch empty-rw
+hg add empty-rw
+hg record empty-rw<<EOF
+n
+EOF
+echo; hg tip -p
+
+echo % select files but no hunks
+
+hg record empty-rw<<EOF
+y
+n
+EOF
+echo; hg tip -p
+
+echo % record empty file
+
+hg record -d '0 0' -m empty empty-rw<<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo % rename empty file
+
+hg mv empty-rw empty-rename
+hg record -d '1 0' -m rename<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % copy empty file
+
+hg cp empty-rename empty-copy
+hg record -d '2 0' -m copy<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % delete empty file
+
+hg rm empty-copy
+hg record -d '3 0' -m delete<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % add binary file
+
+hg bundle --base -2 tip.bundle
+hg add tip.bundle
+hg record -d '4 0' -m binary<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % change binary file
+
+hg bundle --base -2 tip.bundle
+hg record -d '5 0' -m binary-change<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % rename and change binary file
+
+hg mv tip.bundle top.bundle
+hg bundle --base -2 top.bundle
+hg record -d '6 0' -m binary-change-rename<<EOF
+y
+EOF
+echo; hg tip -p
+
+echo % add plain file
+
+for i in 1 2 3 4 5 6 7 8 9 10; do
+    echo $i >> plain
+done
+
+hg add plain
+hg record -d '7 0' -m plain plain<<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo % modify end of plain file
+
+echo 11 >> plain
+hg record -d '8 0' -m end plain <<EOF
+y
+y
+EOF
+
+echo % modify end of plain file, no EOL
+
+hg tip --template '{node}' >> plain
+hg record -d '9 0' -m noeol plain <<EOF
+y
+y
+EOF
+
+echo % modify end of plain file, add EOL
+
+echo >> plain
+hg record -d '10 0' -m eol plain <<EOF
+y
+y
+y
+EOF
+
+echo % modify beginning, trim end, record both
+
+rm plain
+for i in 2 2 3 4 5 6 7 8 9 10; do
+  echo $i >> plain
+done
+
+hg record -d '10 0' -m begin-and-end plain <<EOF
+y
+y
+y
+EOF
+echo; hg tip -p
+
+echo % trim beginning, modify end
+
+rm plain
+for i in 4 5 6 7 8 9 10.new; do
+  echo $i >> plain
+done
+
+echo % record end
+
+hg record -d '11 0' -m end-only plain <<EOF
+y
+n
+y
+EOF
+echo; hg tip -p
+
+echo % record beginning
+
+hg record -d '12 0' -m begin-only plain <<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo % add to beginning, trim from end
+
+rm plain
+for i in 1 2 3 4 5 6 7 8 9; do
+  echo $i >> plain
+done
+
+echo % record end
+
+hg record --traceback -d '13 0' -m end-again plain<<EOF
+y
+n
+y
+EOF
+
+echo % add to beginning, middle, end
+
+rm plain
+for i in 1 2 3 4 5 5.new 5.reallynew 6 7 8 9 10 11; do
+  echo $i >> plain
+done
+
+echo % record beginning, middle
+
+hg record -d '14 0' -m middle-only plain <<EOF
+y
+y
+y
+n
+EOF
+echo; hg tip -p
+
+echo % record end
+
+hg record -d '15 0' -m end-only plain <<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+mkdir subdir
+cd subdir
+echo a > a
+hg ci -d '16 0' -Amsubdir
+
+echo a >> a
+hg record -d '16 0' -m subdir-change a <<EOF
+y
+y
+EOF
+echo; hg tip -p
+
+echo a > f1
+echo b > f2
+hg add f1 f2
+
+hg ci -mz -d '17 0'
+
+echo a >> f1
+echo b >> f2
+
+echo % help, quit
+
+hg record <<EOF
+?
+q
+EOF
+
+echo % skip
+
+hg record <<EOF
+s
+EOF
+
+echo % no
+
+hg record <<EOF
+n
+EOF
+
+echo % f, quit
+
+hg record <<EOF
+f
+q
+EOF
+
+echo % s, all
+
+hg record -d '18 0' -mx <<EOF
+s
+a
+EOF
+echo; hg tip -p
+
+echo % f
+
+hg record -d '19 0' -my <<EOF
+f
+EOF
+echo; hg tip -p
new file mode 100644
--- /dev/null
+++ b/tests/test-record.out
@@ -0,0 +1,489 @@
+% help
+hg record [OPTION]... [FILE]...
+
+interactively select changes to commit
+
+    If a list of files is omitted, all changes reported by "hg status"
+    will be candidates for recording.
+
+    You will be prompted for whether to record changes to each
+    modified file, and for files with multiple changes, for each
+    change to use.  For each query, the following responses are
+    possible:
+
+    y - record this change
+    n - skip this change
+
+    s - skip remaining changes to this file
+    f - record remaining changes to this file
+
+    d - done, skip remaining changes and files
+    a - record all changes to all remaining files
+    q - quit, recording no changes
+
+    ? - display help
+
+options:
+
+ -A --addremove  mark new/missing files as added/removed before committing
+ -I --include    include names matching the given patterns
+ -X --exclude    exclude names matching the given patterns
+ -m --message    use <text> as commit message
+ -l --logfile    read commit message from <file>
+ -d --date       record datecode as commit date
+ -u --user       record user as committer
+
+use "hg -v help record" to show global options
+% select no files
+diff --git a/empty-rw b/empty-rw
+new file mode 100644
+examine changes to 'empty-rw'? [Ynsfdaq?]  no changes to record
+
+changeset:   -1:000000000000
+tag:         tip
+user:        
+date:        Thu Jan 01 00:00:00 1970 +0000
+
+
+% select files but no hunks
+diff --git a/empty-rw b/empty-rw
+new file mode 100644
+examine changes to 'empty-rw'? [Ynsfdaq?]  transaction abort!
+rollback completed
+
+changeset:   -1:000000000000
+tag:         tip
+user:        
+date:        Thu Jan 01 00:00:00 1970 +0000
+
+
+% record empty file
+diff --git a/empty-rw b/empty-rw
+new file mode 100644
+examine changes to 'empty-rw'? [Ynsfdaq?]  
+changeset:   0:c0708cf4e46e
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:00 1970 +0000
+summary:     empty
+
+
+% rename empty file
+diff --git a/empty-rw b/empty-rename
+rename from empty-rw
+rename to empty-rename
+examine changes to 'empty-rw' and 'empty-rename'? [Ynsfdaq?]  
+changeset:   1:df251d174da3
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:01 1970 +0000
+summary:     rename
+
+
+% copy empty file
+diff --git a/empty-rename b/empty-copy
+copy from empty-rename
+copy to empty-copy
+examine changes to 'empty-rename' and 'empty-copy'? [Ynsfdaq?]  
+changeset:   2:b63ea3939f8d
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:02 1970 +0000
+summary:     copy
+
+
+% delete empty file
+diff --git a/empty-copy b/empty-copy
+deleted file mode 100644
+examine changes to 'empty-copy'? [Ynsfdaq?]  
+changeset:   3:a2546574bce9
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:03 1970 +0000
+summary:     delete
+
+
+% add binary file
+diff --git a/tip.bundle b/tip.bundle
+new file mode 100644
+this is a binary file
+examine changes to 'tip.bundle'? [Ynsfdaq?]  
+changeset:   4:9e998a545a8b
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:04 1970 +0000
+summary:     binary
+
+diff -r a2546574bce9 -r 9e998a545a8b tip.bundle
+Binary file tip.bundle has changed
+
+% change binary file
+diff --git a/tip.bundle b/tip.bundle
+this modifies a binary file (all or nothing)
+examine changes to 'tip.bundle'? [Ynsfdaq?]  
+changeset:   5:93d05561507d
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:05 1970 +0000
+summary:     binary-change
+
+diff -r 9e998a545a8b -r 93d05561507d tip.bundle
+Binary file tip.bundle has changed
+
+% rename and change binary file
+diff --git a/tip.bundle b/top.bundle
+rename from tip.bundle
+rename to top.bundle
+this modifies a binary file (all or nothing)
+examine changes to 'tip.bundle' and 'top.bundle'? [Ynsfdaq?]  
+changeset:   6:699cc1bea9aa
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:06 1970 +0000
+summary:     binary-change-rename
+
+diff -r 93d05561507d -r 699cc1bea9aa tip.bundle
+Binary file tip.bundle has changed
+diff -r 93d05561507d -r 699cc1bea9aa top.bundle
+Binary file top.bundle has changed
+
+% add plain file
+diff --git a/plain b/plain
+new file mode 100644
+examine changes to 'plain'? [Ynsfdaq?]  
+changeset:   7:118ed744216b
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:07 1970 +0000
+summary:     plain
+
+diff -r 699cc1bea9aa -r 118ed744216b plain
+--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
++++ b/plain	Thu Jan 01 00:00:07 1970 +0000
+@@ -0,0 +1,10 @@
++1
++2
++3
++4
++5
++6
++7
++8
++9
++10
+
+% modify end of plain file
+diff --git a/plain b/plain
+1 hunks, 1 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -8,3 +8,4 @@ 8
+ 8
+ 9
+ 10
++11
+record this change to 'plain'? [Ynsfdaq?]  % modify end of plain file, no EOL
+diff --git a/plain b/plain
+1 hunks, 1 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -9,3 +9,4 @@ 9
+ 9
+ 10
+ 11
++cf81a2760718a74d44c0c2eecb72f659e63a69c5
+\ No newline at end of file
+record this change to 'plain'? [Ynsfdaq?]  % modify end of plain file, add EOL
+diff --git a/plain b/plain
+1 hunks, 2 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -9,4 +9,4 @@ 9
+ 9
+ 10
+ 11
+-cf81a2760718a74d44c0c2eecb72f659e63a69c5
+\ No newline at end of file
++cf81a2760718a74d44c0c2eecb72f659e63a69c5
+record this change to 'plain'? [Ynsfdaq?]  % modify beginning, trim end, record both
+diff --git a/plain b/plain
+2 hunks, 4 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,4 +1,4 @@ 1
+-1
++2
+ 2
+ 3
+ 4
+record this change to 'plain'? [Ynsfdaq?]  @@ -8,5 +8,3 @@ 8
+ 8
+ 9
+ 10
+-11
+-cf81a2760718a74d44c0c2eecb72f659e63a69c5
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   11:d09ab1967dab
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:10 1970 +0000
+summary:     begin-and-end
+
+diff -r e2ecd9b0b78d -r d09ab1967dab plain
+--- a/plain	Thu Jan 01 00:00:10 1970 +0000
++++ b/plain	Thu Jan 01 00:00:10 1970 +0000
+@@ -1,4 +1,4 @@ 1
+-1
++2
+ 2
+ 3
+ 4
+@@ -8,5 +8,3 @@ 8
+ 8
+ 9
+ 10
+-11
+-cf81a2760718a74d44c0c2eecb72f659e63a69c5
+
+% trim beginning, modify end
+% record end
+diff --git a/plain b/plain
+2 hunks, 5 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,9 +1,6 @@ 2
+-2
+-2
+-3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+record this change to 'plain'? [Ynsfdaq?]  @@ -4,7 +1,7 @@
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+-10
++10.new
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   12:44516c9708ae
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:11 1970 +0000
+summary:     end-only
+
+diff -r d09ab1967dab -r 44516c9708ae plain
+--- a/plain	Thu Jan 01 00:00:10 1970 +0000
++++ b/plain	Thu Jan 01 00:00:11 1970 +0000
+@@ -7,4 +7,4 @@ 7
+ 7
+ 8
+ 9
+-10
++10.new
+
+% record beginning
+diff --git a/plain b/plain
+1 hunks, 3 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,6 +1,3 @@ 2
+-2
+-2
+-3
+ 4
+ 5
+ 6
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   13:3ebbace64a8d
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:12 1970 +0000
+summary:     begin-only
+
+diff -r 44516c9708ae -r 3ebbace64a8d plain
+--- a/plain	Thu Jan 01 00:00:11 1970 +0000
++++ b/plain	Thu Jan 01 00:00:12 1970 +0000
+@@ -1,6 +1,3 @@ 2
+-2
+-2
+-3
+ 4
+ 5
+ 6
+
+% add to beginning, trim from end
+% record end
+diff --git a/plain b/plain
+2 hunks, 4 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,6 +1,9 @@ 4
++1
++2
++3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+record this change to 'plain'? [Ynsfdaq?]  @@ -1,7 +4,6 @@
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+-10.new
+record this change to 'plain'? [Ynsfdaq?]  % add to beginning, middle, end
+% record beginning, middle
+diff --git a/plain b/plain
+3 hunks, 7 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -1,2 +1,5 @@ 4
++1
++2
++3
+ 4
+ 5
+record this change to 'plain'? [Ynsfdaq?]  @@ -1,6 +4,8 @@
+ 4
+ 5
++5.new
++5.reallynew
+ 6
+ 7
+ 8
+ 9
+record this change to 'plain'? [Ynsfdaq?]  @@ -3,4 +8,6 @@
+ 6
+ 7
+ 8
+ 9
++10
++11
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   15:c1c639d8b268
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:14 1970 +0000
+summary:     middle-only
+
+diff -r efc0dad7bd9f -r c1c639d8b268 plain
+--- a/plain	Thu Jan 01 00:00:13 1970 +0000
++++ b/plain	Thu Jan 01 00:00:14 1970 +0000
+@@ -1,5 +1,10 @@ 4
++1
++2
++3
+ 4
+ 5
++5.new
++5.reallynew
+ 6
+ 7
+ 8
+
+% record end
+diff --git a/plain b/plain
+1 hunks, 2 lines changed
+examine changes to 'plain'? [Ynsfdaq?]  @@ -9,3 +9,5 @@ 7
+ 7
+ 8
+ 9
++10
++11
+record this change to 'plain'? [Ynsfdaq?]  
+changeset:   16:80b74bbc7808
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:15 1970 +0000
+summary:     end-only
+
+diff -r c1c639d8b268 -r 80b74bbc7808 plain
+--- a/plain	Thu Jan 01 00:00:14 1970 +0000
++++ b/plain	Thu Jan 01 00:00:15 1970 +0000
+@@ -9,3 +9,5 @@ 7
+ 7
+ 8
+ 9
++10
++11
+
+adding subdir/a
+diff --git a/subdir/a b/subdir/a
+1 hunks, 1 lines changed
+examine changes to 'subdir/a'? [Ynsfdaq?]  @@ -1,1 +1,2 @@ a
+ a
++a
+record this change to 'subdir/a'? [Ynsfdaq?]  
+changeset:   18:33ff5c4fb017
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:16 1970 +0000
+summary:     subdir-change
+
+diff -r aecf2b2ea83c -r 33ff5c4fb017 subdir/a
+--- a/subdir/a	Thu Jan 01 00:00:16 1970 +0000
++++ b/subdir/a	Thu Jan 01 00:00:16 1970 +0000
+@@ -1,1 +1,2 @@ a
+ a
++a
+
+% help, quit
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  y - record this change
+n - skip this change
+s - skip remaining changes to this file
+f - record remaining changes to this file
+d - done, skip remaining changes and files
+a - record all changes to all remaining files
+q - quit, recording no changes
+? - display help
+examine changes to 'subdir/f1'? [Ynsfdaq?]  abort: user quit
+% skip
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  diff --git a/subdir/f2 b/subdir/f2
+1 hunks, 1 lines changed
+examine changes to 'subdir/f2'? [Ynsfdaq?]  abort: response expected
+% no
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  diff --git a/subdir/f2 b/subdir/f2
+1 hunks, 1 lines changed
+examine changes to 'subdir/f2'? [Ynsfdaq?]  abort: response expected
+% f, quit
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  diff --git a/subdir/f2 b/subdir/f2
+1 hunks, 1 lines changed
+examine changes to 'subdir/f2'? [Ynsfdaq?]  abort: user quit
+% s, all
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  diff --git a/subdir/f2 b/subdir/f2
+1 hunks, 1 lines changed
+examine changes to 'subdir/f2'? [Ynsfdaq?]  
+changeset:   20:094183e04b7c
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:18 1970 +0000
+summary:     x
+
+diff -r f9e855cd9374 -r 094183e04b7c subdir/f2
+--- a/subdir/f2	Thu Jan 01 00:00:17 1970 +0000
++++ b/subdir/f2	Thu Jan 01 00:00:18 1970 +0000
+@@ -1,1 +1,2 @@ b
+ b
++b
+
+% f
+diff --git a/subdir/f1 b/subdir/f1
+1 hunks, 1 lines changed
+examine changes to 'subdir/f1'? [Ynsfdaq?]  
+changeset:   21:38164785b0ef
+tag:         tip
+user:        test
+date:        Thu Jan 01 00:00:19 1970 +0000
+summary:     y
+
+diff -r 094183e04b7c -r 38164785b0ef subdir/f1
+--- a/subdir/f1	Thu Jan 01 00:00:18 1970 +0000
++++ b/subdir/f1	Thu Jan 01 00:00:19 1970 +0000
+@@ -1,1 +1,2 @@ a
+ a
++a
+
new file mode 100755
--- /dev/null
+++ b/tests/test-revlog-packentry
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+hg init repo
+cd repo
+
+touch foo
+hg ci -Am 'add foo'
+
+hg up -C null
+# this should be stored as a delta against rev 0
+echo foo bar baz > foo
+hg ci -Am 'add foo again'
+
+hg debugindex .hg/store/data/foo.i
new file mode 100644
--- /dev/null
+++ b/tests/test-revlog-packentry.out
@@ -0,0 +1,6 @@
+adding foo
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+adding foo
+   rev    offset  length   base linkrev nodeid       p1           p2
+     0         0       0      0       0 b80de5d13875 000000000000 000000000000
+     1         0      24      0       1 0376abec49b8 000000000000 000000000000
--- a/tests/test-symlinks
+++ b/tests/test-symlinks
@@ -72,3 +72,13 @@ hg commit -A -m 'add symlink in a/b/c su
 echo '2. clone it'
 cd ..
 hg clone test testclone
+
+echo '# git symlink diff'
+cd testclone
+hg diff --git -r null:tip
+hg export --git tip > ../sl.diff
+echo '# import git symlink diff'
+hg rm a/b/c/demo
+hg commit -m'remove link'
+hg import ../sl.diff
+hg diff --git -r 1:tip
--- a/tests/test-symlinks.out
+++ b/tests/test-symlinks.out
@@ -20,3 +20,20 @@ 1. commit a symlink
 adding a/b/c/demo
 2. clone it
 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+# git symlink diff
+diff --git a/a/b/c/demo b/a/b/c/demo
+new file mode 120000
+--- /dev/null
++++ b/a/b/c/demo
+@@ -0,0 +1,1 @@
++/path/to/symlink/source
+\ No newline at end of file
+# import git symlink diff
+applying ../sl.diff
+diff --git a/a/b/c/demo b/a/b/c/demo
+new file mode 120000
+--- /dev/null
++++ b/a/b/c/demo
+@@ -0,0 +1,1 @@
++/path/to/symlink/source
+\ No newline at end of file
--- a/tests/test-transplant.out
+++ b/tests/test-transplant.out
@@ -101,17 +101,17 @@ removing toremove
 adding bar
 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
 applying a1e30dd1b8e7
-foo
-Hunk #1 FAILED at 1.
+patching file foo
+Hunk #1 FAILED at 0
 1 out of 1 hunk FAILED -- saving rejects to file foo.rej
-patch command failed: exited with status 1
+patch failed to apply
 abort: Fix up the merge and run hg transplant --continue
 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
 applying a1e30dd1b8e7
-foo
-Hunk #1 FAILED at 1.
+patching file foo
+Hunk #1 FAILED at 0
 1 out of 1 hunk FAILED -- saving rejects to file foo.rej
-patch command failed: exited with status 1
+patch failed to apply
 abort: Fix up the merge and run hg transplant --continue
 a1e30dd1b8e7 transplanted as f1563cf27039
 skipping already applied revision 1:a1e30dd1b8e7
--- a/tests/test-ui-config
+++ b/tests/test-ui-config
@@ -1,10 +1,10 @@
 #!/usr/bin/env python
 
 import ConfigParser
-from mercurial import ui, util, cmdutil
+from mercurial import ui, util, dispatch
 
 testui = ui.ui()
-parsed = cmdutil.parseconfig([
+parsed = dispatch._parseconfig([
     'values.string=string value',
     'values.bool1=true',
     'values.bool2=false',