new file mode 100644
--- /dev/null
+++ b/.hgignore
@@ -0,0 +1,26 @@
+syntax: glob
+
+*.elc
+*.orig
+*.rej
+*~
+*.o
+*.so
+*.pyc
+*.swp
+*.prof
+tests/.coverage*
+tests/annotated
+tests/*.err
+build
+contrib/hgsh/hgsh
+dist
+doc/*.[0-9]
+doc/*.[0-9].gendoc.txt
+doc/*.[0-9].{x,ht}ml
+MANIFEST
+patches
+mercurial/__version__.py
+
+syntax: regexp
+^\.pc/
new file mode 100644
--- /dev/null
+++ b/.hgsigs
@@ -0,0 +1,1 @@
+35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0 iD8DBQBEYmO2ywK+sNU5EO8RAnaYAKCO7x15xUn5mnhqWNXqk/ehlhRt2QCfRDfY0LrUq2q4oK/KypuJYPHgq1A=
new file mode 100644
--- /dev/null
+++ b/.hgtags
@@ -0,0 +1,13 @@
+d40cc5aacc31ed673d9b5b24f98bee78c283062c 0.4f
+1c590d34bf61e2ea12c71738e5a746cd74586157 0.4e
+7eca4cfa8aad5fce9a04f7d8acadcd0452e2f34e 0.4d
+b4d0c3786ad3e47beacf8412157326a32b6d25a4 0.4c
+f40273b0ad7b3a6d3012fd37736d0611f41ecf54 0.5
+0a28dfe59f8fab54a5118c5be4f40da34a53cdb7 0.5b
+12e0fdbc57a0be78f0e817fd1d170a3615cd35da 0.6
+4ccf3de52989b14c3d84e1097f59e39a992e00bd 0.6b
+eac9c8efcd9bd8244e72fb6821f769f450457a32 0.6c
+979c049974485125e1f9357f6bbe9c1b548a64c3 0.7
+3a56574f329a368d645853e0f9e09472aee62349 0.8
+6a03cff2b0f5d30281e6addefe96b993582f2eac 0.8.1
+35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0.9
new file mode 100644
--- /dev/null
+++ b/CONTRIBUTORS
@@ -0,0 +1,39 @@
+Andrea Arcangeli <andrea at suse.de>
+Thomas Arendsen Hein <thomas at intevation.de>
+Goffredo Baroncelli <kreijack at libero.it>
+Muli Ben-Yehuda <mulix at mulix.org>
+Mikael Berthe <mikael at lilotux.net>
+Benoit Boissinot <bboissin at gmail.com>
+Vincent Danjean <vdanjean.ml at free.fr>
+Jake Edge <jake at edge2.net>
+Michael Fetterman <michael.fetterman at intel.com>
+Edouard Gomez <ed.gomez at free.fr>
+Eric Hopper <hopper at omnifarious.org>
+Alecs King <alecsk at gmail.com>
+Volker Kleinfeld <Volker.Kleinfeld at gmx.de>
+Vadim Lebedev <vadim at mbdsys.com>
+Christopher Li <hg at chrisli.org>
+Chris Mason <mason at suse.com>
+Colin McMillen <mcmillen at cs.cmu.edu>
+Wojciech Milkowski <wmilkowski at interia.pl>
+Chad Netzer <chad.netzer at gmail.com>
+Bryan O'Sullivan <bos at serpentine.com>
+Vicent Seguí Pascual <vseguip at gmail.com>
+Sean Perry <shaleh at speakeasy.net>
+Nguyen Anh Quynh <aquynh at gmail.com>
+Ollivier Robert <roberto at keltia.freenix.fr>
+Alexander Schremmer <alex at alexanderweb.de>
+Arun Sharma <arun at sharma-home.net>
+Josef "Jeff" Sipek <jeffpc at optonline.net>
+Kevin Smith <yarcs at qualitycode.com>
+TK Soh <teekaysoh at yahoo.com>
+Radoslaw Szkodzinski <astralstorm at gorzow.mm.pl>
+Samuel Tardieu <sam at rfc1149.net>
+K Thananchayan <thananck at yahoo.com>
+Andrew Thompson <andrewkt at aktzero.com>
+Michael S. Tsirkin <mst at mellanox.co.il>
+Rafael Villar Burke <pachi at mmn-arquitectos.com>
+Tristan Wibberley <tristan at wibberley.org>
+Mark Williamson <mark.williamson at cl.cam.ac.uk>
+
+If you are a contributor and don't see your name here, please let me know.
new file mode 100644
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,340 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+ 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Library General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ <signature of Ty Coon>, 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Library General
+Public License instead of this License.
new file mode 100644
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,17 @@
+include hg
+recursive-include mercurial *.py
+include hgweb.cgi hgwebdir.cgi
+include hgeditor rewrite-log
+include tests/README tests/coverage.py tests/run-tests.py tests/md5sum.py tests/test-*[a-z0-9] tests/*.out
+prune tests/*.err
+include *.txt
+include templates/map templates/map-*[a-z0-9]
+include templates/*.tmpl
+include templates/static/*
+include doc/README doc/Makefile doc/gendoc.py doc/*.txt doc/*.html doc/*.[0-9]
+recursive-include contrib *
+include README
+include CONTRIBUTORS
+include COPYING
+include Makefile
+include MANIFEST.in
new file mode 100644
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,61 @@
+PREFIX=/usr/local
+export PREFIX
+PYTHON=python
+
+help:
+ @echo 'Commonly used make targets:'
+ @echo ' all - build program and documentation'
+ @echo ' install - install program and man pages to PREFIX ($(PREFIX))'
+ @echo ' install-home - install with setup.py install --home=HOME ($(HOME))'
+ @echo ' local - build C extensions for inplace usage'
+ @echo ' tests - run all tests in the automatic test suite'
+ @echo ' test-foo - run only specified tests (e.g. test-merge1)'
+ @echo ' dist - run all tests and create a source tarball in dist/'
+ @echo ' clean - remove files created by other targets'
+ @echo ' (except installed files or dist source tarball)'
+ @echo
+ @echo 'Example for a system-wide installation under /usr/local:'
+ @echo ' make all && su -c "make install" && hg version'
+ @echo
+ @echo 'Example for a local installation (usable in this directory):'
+ @echo ' make local && ./hg version'
+
+all: build doc
+
+local:
+ $(PYTHON) setup.py build_ext -i
+
+build:
+ $(PYTHON) setup.py build
+
+doc:
+ $(MAKE) -C doc
+
+clean:
+ -$(PYTHON) setup.py clean --all # ignore errors of this command
+ find . -name '*.py[co]' -exec rm -f '{}' ';'
+ rm -f MANIFEST mercurial/__version__.py mercurial/*.so tests/*.err
+ $(MAKE) -C doc clean
+
+install: all
+ $(PYTHON) setup.py install --prefix="$(PREFIX)" --force
+ cd doc && $(MAKE) $(MFLAGS) install
+
+install-home: all
+ $(PYTHON) setup.py install --home="$(HOME)" --force
+ cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install
+
+dist: tests dist-notests
+
+dist-notests: doc
+ TAR_OPTIONS="--owner=root --group=root --mode=u+w,go-w,a+rX-s" $(PYTHON) setup.py sdist --force-manifest
+
+tests:
+ cd tests && $(PYTHON) run-tests.py
+
+test-%:
+ cd tests && $(PYTHON) run-tests.py $@
+
+
+.PHONY: help all local build doc clean install install-home dist dist-notests tests
+
new file mode 100644
--- /dev/null
+++ b/README
@@ -0,0 +1,103 @@
+MERCURIAL QUICK-START
+
+Setting up Mercurial:
+
+ Note: some distributions fails to include bits of distutils by
+ default, you'll need python-dev to install. You'll also need a C
+ compiler and a 3-way merge tool like merge, tkdiff, or kdiff3.
+
+ First, unpack the source:
+
+ $ tar xvzf mercurial-<ver>.tar.gz
+ $ cd mercurial-<ver>
+
+ When installing, change python to python2.3 or python2.4 if 2.2 is the
+ default on your system.
+
+ To install system-wide:
+
+ $ python setup.py install --force
+
+ To install in your home directory (~/bin and ~/lib, actually), run:
+
+ $ python setup.py install --home=${HOME} --force
+ $ export PYTHONPATH=${HOME}/lib/python # (or lib64/ on some systems)
+ $ export PATH=${HOME}/bin:$PATH # add these to your .bashrc
+
+ And finally:
+
+ $ hg # test installation, show help
+
+ If you get complaints about missing modules, you probably haven't set
+ PYTHONPATH correctly.
+
+Setting up a Mercurial project:
+
+ $ hg init project # creates project directory
+ $ cd project
+ # copy files in, edit them
+ $ hg add # add all unknown files
+ $ hg remove --after # remove deleted files
+ $ hg commit # commit all changes, edit changelog entry
+
+ Mercurial will look for a file named .hgignore in the root of your
+ repository which contains a set of regular expressions to ignore in
+ file paths.
+
+Branching and merging:
+
+ $ hg clone linux linux-work # create a new branch
+ $ cd linux-work
+ $ <make changes>
+ $ hg commit
+ $ cd ../linux
+ $ hg pull ../linux-work # pull changesets from linux-work
+ $ hg merge # merge the new tip from linux-work into
+ # our working directory
+ $ hg commit # commit the result of the merge
+
+Importing patches:
+
+ Fast:
+ $ patch < ../p/foo.patch
+ $ hg commit -A
+
+ Faster:
+ $ patch < ../p/foo.patch
+ $ hg commit `lsdiff -p1 ../p/foo.patch`
+
+ Fastest:
+ $ cat ../p/patchlist | xargs hg import -p1 -b ../p
+
+Exporting a patch:
+
+ (make changes)
+ $ hg commit
+ $ hg tip
+ 28237:747a537bd090880c29eae861df4d81b245aa0190
+ $ hg export 28237 > foo.patch # export changeset 28237
+
+Network support:
+
+ # pull from the primary Mercurial repo
+ foo$ hg clone http://selenic.com/hg/
+ foo$ cd hg
+
+ # export your current repo via HTTP with browsable interface
+ foo$ hg serve -n "My repo" -p 80
+
+ # pushing changes to a remote repo with SSH
+ foo$ hg push ssh://user@example.com/~/hg/
+
+ # merge changes from a remote machine
+ bar$ hg pull http://foo/
+ bar$ hg merge # merge changes into your working directory
+
+ # Set up a CGI server on your webserver
+ foo$ cp hgweb.cgi ~/public_html/hg/index.cgi
+ foo$ emacs ~/public_html/hg/index.cgi # adjust the defaults
+
+For more info:
+
+ Documentation in doc/
+ Mercurial website at http://selenic.com/mercurial
new file mode 100644
--- /dev/null
+++ b/comparison.txt
@@ -0,0 +1,31 @@
+ Mercurial git BK (*)
+storage revlog delta compressed revisions SCCS weave
+storage naming by filename by revision hash by filename
+merge file DAGs changeset DAG file DAGs?
+consistency SHA1 SHA1 CRC
+signable? yes yes no
+
+retrieve file tip O(1) O(1) O(revs)
+add rev O(1) O(1) O(revs)
+find prev file rev O(1) O(changesets) O(revs)
+annotate file O(revs) O(changesets) O(revs)
+find file changeset O(1) O(changesets) ?
+
+checkout O(files) O(files) O(revs)?
+commit O(changes) O(changes) ?
+ 6 patches/s 6 patches/s slow
+diff working dir O(changes) O(changes) ?
+ < 1s < 1s ?
+tree diff revs O(changes) O(changes) ?
+ < 1s < 1s ?
+hardlink clone O(files) O(revisions) O(files)
+
+find remote csets O(log new) rsync: O(revisions) ?
+ git-http: O(changesets)
+pull remote csets O(patch) O(modified files) O(patch)
+
+repo growth O(patch) O(revisions) O(patch)
+ kernel history 300M 3.5G? 250M?
+lines of code 2500 6500 (+ cogito) ??
+
+* I've never used BK so this is just guesses
new file mode 100644
--- /dev/null
+++ b/contrib/bash_completion
@@ -0,0 +1,385 @@
+# bash completion for the Mercurial distributed SCM
+
+# Docs:
+#
+# If you source this file from your .bashrc, bash should be able to
+# complete a command line that uses hg with all the available commands
+# and options and sometimes even arguments.
+#
+# Mercurial allows you to define additional commands through extensions.
+# Bash should be able to automatically figure out the name of these new
+# commands and their options. If you also want to tell it how to
+# complete non-option arguments, see below for how to define an
+# _hg_cmd_foo function.
+#
+#
+# Notes about completion for specific commands:
+#
+# - the completion function for the email command from the patchbomb
+# extension will try to call _hg_emails to get a list of e-mail
+# addresses. It's up to the user to define this function. For
+# example, put the addresses of the lists that you usually patchbomb
+# in ~/.patchbomb-to and the addresses that you usually use to send
+# the patchbombs in ~/.patchbomb-from and use something like this:
+#
+# _hg_emails()
+# {
+# if [ -r ~/.patchbomb-$1 ]; then
+# cat ~/.patchbomb-$1
+# fi
+# }
+#
+#
+# Writing completion functions for additional commands:
+#
+# If it exists, the function _hg_cmd_foo will be called without
+# arguments to generate the completion candidates for the hg command
+# "foo".
+#
+# In addition to the regular completion variables provided by bash,
+# the following variables are also set:
+# - $hg - the hg program being used (e.g. /usr/bin/hg)
+# - $cmd - the name of the hg command being completed
+# - $cmd_index - the index of $cmd in $COMP_WORDS
+# - $cur - the current argument being completed
+# - $prev - the argument before $cur
+# - $global_args - "|"-separated list of global options that accept
+# an argument (e.g. '--cwd|-R|--repository')
+# - $canonical - 1 if we canonicalized $cmd before calling the function
+# 0 otherwise
+#
+
+shopt -s extglob
+
+_hg_commands()
+{
+ local commands
+ commands="$("$hg" debugcomplete "$cur" 2>/dev/null)" || commands=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$commands' -- "$cur"))
+}
+
+_hg_paths()
+{
+ local paths="$("$hg" paths 2>/dev/null | sed -e 's/ = .*$//')"
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$paths' -- "$cur"))
+}
+
+_hg_repos()
+{
+ local i
+ for i in $(compgen -d -- "$cur"); do
+ test ! -d "$i"/.hg || COMPREPLY=(${COMPREPLY[@]:-} "$i")
+ done
+}
+
+_hg_status()
+{
+ local files="$("$hg" status -n$1 . 2>/dev/null)"
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$files' -- "$cur"))
+}
+
+_hg_tags()
+{
+ local tags="$("$hg" tags -q 2>/dev/null)"
+ local IFS=$'\n'
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$tags' -- "$cur"))
+}
+
+# this is "kind of" ugly...
+_hg_count_non_option()
+{
+ local i count=0
+ local filters="$1"
+
+ for ((i=1; $i<=$COMP_CWORD; i++)); do
+ if [[ "${COMP_WORDS[i]}" != -* ]]; then
+ if [[ ${COMP_WORDS[i-1]} == @($filters|$global_args) ]]; then
+ continue
+ fi
+ count=$(($count + 1))
+ fi
+ done
+
+ echo $(($count - 1))
+}
+
+_hg()
+{
+ local cur prev cmd cmd_index opts i
+ # global options that receive an argument
+ local global_args='--cwd|-R|--repository'
+ local hg="$1"
+
+ COMPREPLY=()
+ cur="$2"
+ prev="$3"
+
+ # searching for the command
+ # (first non-option argument that doesn't follow a global option that
+ # receives an argument)
+ for ((i=1; $i<=$COMP_CWORD; i++)); do
+ if [[ ${COMP_WORDS[i]} != -* ]]; then
+ if [[ ${COMP_WORDS[i-1]} != @($global_args) ]]; then
+ cmd="${COMP_WORDS[i]}"
+ cmd_index=$i
+ break
+ fi
+ fi
+ done
+
+ if [[ "$cur" == -* ]]; then
+ opts=$("$hg" debugcomplete --options "$cmd" 2>/dev/null)
+
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$opts' -- "$cur"))
+ return
+ fi
+
+ # global options
+ case "$prev" in
+ -R|--repository)
+ _hg_repos
+ return
+ ;;
+ --cwd)
+ # Stick with default bash completion
+ return
+ ;;
+ esac
+
+ if [ -z "$cmd" ] || [ $COMP_CWORD -eq $i ]; then
+ _hg_commands
+ return
+ fi
+
+ # try to generate completion candidates for whatever command the user typed
+ local help
+ local canonical=0
+ if _hg_command_specific; then
+ return
+ fi
+
+ # canonicalize the command name and try again
+ help=$("$hg" help "$cmd" 2>/dev/null)
+ if [ $? -ne 0 ]; then
+ # Probably either the command doesn't exist or it's ambiguous
+ return
+ fi
+ cmd=${help#hg }
+ cmd=${cmd%%[$' \n']*}
+ canonical=1
+ _hg_command_specific
+}
+
+_hg_command_specific()
+{
+ if [ "$(type -t "_hg_cmd_$cmd")" = function ]; then
+ "_hg_cmd_$cmd"
+ return 0
+ fi
+
+ if [ "$cmd" != status ] && [ "$prev" = -r ] || [ "$prev" == --rev ]; then
+ if [ $canonical = 1 ]; then
+ _hg_tags
+ return 0
+ elif [[ status != "$cmd"* ]]; then
+ _hg_tags
+ return 0
+ else
+ return 1
+ fi
+ fi
+
+ case "$cmd" in
+ help)
+ _hg_commands
+ ;;
+ export|manifest|update)
+ _hg_tags
+ ;;
+ pull|push|outgoing|incoming)
+ _hg_paths
+ _hg_repos
+ ;;
+ paths)
+ _hg_paths
+ ;;
+ add)
+ _hg_status "u"
+ ;;
+ commit)
+ _hg_status "mar"
+ ;;
+ remove)
+ _hg_status "d"
+ ;;
+ forget)
+ _hg_status "a"
+ ;;
+ diff)
+ _hg_status "mar"
+ ;;
+ revert)
+ _hg_status "mard"
+ ;;
+ clone)
+ local count=$(_hg_count_non_option)
+ if [ $count = 1 ]; then
+ _hg_paths
+ fi
+ _hg_repos
+ ;;
+ debugindex|debugindexdot)
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -f -X "!*.i" -- "$cur"))
+ ;;
+ debugdata)
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -f -X "!*.d" -- "$cur"))
+ ;;
+ *)
+ return 1
+ ;;
+ esac
+
+ return 0
+}
+
+complete -o bashdefault -o default -F _hg hg 2>/dev/null \
+ || complete -o default -F _hg hg
+
+
+# Completion for commands provided by extensions
+
+# mq
+_hg_ext_mq_patchlist()
+{
+ local patches=$("$hg" $1 2>/dev/null)
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$patches' -- "$cur"))
+}
+
+_hg_ext_mq_queues()
+{
+ local root=$("$hg" root 2>/dev/null)
+ local n
+ for n in $(cd "$root"/.hg && compgen -d -- "$cur"); do
+ # I think we're usually not interested in the regular "patches" queue
+ # so just filter it.
+ if [ "$n" != patches ] && [ -e "$root/.hg/$n/series" ]; then
+ COMPREPLY=(${COMPREPLY[@]:-} "$n")
+ fi
+ done
+}
+
+_hg_cmd_qpop()
+{
+ if [[ "$prev" = @(-n|--name) ]]; then
+ _hg_ext_mq_queues
+ return
+ fi
+ _hg_ext_mq_patchlist qapplied
+}
+
+_hg_cmd_qpush()
+{
+ if [[ "$prev" = @(-n|--name) ]]; then
+ _hg_ext_mq_queues
+ return
+ fi
+ _hg_ext_mq_patchlist qunapplied
+}
+
+_hg_cmd_qdelete()
+{
+ _hg_ext_mq_patchlist qseries
+}
+
+_hg_cmd_qsave()
+{
+ if [[ "$prev" = @(-n|--name) ]]; then
+ _hg_ext_mq_queues
+ return
+ fi
+}
+
+_hg_cmd_strip()
+{
+ _hg_tags
+}
+
+_hg_cmd_qcommit()
+{
+ local root=$("$hg" root 2>/dev/null)
+ # this is run in a sub-shell, so we can't use _hg_status
+ local files=$(cd "$root/.hg/patches" 2>/dev/null &&
+ "$hg" status -nmar 2>/dev/null)
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$files' -- "$cur"))
+}
+
+
+# hbisect
+_hg_cmd_bisect()
+{
+ local i subcmd
+
+ # find the sub-command
+ for ((i=cmd_index+1; i<=COMP_CWORD; i++)); do
+ if [[ ${COMP_WORDS[i]} != -* ]]; then
+ if [[ ${COMP_WORDS[i-1]} != @($global_args) ]]; then
+ subcmd="${COMP_WORDS[i]}"
+ break
+ fi
+ fi
+ done
+
+ if [ -z "$subcmd" ] || [ $COMP_CWORD -eq $i ] || [ "$subcmd" = help ]; then
+ COMPREPLY=(${COMPREPLY[@]:-}
+ $(compgen -W 'bad good help init next reset' -- "$cur"))
+ return
+ fi
+
+ case "$subcmd" in
+ good|bad)
+ _hg_tags
+ ;;
+ esac
+
+ return
+}
+
+
+# patchbomb
+_hg_cmd_email()
+{
+ case "$prev" in
+ -c|--cc|-t|--to|-f|--from)
+ # we need an e-mail address. let the user provide a function
+ # to get them
+ if [ "$(type -t _hg_emails)" = function ]; then
+ local arg=to
+ if [[ "$prev" == @(-f|--from) ]]; then
+ arg=from
+ fi
+ local addresses=$(_hg_emails $arg)
+ COMPREPLY=(${COMPREPLY[@]:-}
+ $(compgen -W '$addresses' -- "$cur"))
+ fi
+ return
+ ;;
+ -m|--mbox)
+ # fallback to standard filename completion
+ return
+ ;;
+ -s|--subject)
+ # free form string
+ return
+ ;;
+ esac
+
+ _hg_tags
+ return
+}
+
+
+# gpg
+_hg_cmd_sign()
+{
+ _hg_tags
+}
new file mode 100755
--- /dev/null
+++ b/contrib/buildrpm
@@ -0,0 +1,49 @@
+#!/bin/sh
+#
+# Build a Mercurial RPM in place.
+#
+# Bryan O'Sullivan <bos@serpentine.com>
+
+root="`hg root 2>/dev/null`"
+specfile=contrib/mercurial.spec
+
+if [ -z "$root" ]; then
+ echo 'You are not inside a Mercurial repository!' 1>&2
+ exit 1
+fi
+
+rpmdir=/tmp/"`basename $root | sed 's/ /_/'`"-rpm
+
+cd "$root"
+rm -rf $rpmdir
+mkdir -p $rpmdir/RPMS
+hg clone "$root" $rpmdir/BUILD
+
+if [ ! -f $specfile ]; then
+ echo "Cannot find $specfile!" 1>&2
+ exit 1
+fi
+
+tmpspec=/tmp/`basename "$specfile"`.$$
+# Use the most recent tag as the version.
+version=`hg tags | perl -e 'while(<STDIN>){if(/^(\d\S+)/){print$1;exit}}'`
+# Compute the release number as the difference in revision numbers
+# between the tip and the most recent tag.
+release=`hg tags | perl -e 'while(<STDIN>){/^(\S+)\s+(\d+)/;if($1eq"tip"){$t=$2}else{print$t-$2+1;exit}}'`
+tip=`hg -q tip`
+
+# Beat up the spec file
+sed -e 's,^Source:.*,Source: /dev/null,' \
+ -e "s,^Version:.*,Version: $version," \
+ -e "s,^Release:.*,Release: $release," \
+ -e "s,^%prep.*,Changeset: $tip\n\0," \
+ -e 's,^%setup.*,,' \
+ $specfile > $tmpspec
+
+rpmbuild --define "_topdir $rpmdir" -bb $tmpspec
+if [ $? = 0 ]; then
+ rm -rf $tmpspec $rpmdir/BUILD
+ mv $rpmdir/RPMS/*/* $rpmdir && rm -r $rpmdir/RPMS
+ echo
+ echo "Packages are in $rpmdir"
+fi
new file mode 100755
--- /dev/null
+++ b/contrib/convert-repo
@@ -0,0 +1,289 @@
+#!/usr/bin/env python
+#
+# This is a generalized framework for converting between SCM
+# repository formats.
+#
+# In its current form, it's hardcoded to convert incrementally between
+# git and Mercurial.
+#
+# To use, you must first import the first git version into Mercurial,
+# and establish a mapping between the git commit hash and the hash in
+# Mercurial for that version. This mapping is kept in a simple text
+# file with lines like so:
+#
+# <git hash> <mercurial hash>
+#
+# To convert the rest of the repo, run:
+#
+# convert-repo <git-dir> <hg-dir> <mapfile>
+#
+# This updates the mapfile on each commit copied, so it can be
+# interrupted and can be run repeatedly to copy new commits.
+
+import sys, os, zlib, sha, time
+from mercurial import hg, ui, util
+
+class convert_git:
+ def __init__(self, path):
+ self.path = path
+
+ def getheads(self):
+ return [file(self.path + "/HEAD").read()[:-1]]
+
+ def catfile(self, rev, type):
+ if rev == "0" * 40: raise IOError()
+ fh = os.popen("GIT_DIR=%s git-cat-file %s %s 2>/dev/null" % (self.path, type, rev))
+ return fh.read()
+
+ def getfile(self, name, rev):
+ return self.catfile(rev, "blob")
+
+ def getchanges(self, version):
+ fh = os.popen("GIT_DIR=%s git-diff-tree --root -m -r %s" % (self.path, version))
+ changes = []
+ for l in fh:
+ if "\t" not in l: continue
+ m, f = l[:-1].split("\t")
+ m = m.split()
+ h = m[3]
+ p = (m[1] == "100755")
+ changes.append((f, h, p))
+ return changes
+
+ def getcommit(self, version):
+ c = self.catfile(version, "commit") # read the commit hash
+ end = c.find("\n\n")
+ message = c[end+2:]
+ l = c[:end].splitlines()
+ manifest = l[0].split()[1]
+ parents = []
+ for e in l[1:]:
+ n,v = e.split(" ", 1)
+ if n == "author":
+ p = v.split()
+ tm, tz = p[-2:]
+ author = " ".join(p[:-2])
+ if author[0] == "<": author = author[1:-1]
+ if n == "committer":
+ p = v.split()
+ tm, tz = p[-2:]
+ committer = " ".join(p[:-2])
+ if committer[0] == "<": committer = committer[1:-1]
+ message += "\ncommitter: %s\n" % v
+ if n == "parent": parents.append(v)
+
+ tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
+ tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
+ date = tm + " " + str(tz)
+ return (parents, author, date, message)
+
+ def gettags(self):
+ tags = {}
+ for f in os.listdir(self.path + "/refs/tags"):
+ try:
+ h = file(self.path + "/refs/tags/" + f).read().strip()
+ c = self.catfile(h, "tag") # read the commit hash
+ h = c.splitlines()[0].split()[1]
+ tags[f] = h
+ except:
+ pass
+ return tags
+
+class convert_mercurial:
+ def __init__(self, path):
+ self.path = path
+ u = ui.ui()
+ self.repo = hg.repository(u, path)
+
+ def getheads(self):
+ h = self.repo.changelog.heads()
+ return [ hg.hex(x) for x in h ]
+
+ def putfile(self, f, e, data):
+ self.repo.wfile(f, "w").write(data)
+ if self.repo.dirstate.state(f) == '?':
+ self.repo.dirstate.update([f], "a")
+
+ util.set_exec(self.repo.wjoin(f), e)
+
+ def delfile(self, f):
+ try:
+ os.unlink(self.repo.wjoin(f))
+ #self.repo.remove([f])
+ except:
+ pass
+
+ def putcommit(self, files, parents, author, dest, text):
+ seen = {}
+ pl = []
+ for p in parents:
+ if p not in seen:
+ pl.append(p)
+ seen[p] = 1
+ parents = pl
+
+ if len(parents) < 2: parents.append("0" * 40)
+ if len(parents) < 2: parents.append("0" * 40)
+ p2 = parents.pop(0)
+
+ while parents:
+ p1 = p2
+ p2 = parents.pop(0)
+ self.repo.rawcommit(files, text, author, dest,
+ hg.bin(p1), hg.bin(p2))
+ text = "(octopus merge fixup)\n"
+ p2 = hg.hex(self.repo.changelog.tip())
+
+ return p2
+
+ def puttags(self, tags):
+ try:
+ old = self.repo.wfile(".hgtags").read()
+ oldlines = old.splitlines(1)
+ oldlines.sort()
+ except:
+ oldlines = []
+
+ k = tags.keys()
+ k.sort()
+ newlines = []
+ for tag in k:
+ newlines.append("%s %s\n" % (tags[tag], tag))
+
+ newlines.sort()
+
+ if newlines != oldlines:
+ #print "updating tags"
+ f = self.repo.wfile(".hgtags", "w")
+ f.write("".join(newlines))
+ f.close()
+ if not oldlines: self.repo.add([".hgtags"])
+ date = "%s 0" % int(time.mktime(time.gmtime()))
+ self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
+ date, self.repo.changelog.tip(), hg.nullid)
+ return hg.hex(self.repo.changelog.tip())
+
+class convert:
+ def __init__(self, source, dest, mapfile):
+ self.source = source
+ self.dest = dest
+ self.mapfile = mapfile
+ self.commitcache = {}
+
+ self.map = {}
+ try:
+ for l in file(self.mapfile):
+ sv, dv = l[:-1].split()
+ self.map[sv] = dv
+ except IOError:
+ pass
+
+ def walktree(self, heads):
+ visit = heads
+ known = {}
+ parents = {}
+ while visit:
+ n = visit.pop(0)
+ if n in known or n in self.map: continue
+ known[n] = 1
+ self.commitcache[n] = self.source.getcommit(n)
+ cp = self.commitcache[n][0]
+ for p in cp:
+ parents.setdefault(n, []).append(p)
+ visit.append(p)
+
+ return parents
+
+ def toposort(self, parents):
+ visit = parents.keys()
+ seen = {}
+ children = {}
+
+ while visit:
+ n = visit.pop(0)
+ if n in seen: continue
+ seen[n] = 1
+ pc = 0
+ if n in parents:
+ for p in parents[n]:
+ if p not in self.map: pc += 1
+ visit.append(p)
+ children.setdefault(p, []).append(n)
+ if not pc: root = n
+
+ s = []
+ removed = {}
+ visit = children.keys()
+ while visit:
+ n = visit.pop(0)
+ if n in removed: continue
+ dep = 0
+ if n in parents:
+ for p in parents[n]:
+ if p in self.map: continue
+ if p not in removed:
+ # we're still dependent
+ visit.append(n)
+ dep = 1
+ break
+
+ if not dep:
+ # all n's parents are in the list
+ removed[n] = 1
+ s.append(n)
+ if n in children:
+ for c in children[n]:
+ visit.insert(0, c)
+
+ return s
+
+ def copy(self, rev):
+ p, a, d, t = self.commitcache[rev]
+ files = self.source.getchanges(rev)
+
+ for f,v,e in files:
+ try:
+ data = self.source.getfile(f, v)
+ except IOError, inst:
+ self.dest.delfile(f)
+ else:
+ self.dest.putfile(f, e, data)
+
+ r = [self.map[v] for v in p]
+ f = [f for f,v,e in files]
+ self.map[rev] = self.dest.putcommit(f, r, a, d, t)
+ file(self.mapfile, "a").write("%s %s\n" % (rev, self.map[rev]))
+
+ def convert(self):
+ heads = self.source.getheads()
+ parents = self.walktree(heads)
+ t = self.toposort(parents)
+ t = [n for n in t if n not in self.map]
+ num = len(t)
+ c = None
+
+ for c in t:
+ num -= 1
+ desc = self.commitcache[c][3].splitlines()[0]
+ #print num, desc
+ self.copy(c)
+
+ tags = self.source.gettags()
+ ctags = {}
+ for k in tags:
+ v = tags[k]
+ if v in self.map:
+ ctags[k] = self.map[v]
+
+ if c and ctags:
+ nrev = self.dest.puttags(ctags)
+ # write another hash correspondence to override the previous
+ # one so we don't end up with extra tag heads
+ file(self.mapfile, "a").write("%s %s\n" % (c, nrev))
+
+gitpath, hgpath, mapfile = sys.argv[1:]
+if os.path.isdir(gitpath + "/.git"):
+ gitpath += "/.git"
+
+c = convert(convert_git(gitpath), convert_mercurial(hgpath), mapfile)
+c.convert()
new file mode 100755
--- /dev/null
+++ b/contrib/darcs2hg.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+# Encoding: iso-8859-1
+# vim: tw=80 ts=4 sw=4 noet
+# -----------------------------------------------------------------------------
+# Project : Basic Darcs to Mercurial conversion script
+# -----------------------------------------------------------------------------
+# Author : Sebastien Pierre <sebastien@xprima.com>
+# Creation : 24-May-2006
+# Last mod : 26-May-2006
+# History :
+# 26-May-2006 - Updated
+# 24-May-2006 - First implementation
+# -----------------------------------------------------------------------------
+
+import os, sys
+import tempfile
+import xml.dom.minidom as xml_dom
+from time import strptime, mktime
+
+DARCS_REPO = None
+HG_REPO = None
+
+USAGE = """\
+%s DARCSREPO HGREPO
+
+ Converts the given Darcs repository to a new Mercurial repository. The given
+ HGREPO must not exist, as it will be created and filled up (this will avoid
+ overwriting valuable data.
+
+""" % (os.path.basename(sys.argv[0]))
+
+# ------------------------------------------------------------------------------
+#
+# Utilities
+#
+# ------------------------------------------------------------------------------
+
+def cmd(text, path=None):
+ """Executes a command, in the given directory (if any), and returns the
+ command result as a string."""
+ cwd = None
+ if path:
+ path = os.path.abspath(path)
+ cwd = os.getcwd()
+ os.chdir(path)
+ print text
+ res = os.popen(text).read()
+ if path:
+ os.chdir(cwd)
+ return res
+
+def writefile(path, data):
+ """Writes the given data into the given file."""
+ f = file(path, "w") ; f.write(data) ; f.close()
+
+# ------------------------------------------------------------------------------
+#
+# Darcs interface
+#
+# ------------------------------------------------------------------------------
+
+def darcs_changes(darcsRepo):
+ """Gets the changes list from the given darcs repository. This returns the
+ chronological list of changes as (change name, change summary)."""
+ changes = cmd("darcs changes --reverse --xml-output", darcsRepo)
+ doc = xml_dom.parseString(changes)
+ res = []
+ for patch_node in doc.childNodes[0].childNodes:
+ name = filter(lambda n:n.nodeName == "name", patch_node.childNodes)
+ comm = filter(lambda n:n.nodeName == "comment", patch_node.childNodes)
+ if not name:continue
+ else: name = name[0].childNodes[0].data
+ if not comm: comm = ""
+ else: comm = comm[0].childNodes[0].data
+ author = patch_node.getAttribute("author")
+ date = patch_node.getAttribute("date")
+ yield author, date, name, comm
+
+def darcs_pull(hg_repo, darcs_repo, change):
+ cmd("darcs pull '%s' --all --patches='%s'" % (darcs_repo, change), hg_repo)
+
+# ------------------------------------------------------------------------------
+#
+# Mercurial interface
+#
+# ------------------------------------------------------------------------------
+
+def hg_commit( hg_repo, text, author, date ):
+ fd, tmpfile = tempfile.mkstemp(prefix="darcs2hg_")
+ writefile(tmpfile, text)
+ cmd("hg add -X _darcs", hg_repo)
+ cmd("hg remove -X _darcs --after", hg_repo)
+ cmd("hg commit -l %s -u '%s' -d '%s 0'" % (tmpfile, author, date), hg_repo)
+ os.unlink(tmpfile)
+
+# ------------------------------------------------------------------------------
+#
+# Main
+#
+# ------------------------------------------------------------------------------
+
+if __name__ == "__main__":
+ args = sys.argv[1:]
+ # We parse the arguments
+ if len(args) == 2:
+ darcs_repo = os.path.abspath(args[0])
+ hg_repo = os.path.abspath(args[1])
+ else:
+ print USAGE
+ sys.exit(-1)
+ # Initializes the target repo
+ if not os.path.isdir(darcs_repo + "/_darcs"):
+ print "No darcs directory found at: " + darc_repo
+ sys.exit(-1)
+ if not os.path.isdir(hg_repo):
+ os.mkdir(hg_repo)
+ else:
+ print "Given HG repository must not exist. It will be created"
+ sys.exit(-1)
+ cmd("hg init '%s'" % (hg_repo))
+ cmd("darcs initialize", hg_repo)
+ # Get the changes from the Darcs repository
+ for author, date, summary, description in darcs_changes(darcs_repo):
+ text = summary + "\n" + description
+ darcs_pull(hg_repo, darcs_repo, summary)
+ epoch = int(mktime(strptime(date, '%Y%m%d%H%M%S')))
+ hg_commit(hg_repo, text, author, epoch)
+
+# EOF
+
new file mode 100644
index 0000000000000000000000000000000000000000..2872cbdff68dbb52ff89565e3cdea704100df8f9
GIT binary patch
literal 2058
zc%1Fjc~BEq90%~P)~ao(_5cyJQWYdf+i*%_uu!6*rVxR0C32r35C{+;T!8?A0D({-
z7$7&h-a6j5jym4gcwgf+<9Oc=-dB?b{llYwb^2GoJMX=j_j&u9x4Ul^7+}5}N5k9?
zMc)u$1OSS;IhxyKSm!^MfPer51_pw`V1UVFg2iGXBqRiEHXC7KVF(WoM?^#fA|oRa
z6%~b;m>6(49B{c@#Ky)VE-nu7@$pDVNI+s@B9fAlker;1l#~>trluk-Ee+}E>Bz{)
z0FTE*W@aY%d_J<WvXGsf4S_&_oSYow=H@~u6e2G#5Bd4|C@3g^NF+jGVIhi&icnl!
z46#^@l9CdXmX@NdtPJJl<*2BrfJ7oeWo0F*s;VHBN>N>1jhdPo)YjHQCX=DAt`7C}
z^^nWuXlQ6aV`C$lnwp?cDA3&8jFy%bD3waIwzi_Jtqtw%?NF&y=<Mu-TCGM`R~Ndw
zyV29r1C2%ltyT-2P6xeS4}-w~qtOVH$pnHRK$0Zp%$b9^bLV2-ym^>Ee?As0Sb&8K
z7h=((MOeIeF_tV@f~8BBV%f4~SiXEYR;*Zol`B_b)v8rky?Qm)tXYG#Yu94kx^-B;
zemyp9*no{2H)7MKP1wA7Gq!Bmf~{M(V%xTD*uH%`cI?=JojZ48*REaIy?ZzI?Ae38
zd-r1BzJ1uge?JZ!IDmr(58}|FLpXf+FpeBKf}=-|;@GicIDY&%PMkP_lP6E&)TvWA
zefl)coH>KDXV2o?xpO#w{yZ*RxPXfnFXGarOSpXbGOk>?f~!}r;@Y)qxPJXQZrr$m
zn>TOb)~#E(efu`<+_{6hckklfy?eNS|2`f(c!0jXK0JK*5RV=`!sEw}@#M)9Jbn5U
z&z?QQ^XJd;;>8QReEAZuUcJKW*RMZ%UjNU(8}gq6GQYmDO<JQ9=9x4_8lqIH6pG1W
zp}bQfoh=k<LDGX}vRL6VS}ox!N)&M>sA+OiPW#hO;HqH@3}&bL{oy{g*gsW1v7C`w
z&oog5&0Q=q&+BKB*JQ4rD;5!ANpF@+rsm5e9is01(&kD5spfU|##sFQ5GGS*FF}&J
zB<%;BQ1Te=)ZmgX!n*%=qmA@uF{h3jFtU#Lpc{PM>}<w3N7J2Vf6z&TQlV%}wvUZ2
zC9F=E{{aT#&0v`z2$NetuE6T1qG12=Zsq#I8D0*QG81WRDd1PBSkw6VR&S0E`R2Fj
zVe)8-PXd#v>qraob8}%u%}DQe)f69BPoJuGFWYgRv}$!|5c5}iyNMpuxPB)UZj;7l
znaI}hj?O8@Fgxc^Zb?q?u)*v%uJiHw&b(Knj|)Yl`r45ZXC%pvAo#!aDgHBadI`fc
zZzoa3DEnXJmWSYc;{DeB7%@N1N=v0rp^6HI4F5r9akkCmw@#lrErRAi@$jUJ<aV}>
z9E15KQAbVA>+N^ik6(T38^9Rmzz`X{7$d#7iiS|vFDbz)qW^x!fu8(q?sOZvh>&=X
zcJ_Alcei!O2>e}Rb%u*w5kcxl4)Z7=2<fCjgFhYg+2;f8CL~yWc8HH}ypHJda2Oq{
zH7^&3PES%Q*@02Yzw7sK=r&X*o;rHe(C^*TOxEwy-&wrQZTKu(d#+>*W5gFOv-Izt
ge^Q@8o9N6`XwrQtlwbw%o?9weiuDig_<xzd0OoeZ+W-In
new file mode 100644
--- /dev/null
+++ b/contrib/git-viz/git-cat-file
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+op=`basename $0 | sed -e 's/^git-//'`
+exec hgit $op "$@"
+
new file mode 100644
--- /dev/null
+++ b/contrib/git-viz/git-diff-tree
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+op=`basename $0 | sed -e 's/^git-//'`
+exec hgit $op "$@"
+
new file mode 100644
--- /dev/null
+++ b/contrib/git-viz/git-rev-list
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+op=`basename $0 | sed -e 's/^git-//'`
+exec hgit $op "$@"
+
new file mode 100644
--- /dev/null
+++ b/contrib/git-viz/git-rev-tree
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+op=`basename $0 | sed -e 's/^git-//'`
+exec hgit $op "$@"
+
new file mode 100644
--- /dev/null
+++ b/contrib/git-viz/hg-viz
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+set -e
+
+if test x"$1" != x ; then
+ cd $1
+fi
+
+if [ ! -d ".hg" ]; then
+ echo "${1:-.} is not a mercurial repository" 1>&2
+ echo "Aborting" 1>&2
+ exit 1
+fi
+if [ ! -d ".git" ]; then
+ mkdir -v ".git"
+fi
+if [ -e ".git/HEAD" ]; then
+ if [ ! -e ".git/HEAD.hg-viz-save" ]; then
+ mv -v ".git/HEAD" ".git/HEAD.hg-viz-save"
+ else
+ rm -vf ".git/HEAD"
+ fi
+fi
+hg history | head -1 | awk -F: '{print $3}' > .git/HEAD
+git-viz
+
new file mode 100755
--- /dev/null
+++ b/contrib/hg-ssh
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+#
+# Copyright 2005, 2006 by Intevation GmbH <intevation@intevation.de>
+# Author(s):
+# Thomas Arendsen Hein <thomas@intevation.de>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+"""
+hg-ssh - a wrapper for ssh access to a limited set of mercurial repos
+
+To be used in ~/.ssh/authorized_keys with the "command" option, see sshd(8):
+command="hg-ssh path/to/repo1 /path/to/repo2 ~/repo3 ~user/repo4" ssh-dss ...
+(probably together with these other useful options:
+ no-port-forwarding,no-X11-forwarding,no-agent-forwarding)
+
+This allows pull/push over ssh to to the repositories given as arguments.
+
+If all your repositories are subdirectories of a common directory, you can
+allow shorter paths with:
+command="cd path/to/my/repositories && hg-ssh repo1 subdir/repo2"
+
+You can use pattern matching of your normal shell, e.g.:
+command="cd repos && hg-ssh user/thomas/* projects/{mercurial,foo}"
+"""
+
+from mercurial import commands
+
+import sys, os
+
+cwd = os.getcwd()
+allowed_paths = [os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
+ for path in sys.argv[1:]]
+orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?')
+
+if orig_cmd.startswith('hg -R ') and orig_cmd.endswith(' serve --stdio'):
+ path = orig_cmd[6:-14]
+ repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
+ if repo in allowed_paths:
+ commands.dispatch(['-R', repo, 'serve', '--stdio'])
+ else:
+ sys.stderr.write("Illegal repository %r\n" % repo)
+ sys.exit(-1)
+else:
+ sys.stderr.write("Illegal command %r\n" % orig_cmd)
+ sys.exit(-1)
+
new file mode 100755
--- /dev/null
+++ b/contrib/hgdiff
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+
+import os, sys, struct, stat
+import difflib
+import re
+from optparse import OptionParser
+from mercurial.bdiff import bdiff, blocks
+from mercurial.mdiff import bunidiff
+
+VERSION="0.2"
+usage = "usage: %prog [options] file1 file2"
+parser = OptionParser(usage=usage)
+
+parser.add_option("-d", "--difflib", action="store_true", default=False)
+parser.add_option('-x', '--count', default=1)
+parser.add_option('-c', '--context', type="int", default=3)
+parser.add_option('-p', '--show-c-function', action="store_true", default=False)
+parser.add_option('-w', '--ignore-all-space', action="store_true",
+ default=False)
+
+(options, args) = parser.parse_args()
+
+if not args:
+ parser.print_help()
+ sys.exit(1)
+
+# simple utility function to put all the
+# files from a directory tree into a dict
+def buildlist(names, top):
+ tlen = len(top)
+ for root, dirs, files in os.walk(top):
+ l = root[tlen + 1:]
+ for x in files:
+ p = os.path.join(root, x)
+ st = os.lstat(p)
+ if stat.S_ISREG(st.st_mode):
+ names[os.path.join(l, x)] = (st.st_dev, st.st_ino)
+
+def diff_files(file1, file2):
+ if file1 == None:
+ b = file(file2).read().splitlines(1)
+ l1 = "--- %s\n" % (file2)
+ l2 = "+++ %s\n" % (file2)
+ l3 = "@@ -0,0 +1,%d @@\n" % len(b)
+ l = [l1, l2, l3] + ["+" + e for e in b]
+ elif file2 == None:
+ a = file(file1).read().splitlines(1)
+ l1 = "--- %s\n" % (file1)
+ l2 = "+++ %s\n" % (file1)
+ l3 = "@@ -1,%d +0,0 @@\n" % len(a)
+ l = [l1, l2, l3] + ["-" + e for e in a]
+ else:
+ t1 = file(file1).read()
+ t2 = file(file2).read()
+ l1 = t1.splitlines(1)
+ l2 = t2.splitlines(1)
+ if options.difflib:
+ l = difflib.unified_diff(l1, l2, file1, file2)
+ else:
+ l = bunidiff(t1, t2, l1, l2, file1, file2, context=options.context,
+ showfunc=options.show_c_function,
+ ignorews=options.ignore_all_space)
+ for x in l:
+ if x[-1] != '\n':
+ x += "\n\ No newline at end of file\n"
+ print x,
+
+file1 = args[0]
+file2 = args[1]
+
+if os.path.isfile(file1) and os.path.isfile(file2):
+ diff_files(file1, file2)
+elif os.path.isdir(file1):
+ if not os.path.isdir(file2):
+ sys.stderr.write("file types don't match\n")
+ sys.exit(1)
+
+ d1 = {}
+ d2 = {}
+
+ buildlist(d1, file1)
+ buildlist(d2, file2)
+ keys = d1.keys()
+ keys.sort()
+ for x in keys:
+ if x not in d2:
+ f2 = None
+ else:
+ f2 = os.path.join(file2, x)
+ st1 = d1[x]
+ st2 = d2[x]
+ del d2[x]
+ if st1[0] == st2[0] and st1[1] == st2[1]:
+ sys.stderr.write("%s is a hard link\n" % x)
+ continue
+ x = os.path.join(file1, x)
+ diff_files(x, f2)
+ keys = d2.keys()
+ keys.sort()
+ for x in keys:
+ f1 = None
+ x = os.path.join(file2, x)
+ diff_files(f1, x)
+
new file mode 100755
--- /dev/null
+++ b/contrib/hgk
@@ -0,0 +1,3654 @@
+#!/usr/bin/env wish
+
+# Copyright (C) 2005 Paul Mackerras. All rights reserved.
+# This program is free software; it may be used, copied, modified
+# and distributed under the terms of the GNU General Public Licence,
+# either version 2, or (at your option) any later version.
+
+proc gitdir {} {
+ global env
+ if {[info exists env(GIT_DIR)]} {
+ return $env(GIT_DIR)
+ } else {
+ return ".hg"
+ }
+}
+
+proc getcommits {rargs} {
+ global commits commfd phase canv mainfont env
+ global startmsecs nextupdate ncmupdate
+ global ctext maincursor textcursor leftover
+
+ # check that we can find a .git directory somewhere...
+ set gitdir [gitdir]
+ if {![file isdirectory $gitdir]} {
+ error_popup "Cannot find the git directory \"$gitdir\"."
+ exit 1
+ }
+ set commits {}
+ set phase getcommits
+ set startmsecs [clock clicks -milliseconds]
+ set nextupdate [expr $startmsecs + 100]
+ set ncmupdate 1
+ if [catch {
+ set parse_args [concat --default HEAD $rargs]
+ set parsed_args [split [eval exec hg debug-rev-parse $parse_args] "\n"]
+ }] {
+ # if git-rev-parse failed for some reason...
+ if {$rargs == {}} {
+ set rargs HEAD
+ }
+ set parsed_args $rargs
+ }
+ if [catch {
+ set commfd [open "|hg debug-rev-list --header --topo-order --parents $parsed_args" r]
+ } err] {
+ puts stderr "Error executing hg debug-rev-list: $err"
+ exit 1
+ }
+ set leftover {}
+ fconfigure $commfd -blocking 0 -translation lf
+ fileevent $commfd readable [list getcommitlines $commfd]
+ $canv delete all
+ $canv create text 3 3 -anchor nw -text "Reading commits..." \
+ -font $mainfont -tags textitems
+ . config -cursor watch
+ settextcursor watch
+}
+
+proc getcommitlines {commfd} {
+ global commits parents cdate children
+ global commitlisted phase commitinfo nextupdate
+ global stopped redisplaying leftover
+
+ set stuff [read $commfd]
+ if {$stuff == {}} {
+ if {![eof $commfd]} return
+ # set it blocking so we wait for the process to terminate
+ fconfigure $commfd -blocking 1
+ if {![catch {close $commfd} err]} {
+ after idle finishcommits
+ return
+ }
+ if {[string range $err 0 4] == "usage"} {
+ set err \
+{Gitk: error reading commits: bad arguments to git-rev-list.
+(Note: arguments to gitk are passed to git-rev-list
+to allow selection of commits to be displayed.)}
+ } else {
+ set err "Error reading commits: $err"
+ }
+ error_popup $err
+ exit 1
+ }
+ set start 0
+ while 1 {
+ set i [string first "\0" $stuff $start]
+ if {$i < 0} {
+ append leftover [string range $stuff $start end]
+ return
+ }
+ set cmit [string range $stuff $start [expr {$i - 1}]]
+ if {$start == 0} {
+ set cmit "$leftover$cmit"
+ set leftover {}
+ }
+ set start [expr {$i + 1}]
+ set j [string first "\n" $cmit]
+ set ok 0
+ if {$j >= 0} {
+ set ids [string range $cmit 0 [expr {$j - 1}]]
+ set ok 1
+ foreach id $ids {
+ if {![regexp {^[0-9a-f]{40}$} $id]} {
+ set ok 0
+ break
+ }
+ }
+ }
+ if {!$ok} {
+ set shortcmit $cmit
+ if {[string length $shortcmit] > 80} {
+ set shortcmit "[string range $shortcmit 0 80]..."
+ }
+ error_popup "Can't parse hg debug-rev-list output: {$shortcmit}"
+ exit 1
+ }
+ set id [lindex $ids 0]
+ set olds [lrange $ids 1 end]
+ set cmit [string range $cmit [expr {$j + 1}] end]
+ lappend commits $id
+ set commitlisted($id) 1
+ parsecommit $id $cmit 1 [lrange $ids 1 end]
+ drawcommit $id
+ if {[clock clicks -milliseconds] >= $nextupdate} {
+ doupdate 1
+ }
+ while {$redisplaying} {
+ set redisplaying 0
+ if {$stopped == 1} {
+ set stopped 0
+ set phase "getcommits"
+ foreach id $commits {
+ drawcommit $id
+ if {$stopped} break
+ if {[clock clicks -milliseconds] >= $nextupdate} {
+ doupdate 1
+ }
+ }
+ }
+ }
+ }
+}
+
+proc doupdate {reading} {
+ global commfd nextupdate numcommits ncmupdate
+
+ if {$reading} {
+ fileevent $commfd readable {}
+ }
+ update
+ set nextupdate [expr {[clock clicks -milliseconds] + 100}]
+ if {$numcommits < 100} {
+ set ncmupdate [expr {$numcommits + 1}]
+ } elseif {$numcommits < 10000} {
+ set ncmupdate [expr {$numcommits + 10}]
+ } else {
+ set ncmupdate [expr {$numcommits + 100}]
+ }
+ if {$reading} {
+ fileevent $commfd readable [list getcommitlines $commfd]
+ }
+}
+
+proc readcommit {id} {
+ if [catch {set contents [exec hg debug-cat-file commit $id]}] return
+ parsecommit $id $contents 0 {}
+}
+
+proc parsecommit {id contents listed olds} {
+ global commitinfo children nchildren parents nparents cdate ncleft
+
+ set inhdr 1
+ set comment {}
+ set headline {}
+ set auname {}
+ set audate {}
+ set comname {}
+ set comdate {}
+ if {![info exists nchildren($id)]} {
+ set children($id) {}
+ set nchildren($id) 0
+ set ncleft($id) 0
+ }
+ set parents($id) $olds
+ set nparents($id) [llength $olds]
+ foreach p $olds {
+ if {![info exists nchildren($p)]} {
+ set children($p) [list $id]
+ set nchildren($p) 1
+ set ncleft($p) 1
+ } elseif {[lsearch -exact $children($p) $id] < 0} {
+ lappend children($p) $id
+ incr nchildren($p)
+ incr ncleft($p)
+ }
+ }
+ foreach line [split $contents "\n"] {
+ if {$inhdr} {
+ set line [split $line]
+ if {$line == {}} {
+ set inhdr 0
+ } else {
+ set tag [lindex $line 0]
+ if {$tag == "author"} {
+ set x [expr {[llength $line] - 2}]
+ set audate [lindex $line $x]
+ set auname [join [lrange $line 1 [expr {$x - 1}]]]
+ } elseif {$tag == "committer"} {
+ set x [expr {[llength $line] - 2}]
+ set comdate [lindex $line $x]
+ set comname [join [lrange $line 1 [expr {$x - 1}]]]
+ }
+ }
+ } else {
+ if {$comment == {}} {
+ set headline [string trim $line]
+ } else {
+ append comment "\n"
+ }
+ if {!$listed} {
+ # git-rev-list indents the comment by 4 spaces;
+ # if we got this via git-cat-file, add the indentation
+ append comment " "
+ }
+ append comment $line
+ }
+ }
+ if {$audate != {}} {
+ set audate [clock format $audate -format "%Y-%m-%d %H:%M:%S"]
+ }
+ if {$comdate != {}} {
+ set cdate($id) $comdate
+ set comdate [clock format $comdate -format "%Y-%m-%d %H:%M:%S"]
+ }
+ set commitinfo($id) [list $headline $auname $audate \
+ $comname $comdate $comment]
+}
+
+proc readrefs {} {
+ global tagids idtags headids idheads tagcontents
+
+ set tags [exec hg tags]
+ set lines [split $tags '\n']
+ foreach f $lines {
+ set f [regexp -all -inline {\S+} $f]
+ set direct [lindex $f 0]
+ set full [lindex $f 1]
+ set sha [split $full ':']
+ set tag [lindex $sha 1]
+ lappend tagids($direct) $tag
+ lappend idtags($tag) $direct
+ }
+}
+
+proc readotherrefs {base dname excl} {
+ global otherrefids idotherrefs
+
+ set git [gitdir]
+ set files [glob -nocomplain -types f [file join $git $base *]]
+ foreach f $files {
+ catch {
+ set fd [open $f r]
+ set line [read $fd 40]
+ if {[regexp {^[0-9a-f]{40}} $line id]} {
+ set name "$dname[file tail $f]"
+ set otherrefids($name) $id
+ lappend idotherrefs($id) $name
+ }
+ close $fd
+ }
+ }
+ set dirs [glob -nocomplain -types d [file join $git $base *]]
+ foreach d $dirs {
+ set dir [file tail $d]
+ if {[lsearch -exact $excl $dir] >= 0} continue
+ readotherrefs [file join $base $dir] "$dname$dir/" {}
+ }
+}
+
+proc error_popup msg {
+ set w .error
+ toplevel $w
+ wm transient $w .
+ message $w.m -text $msg -justify center -aspect 400
+ pack $w.m -side top -fill x -padx 20 -pady 20
+ button $w.ok -text OK -command "destroy $w"
+ pack $w.ok -side bottom -fill x
+ bind $w <Visibility> "grab $w; focus $w"
+ tkwait window $w
+}
+
+proc makewindow {} {
+ global canv canv2 canv3 linespc charspc ctext cflist textfont
+ global findtype findtypemenu findloc findstring fstring geometry
+ global entries sha1entry sha1string sha1but
+ global maincursor textcursor curtextcursor
+ global rowctxmenu gaudydiff mergemax
+
+ menu .bar
+ .bar add cascade -label "File" -menu .bar.file
+ menu .bar.file
+ .bar.file add command -label "Reread references" -command rereadrefs
+ .bar.file add command -label "Quit" -command doquit
+ menu .bar.help
+ .bar add cascade -label "Help" -menu .bar.help
+ .bar.help add command -label "About gitk" -command about
+ . configure -menu .bar
+
+ if {![info exists geometry(canv1)]} {
+ set geometry(canv1) [expr 45 * $charspc]
+ set geometry(canv2) [expr 30 * $charspc]
+ set geometry(canv3) [expr 15 * $charspc]
+ set geometry(canvh) [expr 25 * $linespc + 4]
+ set geometry(ctextw) 80
+ set geometry(ctexth) 30
+ set geometry(cflistw) 30
+ }
+ panedwindow .ctop -orient vertical
+ if {[info exists geometry(width)]} {
+ .ctop conf -width $geometry(width) -height $geometry(height)
+ set texth [expr {$geometry(height) - $geometry(canvh) - 56}]
+ set geometry(ctexth) [expr {($texth - 8) /
+ [font metrics $textfont -linespace]}]
+ }
+ frame .ctop.top
+ frame .ctop.top.bar
+ pack .ctop.top.bar -side bottom -fill x
+ set cscroll .ctop.top.csb
+ scrollbar $cscroll -command {allcanvs yview} -highlightthickness 0
+ pack $cscroll -side right -fill y
+ panedwindow .ctop.top.clist -orient horizontal -sashpad 0 -handlesize 4
+ pack .ctop.top.clist -side top -fill both -expand 1
+ .ctop add .ctop.top
+ set canv .ctop.top.clist.canv
+ canvas $canv -height $geometry(canvh) -width $geometry(canv1) \
+ -bg white -bd 0 \
+ -yscrollincr $linespc -yscrollcommand "$cscroll set"
+ .ctop.top.clist add $canv
+ set canv2 .ctop.top.clist.canv2
+ canvas $canv2 -height $geometry(canvh) -width $geometry(canv2) \
+ -bg white -bd 0 -yscrollincr $linespc
+ .ctop.top.clist add $canv2
+ set canv3 .ctop.top.clist.canv3
+ canvas $canv3 -height $geometry(canvh) -width $geometry(canv3) \
+ -bg white -bd 0 -yscrollincr $linespc
+ .ctop.top.clist add $canv3
+ bind .ctop.top.clist <Configure> {resizeclistpanes %W %w}
+
+ set sha1entry .ctop.top.bar.sha1
+ set entries $sha1entry
+ set sha1but .ctop.top.bar.sha1label
+ button $sha1but -text "SHA1 ID: " -state disabled -relief flat \
+ -command gotocommit -width 8
+ $sha1but conf -disabledforeground [$sha1but cget -foreground]
+ pack .ctop.top.bar.sha1label -side left
+ entry $sha1entry -width 40 -font $textfont -textvariable sha1string
+ trace add variable sha1string write sha1change
+ pack $sha1entry -side left -pady 2
+
+ image create bitmap bm-left -data {
+ #define left_width 16
+ #define left_height 16
+ static unsigned char left_bits[] = {
+ 0x00, 0x00, 0xc0, 0x01, 0xe0, 0x00, 0x70, 0x00, 0x38, 0x00, 0x1c, 0x00,
+ 0x0e, 0x00, 0xff, 0x7f, 0xff, 0x7f, 0xff, 0x7f, 0x0e, 0x00, 0x1c, 0x00,
+ 0x38, 0x00, 0x70, 0x00, 0xe0, 0x00, 0xc0, 0x01};
+ }
+ image create bitmap bm-right -data {
+ #define right_width 16
+ #define right_height 16
+ static unsigned char right_bits[] = {
+ 0x00, 0x00, 0xc0, 0x01, 0x80, 0x03, 0x00, 0x07, 0x00, 0x0e, 0x00, 0x1c,
+ 0x00, 0x38, 0xff, 0x7f, 0xff, 0x7f, 0xff, 0x7f, 0x00, 0x38, 0x00, 0x1c,
+ 0x00, 0x0e, 0x00, 0x07, 0x80, 0x03, 0xc0, 0x01};
+ }
+ button .ctop.top.bar.leftbut -image bm-left -command goback \
+ -state disabled -width 26
+ pack .ctop.top.bar.leftbut -side left -fill y
+ button .ctop.top.bar.rightbut -image bm-right -command goforw \
+ -state disabled -width 26
+ pack .ctop.top.bar.rightbut -side left -fill y
+
+ button .ctop.top.bar.findbut -text "Find" -command dofind
+ pack .ctop.top.bar.findbut -side left
+ set findstring {}
+ set fstring .ctop.top.bar.findstring
+ lappend entries $fstring
+ entry $fstring -width 30 -font $textfont -textvariable findstring
+ pack $fstring -side left -expand 1 -fill x
+ set findtype Exact
+ set findtypemenu [tk_optionMenu .ctop.top.bar.findtype \
+ findtype Exact IgnCase Regexp]
+ set findloc "All fields"
+ tk_optionMenu .ctop.top.bar.findloc findloc "All fields" Headline \
+ Comments Author Committer Files Pickaxe
+ pack .ctop.top.bar.findloc -side right
+ pack .ctop.top.bar.findtype -side right
+ # for making sure type==Exact whenever loc==Pickaxe
+ trace add variable findloc write findlocchange
+
+ panedwindow .ctop.cdet -orient horizontal
+ .ctop add .ctop.cdet
+ frame .ctop.cdet.left
+ set ctext .ctop.cdet.left.ctext
+ text $ctext -bg white -state disabled -font $textfont \
+ -width $geometry(ctextw) -height $geometry(ctexth) \
+ -yscrollcommand ".ctop.cdet.left.sb set" \
+ -xscrollcommand ".ctop.cdet.left.hb set" -wrap none
+ scrollbar .ctop.cdet.left.sb -command "$ctext yview"
+ scrollbar .ctop.cdet.left.hb -orient horizontal -command "$ctext xview"
+ pack .ctop.cdet.left.sb -side right -fill y
+ pack .ctop.cdet.left.hb -side bottom -fill x
+ pack $ctext -side left -fill both -expand 1
+ .ctop.cdet add .ctop.cdet.left
+
+ $ctext tag conf filesep -font [concat $textfont bold] -back "#aaaaaa"
+ if {$gaudydiff} {
+ $ctext tag conf hunksep -back blue -fore white
+ $ctext tag conf d0 -back "#ff8080"
+ $ctext tag conf d1 -back green
+ } else {
+ $ctext tag conf hunksep -fore blue
+ $ctext tag conf d0 -fore red
+ $ctext tag conf d1 -fore "#00a000"
+ $ctext tag conf m0 -fore red
+ $ctext tag conf m1 -fore blue
+ $ctext tag conf m2 -fore green
+ $ctext tag conf m3 -fore purple
+ $ctext tag conf m4 -fore brown
+ $ctext tag conf mmax -fore darkgrey
+ set mergemax 5
+ $ctext tag conf mresult -font [concat $textfont bold]
+ $ctext tag conf msep -font [concat $textfont bold]
+ $ctext tag conf found -back yellow
+ }
+
+ frame .ctop.cdet.right
+ set cflist .ctop.cdet.right.cfiles
+ listbox $cflist -bg white -selectmode extended -width $geometry(cflistw) \
+ -yscrollcommand ".ctop.cdet.right.sb set"
+ scrollbar .ctop.cdet.right.sb -command "$cflist yview"
+ pack .ctop.cdet.right.sb -side right -fill y
+ pack $cflist -side left -fill both -expand 1
+ .ctop.cdet add .ctop.cdet.right
+ bind .ctop.cdet <Configure> {resizecdetpanes %W %w}
+
+ pack .ctop -side top -fill both -expand 1
+
+ bindall <1> {selcanvline %W %x %y}
+ #bindall <B1-Motion> {selcanvline %W %x %y}
+ bindall <ButtonRelease-4> "allcanvs yview scroll -5 units"
+ bindall <ButtonRelease-5> "allcanvs yview scroll 5 units"
+ bindall <2> "allcanvs scan mark 0 %y"
+ bindall <B2-Motion> "allcanvs scan dragto 0 %y"
+ bind . <Key-Up> "selnextline -1"
+ bind . <Key-Down> "selnextline 1"
+ bind . <Key-Prior> "allcanvs yview scroll -1 pages"
+ bind . <Key-Next> "allcanvs yview scroll 1 pages"
+ bindkey <Key-Delete> "$ctext yview scroll -1 pages"
+ bindkey <Key-BackSpace> "$ctext yview scroll -1 pages"
+ bindkey <Key-space> "$ctext yview scroll 1 pages"
+ bindkey p "selnextline -1"
+ bindkey n "selnextline 1"
+ bindkey b "$ctext yview scroll -1 pages"
+ bindkey d "$ctext yview scroll 18 units"
+ bindkey u "$ctext yview scroll -18 units"
+ bindkey / {findnext 1}
+ bindkey <Key-Return> {findnext 0}
+ bindkey ? findprev
+ bindkey f nextfile
+ bind . <Control-q> doquit
+ bind . <Control-w> doquit
+ bind . <Control-f> dofind
+ bind . <Control-g> {findnext 0}
+ bind . <Control-r> findprev
+ bind . <Control-equal> {incrfont 1}
+ bind . <Control-KP_Add> {incrfont 1}
+ bind . <Control-minus> {incrfont -1}
+ bind . <Control-KP_Subtract> {incrfont -1}
+ bind $cflist <<ListboxSelect>> listboxsel
+ bind . <Destroy> {savestuff %W}
+ bind . <Button-1> "click %W"
+ bind $fstring <Key-Return> dofind
+ bind $sha1entry <Key-Return> gotocommit
+ bind $sha1entry <<PasteSelection>> clearsha1
+
+ set maincursor [. cget -cursor]
+ set textcursor [$ctext cget -cursor]
+ set curtextcursor $textcursor
+
+ set rowctxmenu .rowctxmenu
+ menu $rowctxmenu -tearoff 0
+ $rowctxmenu add command -label "Diff this -> selected" \
+ -command {diffvssel 0}
+ $rowctxmenu add command -label "Diff selected -> this" \
+ -command {diffvssel 1}
+ $rowctxmenu add command -label "Make patch" -command mkpatch
+ $rowctxmenu add command -label "Create tag" -command mktag
+ $rowctxmenu add command -label "Write commit to file" -command writecommit
+}
+
+# when we make a key binding for the toplevel, make sure
+# it doesn't get triggered when that key is pressed in the
+# find string entry widget.
+proc bindkey {ev script} {
+ global entries
+ bind . $ev $script
+ set escript [bind Entry $ev]
+ if {$escript == {}} {
+ set escript [bind Entry <Key>]
+ }
+ foreach e $entries {
+ bind $e $ev "$escript; break"
+ }
+}
+
+# set the focus back to the toplevel for any click outside
+# the entry widgets
+proc click {w} {
+ global entries
+ foreach e $entries {
+ if {$w == $e} return
+ }
+ focus .
+}
+
+proc savestuff {w} {
+ global canv canv2 canv3 ctext cflist mainfont textfont
+ global stuffsaved findmergefiles gaudydiff maxgraphpct
+ global maxwidth
+
+ if {$stuffsaved} return
+ if {![winfo viewable .]} return
+ catch {
+ set f [open "~/.gitk-new" w]
+ puts $f [list set mainfont $mainfont]
+ puts $f [list set textfont $textfont]
+ puts $f [list set findmergefiles $findmergefiles]
+ puts $f [list set gaudydiff $gaudydiff]
+ puts $f [list set maxgraphpct $maxgraphpct]
+ puts $f [list set maxwidth $maxwidth]
+ puts $f "set geometry(width) [winfo width .ctop]"
+ puts $f "set geometry(height) [winfo height .ctop]"
+ puts $f "set geometry(canv1) [expr [winfo width $canv]-2]"
+ puts $f "set geometry(canv2) [expr [winfo width $canv2]-2]"
+ puts $f "set geometry(canv3) [expr [winfo width $canv3]-2]"
+ puts $f "set geometry(canvh) [expr [winfo height $canv]-2]"
+ set wid [expr {([winfo width $ctext] - 8) \
+ / [font measure $textfont "0"]}]
+ puts $f "set geometry(ctextw) $wid"
+ set wid [expr {([winfo width $cflist] - 11) \
+ / [font measure [$cflist cget -font] "0"]}]
+ puts $f "set geometry(cflistw) $wid"
+ close $f
+ file rename -force "~/.gitk-new" "~/.gitk"
+ }
+ set stuffsaved 1
+}
+
+proc resizeclistpanes {win w} {
+ global oldwidth
+ if [info exists oldwidth($win)] {
+ set s0 [$win sash coord 0]
+ set s1 [$win sash coord 1]
+ if {$w < 60} {
+ set sash0 [expr {int($w/2 - 2)}]
+ set sash1 [expr {int($w*5/6 - 2)}]
+ } else {
+ set factor [expr {1.0 * $w / $oldwidth($win)}]
+ set sash0 [expr {int($factor * [lindex $s0 0])}]
+ set sash1 [expr {int($factor * [lindex $s1 0])}]
+ if {$sash0 < 30} {
+ set sash0 30
+ }
+ if {$sash1 < $sash0 + 20} {
+ set sash1 [expr $sash0 + 20]
+ }
+ if {$sash1 > $w - 10} {
+ set sash1 [expr $w - 10]
+ if {$sash0 > $sash1 - 20} {
+ set sash0 [expr $sash1 - 20]
+ }
+ }
+ }
+ $win sash place 0 $sash0 [lindex $s0 1]
+ $win sash place 1 $sash1 [lindex $s1 1]
+ }
+ set oldwidth($win) $w
+}
+
+proc resizecdetpanes {win w} {
+ global oldwidth
+ if [info exists oldwidth($win)] {
+ set s0 [$win sash coord 0]
+ if {$w < 60} {
+ set sash0 [expr {int($w*3/4 - 2)}]
+ } else {
+ set factor [expr {1.0 * $w / $oldwidth($win)}]
+ set sash0 [expr {int($factor * [lindex $s0 0])}]
+ if {$sash0 < 45} {
+ set sash0 45
+ }
+ if {$sash0 > $w - 15} {
+ set sash0 [expr $w - 15]
+ }
+ }
+ $win sash place 0 $sash0 [lindex $s0 1]
+ }
+ set oldwidth($win) $w
+}
+
+proc allcanvs args {
+ global canv canv2 canv3
+ eval $canv $args
+ eval $canv2 $args
+ eval $canv3 $args
+}
+
+proc bindall {event action} {
+ global canv canv2 canv3
+ bind $canv $event $action
+ bind $canv2 $event $action
+ bind $canv3 $event $action
+}
+
+proc about {} {
+ set w .about
+ if {[winfo exists $w]} {
+ raise $w
+ return
+ }
+ toplevel $w
+ wm title $w "About gitk"
+ message $w.m -text {
+Gitk version 1.2
+
+Copyright 2005 Paul Mackerras
+
+Use and redistribute under the terms of the GNU General Public License} \
+ -justify center -aspect 400
+ pack $w.m -side top -fill x -padx 20 -pady 20
+ button $w.ok -text Close -command "destroy $w"
+ pack $w.ok -side bottom
+}
+
+proc assigncolor {id} {
+ global commitinfo colormap commcolors colors nextcolor
+ global parents nparents children nchildren
+ global cornercrossings crossings
+
+ if [info exists colormap($id)] return
+ set ncolors [llength $colors]
+ if {$nparents($id) <= 1 && $nchildren($id) == 1} {
+ set child [lindex $children($id) 0]
+ if {[info exists colormap($child)]
+ && $nparents($child) == 1} {
+ set colormap($id) $colormap($child)
+ return
+ }
+ }
+ set badcolors {}
+ if {[info exists cornercrossings($id)]} {
+ foreach x $cornercrossings($id) {
+ if {[info exists colormap($x)]
+ && [lsearch -exact $badcolors $colormap($x)] < 0} {
+ lappend badcolors $colormap($x)
+ }
+ }
+ if {[llength $badcolors] >= $ncolors} {
+ set badcolors {}
+ }
+ }
+ set origbad $badcolors
+ if {[llength $badcolors] < $ncolors - 1} {
+ if {[info exists crossings($id)]} {
+ foreach x $crossings($id) {
+ if {[info exists colormap($x)]
+ && [lsearch -exact $badcolors $colormap($x)] < 0} {
+ lappend badcolors $colormap($x)
+ }
+ }
+ if {[llength $badcolors] >= $ncolors} {
+ set badcolors $origbad
+ }
+ }
+ set origbad $badcolors
+ }
+ if {[llength $badcolors] < $ncolors - 1} {
+ foreach child $children($id) {
+ if {[info exists colormap($child)]
+ && [lsearch -exact $badcolors $colormap($child)] < 0} {
+ lappend badcolors $colormap($child)
+ }
+ if {[info exists parents($child)]} {
+ foreach p $parents($child) {
+ if {[info exists colormap($p)]
+ && [lsearch -exact $badcolors $colormap($p)] < 0} {
+ lappend badcolors $colormap($p)
+ }
+ }
+ }
+ }
+ if {[llength $badcolors] >= $ncolors} {
+ set badcolors $origbad
+ }
+ }
+ for {set i 0} {$i <= $ncolors} {incr i} {
+ set c [lindex $colors $nextcolor]
+ if {[incr nextcolor] >= $ncolors} {
+ set nextcolor 0
+ }
+ if {[lsearch -exact $badcolors $c]} break
+ }
+ set colormap($id) $c
+}
+
+proc initgraph {} {
+ global canvy canvy0 lineno numcommits nextcolor linespc
+ global mainline mainlinearrow sidelines
+ global nchildren ncleft
+ global displist nhyperspace
+
+ allcanvs delete all
+ set nextcolor 0
+ set canvy $canvy0
+ set lineno -1
+ set numcommits 0
+ catch {unset mainline}
+ catch {unset mainlinearrow}
+ catch {unset sidelines}
+ foreach id [array names nchildren] {
+ set ncleft($id) $nchildren($id)
+ }
+ set displist {}
+ set nhyperspace 0
+}
+
+proc bindline {t id} {
+ global canv
+
+ $canv bind $t <Enter> "lineenter %x %y $id"
+ $canv bind $t <Motion> "linemotion %x %y $id"
+ $canv bind $t <Leave> "lineleave $id"
+ $canv bind $t <Button-1> "lineclick %x %y $id 1"
+}
+
+proc drawlines {id xtra} {
+ global mainline mainlinearrow sidelines lthickness colormap canv
+
+ $canv delete lines.$id
+ if {[info exists mainline($id)]} {
+ set t [$canv create line $mainline($id) \
+ -width [expr {($xtra + 1) * $lthickness}] \
+ -fill $colormap($id) -tags lines.$id \
+ -arrow $mainlinearrow($id)]
+ $canv lower $t
+ bindline $t $id
+ }
+ if {[info exists sidelines($id)]} {
+ foreach ls $sidelines($id) {
+ set coords [lindex $ls 0]
+ set thick [lindex $ls 1]
+ set arrow [lindex $ls 2]
+ set t [$canv create line $coords -fill $colormap($id) \
+ -width [expr {($thick + $xtra) * $lthickness}] \
+ -arrow $arrow -tags lines.$id]
+ $canv lower $t
+ bindline $t $id
+ }
+ }
+}
+
+# level here is an index in displist
+proc drawcommitline {level} {
+ global parents children nparents displist
+ global canv canv2 canv3 mainfont namefont canvy linespc
+ global lineid linehtag linentag linedtag commitinfo
+ global colormap numcommits currentparents dupparents
+ global idtags idline idheads idotherrefs
+ global lineno lthickness mainline mainlinearrow sidelines
+ global commitlisted rowtextx idpos lastuse displist
+ global oldnlines olddlevel olddisplist
+
+ incr numcommits
+ incr lineno
+ set id [lindex $displist $level]
+ set lastuse($id) $lineno
+ set lineid($lineno) $id
+ set idline($id) $lineno
+ set ofill [expr {[info exists commitlisted($id)]? "blue": "white"}]
+ if {![info exists commitinfo($id)]} {
+ readcommit $id
+ if {![info exists commitinfo($id)]} {
+ set commitinfo($id) {"No commit information available"}
+ set nparents($id) 0
+ }
+ }
+ assigncolor $id
+ set currentparents {}
+ set dupparents {}
+ if {[info exists commitlisted($id)] && [info exists parents($id)]} {
+ foreach p $parents($id) {
+ if {[lsearch -exact $currentparents $p] < 0} {
+ lappend currentparents $p
+ } else {
+ # remember that this parent was listed twice
+ lappend dupparents $p
+ }
+ }
+ }
+ set x [xcoord $level $level $lineno]
+ set y1 $canvy
+ set canvy [expr $canvy + $linespc]
+ allcanvs conf -scrollregion \
+ [list 0 0 0 [expr $y1 + 0.5 * $linespc + 2]]
+ if {[info exists mainline($id)]} {
+ lappend mainline($id) $x $y1
+ if {$mainlinearrow($id) ne "none"} {
+ set mainline($id) [trimdiagstart $mainline($id)]
+ }
+ }
+ drawlines $id 0
+ set orad [expr {$linespc / 3}]
+ set t [$canv create oval [expr $x - $orad] [expr $y1 - $orad] \
+ [expr $x + $orad - 1] [expr $y1 + $orad - 1] \
+ -fill $ofill -outline black -width 1]
+ $canv raise $t
+ $canv bind $t <1> {selcanvline {} %x %y}
+ set xt [xcoord [llength $displist] $level $lineno]
+ if {[llength $currentparents] > 2} {
+ set xt [expr {$xt + ([llength $currentparents] - 2) * $linespc}]
+ }
+ set rowtextx($lineno) $xt
+ set idpos($id) [list $x $xt $y1]
+ if {[info exists idtags($id)] || [info exists idheads($id)]
+ || [info exists idotherrefs($id)]} {
+ set xt [drawtags $id $x $xt $y1]
+ }
+ set headline [lindex $commitinfo($id) 0]
+ set name [lindex $commitinfo($id) 1]
+ set date [lindex $commitinfo($id) 2]
+ set linehtag($lineno) [$canv create text $xt $y1 -anchor w \
+ -text $headline -font $mainfont ]
+ $canv bind $linehtag($lineno) <Button-3> "rowmenu %X %Y $id"
+ set linentag($lineno) [$canv2 create text 3 $y1 -anchor w \
+ -text $name -font $namefont]
+ set linedtag($lineno) [$canv3 create text 3 $y1 -anchor w \
+ -text $date -font $mainfont]
+
+ set olddlevel $level
+ set olddisplist $displist
+ set oldnlines [llength $displist]
+}
+
+proc drawtags {id x xt y1} {
+ global idtags idheads idotherrefs
+ global linespc lthickness
+ global canv mainfont idline rowtextx
+
+ set marks {}
+ set ntags 0
+ set nheads 0
+ if {[info exists idtags($id)]} {
+ set marks $idtags($id)
+ set ntags [llength $marks]
+ }
+ if {[info exists idheads($id)]} {
+ set marks [concat $marks $idheads($id)]
+ set nheads [llength $idheads($id)]
+ }
+ if {[info exists idotherrefs($id)]} {
+ set marks [concat $marks $idotherrefs($id)]
+ }
+ if {$marks eq {}} {
+ return $xt
+ }
+
+ set delta [expr {int(0.5 * ($linespc - $lthickness))}]
+ set yt [expr $y1 - 0.5 * $linespc]
+ set yb [expr $yt + $linespc - 1]
+ set xvals {}
+ set wvals {}
+ foreach tag $marks {
+ set wid [font measure $mainfont $tag]
+ lappend xvals $xt
+ lappend wvals $wid
+ set xt [expr {$xt + $delta + $wid + $lthickness + $linespc}]
+ }
+ set t [$canv create line $x $y1 [lindex $xvals end] $y1 \
+ -width $lthickness -fill black -tags tag.$id]
+ $canv lower $t
+ foreach tag $marks x $xvals wid $wvals {
+ set xl [expr $x + $delta]
+ set xr [expr $x + $delta + $wid + $lthickness]
+ if {[incr ntags -1] >= 0} {
+ # draw a tag
+ set t [$canv create polygon $x [expr $yt + $delta] $xl $yt \
+ $xr $yt $xr $yb $xl $yb $x [expr $yb - $delta] \
+ -width 1 -outline black -fill yellow -tags tag.$id]
+ $canv bind $t <1> [list showtag $tag 1]
+ set rowtextx($idline($id)) [expr {$xr + $linespc}]
+ } else {
+ # draw a head or other ref
+ if {[incr nheads -1] >= 0} {
+ set col green
+ } else {
+ set col "#ddddff"
+ }
+ set xl [expr $xl - $delta/2]
+ $canv create polygon $x $yt $xr $yt $xr $yb $x $yb \
+ -width 1 -outline black -fill $col -tags tag.$id
+ }
+ set t [$canv create text $xl $y1 -anchor w -text $tag \
+ -font $mainfont -tags tag.$id]
+ if {$ntags >= 0} {
+ $canv bind $t <1> [list showtag $tag 1]
+ }
+ }
+ return $xt
+}
+
+proc notecrossings {id lo hi corner} {
+ global olddisplist crossings cornercrossings
+
+ for {set i $lo} {[incr i] < $hi} {} {
+ set p [lindex $olddisplist $i]
+ if {$p == {}} continue
+ if {$i == $corner} {
+ if {![info exists cornercrossings($id)]
+ || [lsearch -exact $cornercrossings($id) $p] < 0} {
+ lappend cornercrossings($id) $p
+ }
+ if {![info exists cornercrossings($p)]
+ || [lsearch -exact $cornercrossings($p) $id] < 0} {
+ lappend cornercrossings($p) $id
+ }
+ } else {
+ if {![info exists crossings($id)]
+ || [lsearch -exact $crossings($id) $p] < 0} {
+ lappend crossings($id) $p
+ }
+ if {![info exists crossings($p)]
+ || [lsearch -exact $crossings($p) $id] < 0} {
+ lappend crossings($p) $id
+ }
+ }
+ }
+}
+
+proc xcoord {i level ln} {
+ global canvx0 xspc1 xspc2
+
+ set x [expr {$canvx0 + $i * $xspc1($ln)}]
+ if {$i > 0 && $i == $level} {
+ set x [expr {$x + 0.5 * ($xspc2 - $xspc1($ln))}]
+ } elseif {$i > $level} {
+ set x [expr {$x + $xspc2 - $xspc1($ln)}]
+ }
+ return $x
+}
+
+# it seems Tk can't draw arrows on the end of diagonal line segments...
+proc trimdiagend {line} {
+ while {[llength $line] > 4} {
+ set x1 [lindex $line end-3]
+ set y1 [lindex $line end-2]
+ set x2 [lindex $line end-1]
+ set y2 [lindex $line end]
+ if {($x1 == $x2) != ($y1 == $y2)} break
+ set line [lreplace $line end-1 end]
+ }
+ return $line
+}
+
+proc trimdiagstart {line} {
+ while {[llength $line] > 4} {
+ set x1 [lindex $line 0]
+ set y1 [lindex $line 1]
+ set x2 [lindex $line 2]
+ set y2 [lindex $line 3]
+ if {($x1 == $x2) != ($y1 == $y2)} break
+ set line [lreplace $line 0 1]
+ }
+ return $line
+}
+
+proc drawslants {id needonscreen nohs} {
+ global canv mainline mainlinearrow sidelines
+ global canvx0 canvy xspc1 xspc2 lthickness
+ global currentparents dupparents
+ global lthickness linespc canvy colormap lineno geometry
+ global maxgraphpct maxwidth
+ global displist onscreen lastuse
+ global parents commitlisted
+ global oldnlines olddlevel olddisplist
+ global nhyperspace numcommits nnewparents
+
+ if {$lineno < 0} {
+ lappend displist $id
+ set onscreen($id) 1
+ return 0
+ }
+
+ set y1 [expr {$canvy - $linespc}]
+ set y2 $canvy
+
+ # work out what we need to get back on screen
+ set reins {}
+ if {$onscreen($id) < 0} {
+ # next to do isn't displayed, better get it on screen...
+ lappend reins [list $id 0]
+ }
+ # make sure all the previous commits's parents are on the screen
+ foreach p $currentparents {
+ if {$onscreen($p) < 0} {
+ lappend reins [list $p 0]
+ }
+ }
+ # bring back anything requested by caller
+ if {$needonscreen ne {}} {
+ lappend reins $needonscreen
+ }
+
+ # try the shortcut
+ if {$currentparents == $id && $onscreen($id) == 0 && $reins eq {}} {
+ set dlevel $olddlevel
+ set x [xcoord $dlevel $dlevel $lineno]
+ set mainline($id) [list $x $y1]
+ set mainlinearrow($id) none
+ set lastuse($id) $lineno
+ set displist [lreplace $displist $dlevel $dlevel $id]
+ set onscreen($id) 1
+ set xspc1([expr {$lineno + 1}]) $xspc1($lineno)
+ return $dlevel
+ }
+
+ # update displist
+ set displist [lreplace $displist $olddlevel $olddlevel]
+ set j $olddlevel
+ foreach p $currentparents {
+ set lastuse($p) $lineno
+ if {$onscreen($p) == 0} {
+ set displist [linsert $displist $j $p]
+ set onscreen($p) 1
+ incr j
+ }
+ }
+ if {$onscreen($id) == 0} {
+ lappend displist $id
+ set onscreen($id) 1
+ }
+
+ # remove the null entry if present
+ set nullentry [lsearch -exact $displist {}]
+ if {$nullentry >= 0} {
+ set displist [lreplace $displist $nullentry $nullentry]
+ }
+
+ # bring back the ones we need now (if we did it earlier
+ # it would change displist and invalidate olddlevel)
+ foreach pi $reins {
+ # test again in case of duplicates in reins
+ set p [lindex $pi 0]
+ if {$onscreen($p) < 0} {
+ set onscreen($p) 1
+ set lastuse($p) $lineno
+ set displist [linsert $displist [lindex $pi 1] $p]
+ incr nhyperspace -1
+ }
+ }
+
+ set lastuse($id) $lineno
+
+ # see if we need to make any lines jump off into hyperspace
+ set displ [llength $displist]
+ if {$displ > $maxwidth} {
+ set ages {}
+ foreach x $displist {
+ lappend ages [list $lastuse($x) $x]
+ }
+ set ages [lsort -integer -index 0 $ages]
+ set k 0
+ while {$displ > $maxwidth} {
+ set use [lindex $ages $k 0]
+ set victim [lindex $ages $k 1]
+ if {$use >= $lineno - 5} break
+ incr k
+ if {[lsearch -exact $nohs $victim] >= 0} continue
+ set i [lsearch -exact $displist $victim]
+ set displist [lreplace $displist $i $i]
+ set onscreen($victim) -1
+ incr nhyperspace
+ incr displ -1
+ if {$i < $nullentry} {
+ incr nullentry -1
+ }
+ set x [lindex $mainline($victim) end-1]
+ lappend mainline($victim) $x $y1
+ set line [trimdiagend $mainline($victim)]
+ set arrow "last"
+ if {$mainlinearrow($victim) ne "none"} {
+ set line [trimdiagstart $line]
+ set arrow "both"
+ }
+ lappend sidelines($victim) [list $line 1 $arrow]
+ unset mainline($victim)
+ }
+ }
+
+ set dlevel [lsearch -exact $displist $id]
+
+ # If we are reducing, put in a null entry
+ if {$displ < $oldnlines} {
+ # does the next line look like a merge?
+ # i.e. does it have > 1 new parent?
+ if {$nnewparents($id) > 1} {
+ set i [expr {$dlevel + 1}]
+ } elseif {$nnewparents([lindex $olddisplist $olddlevel]) == 0} {
+ set i $olddlevel
+ if {$nullentry >= 0 && $nullentry < $i} {
+ incr i -1
+ }
+ } elseif {$nullentry >= 0} {
+ set i $nullentry
+ while {$i < $displ
+ && [lindex $olddisplist $i] == [lindex $displist $i]} {
+ incr i
+ }
+ } else {
+ set i $olddlevel
+ if {$dlevel >= $i} {
+ incr i
+ }
+ }
+ if {$i < $displ} {
+ set displist [linsert $displist $i {}]
+ incr displ
+ if {$dlevel >= $i} {
+ incr dlevel
+ }
+ }
+ }
+
+ # decide on the line spacing for the next line
+ set lj [expr {$lineno + 1}]
+ set maxw [expr {$maxgraphpct * $geometry(canv1) / 100}]
+ if {$displ <= 1 || $canvx0 + $displ * $xspc2 <= $maxw} {
+ set xspc1($lj) $xspc2
+ } else {
+ set xspc1($lj) [expr {($maxw - $canvx0 - $xspc2) / ($displ - 1)}]
+ if {$xspc1($lj) < $lthickness} {
+ set xspc1($lj) $lthickness
+ }
+ }
+
+ foreach idi $reins {
+ set id [lindex $idi 0]
+ set j [lsearch -exact $displist $id]
+ set xj [xcoord $j $dlevel $lj]
+ set mainline($id) [list $xj $y2]
+ set mainlinearrow($id) first
+ }
+
+ set i -1
+ foreach id $olddisplist {
+ incr i
+ if {$id == {}} continue
+ if {$onscreen($id) <= 0} continue
+ set xi [xcoord $i $olddlevel $lineno]
+ if {$i == $olddlevel} {
+ foreach p $currentparents {
+ set j [lsearch -exact $displist $p]
+ set coords [list $xi $y1]
+ set xj [xcoord $j $dlevel $lj]
+ if {$xj < $xi - $linespc} {
+ lappend coords [expr {$xj + $linespc}] $y1
+ notecrossings $p $j $i [expr {$j + 1}]
+ } elseif {$xj > $xi + $linespc} {
+ lappend coords [expr {$xj - $linespc}] $y1
+ notecrossings $p $i $j [expr {$j - 1}]
+ }
+ if {[lsearch -exact $dupparents $p] >= 0} {
+ # draw a double-width line to indicate the doubled parent
+ lappend coords $xj $y2
+ lappend sidelines($p) [list $coords 2 none]
+ if {![info exists mainline($p)]} {
+ set mainline($p) [list $xj $y2]
+ set mainlinearrow($p) none
+ }
+ } else {
+ # normal case, no parent duplicated
+ set yb $y2
+ set dx [expr {abs($xi - $xj)}]
+ if {0 && $dx < $linespc} {
+ set yb [expr {$y1 + $dx}]
+ }
+ if {![info exists mainline($p)]} {
+ if {$xi != $xj} {
+ lappend coords $xj $yb
+ }
+ set mainline($p) $coords
+ set mainlinearrow($p) none
+ } else {
+ lappend coords $xj $yb
+ if {$yb < $y2} {
+ lappend coords $xj $y2
+ }
+ lappend sidelines($p) [list $coords 1 none]
+ }
+ }
+ }
+ } else {
+ set j $i
+ if {[lindex $displist $i] != $id} {
+ set j [lsearch -exact $displist $id]
+ }
+ if {$j != $i || $xspc1($lineno) != $xspc1($lj)
+ || ($olddlevel < $i && $i < $dlevel)
+ || ($dlevel < $i && $i < $olddlevel)} {
+ set xj [xcoord $j $dlevel $lj]
+ lappend mainline($id) $xi $y1 $xj $y2
+ }
+ }
+ }
+ return $dlevel
+}
+
+# search for x in a list of lists
+proc llsearch {llist x} {
+ set i 0
+ foreach l $llist {
+ if {$l == $x || [lsearch -exact $l $x] >= 0} {
+ return $i
+ }
+ incr i
+ }
+ return -1
+}
+
+proc drawmore {reading} {
+ global displayorder numcommits ncmupdate nextupdate
+ global stopped nhyperspace parents commitlisted
+ global maxwidth onscreen displist currentparents olddlevel
+
+ set n [llength $displayorder]
+ while {$numcommits < $n} {
+ set id [lindex $displayorder $numcommits]
+ set ctxend [expr {$numcommits + 10}]
+ if {!$reading && $ctxend > $n} {
+ set ctxend $n
+ }
+ set dlist {}
+ if {$numcommits > 0} {
+ set dlist [lreplace $displist $olddlevel $olddlevel]
+ set i $olddlevel
+ foreach p $currentparents {
+ if {$onscreen($p) == 0} {
+ set dlist [linsert $dlist $i $p]
+ incr i
+ }
+ }
+ }
+ set nohs {}
+ set reins {}
+ set isfat [expr {[llength $dlist] > $maxwidth}]
+ if {$nhyperspace > 0 || $isfat} {
+ if {$ctxend > $n} break
+ # work out what to bring back and
+ # what we want to don't want to send into hyperspace
+ set room 1
+ for {set k $numcommits} {$k < $ctxend} {incr k} {
+ set x [lindex $displayorder $k]
+ set i [llsearch $dlist $x]
+ if {$i < 0} {
+ set i [llength $dlist]
+ lappend dlist $x
+ }
+ if {[lsearch -exact $nohs $x] < 0} {
+ lappend nohs $x
+ }
+ if {$reins eq {} && $onscreen($x) < 0 && $room} {
+ set reins [list $x $i]
+ }
+ set newp {}
+ if {[info exists commitlisted($x)]} {
+ set right 0
+ foreach p $parents($x) {
+ if {[llsearch $dlist $p] < 0} {
+ lappend newp $p
+ if {[lsearch -exact $nohs $p] < 0} {
+ lappend nohs $p
+ }
+ if {$reins eq {} && $onscreen($p) < 0 && $room} {
+ set reins [list $p [expr {$i + $right}]]
+ }
+ }
+ set right 1
+ }
+ }
+ set l [lindex $dlist $i]
+ if {[llength $l] == 1} {
+ set l $newp
+ } else {
+ set j [lsearch -exact $l $x]
+ set l [concat [lreplace $l $j $j] $newp]
+ }
+ set dlist [lreplace $dlist $i $i $l]
+ if {$room && $isfat && [llength $newp] <= 1} {
+ set room 0
+ }
+ }
+ }
+
+ set dlevel [drawslants $id $reins $nohs]
+ drawcommitline $dlevel
+ if {[clock clicks -milliseconds] >= $nextupdate
+ && $numcommits >= $ncmupdate} {
+ doupdate $reading
+ if {$stopped} break
+ }
+ }
+}
+
+# level here is an index in todo
+proc updatetodo {level noshortcut} {
+ global ncleft todo nnewparents
+ global commitlisted parents onscreen
+
+ set id [lindex $todo $level]
+ set olds {}
+ if {[info exists commitlisted($id)]} {
+ foreach p $parents($id) {
+ if {[lsearch -exact $olds $p] < 0} {
+ lappend olds $p
+ }
+ }
+ }
+ if {!$noshortcut && [llength $olds] == 1} {
+ set p [lindex $olds 0]
+ if {$ncleft($p) == 1 && [lsearch -exact $todo $p] < 0} {
+ set ncleft($p) 0
+ set todo [lreplace $todo $level $level $p]
+ set onscreen($p) 0
+ set nnewparents($id) 1
+ return 0
+ }
+ }
+
+ set todo [lreplace $todo $level $level]
+ set i $level
+ set n 0
+ foreach p $olds {
+ incr ncleft($p) -1
+ set k [lsearch -exact $todo $p]
+ if {$k < 0} {
+ set todo [linsert $todo $i $p]
+ set onscreen($p) 0
+ incr i
+ incr n
+ }
+ }
+ set nnewparents($id) $n
+
+ return 1
+}
+
+proc decidenext {{noread 0}} {
+ global ncleft todo
+ global datemode cdate
+ global commitinfo
+
+ # choose which one to do next time around
+ set todol [llength $todo]
+ set level -1
+ set latest {}
+ for {set k $todol} {[incr k -1] >= 0} {} {
+ set p [lindex $todo $k]
+ if {$ncleft($p) == 0} {
+ if {$datemode} {
+ if {![info exists commitinfo($p)]} {
+ if {$noread} {
+ return {}
+ }
+ readcommit $p
+ }
+ if {$latest == {} || $cdate($p) > $latest} {
+ set level $k
+ set latest $cdate($p)
+ }
+ } else {
+ set level $k
+ break
+ }
+ }
+ }
+ if {$level < 0} {
+ if {$todo != {}} {
+ puts "ERROR: none of the pending commits can be done yet:"
+ foreach p $todo {
+ puts " $p ($ncleft($p))"
+ }
+ }
+ return -1
+ }
+
+ return $level
+}
+
+proc drawcommit {id} {
+ global phase todo nchildren datemode nextupdate
+ global numcommits ncmupdate displayorder todo onscreen
+
+ if {$phase != "incrdraw"} {
+ set phase incrdraw
+ set displayorder {}
+ set todo {}
+ initgraph
+ }
+ if {$nchildren($id) == 0} {
+ lappend todo $id
+ set onscreen($id) 0
+ }
+ set level [decidenext 1]
+ if {$level == {} || $id != [lindex $todo $level]} {
+ return
+ }
+ while 1 {
+ lappend displayorder [lindex $todo $level]
+ if {[updatetodo $level $datemode]} {
+ set level [decidenext 1]
+ if {$level == {}} break
+ }
+ set id [lindex $todo $level]
+ if {![info exists commitlisted($id)]} {
+ break
+ }
+ }
+ drawmore 1
+}
+
+proc finishcommits {} {
+ global phase
+ global canv mainfont ctext maincursor textcursor
+
+ if {$phase != "incrdraw"} {
+ $canv delete all
+ $canv create text 3 3 -anchor nw -text "No commits selected" \
+ -font $mainfont -tags textitems
+ set phase {}
+ } else {
+ drawrest
+ }
+ . config -cursor $maincursor
+ settextcursor $textcursor
+}
+
+# Don't change the text pane cursor if it is currently the hand cursor,
+# showing that we are over a sha1 ID link.
+proc settextcursor {c} {
+ global ctext curtextcursor
+
+ if {[$ctext cget -cursor] == $curtextcursor} {
+ $ctext config -cursor $c
+ }
+ set curtextcursor $c
+}
+
+proc drawgraph {} {
+ global nextupdate startmsecs ncmupdate
+ global displayorder onscreen
+
+ if {$displayorder == {}} return
+ set startmsecs [clock clicks -milliseconds]
+ set nextupdate [expr $startmsecs + 100]
+ set ncmupdate 1
+ initgraph
+ foreach id $displayorder {
+ set onscreen($id) 0
+ }
+ drawmore 0
+}
+
+proc drawrest {} {
+ global phase stopped redisplaying selectedline
+ global datemode todo displayorder
+ global numcommits ncmupdate
+ global nextupdate startmsecs
+
+ set level [decidenext]
+ if {$level >= 0} {
+ set phase drawgraph
+ while 1 {
+ lappend displayorder [lindex $todo $level]
+ set hard [updatetodo $level $datemode]
+ if {$hard} {
+ set level [decidenext]
+ if {$level < 0} break
+ }
+ }
+ drawmore 0
+ }
+ set phase {}
+ set drawmsecs [expr [clock clicks -milliseconds] - $startmsecs]
+ #puts "overall $drawmsecs ms for $numcommits commits"
+ if {$redisplaying} {
+ if {$stopped == 0 && [info exists selectedline]} {
+ selectline $selectedline 0
+ }
+ if {$stopped == 1} {
+ set stopped 0
+ after idle drawgraph
+ } else {
+ set redisplaying 0
+ }
+ }
+}
+
+proc findmatches {f} {
+ global findtype foundstring foundstrlen
+ if {$findtype == "Regexp"} {
+ set matches [regexp -indices -all -inline $foundstring $f]
+ } else {
+ if {$findtype == "IgnCase"} {
+ set str [string tolower $f]
+ } else {
+ set str $f
+ }
+ set matches {}
+ set i 0
+ while {[set j [string first $foundstring $str $i]] >= 0} {
+ lappend matches [list $j [expr $j+$foundstrlen-1]]
+ set i [expr $j + $foundstrlen]
+ }
+ }
+ return $matches
+}
+
+proc dofind {} {
+ global findtype findloc findstring markedmatches commitinfo
+ global numcommits lineid linehtag linentag linedtag
+ global mainfont namefont canv canv2 canv3 selectedline
+ global matchinglines foundstring foundstrlen
+
+ stopfindproc
+ unmarkmatches
+ focus .
+ set matchinglines {}
+ if {$findloc == "Pickaxe"} {
+ findpatches
+ return
+ }
+ if {$findtype == "IgnCase"} {
+ set foundstring [string tolower $findstring]
+ } else {
+ set foundstring $findstring
+ }
+ set foundstrlen [string length $findstring]
+ if {$foundstrlen == 0} return
+ if {$findloc == "Files"} {
+ findfiles
+ return
+ }
+ if {![info exists selectedline]} {
+ set oldsel -1
+ } else {
+ set oldsel $selectedline
+ }
+ set didsel 0
+ set fldtypes {Headline Author Date Committer CDate Comment}
+ for {set l 0} {$l < $numcommits} {incr l} {
+ set id $lineid($l)
+ set info $commitinfo($id)
+ set doesmatch 0
+ foreach f $info ty $fldtypes {
+ if {$findloc != "All fields" && $findloc != $ty} {
+ continue
+ }
+ set matches [findmatches $f]
+ if {$matches == {}} continue
+ set doesmatch 1
+ if {$ty == "Headline"} {
+ markmatches $canv $l $f $linehtag($l) $matches $mainfont
+ } elseif {$ty == "Author"} {
+ markmatches $canv2 $l $f $linentag($l) $matches $namefont
+ } elseif {$ty == "Date"} {
+ markmatches $canv3 $l $f $linedtag($l) $matches $mainfont
+ }
+ }
+ if {$doesmatch} {
+ lappend matchinglines $l
+ if {!$didsel && $l > $oldsel} {
+ findselectline $l
+ set didsel 1
+ }
+ }
+ }
+ if {$matchinglines == {}} {
+ bell
+ } elseif {!$didsel} {
+ findselectline [lindex $matchinglines 0]
+ }
+}
+
+proc findselectline {l} {
+ global findloc commentend ctext
+ selectline $l 1
+ if {$findloc == "All fields" || $findloc == "Comments"} {
+ # highlight the matches in the comments
+ set f [$ctext get 1.0 $commentend]
+ set matches [findmatches $f]
+ foreach match $matches {
+ set start [lindex $match 0]
+ set end [expr [lindex $match 1] + 1]
+ $ctext tag add found "1.0 + $start c" "1.0 + $end c"
+ }
+ }
+}
+
+proc findnext {restart} {
+ global matchinglines selectedline
+ if {![info exists matchinglines]} {
+ if {$restart} {
+ dofind
+ }
+ return
+ }
+ if {![info exists selectedline]} return
+ foreach l $matchinglines {
+ if {$l > $selectedline} {
+ findselectline $l
+ return
+ }
+ }
+ bell
+}
+
+proc findprev {} {
+ global matchinglines selectedline
+ if {![info exists matchinglines]} {
+ dofind
+ return
+ }
+ if {![info exists selectedline]} return
+ set prev {}
+ foreach l $matchinglines {
+ if {$l >= $selectedline} break
+ set prev $l
+ }
+ if {$prev != {}} {
+ findselectline $prev
+ } else {
+ bell
+ }
+}
+
+proc findlocchange {name ix op} {
+ global findloc findtype findtypemenu
+ if {$findloc == "Pickaxe"} {
+ set findtype Exact
+ set state disabled
+ } else {
+ set state normal
+ }
+ $findtypemenu entryconf 1 -state $state
+ $findtypemenu entryconf 2 -state $state
+}
+
+proc stopfindproc {{done 0}} {
+ global findprocpid findprocfile findids
+ global ctext findoldcursor phase maincursor textcursor
+ global findinprogress
+
+ catch {unset findids}
+ if {[info exists findprocpid]} {
+ if {!$done} {
+ catch {exec kill $findprocpid}
+ }
+ catch {close $findprocfile}
+ unset findprocpid
+ }
+ if {[info exists findinprogress]} {
+ unset findinprogress
+ if {$phase != "incrdraw"} {
+ . config -cursor $maincursor
+ settextcursor $textcursor
+ }
+ }
+}
+
+proc findpatches {} {
+ global findstring selectedline numcommits
+ global findprocpid findprocfile
+ global finddidsel ctext lineid findinprogress
+ global findinsertpos
+
+ if {$numcommits == 0} return
+
+ # make a list of all the ids to search, starting at the one
+ # after the selected line (if any)
+ if {[info exists selectedline]} {
+ set l $selectedline
+ } else {
+ set l -1
+ }
+ set inputids {}
+ for {set i 0} {$i < $numcommits} {incr i} {
+ if {[incr l] >= $numcommits} {
+ set l 0
+ }
+ append inputids $lineid($l) "\n"
+ }
+
+ if {[catch {
+ set f [open [list | hg debug-diff-tree --stdin -s -r -S$findstring \
+ << $inputids] r]
+ } err]} {
+ error_popup "Error starting search process: $err"
+ return
+ }
+
+ set findinsertpos end
+ set findprocfile $f
+ set findprocpid [pid $f]
+ fconfigure $f -blocking 0
+ fileevent $f readable readfindproc
+ set finddidsel 0
+ . config -cursor watch
+ settextcursor watch
+ set findinprogress 1
+}
+
+proc readfindproc {} {
+ global findprocfile finddidsel
+ global idline matchinglines findinsertpos
+
+ set n [gets $findprocfile line]
+ if {$n < 0} {
+ if {[eof $findprocfile]} {
+ stopfindproc 1
+ if {!$finddidsel} {
+ bell
+ }
+ }
+ return
+ }
+ if {![regexp {^[0-9a-f]{40}} $line id]} {
+ error_popup "Can't parse git-diff-tree output: $line"
+ stopfindproc
+ return
+ }
+ if {![info exists idline($id)]} {
+ puts stderr "spurious id: $id"
+ return
+ }
+ set l $idline($id)
+ insertmatch $l $id
+}
+
+proc insertmatch {l id} {
+ global matchinglines findinsertpos finddidsel
+
+ if {$findinsertpos == "end"} {
+ if {$matchinglines != {} && $l < [lindex $matchinglines 0]} {
+ set matchinglines [linsert $matchinglines 0 $l]
+ set findinsertpos 1
+ } else {
+ lappend matchinglines $l
+ }
+ } else {
+ set matchinglines [linsert $matchinglines $findinsertpos $l]
+ incr findinsertpos
+ }
+ markheadline $l $id
+ if {!$finddidsel} {
+ findselectline $l
+ set finddidsel 1
+ }
+}
+
+proc findfiles {} {
+ global selectedline numcommits lineid ctext
+ global ffileline finddidsel parents nparents
+ global findinprogress findstartline findinsertpos
+ global treediffs fdiffids fdiffsneeded fdiffpos
+ global findmergefiles
+
+ if {$numcommits == 0} return
+
+ if {[info exists selectedline]} {
+ set l [expr {$selectedline + 1}]
+ } else {
+ set l 0
+ }
+ set ffileline $l
+ set findstartline $l
+ set diffsneeded {}
+ set fdiffsneeded {}
+ while 1 {
+ set id $lineid($l)
+ if {$findmergefiles || $nparents($id) == 1} {
+ foreach p $parents($id) {
+ if {![info exists treediffs([list $id $p])]} {
+ append diffsneeded "$id $p\n"
+ lappend fdiffsneeded [list $id $p]
+ }
+ }
+ }
+ if {[incr l] >= $numcommits} {
+ set l 0
+ }
+ if {$l == $findstartline} break
+ }
+
+ # start off a git-diff-tree process if needed
+ if {$diffsneeded ne {}} {
+ if {[catch {
+ set df [open [list | hg debug-diff-tree -r --stdin << $diffsneeded] r]
+ } err ]} {
+ error_popup "Error starting search process: $err"
+ return
+ }
+ catch {unset fdiffids}
+ set fdiffpos 0
+ fconfigure $df -blocking 0
+ fileevent $df readable [list readfilediffs $df]
+ }
+
+ set finddidsel 0
+ set findinsertpos end
+ set id $lineid($l)
+ set p [lindex $parents($id) 0]
+ . config -cursor watch
+ settextcursor watch
+ set findinprogress 1
+ findcont [list $id $p]
+ update
+}
+
+proc readfilediffs {df} {
+ global findids fdiffids fdiffs
+
+ set n [gets $df line]
+ if {$n < 0} {
+ if {[eof $df]} {
+ donefilediff
+ if {[catch {close $df} err]} {
+ stopfindproc
+ bell
+ error_popup "Error in hg debug-diff-tree: $err"
+ } elseif {[info exists findids]} {
+ set ids $findids
+ stopfindproc
+ bell
+ error_popup "Couldn't find diffs for {$ids}"
+ }
+ }
+ return
+ }
+ if {[regexp {^([0-9a-f]{40}) \(from ([0-9a-f]{40})\)} $line match id p]} {
+ # start of a new string of diffs
+ donefilediff
+ set fdiffids [list $id $p]
+ set fdiffs {}
+ } elseif {[string match ":*" $line]} {
+ lappend fdiffs [lindex $line 5]
+ }
+}
+
+proc donefilediff {} {
+ global fdiffids fdiffs treediffs findids
+ global fdiffsneeded fdiffpos
+
+ if {[info exists fdiffids]} {
+ while {[lindex $fdiffsneeded $fdiffpos] ne $fdiffids
+ && $fdiffpos < [llength $fdiffsneeded]} {
+ # git-diff-tree doesn't output anything for a commit
+ # which doesn't change anything
+ set nullids [lindex $fdiffsneeded $fdiffpos]
+ set treediffs($nullids) {}
+ if {[info exists findids] && $nullids eq $findids} {
+ unset findids
+ findcont $nullids
+ }
+ incr fdiffpos
+ }
+ incr fdiffpos
+
+ if {![info exists treediffs($fdiffids)]} {
+ set treediffs($fdiffids) $fdiffs
+ }
+ if {[info exists findids] && $fdiffids eq $findids} {
+ unset findids
+ findcont $fdiffids
+ }
+ }
+}
+
+proc findcont {ids} {
+ global findids treediffs parents nparents
+ global ffileline findstartline finddidsel
+ global lineid numcommits matchinglines findinprogress
+ global findmergefiles
+
+ set id [lindex $ids 0]
+ set p [lindex $ids 1]
+ set pi [lsearch -exact $parents($id) $p]
+ set l $ffileline
+ while 1 {
+ if {$findmergefiles || $nparents($id) == 1} {
+ if {![info exists treediffs($ids)]} {
+ set findids $ids
+ set ffileline $l
+ return
+ }
+ set doesmatch 0
+ foreach f $treediffs($ids) {
+ set x [findmatches $f]
+ if {$x != {}} {
+ set doesmatch 1
+ break
+ }
+ }
+ if {$doesmatch} {
+ insertmatch $l $id
+ set pi $nparents($id)
+ }
+ } else {
+ set pi $nparents($id)
+ }
+ if {[incr pi] >= $nparents($id)} {
+ set pi 0
+ if {[incr l] >= $numcommits} {
+ set l 0
+ }
+ if {$l == $findstartline} break
+ set id $lineid($l)
+ }
+ set p [lindex $parents($id) $pi]
+ set ids [list $id $p]
+ }
+ stopfindproc
+ if {!$finddidsel} {
+ bell
+ }
+}
+
+# mark a commit as matching by putting a yellow background
+# behind the headline
+proc markheadline {l id} {
+ global canv mainfont linehtag commitinfo
+
+ set bbox [$canv bbox $linehtag($l)]
+ set t [$canv create rect $bbox -outline {} -tags matches -fill yellow]
+ $canv lower $t
+}
+
+# mark the bits of a headline, author or date that match a find string
+proc markmatches {canv l str tag matches font} {
+ set bbox [$canv bbox $tag]
+ set x0 [lindex $bbox 0]
+ set y0 [lindex $bbox 1]
+ set y1 [lindex $bbox 3]
+ foreach match $matches {
+ set start [lindex $match 0]
+ set end [lindex $match 1]
+ if {$start > $end} continue
+ set xoff [font measure $font [string range $str 0 [expr $start-1]]]
+ set xlen [font measure $font [string range $str 0 [expr $end]]]
+ set t [$canv create rect [expr $x0+$xoff] $y0 [expr $x0+$xlen+2] $y1 \
+ -outline {} -tags matches -fill yellow]
+ $canv lower $t
+ }
+}
+
+proc unmarkmatches {} {
+ global matchinglines findids
+ allcanvs delete matches
+ catch {unset matchinglines}
+ catch {unset findids}
+}
+
+proc selcanvline {w x y} {
+ global canv canvy0 ctext linespc
+ global lineid linehtag linentag linedtag rowtextx
+ set ymax [lindex [$canv cget -scrollregion] 3]
+ if {$ymax == {}} return
+ set yfrac [lindex [$canv yview] 0]
+ set y [expr {$y + $yfrac * $ymax}]
+ set l [expr {int(($y - $canvy0) / $linespc + 0.5)}]
+ if {$l < 0} {
+ set l 0
+ }
+ if {$w eq $canv} {
+ if {![info exists rowtextx($l)] || $x < $rowtextx($l)} return
+ }
+ unmarkmatches
+ selectline $l 1
+}
+
+proc commit_descriptor {p} {
+ global commitinfo
+ set l "..."
+ if {[info exists commitinfo($p)]} {
+ set l [lindex $commitinfo($p) 0]
+ }
+ return "$p ($l)"
+}
+
+# append some text to the ctext widget, and make any SHA1 ID
+# that we know about be a clickable link.
+proc appendwithlinks {text} {
+ global ctext idline linknum
+
+ set start [$ctext index "end - 1c"]
+ $ctext insert end $text
+ $ctext insert end "\n"
+ set links [regexp -indices -all -inline {[0-9a-f]{40}} $text]
+ foreach l $links {
+ set s [lindex $l 0]
+ set e [lindex $l 1]
+ set linkid [string range $text $s $e]
+ if {![info exists idline($linkid)]} continue
+ incr e
+ $ctext tag add link "$start + $s c" "$start + $e c"
+ $ctext tag add link$linknum "$start + $s c" "$start + $e c"
+ $ctext tag bind link$linknum <1> [list selectline $idline($linkid) 1]
+ incr linknum
+ }
+ $ctext tag conf link -foreground blue -underline 1
+ $ctext tag bind link <Enter> { %W configure -cursor hand2 }
+ $ctext tag bind link <Leave> { %W configure -cursor $curtextcursor }
+}
+
+proc selectline {l isnew} {
+ global canv canv2 canv3 ctext commitinfo selectedline
+ global lineid linehtag linentag linedtag
+ global canvy0 linespc parents nparents children
+ global cflist currentid sha1entry
+ global commentend idtags idline linknum
+
+ $canv delete hover
+ normalline
+ if {![info exists lineid($l)] || ![info exists linehtag($l)]} return
+ $canv delete secsel
+ set t [eval $canv create rect [$canv bbox $linehtag($l)] -outline {{}} \
+ -tags secsel -fill [$canv cget -selectbackground]]
+ $canv lower $t
+ $canv2 delete secsel
+ set t [eval $canv2 create rect [$canv2 bbox $linentag($l)] -outline {{}} \
+ -tags secsel -fill [$canv2 cget -selectbackground]]
+ $canv2 lower $t
+ $canv3 delete secsel
+ set t [eval $canv3 create rect [$canv3 bbox $linedtag($l)] -outline {{}} \
+ -tags secsel -fill [$canv3 cget -selectbackground]]
+ $canv3 lower $t
+ set y [expr {$canvy0 + $l * $linespc}]
+ set ymax [lindex [$canv cget -scrollregion] 3]
+ set ytop [expr {$y - $linespc - 1}]
+ set ybot [expr {$y + $linespc + 1}]
+ set wnow [$canv yview]
+ set wtop [expr [lindex $wnow 0] * $ymax]
+ set wbot [expr [lindex $wnow 1] * $ymax]
+ set wh [expr {$wbot - $wtop}]
+ set newtop $wtop
+ if {$ytop < $wtop} {
+ if {$ybot < $wtop} {
+ set newtop [expr {$y - $wh / 2.0}]
+ } else {
+ set newtop $ytop
+ if {$newtop > $wtop - $linespc} {
+ set newtop [expr {$wtop - $linespc}]
+ }
+ }
+ } elseif {$ybot > $wbot} {
+ if {$ytop > $wbot} {
+ set newtop [expr {$y - $wh / 2.0}]
+ } else {
+ set newtop [expr {$ybot - $wh}]
+ if {$newtop < $wtop + $linespc} {
+ set newtop [expr {$wtop + $linespc}]
+ }
+ }
+ }
+ if {$newtop != $wtop} {
+ if {$newtop < 0} {
+ set newtop 0
+ }
+ allcanvs yview moveto [expr $newtop * 1.0 / $ymax]
+ }
+
+ if {$isnew} {
+ addtohistory [list selectline $l 0]
+ }
+
+ set selectedline $l
+
+ set id $lineid($l)
+ set currentid $id
+ $sha1entry delete 0 end
+ $sha1entry insert 0 $id
+ $sha1entry selection from 0
+ $sha1entry selection to end
+
+ $ctext conf -state normal
+ $ctext delete 0.0 end
+ set linknum 0
+ $ctext mark set fmark.0 0.0
+ $ctext mark gravity fmark.0 left
+ set info $commitinfo($id)
+ $ctext insert end "Author: [lindex $info 1] [lindex $info 2]\n"
+ $ctext insert end "Committer: [lindex $info 3] [lindex $info 4]\n"
+ if {[info exists idtags($id)]} {
+ $ctext insert end "Tags:"
+ foreach tag $idtags($id) {
+ $ctext insert end " $tag"
+ }
+ $ctext insert end "\n"
+ }
+
+ set comment {}
+ if {[info exists parents($id)]} {
+ foreach p $parents($id) {
+ append comment "Parent: [commit_descriptor $p]\n"
+ }
+ }
+ if {[info exists children($id)]} {
+ foreach c $children($id) {
+ append comment "Child: [commit_descriptor $c]\n"
+ }
+ }
+ append comment "\n"
+ append comment [lindex $info 5]
+
+ # make anything that looks like a SHA1 ID be a clickable link
+ appendwithlinks $comment
+
+ $ctext tag delete Comments
+ $ctext tag remove found 1.0 end
+ $ctext conf -state disabled
+ set commentend [$ctext index "end - 1c"]
+
+ $cflist delete 0 end
+ $cflist insert end "Comments"
+ if {$nparents($id) == 1} {
+ startdiff [concat $id $parents($id)]
+ } elseif {$nparents($id) > 1} {
+ mergediff $id
+ }
+}
+
+proc selnextline {dir} {
+ global selectedline
+ if {![info exists selectedline]} return
+ set l [expr $selectedline + $dir]
+ unmarkmatches
+ selectline $l 1
+}
+
+proc unselectline {} {
+ global selectedline
+
+ catch {unset selectedline}
+ allcanvs delete secsel
+}
+
+proc addtohistory {cmd} {
+ global history historyindex
+
+ if {$historyindex > 0
+ && [lindex $history [expr {$historyindex - 1}]] == $cmd} {
+ return
+ }
+
+ if {$historyindex < [llength $history]} {
+ set history [lreplace $history $historyindex end $cmd]
+ } else {
+ lappend history $cmd
+ }
+ incr historyindex
+ if {$historyindex > 1} {
+ .ctop.top.bar.leftbut conf -state normal
+ } else {
+ .ctop.top.bar.leftbut conf -state disabled
+ }
+ .ctop.top.bar.rightbut conf -state disabled
+}
+
+proc goback {} {
+ global history historyindex
+
+ if {$historyindex > 1} {
+ incr historyindex -1
+ set cmd [lindex $history [expr {$historyindex - 1}]]
+ eval $cmd
+ .ctop.top.bar.rightbut conf -state normal
+ }
+ if {$historyindex <= 1} {
+ .ctop.top.bar.leftbut conf -state disabled
+ }
+}
+
+proc goforw {} {
+ global history historyindex
+
+ if {$historyindex < [llength $history]} {
+ set cmd [lindex $history $historyindex]
+ incr historyindex
+ eval $cmd
+ .ctop.top.bar.leftbut conf -state normal
+ }
+ if {$historyindex >= [llength $history]} {
+ .ctop.top.bar.rightbut conf -state disabled
+ }
+}
+
+proc mergediff {id} {
+ global parents diffmergeid diffmergegca mergefilelist diffpindex
+
+ set diffmergeid $id
+ set diffpindex -1
+ set diffmergegca [findgca $parents($id)]
+ if {[info exists mergefilelist($id)]} {
+ if {$mergefilelist($id) ne {}} {
+ showmergediff
+ }
+ } else {
+ contmergediff {}
+ }
+}
+
+proc findgca {ids} {
+ set gca {}
+ foreach id $ids {
+ if {$gca eq {}} {
+ set gca $id
+ } else {
+ if {[catch {
+ set gca [exec hg debug-merge-base $gca $id]
+ } err]} {
+ return {}
+ }
+ }
+ }
+ return $gca
+}
+
+proc contmergediff {ids} {
+ global diffmergeid diffpindex parents nparents diffmergegca
+ global treediffs mergefilelist diffids treepending
+
+ # diff the child against each of the parents, and diff
+ # each of the parents against the GCA.
+ while 1 {
+ if {[lindex $ids 0] == $diffmergeid && $diffmergegca ne {}} {
+ set ids [list [lindex $ids 1] $diffmergegca]
+ } else {
+ if {[incr diffpindex] >= $nparents($diffmergeid)} break
+ set p [lindex $parents($diffmergeid) $diffpindex]
+ set ids [list $diffmergeid $p]
+ }
+ if {![info exists treediffs($ids)]} {
+ set diffids $ids
+ if {![info exists treepending]} {
+ gettreediffs $ids
+ }
+ return
+ }
+ }
+
+ # If a file in some parent is different from the child and also
+ # different from the GCA, then it's interesting.
+ # If we don't have a GCA, then a file is interesting if it is
+ # different from the child in all the parents.
+ if {$diffmergegca ne {}} {
+ set files {}
+ foreach p $parents($diffmergeid) {
+ set gcadiffs $treediffs([list $p $diffmergegca])
+ foreach f $treediffs([list $diffmergeid $p]) {
+ if {[lsearch -exact $files $f] < 0
+ && [lsearch -exact $gcadiffs $f] >= 0} {
+ lappend files $f
+ }
+ }
+ }
+ set files [lsort $files]
+ } else {
+ set p [lindex $parents($diffmergeid) 0]
+ set files $treediffs([list $diffmergeid $p])
+ for {set i 1} {$i < $nparents($diffmergeid) && $files ne {}} {incr i} {
+ set p [lindex $parents($diffmergeid) $i]
+ set df $treediffs([list $diffmergeid $p])
+ set nf {}
+ foreach f $files {
+ if {[lsearch -exact $df $f] >= 0} {
+ lappend nf $f
+ }
+ }
+ set files $nf
+ }
+ }
+
+ set mergefilelist($diffmergeid) $files
+ if {$files ne {}} {
+ showmergediff
+ }
+}
+
+proc showmergediff {} {
+ global cflist diffmergeid mergefilelist parents
+ global diffopts diffinhunk currentfile currenthunk filelines
+ global diffblocked groupfilelast mergefds groupfilenum grouphunks
+
+ set files $mergefilelist($diffmergeid)
+ foreach f $files {
+ $cflist insert end $f
+ }
+ set env(GIT_DIFF_OPTS) $diffopts
+ set flist {}
+ catch {unset currentfile}
+ catch {unset currenthunk}
+ catch {unset filelines}
+ catch {unset groupfilenum}
+ catch {unset grouphunks}
+ set groupfilelast -1
+ foreach p $parents($diffmergeid) {
+ set cmd [list | hg debug-diff-tree -p $p $diffmergeid]
+ set cmd [concat $cmd $mergefilelist($diffmergeid)]
+ if {[catch {set f [open $cmd r]} err]} {
+ error_popup "Error getting diffs: $err"
+ foreach f $flist {
+ catch {close $f}
+ }
+ return
+ }
+ lappend flist $f
+ set ids [list $diffmergeid $p]
+ set mergefds($ids) $f
+ set diffinhunk($ids) 0
+ set diffblocked($ids) 0
+ fconfigure $f -blocking 0
+ fileevent $f readable [list getmergediffline $f $ids $diffmergeid]
+ }
+}
+
+proc getmergediffline {f ids id} {
+ global diffmergeid diffinhunk diffoldlines diffnewlines
+ global currentfile currenthunk
+ global diffoldstart diffnewstart diffoldlno diffnewlno
+ global diffblocked mergefilelist
+ global noldlines nnewlines difflcounts filelines
+
+ set n [gets $f line]
+ if {$n < 0} {
+ if {![eof $f]} return
+ }
+
+ if {!([info exists diffmergeid] && $diffmergeid == $id)} {
+ if {$n < 0} {
+ close $f
+ }
+ return
+ }
+
+ if {$diffinhunk($ids) != 0} {
+ set fi $currentfile($ids)
+ if {$n > 0 && [regexp {^[-+ \\]} $line match]} {
+ # continuing an existing hunk
+ set line [string range $line 1 end]
+ set p [lindex $ids 1]
+ if {$match eq "-" || $match eq " "} {
+ set filelines($p,$fi,$diffoldlno($ids)) $line
+ incr diffoldlno($ids)
+ }
+ if {$match eq "+" || $match eq " "} {
+ set filelines($id,$fi,$diffnewlno($ids)) $line
+ incr diffnewlno($ids)
+ }
+ if {$match eq " "} {
+ if {$diffinhunk($ids) == 2} {
+ lappend difflcounts($ids) \
+ [list $noldlines($ids) $nnewlines($ids)]
+ set noldlines($ids) 0
+ set diffinhunk($ids) 1
+ }
+ incr noldlines($ids)
+ } elseif {$match eq "-" || $match eq "+"} {
+ if {$diffinhunk($ids) == 1} {
+ lappend difflcounts($ids) [list $noldlines($ids)]
+ set noldlines($ids) 0
+ set nnewlines($ids) 0
+ set diffinhunk($ids) 2
+ }
+ if {$match eq "-"} {
+ incr noldlines($ids)
+ } else {
+ incr nnewlines($ids)
+ }
+ }
+ # and if it's \ No newline at end of line, then what?
+ return
+ }
+ # end of a hunk
+ if {$diffinhunk($ids) == 1 && $noldlines($ids) != 0} {
+ lappend difflcounts($ids) [list $noldlines($ids)]
+ } elseif {$diffinhunk($ids) == 2
+ && ($noldlines($ids) != 0 || $nnewlines($ids) != 0)} {
+ lappend difflcounts($ids) [list $noldlines($ids) $nnewlines($ids)]
+ }
+ set currenthunk($ids) [list $currentfile($ids) \
+ $diffoldstart($ids) $diffnewstart($ids) \
+ $diffoldlno($ids) $diffnewlno($ids) \
+ $difflcounts($ids)]
+ set diffinhunk($ids) 0
+ # -1 = need to block, 0 = unblocked, 1 = is blocked
+ set diffblocked($ids) -1
+ processhunks
+ if {$diffblocked($ids) == -1} {
+ fileevent $f readable {}
+ set diffblocked($ids) 1
+ }
+ }
+
+ if {$n < 0} {
+ # eof
+ if {!$diffblocked($ids)} {
+ close $f
+ set currentfile($ids) [llength $mergefilelist($diffmergeid)]
+ set currenthunk($ids) [list $currentfile($ids) 0 0 0 0 {}]
+ processhunks
+ }
+ } elseif {[regexp {^diff --git a/(.*) b/} $line match fname]} {
+ # start of a new file
+ set currentfile($ids) \
+ [lsearch -exact $mergefilelist($diffmergeid) $fname]
+ } elseif {[regexp {^@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@(.*)} \
+ $line match f1l f1c f2l f2c rest]} {
+ if {[info exists currentfile($ids)] && $currentfile($ids) >= 0} {
+ # start of a new hunk
+ if {$f1l == 0 && $f1c == 0} {
+ set f1l 1
+ }
+ if {$f2l == 0 && $f2c == 0} {
+ set f2l 1
+ }
+ set diffinhunk($ids) 1
+ set diffoldstart($ids) $f1l
+ set diffnewstart($ids) $f2l
+ set diffoldlno($ids) $f1l
+ set diffnewlno($ids) $f2l
+ set difflcounts($ids) {}
+ set noldlines($ids) 0
+ set nnewlines($ids) 0
+ }
+ }
+}
+
+proc processhunks {} {
+ global diffmergeid parents nparents currenthunk
+ global mergefilelist diffblocked mergefds
+ global grouphunks grouplinestart grouplineend groupfilenum
+
+ set nfiles [llength $mergefilelist($diffmergeid)]
+ while 1 {
+ set fi $nfiles
+ set lno 0
+ # look for the earliest hunk
+ foreach p $parents($diffmergeid) {
+ set ids [list $diffmergeid $p]
+ if {![info exists currenthunk($ids)]} return
+ set i [lindex $currenthunk($ids) 0]
+ set l [lindex $currenthunk($ids) 2]
+ if {$i < $fi || ($i == $fi && $l < $lno)} {
+ set fi $i
+ set lno $l
+ set pi $p
+ }
+ }
+
+ if {$fi < $nfiles} {
+ set ids [list $diffmergeid $pi]
+ set hunk $currenthunk($ids)
+ unset currenthunk($ids)
+ if {$diffblocked($ids) > 0} {
+ fileevent $mergefds($ids) readable \
+ [list getmergediffline $mergefds($ids) $ids $diffmergeid]
+ }
+ set diffblocked($ids) 0
+
+ if {[info exists groupfilenum] && $groupfilenum == $fi
+ && $lno <= $grouplineend} {
+ # add this hunk to the pending group
+ lappend grouphunks($pi) $hunk
+ set endln [lindex $hunk 4]
+ if {$endln > $grouplineend} {
+ set grouplineend $endln
+ }
+ continue
+ }
+ }
+
+ # succeeding stuff doesn't belong in this group, so
+ # process the group now
+ if {[info exists groupfilenum]} {
+ processgroup
+ unset groupfilenum
+ unset grouphunks
+ }
+
+ if {$fi >= $nfiles} break
+
+ # start a new group
+ set groupfilenum $fi
+ set grouphunks($pi) [list $hunk]
+ set grouplinestart $lno
+ set grouplineend [lindex $hunk 4]
+ }
+}
+
+proc processgroup {} {
+ global groupfilelast groupfilenum difffilestart
+ global mergefilelist diffmergeid ctext filelines
+ global parents diffmergeid diffoffset
+ global grouphunks grouplinestart grouplineend nparents
+ global mergemax
+
+ $ctext conf -state normal
+ set id $diffmergeid
+ set f $groupfilenum
+ if {$groupfilelast != $f} {
+ $ctext insert end "\n"
+ set here [$ctext index "end - 1c"]
+ set difffilestart($f) $here
+ set mark fmark.[expr {$f + 1}]
+ $ctext mark set $mark $here
+ $ctext mark gravity $mark left
+ set header [lindex $mergefilelist($id) $f]
+ set l [expr {(78 - [string length $header]) / 2}]
+ set pad [string range "----------------------------------------" 1 $l]
+ $ctext insert end "$pad $header $pad\n" filesep
+ set groupfilelast $f
+ foreach p $parents($id) {
+ set diffoffset($p) 0
+ }
+ }
+
+ $ctext insert end "@@" msep
+ set nlines [expr {$grouplineend - $grouplinestart}]
+ set events {}
+ set pnum 0
+ foreach p $parents($id) {
+ set startline [expr {$grouplinestart + $diffoffset($p)}]
+ set ol $startline
+ set nl $grouplinestart
+ if {[info exists grouphunks($p)]} {
+ foreach h $grouphunks($p) {
+ set l [lindex $h 2]
+ if {$nl < $l} {
+ for {} {$nl < $l} {incr nl} {
+ set filelines($p,$f,$ol) $filelines($id,$f,$nl)
+ incr ol
+ }
+ }
+ foreach chunk [lindex $h 5] {
+ if {[llength $chunk] == 2} {
+ set olc [lindex $chunk 0]
+ set nlc [lindex $chunk 1]
+ set nnl [expr {$nl + $nlc}]
+ lappend events [list $nl $nnl $pnum $olc $nlc]
+ incr ol $olc
+ set nl $nnl
+ } else {
+ incr ol [lindex $chunk 0]
+ incr nl [lindex $chunk 0]
+ }
+ }
+ }
+ }
+ if {$nl < $grouplineend} {
+ for {} {$nl < $grouplineend} {incr nl} {
+ set filelines($p,$f,$ol) $filelines($id,$f,$nl)
+ incr ol
+ }
+ }
+ set nlines [expr {$ol - $startline}]
+ $ctext insert end " -$startline,$nlines" msep
+ incr pnum
+ }
+
+ set nlines [expr {$grouplineend - $grouplinestart}]
+ $ctext insert end " +$grouplinestart,$nlines @@\n" msep
+
+ set events [lsort -integer -index 0 $events]
+ set nevents [llength $events]
+ set nmerge $nparents($diffmergeid)
+ set l $grouplinestart
+ for {set i 0} {$i < $nevents} {set i $j} {
+ set nl [lindex $events $i 0]
+ while {$l < $nl} {
+ $ctext insert end " $filelines($id,$f,$l)\n"
+ incr l
+ }
+ set e [lindex $events $i]
+ set enl [lindex $e 1]
+ set j $i
+ set active {}
+ while 1 {
+ set pnum [lindex $e 2]
+ set olc [lindex $e 3]
+ set nlc [lindex $e 4]
+ if {![info exists delta($pnum)]} {
+ set delta($pnum) [expr {$olc - $nlc}]
+ lappend active $pnum
+ } else {
+ incr delta($pnum) [expr {$olc - $nlc}]
+ }
+ if {[incr j] >= $nevents} break
+ set e [lindex $events $j]
+ if {[lindex $e 0] >= $enl} break
+ if {[lindex $e 1] > $enl} {
+ set enl [lindex $e 1]
+ }
+ }
+ set nlc [expr {$enl - $l}]
+ set ncol mresult
+ set bestpn -1
+ if {[llength $active] == $nmerge - 1} {
+ # no diff for one of the parents, i.e. it's identical
+ for {set pnum 0} {$pnum < $nmerge} {incr pnum} {
+ if {![info exists delta($pnum)]} {
+ if {$pnum < $mergemax} {
+ lappend ncol m$pnum
+ } else {
+ lappend ncol mmax
+ }
+ break
+ }
+ }
+ } elseif {[llength $active] == $nmerge} {
+ # all parents are different, see if one is very similar
+ set bestsim 30
+ for {set pnum 0} {$pnum < $nmerge} {incr pnum} {
+ set sim [similarity $pnum $l $nlc $f \
+ [lrange $events $i [expr {$j-1}]]]
+ if {$sim > $bestsim} {
+ set bestsim $sim
+ set bestpn $pnum
+ }
+ }
+ if {$bestpn >= 0} {
+ lappend ncol m$bestpn
+ }
+ }
+ set pnum -1
+ foreach p $parents($id) {
+ incr pnum
+ if {![info exists delta($pnum)] || $pnum == $bestpn} continue
+ set olc [expr {$nlc + $delta($pnum)}]
+ set ol [expr {$l + $diffoffset($p)}]
+ incr diffoffset($p) $delta($pnum)
+ unset delta($pnum)
+ for {} {$olc > 0} {incr olc -1} {
+ $ctext insert end "-$filelines($p,$f,$ol)\n" m$pnum
+ incr ol
+ }
+ }
+ set endl [expr {$l + $nlc}]
+ if {$bestpn >= 0} {
+ # show this pretty much as a normal diff
+ set p [lindex $parents($id) $bestpn]
+ set ol [expr {$l + $diffoffset($p)}]
+ incr diffoffset($p) $delta($bestpn)
+ unset delta($bestpn)
+ for {set k $i} {$k < $j} {incr k} {
+ set e [lindex $events $k]
+ if {[lindex $e 2] != $bestpn} continue
+ set nl [lindex $e 0]
+ set ol [expr {$ol + $nl - $l}]
+ for {} {$l < $nl} {incr l} {
+ $ctext insert end "+$filelines($id,$f,$l)\n" $ncol
+ }
+ set c [lindex $e 3]
+ for {} {$c > 0} {incr c -1} {
+ $ctext insert end "-$filelines($p,$f,$ol)\n" m$bestpn
+ incr ol
+ }
+ set nl [lindex $e 1]
+ for {} {$l < $nl} {incr l} {
+ $ctext insert end "+$filelines($id,$f,$l)\n" mresult
+ }
+ }
+ }
+ for {} {$l < $endl} {incr l} {
+ $ctext insert end "+$filelines($id,$f,$l)\n" $ncol
+ }
+ }
+ while {$l < $grouplineend} {
+ $ctext insert end " $filelines($id,$f,$l)\n"
+ incr l
+ }
+ $ctext conf -state disabled
+}
+
+proc similarity {pnum l nlc f events} {
+ global diffmergeid parents diffoffset filelines
+
+ set id $diffmergeid
+ set p [lindex $parents($id) $pnum]
+ set ol [expr {$l + $diffoffset($p)}]
+ set endl [expr {$l + $nlc}]
+ set same 0
+ set diff 0
+ foreach e $events {
+ if {[lindex $e 2] != $pnum} continue
+ set nl [lindex $e 0]
+ set ol [expr {$ol + $nl - $l}]
+ for {} {$l < $nl} {incr l} {
+ incr same [string length $filelines($id,$f,$l)]
+ incr same
+ }
+ set oc [lindex $e 3]
+ for {} {$oc > 0} {incr oc -1} {
+ incr diff [string length $filelines($p,$f,$ol)]
+ incr diff
+ incr ol
+ }
+ set nl [lindex $e 1]
+ for {} {$l < $nl} {incr l} {
+ incr diff [string length $filelines($id,$f,$l)]
+ incr diff
+ }
+ }
+ for {} {$l < $endl} {incr l} {
+ incr same [string length $filelines($id,$f,$l)]
+ incr same
+ }
+ if {$same == 0} {
+ return 0
+ }
+ return [expr {200 * $same / (2 * $same + $diff)}]
+}
+
+proc startdiff {ids} {
+ global treediffs diffids treepending diffmergeid
+
+ set diffids $ids
+ catch {unset diffmergeid}
+ if {![info exists treediffs($ids)]} {
+ if {![info exists treepending]} {
+ gettreediffs $ids
+ }
+ } else {
+ addtocflist $ids
+ }
+}
+
+proc addtocflist {ids} {
+ global treediffs cflist
+ foreach f $treediffs($ids) {
+ $cflist insert end $f
+ }
+ getblobdiffs $ids
+}
+
+proc gettreediffs {ids} {
+ global treediff parents treepending
+ set treepending $ids
+ set treediff {}
+ set id [lindex $ids 0]
+ set p [lindex $ids 1]
+ if [catch {set gdtf [open "|hg debug-diff-tree -r $p $id" r]}] return
+ fconfigure $gdtf -blocking 0
+ fileevent $gdtf readable [list gettreediffline $gdtf $ids]
+}
+
+proc gettreediffline {gdtf ids} {
+ global treediff treediffs treepending diffids diffmergeid
+
+ set n [gets $gdtf line]
+ if {$n < 0} {
+ if {![eof $gdtf]} return
+ close $gdtf
+ set treediffs($ids) $treediff
+ unset treepending
+ if {$ids != $diffids} {
+ gettreediffs $diffids
+ } else {
+ if {[info exists diffmergeid]} {
+ contmergediff $ids
+ } else {
+ addtocflist $ids
+ }
+ }
+ return
+ }
+ set file [lindex $line 5]
+ lappend treediff $file
+}
+
+proc getblobdiffs {ids} {
+ global diffopts blobdifffd diffids env curdifftag curtagstart
+ global difffilestart nextupdate diffinhdr treediffs
+
+ set id [lindex $ids 0]
+ set p [lindex $ids 1]
+ set env(GIT_DIFF_OPTS) $diffopts
+ set cmd [list | hg debug-diff-tree -r -p -C $p $id]
+ if {[catch {set bdf [open $cmd r]} err]} {
+ puts "error getting diffs: $err"
+ return
+ }
+ set diffinhdr 0
+ fconfigure $bdf -blocking 0
+ set blobdifffd($ids) $bdf
+ set curdifftag Comments
+ set curtagstart 0.0
+ catch {unset difffilestart}
+ fileevent $bdf readable [list getblobdiffline $bdf $diffids]
+ set nextupdate [expr {[clock clicks -milliseconds] + 100}]
+}
+
+proc getblobdiffline {bdf ids} {
+ global diffids blobdifffd ctext curdifftag curtagstart
+ global diffnexthead diffnextnote difffilestart
+ global nextupdate diffinhdr treediffs
+ global gaudydiff
+
+ set n [gets $bdf line]
+ if {$n < 0} {
+ if {[eof $bdf]} {
+ close $bdf
+ if {$ids == $diffids && $bdf == $blobdifffd($ids)} {
+ $ctext tag add $curdifftag $curtagstart end
+ }
+ }
+ return
+ }
+ if {$ids != $diffids || $bdf != $blobdifffd($ids)} {
+ return
+ }
+ $ctext conf -state normal
+ if {[regexp {^diff --git a/(.*) b/(.*)} $line match fname newname]} {
+ # start of a new file
+ $ctext insert end "\n"
+ $ctext tag add $curdifftag $curtagstart end
+ set curtagstart [$ctext index "end - 1c"]
+ set header $newname
+ set here [$ctext index "end - 1c"]
+ set i [lsearch -exact $treediffs($diffids) $fname]
+ if {$i >= 0} {
+ set difffilestart($i) $here
+ incr i
+ $ctext mark set fmark.$i $here
+ $ctext mark gravity fmark.$i left
+ }
+ if {$newname != $fname} {
+ set i [lsearch -exact $treediffs($diffids) $newname]
+ if {$i >= 0} {
+ set difffilestart($i) $here
+ incr i
+ $ctext mark set fmark.$i $here
+ $ctext mark gravity fmark.$i left
+ }
+ }
+ set curdifftag "f:$fname"
+ $ctext tag delete $curdifftag
+ set l [expr {(78 - [string length $header]) / 2}]
+ set pad [string range "----------------------------------------" 1 $l]
+ $ctext insert end "$pad $header $pad\n" filesep
+ set diffinhdr 1
+ } elseif {[regexp {^(---|\+\+\+)} $line]} {
+ set diffinhdr 0
+ } elseif {[regexp {^@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@(.*)} \
+ $line match f1l f1c f2l f2c rest]} {
+ if {$gaudydiff} {
+ $ctext insert end "\t" hunksep
+ $ctext insert end " $f1l " d0 " $f2l " d1
+ $ctext insert end " $rest \n" hunksep
+ } else {
+ $ctext insert end "$line\n" hunksep
+ }
+ set diffinhdr 0
+ } else {
+ set x [string range $line 0 0]
+ if {$x == "-" || $x == "+"} {
+ set tag [expr {$x == "+"}]
+ if {$gaudydiff} {
+ set line [string range $line 1 end]
+ }
+ $ctext insert end "$line\n" d$tag
+ } elseif {$x == " "} {
+ if {$gaudydiff} {
+ set line [string range $line 1 end]
+ }
+ $ctext insert end "$line\n"
+ } elseif {$diffinhdr || $x == "\\"} {
+ # e.g. "\ No newline at end of file"
+ $ctext insert end "$line\n" filesep
+ } else {
+ # Something else we don't recognize
+ if {$curdifftag != "Comments"} {
+ $ctext insert end "\n"
+ $ctext tag add $curdifftag $curtagstart end
+ set curtagstart [$ctext index "end - 1c"]
+ set curdifftag Comments
+ }
+ $ctext insert end "$line\n" filesep
+ }
+ }
+ $ctext conf -state disabled
+ if {[clock clicks -milliseconds] >= $nextupdate} {
+ incr nextupdate 100
+ fileevent $bdf readable {}
+ update
+ fileevent $bdf readable "getblobdiffline $bdf {$ids}"
+ }
+}
+
+proc nextfile {} {
+ global difffilestart ctext
+ set here [$ctext index @0,0]
+ for {set i 0} {[info exists difffilestart($i)]} {incr i} {
+ if {[$ctext compare $difffilestart($i) > $here]} {
+ if {![info exists pos]
+ || [$ctext compare $difffilestart($i) < $pos]} {
+ set pos $difffilestart($i)
+ }
+ }
+ }
+ if {[info exists pos]} {
+ $ctext yview $pos
+ }
+}
+
+proc listboxsel {} {
+ global ctext cflist currentid
+ if {![info exists currentid]} return
+ set sel [lsort [$cflist curselection]]
+ if {$sel eq {}} return
+ set first [lindex $sel 0]
+ catch {$ctext yview fmark.$first}
+}
+
+proc setcoords {} {
+ global linespc charspc canvx0 canvy0 mainfont
+ global xspc1 xspc2 lthickness
+
+ set linespc [font metrics $mainfont -linespace]
+ set charspc [font measure $mainfont "m"]
+ set canvy0 [expr 3 + 0.5 * $linespc]
+ set canvx0 [expr 3 + 0.5 * $linespc]
+ set lthickness [expr {int($linespc / 9) + 1}]
+ set xspc1(0) $linespc
+ set xspc2 $linespc
+}
+
+proc redisplay {} {
+ global stopped redisplaying phase
+ if {$stopped > 1} return
+ if {$phase == "getcommits"} return
+ set redisplaying 1
+ if {$phase == "drawgraph" || $phase == "incrdraw"} {
+ set stopped 1
+ } else {
+ drawgraph
+ }
+}
+
+proc incrfont {inc} {
+ global mainfont namefont textfont ctext canv phase
+ global stopped entries
+ unmarkmatches
+ set mainfont [lreplace $mainfont 1 1 [expr {[lindex $mainfont 1] + $inc}]]
+ set namefont [lreplace $namefont 1 1 [expr {[lindex $namefont 1] + $inc}]]
+ set textfont [lreplace $textfont 1 1 [expr {[lindex $textfont 1] + $inc}]]
+ setcoords
+ $ctext conf -font $textfont
+ $ctext tag conf filesep -font [concat $textfont bold]
+ foreach e $entries {
+ $e conf -font $mainfont
+ }
+ if {$phase == "getcommits"} {
+ $canv itemconf textitems -font $mainfont
+ }
+ redisplay
+}
+
+proc clearsha1 {} {
+ global sha1entry sha1string
+ if {[string length $sha1string] == 40} {
+ $sha1entry delete 0 end
+ }
+}
+
+proc sha1change {n1 n2 op} {
+ global sha1string currentid sha1but
+ if {$sha1string == {}
+ || ([info exists currentid] && $sha1string == $currentid)} {
+ set state disabled
+ } else {
+ set state normal
+ }
+ if {[$sha1but cget -state] == $state} return
+ if {$state == "normal"} {
+ $sha1but conf -state normal -relief raised -text "Goto: "
+ } else {
+ $sha1but conf -state disabled -relief flat -text "SHA1 ID: "
+ }
+}
+
+proc gotocommit {} {
+ global sha1string currentid idline tagids
+ global lineid numcommits
+
+ if {$sha1string == {}
+ || ([info exists currentid] && $sha1string == $currentid)} return
+ if {[info exists tagids($sha1string)]} {
+ set id $tagids($sha1string)
+ } else {
+ set id [string tolower $sha1string]
+ if {[regexp {^[0-9a-f]{4,39}$} $id]} {
+ set matches {}
+ for {set l 0} {$l < $numcommits} {incr l} {
+ if {[string match $id* $lineid($l)]} {
+ lappend matches $lineid($l)
+ }
+ }
+ if {$matches ne {}} {
+ if {[llength $matches] > 1} {
+ error_popup "Short SHA1 id $id is ambiguous"
+ return
+ }
+ set id [lindex $matches 0]
+ }
+ }
+ }
+ if {[info exists idline($id)]} {
+ selectline $idline($id) 1
+ return
+ }
+ if {[regexp {^[0-9a-fA-F]{4,}$} $sha1string]} {
+ set type "SHA1 id"
+ } else {
+ set type "Tag"
+ }
+ error_popup "$type $sha1string is not known"
+}
+
+proc lineenter {x y id} {
+ global hoverx hovery hoverid hovertimer
+ global commitinfo canv
+
+ if {![info exists commitinfo($id)]} return
+ set hoverx $x
+ set hovery $y
+ set hoverid $id
+ if {[info exists hovertimer]} {
+ after cancel $hovertimer
+ }
+ set hovertimer [after 500 linehover]
+ $canv delete hover
+}
+
+proc linemotion {x y id} {
+ global hoverx hovery hoverid hovertimer
+
+ if {[info exists hoverid] && $id == $hoverid} {
+ set hoverx $x
+ set hovery $y
+ if {[info exists hovertimer]} {
+ after cancel $hovertimer
+ }
+ set hovertimer [after 500 linehover]
+ }
+}
+
+proc lineleave {id} {
+ global hoverid hovertimer canv
+
+ if {[info exists hoverid] && $id == $hoverid} {
+ $canv delete hover
+ if {[info exists hovertimer]} {
+ after cancel $hovertimer
+ unset hovertimer
+ }
+ unset hoverid
+ }
+}
+
+proc linehover {} {
+ global hoverx hovery hoverid hovertimer
+ global canv linespc lthickness
+ global commitinfo mainfont
+
+ set text [lindex $commitinfo($hoverid) 0]
+ set ymax [lindex [$canv cget -scrollregion] 3]
+ if {$ymax == {}} return
+ set yfrac [lindex [$canv yview] 0]
+ set x [expr {$hoverx + 2 * $linespc}]
+ set y [expr {$hovery + $yfrac * $ymax - $linespc / 2}]
+ set x0 [expr {$x - 2 * $lthickness}]
+ set y0 [expr {$y - 2 * $lthickness}]
+ set x1 [expr {$x + [font measure $mainfont $text] + 2 * $lthickness}]
+ set y1 [expr {$y + $linespc + 2 * $lthickness}]
+ set t [$canv create rectangle $x0 $y0 $x1 $y1 \
+ -fill \#ffff80 -outline black -width 1 -tags hover]
+ $canv raise $t
+ set t [$canv create text $x $y -anchor nw -text $text -tags hover]
+ $canv raise $t
+}
+
+proc clickisonarrow {id y} {
+ global mainline mainlinearrow sidelines lthickness
+
+ set thresh [expr {2 * $lthickness + 6}]
+ if {[info exists mainline($id)]} {
+ if {$mainlinearrow($id) ne "none"} {
+ if {abs([lindex $mainline($id) 1] - $y) < $thresh} {
+ return "up"
+ }
+ }
+ }
+ if {[info exists sidelines($id)]} {
+ foreach ls $sidelines($id) {
+ set coords [lindex $ls 0]
+ set arrow [lindex $ls 2]
+ if {$arrow eq "first" || $arrow eq "both"} {
+ if {abs([lindex $coords 1] - $y) < $thresh} {
+ return "up"
+ }
+ }
+ if {$arrow eq "last" || $arrow eq "both"} {
+ if {abs([lindex $coords end] - $y) < $thresh} {
+ return "down"
+ }
+ }
+ }
+ }
+ return {}
+}
+
+proc arrowjump {id dirn y} {
+ global mainline sidelines canv
+
+ set yt {}
+ if {$dirn eq "down"} {
+ if {[info exists mainline($id)]} {
+ set y1 [lindex $mainline($id) 1]
+ if {$y1 > $y} {
+ set yt $y1
+ }
+ }
+ if {[info exists sidelines($id)]} {
+ foreach ls $sidelines($id) {
+ set y1 [lindex $ls 0 1]
+ if {$y1 > $y && ($yt eq {} || $y1 < $yt)} {
+ set yt $y1
+ }
+ }
+ }
+ } else {
+ if {[info exists sidelines($id)]} {
+ foreach ls $sidelines($id) {
+ set y1 [lindex $ls 0 end]
+ if {$y1 < $y && ($yt eq {} || $y1 > $yt)} {
+ set yt $y1
+ }
+ }
+ }
+ }
+ if {$yt eq {}} return
+ set ymax [lindex [$canv cget -scrollregion] 3]
+ if {$ymax eq {} || $ymax <= 0} return
+ set view [$canv yview]
+ set yspan [expr {[lindex $view 1] - [lindex $view 0]}]
+ set yfrac [expr {$yt / $ymax - $yspan / 2}]
+ if {$yfrac < 0} {
+ set yfrac 0
+ }
+ $canv yview moveto $yfrac
+}
+
+proc lineclick {x y id isnew} {
+ global ctext commitinfo children cflist canv thickerline
+
+ unmarkmatches
+ unselectline
+ normalline
+ $canv delete hover
+ # draw this line thicker than normal
+ drawlines $id 1
+ set thickerline $id
+ if {$isnew} {
+ set ymax [lindex [$canv cget -scrollregion] 3]
+ if {$ymax eq {}} return
+ set yfrac [lindex [$canv yview] 0]
+ set y [expr {$y + $yfrac * $ymax}]
+ }
+ set dirn [clickisonarrow $id $y]
+ if {$dirn ne {}} {
+ arrowjump $id $dirn $y
+ return
+ }
+
+ if {$isnew} {
+ addtohistory [list lineclick $x $y $id 0]
+ }
+ # fill the details pane with info about this line
+ $ctext conf -state normal
+ $ctext delete 0.0 end
+ $ctext tag conf link -foreground blue -underline 1
+ $ctext tag bind link <Enter> { %W configure -cursor hand2 }
+ $ctext tag bind link <Leave> { %W configure -cursor $curtextcursor }
+ $ctext insert end "Parent:\t"
+ $ctext insert end $id [list link link0]
+ $ctext tag bind link0 <1> [list selbyid $id]
+ set info $commitinfo($id)
+ $ctext insert end "\n\t[lindex $info 0]\n"
+ $ctext insert end "\tAuthor:\t[lindex $info 1]\n"
+ $ctext insert end "\tDate:\t[lindex $info 2]\n"
+ if {[info exists children($id)]} {
+ $ctext insert end "\nChildren:"
+ set i 0
+ foreach child $children($id) {
+ incr i
+ set info $commitinfo($child)
+ $ctext insert end "\n\t"
+ $ctext insert end $child [list link link$i]
+ $ctext tag bind link$i <1> [list selbyid $child]
+ $ctext insert end "\n\t[lindex $info 0]"
+ $ctext insert end "\n\tAuthor:\t[lindex $info 1]"
+ $ctext insert end "\n\tDate:\t[lindex $info 2]\n"
+ }
+ }
+ $ctext conf -state disabled
+
+ $cflist delete 0 end
+}
+
+proc normalline {} {
+ global thickerline
+ if {[info exists thickerline]} {
+ drawlines $thickerline 0
+ unset thickerline
+ }
+}
+
+proc selbyid {id} {
+ global idline
+ if {[info exists idline($id)]} {
+ selectline $idline($id) 1
+ }
+}
+
+proc mstime {} {
+ global startmstime
+ if {![info exists startmstime]} {
+ set startmstime [clock clicks -milliseconds]
+ }
+ return [format "%.3f" [expr {([clock click -milliseconds] - $startmstime) / 1000.0}]]
+}
+
+proc rowmenu {x y id} {
+ global rowctxmenu idline selectedline rowmenuid
+
+ if {![info exists selectedline] || $idline($id) eq $selectedline} {
+ set state disabled
+ } else {
+ set state normal
+ }
+ $rowctxmenu entryconfigure 0 -state $state
+ $rowctxmenu entryconfigure 1 -state $state
+ $rowctxmenu entryconfigure 2 -state $state
+ set rowmenuid $id
+ tk_popup $rowctxmenu $x $y
+}
+
+proc diffvssel {dirn} {
+ global rowmenuid selectedline lineid
+
+ if {![info exists selectedline]} return
+ if {$dirn} {
+ set oldid $lineid($selectedline)
+ set newid $rowmenuid
+ } else {
+ set oldid $rowmenuid
+ set newid $lineid($selectedline)
+ }
+ addtohistory [list doseldiff $oldid $newid]
+ doseldiff $oldid $newid
+}
+
+proc doseldiff {oldid newid} {
+ global ctext cflist
+ global commitinfo
+
+ $ctext conf -state normal
+ $ctext delete 0.0 end
+ $ctext mark set fmark.0 0.0
+ $ctext mark gravity fmark.0 left
+ $cflist delete 0 end
+ $cflist insert end "Top"
+ $ctext insert end "From "
+ $ctext tag conf link -foreground blue -underline 1
+ $ctext tag bind link <Enter> { %W configure -cursor hand2 }
+ $ctext tag bind link <Leave> { %W configure -cursor $curtextcursor }
+ $ctext tag bind link0 <1> [list selbyid $oldid]
+ $ctext insert end $oldid [list link link0]
+ $ctext insert end "\n "
+ $ctext insert end [lindex $commitinfo($oldid) 0]
+ $ctext insert end "\n\nTo "
+ $ctext tag bind link1 <1> [list selbyid $newid]
+ $ctext insert end $newid [list link link1]
+ $ctext insert end "\n "
+ $ctext insert end [lindex $commitinfo($newid) 0]
+ $ctext insert end "\n"
+ $ctext conf -state disabled
+ $ctext tag delete Comments
+ $ctext tag remove found 1.0 end
+ startdiff [list $newid $oldid]
+}
+
+proc mkpatch {} {
+ global rowmenuid currentid commitinfo patchtop patchnum
+
+ if {![info exists currentid]} return
+ set oldid $currentid
+ set oldhead [lindex $commitinfo($oldid) 0]
+ set newid $rowmenuid
+ set newhead [lindex $commitinfo($newid) 0]
+ set top .patch
+ set patchtop $top
+ catch {destroy $top}
+ toplevel $top
+ label $top.title -text "Generate patch"
+ grid $top.title - -pady 10
+ label $top.from -text "From:"
+ entry $top.fromsha1 -width 40 -relief flat
+ $top.fromsha1 insert 0 $oldid
+ $top.fromsha1 conf -state readonly
+ grid $top.from $top.fromsha1 -sticky w
+ entry $top.fromhead -width 60 -relief flat
+ $top.fromhead insert 0 $oldhead
+ $top.fromhead conf -state readonly
+ grid x $top.fromhead -sticky w
+ label $top.to -text "To:"
+ entry $top.tosha1 -width 40 -relief flat
+ $top.tosha1 insert 0 $newid
+ $top.tosha1 conf -state readonly
+ grid $top.to $top.tosha1 -sticky w
+ entry $top.tohead -width 60 -relief flat
+ $top.tohead insert 0 $newhead
+ $top.tohead conf -state readonly
+ grid x $top.tohead -sticky w
+ button $top.rev -text "Reverse" -command mkpatchrev -padx 5
+ grid $top.rev x -pady 10
+ label $top.flab -text "Output file:"
+ entry $top.fname -width 60
+ $top.fname insert 0 [file normalize "patch$patchnum.patch"]
+ incr patchnum
+ grid $top.flab $top.fname -sticky w
+ frame $top.buts
+ button $top.buts.gen -text "Generate" -command mkpatchgo
+ button $top.buts.can -text "Cancel" -command mkpatchcan
+ grid $top.buts.gen $top.buts.can
+ grid columnconfigure $top.buts 0 -weight 1 -uniform a
+ grid columnconfigure $top.buts 1 -weight 1 -uniform a
+ grid $top.buts - -pady 10 -sticky ew
+ focus $top.fname
+}
+
+proc mkpatchrev {} {
+ global patchtop
+
+ set oldid [$patchtop.fromsha1 get]
+ set oldhead [$patchtop.fromhead get]
+ set newid [$patchtop.tosha1 get]
+ set newhead [$patchtop.tohead get]
+ foreach e [list fromsha1 fromhead tosha1 tohead] \
+ v [list $newid $newhead $oldid $oldhead] {
+ $patchtop.$e conf -state normal
+ $patchtop.$e delete 0 end
+ $patchtop.$e insert 0 $v
+ $patchtop.$e conf -state readonly
+ }
+}
+
+proc mkpatchgo {} {
+ global patchtop
+
+ set oldid [$patchtop.fromsha1 get]
+ set newid [$patchtop.tosha1 get]
+ set fname [$patchtop.fname get]
+ if {[catch {exec hg debug-diff-tree -p $oldid $newid >$fname &} err]} {
+ error_popup "Error creating patch: $err"
+ }
+ catch {destroy $patchtop}
+ unset patchtop
+}
+
+proc mkpatchcan {} {
+ global patchtop
+
+ catch {destroy $patchtop}
+ unset patchtop
+}
+
+proc mktag {} {
+ global rowmenuid mktagtop commitinfo
+
+ set top .maketag
+ set mktagtop $top
+ catch {destroy $top}
+ toplevel $top
+ label $top.title -text "Create tag"
+ grid $top.title - -pady 10
+ label $top.id -text "ID:"
+ entry $top.sha1 -width 40 -relief flat
+ $top.sha1 insert 0 $rowmenuid
+ $top.sha1 conf -state readonly
+ grid $top.id $top.sha1 -sticky w
+ entry $top.head -width 60 -relief flat
+ $top.head insert 0 [lindex $commitinfo($rowmenuid) 0]
+ $top.head conf -state readonly
+ grid x $top.head -sticky w
+ label $top.tlab -text "Tag name:"
+ entry $top.tag -width 60
+ grid $top.tlab $top.tag -sticky w
+ frame $top.buts
+ button $top.buts.gen -text "Create" -command mktaggo
+ button $top.buts.can -text "Cancel" -command mktagcan
+ grid $top.buts.gen $top.buts.can
+ grid columnconfigure $top.buts 0 -weight 1 -uniform a
+ grid columnconfigure $top.buts 1 -weight 1 -uniform a
+ grid $top.buts - -pady 10 -sticky ew
+ focus $top.tag
+}
+
+proc domktag {} {
+ global mktagtop env tagids idtags
+
+ set id [$mktagtop.sha1 get]
+ set tag [$mktagtop.tag get]
+ if {$tag == {}} {
+ error_popup "No tag name specified"
+ return
+ }
+ if {[info exists tagids($tag)]} {
+ error_popup "Tag \"$tag\" already exists"
+ return
+ }
+ if {[catch {
+ set out [exec hg tag -r $id $tag]
+ } err]} {
+ error_popup "Error creating tag: $err"
+ return
+ }
+
+ set tagids($tag) $id
+ lappend idtags($id) $tag
+ redrawtags $id
+}
+
+proc redrawtags {id} {
+ global canv linehtag idline idpos selectedline
+
+ if {![info exists idline($id)]} return
+ $canv delete tag.$id
+ set xt [eval drawtags $id $idpos($id)]
+ $canv coords $linehtag($idline($id)) $xt [lindex $idpos($id) 2]
+ if {[info exists selectedline] && $selectedline == $idline($id)} {
+ selectline $selectedline 0
+ }
+}
+
+proc mktagcan {} {
+ global mktagtop
+
+ catch {destroy $mktagtop}
+ unset mktagtop
+}
+
+proc mktaggo {} {
+ domktag
+ mktagcan
+}
+
+proc writecommit {} {
+ global rowmenuid wrcomtop commitinfo wrcomcmd
+
+ set top .writecommit
+ set wrcomtop $top
+ catch {destroy $top}
+ toplevel $top
+ label $top.title -text "Write commit to file"
+ grid $top.title - -pady 10
+ label $top.id -text "ID:"
+ entry $top.sha1 -width 40 -relief flat
+ $top.sha1 insert 0 $rowmenuid
+ $top.sha1 conf -state readonly
+ grid $top.id $top.sha1 -sticky w
+ entry $top.head -width 60 -relief flat
+ $top.head insert 0 [lindex $commitinfo($rowmenuid) 0]
+ $top.head conf -state readonly
+ grid x $top.head -sticky w
+ label $top.clab -text "Command:"
+ entry $top.cmd -width 60 -textvariable wrcomcmd
+ grid $top.clab $top.cmd -sticky w -pady 10
+ label $top.flab -text "Output file:"
+ entry $top.fname -width 60
+ $top.fname insert 0 [file normalize "commit-[string range $rowmenuid 0 6]"]
+ grid $top.flab $top.fname -sticky w
+ frame $top.buts
+ button $top.buts.gen -text "Write" -command wrcomgo
+ button $top.buts.can -text "Cancel" -command wrcomcan
+ grid $top.buts.gen $top.buts.can
+ grid columnconfigure $top.buts 0 -weight 1 -uniform a
+ grid columnconfigure $top.buts 1 -weight 1 -uniform a
+ grid $top.buts - -pady 10 -sticky ew
+ focus $top.fname
+}
+
+proc wrcomgo {} {
+ global wrcomtop
+
+ set id [$wrcomtop.sha1 get]
+ set cmd "echo $id | [$wrcomtop.cmd get]"
+ set fname [$wrcomtop.fname get]
+ if {[catch {exec sh -c $cmd >$fname &} err]} {
+ error_popup "Error writing commit: $err"
+ }
+ catch {destroy $wrcomtop}
+ unset wrcomtop
+}
+
+proc wrcomcan {} {
+ global wrcomtop
+
+ catch {destroy $wrcomtop}
+ unset wrcomtop
+}
+
+proc listrefs {id} {
+ global idtags idheads idotherrefs
+
+ set x {}
+ if {[info exists idtags($id)]} {
+ set x $idtags($id)
+ }
+ set y {}
+ if {[info exists idheads($id)]} {
+ set y $idheads($id)
+ }
+ set z {}
+ if {[info exists idotherrefs($id)]} {
+ set z $idotherrefs($id)
+ }
+ return [list $x $y $z]
+}
+
+proc rereadrefs {} {
+ global idtags idheads idotherrefs
+ global tagids headids otherrefids
+
+ set refids [concat [array names idtags] \
+ [array names idheads] [array names idotherrefs]]
+ foreach id $refids {
+ if {![info exists ref($id)]} {
+ set ref($id) [listrefs $id]
+ }
+ }
+ foreach v {tagids idtags headids idheads otherrefids idotherrefs} {
+ catch {unset $v}
+ }
+ readrefs
+ set refids [lsort -unique [concat $refids [array names idtags] \
+ [array names idheads] [array names idotherrefs]]]
+ foreach id $refids {
+ set v [listrefs $id]
+ if {![info exists ref($id)] || $ref($id) != $v} {
+ redrawtags $id
+ }
+ }
+}
+
+proc showtag {tag isnew} {
+ global ctext cflist tagcontents tagids linknum
+
+ if {$isnew} {
+ addtohistory [list showtag $tag 0]
+ }
+ $ctext conf -state normal
+ $ctext delete 0.0 end
+ set linknum 0
+ if {[info exists tagcontents($tag)]} {
+ set text $tagcontents($tag)
+ } else {
+ set text "Tag: $tag\nId: $tagids($tag)"
+ }
+ appendwithlinks $text
+ $ctext conf -state disabled
+ $cflist delete 0 end
+}
+
+proc doquit {} {
+ global stopped
+ set stopped 100
+ destroy .
+}
+
+# defaults...
+set datemode 0
+set boldnames 0
+set diffopts "-U 5 -p"
+set wrcomcmd "hg debug-diff-tree --stdin -p --pretty"
+
+set mainfont {Helvetica 9}
+set textfont {Courier 9}
+set findmergefiles 0
+set gaudydiff 0
+set maxgraphpct 50
+set maxwidth 16
+
+set colors {green red blue magenta darkgrey brown orange}
+
+catch {source ~/.gitk}
+
+set namefont $mainfont
+if {$boldnames} {
+ lappend namefont bold
+}
+
+set revtreeargs {}
+foreach arg $argv {
+ switch -regexp -- $arg {
+ "^$" { }
+ "^-b" { set boldnames 1 }
+ "^-d" { set datemode 1 }
+ default {
+ lappend revtreeargs $arg
+ }
+ }
+}
+
+set history {}
+set historyindex 0
+
+set stopped 0
+set redisplaying 0
+set stuffsaved 0
+set patchnum 0
+setcoords
+makewindow
+readrefs
+getcommits $revtreeargs
new file mode 100644
--- /dev/null
+++ b/contrib/hgk.py
@@ -0,0 +1,339 @@
+# Minimal support for git commands on an hg repository
+#
+# Copyright 2005 Chris Mason <mason@suse.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import time, sys, signal, os
+from mercurial import hg, mdiff, fancyopts, commands, ui, util
+
+def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
+ changes=None, text=False):
+ def date(c):
+ return time.asctime(time.gmtime(c[2][0]))
+
+ if not changes:
+ changes = repo.changes(node1, node2, files, match=match)
+ modified, added, removed, deleted, unknown = changes
+ if files:
+ modified, added, removed = map(lambda x: filterfiles(files, x),
+ (modified, added, removed))
+
+ if not modified and not added and not removed:
+ return
+
+ if node2:
+ change = repo.changelog.read(node2)
+ mmap2 = repo.manifest.read(change[0])
+ date2 = date(change)
+ def read(f):
+ return repo.file(f).read(mmap2[f])
+ else:
+ date2 = time.asctime()
+ if not node1:
+ node1 = repo.dirstate.parents()[0]
+ def read(f):
+ return repo.wfile(f).read()
+
+ change = repo.changelog.read(node1)
+ mmap = repo.manifest.read(change[0])
+ date1 = date(change)
+
+ for f in modified:
+ to = None
+ if f in mmap:
+ to = repo.file(f).read(mmap[f])
+ tn = read(f)
+ fp.write("diff --git a/%s b/%s\n" % (f, f))
+ fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
+ for f in added:
+ to = None
+ tn = read(f)
+ fp.write("diff --git /dev/null b/%s\n" % (f))
+ fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
+ for f in removed:
+ to = repo.file(f).read(mmap[f])
+ tn = None
+ fp.write("diff --git a/%s /dev/null\n" % (f))
+ fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
+
+def difftree(ui, repo, node1=None, node2=None, **opts):
+ """diff trees from two commits"""
+ def __difftree(repo, node1, node2):
+ def date(c):
+ return time.asctime(time.gmtime(c[2][0]))
+
+ if node2:
+ change = repo.changelog.read(node2)
+ mmap2 = repo.manifest.read(change[0])
+ modified, added, removed, deleted, unknown = repo.changes(node1, node2)
+ def read(f): return repo.file(f).read(mmap2[f])
+ date2 = date(change)
+ else:
+ date2 = time.asctime()
+ modified, added, removed, deleted, unknown = repo.changes(node1)
+ if not node1:
+ node1 = repo.dirstate.parents()[0]
+ def read(f): return file(os.path.join(repo.root, f)).read()
+
+ change = repo.changelog.read(node1)
+ mmap = repo.manifest.read(change[0])
+ date1 = date(change)
+ empty = "0" * 40;
+
+ for f in modified:
+ # TODO get file permissions
+ print ":100664 100664 %s %s M\t%s\t%s" % (hg.hex(mmap[f]),
+ hg.hex(mmap2[f]), f, f)
+ for f in added:
+ print ":000000 100664 %s %s N\t%s\t%s" % (empty, hg.hex(mmap2[f]), f, f)
+ for f in removed:
+ print ":100664 000000 %s %s D\t%s\t%s" % (hg.hex(mmap[f]), empty, f, f)
+ ##
+
+ while True:
+ if opts['stdin']:
+ try:
+ line = raw_input().split(' ')
+ node1 = line[0]
+ if len(line) > 1:
+ node2 = line[1]
+ else:
+ node2 = None
+ except EOFError:
+ break
+ node1 = repo.lookup(node1)
+ if node2:
+ node2 = repo.lookup(node2)
+ else:
+ node2 = node1
+ node1 = repo.changelog.parents(node1)[0]
+ if opts['patch']:
+ if opts['pretty']:
+ catcommit(repo, node2, "")
+ dodiff(sys.stdout, ui, repo, node1, node2)
+ else:
+ __difftree(repo, node1, node2)
+ if not opts['stdin']:
+ break
+
+def catcommit(repo, n, prefix, changes=None):
+ nlprefix = '\n' + prefix;
+ (p1, p2) = repo.changelog.parents(n)
+ (h, h1, h2) = map(hg.hex, (n, p1, p2))
+ (i1, i2) = map(repo.changelog.rev, (p1, p2))
+ if not changes:
+ changes = repo.changelog.read(n)
+ print "tree %s" % (hg.hex(changes[0]))
+ if i1 != -1: print "parent %s" % (h1)
+ if i2 != -1: print "parent %s" % (h2)
+ date_ar = changes[2]
+ date = int(float(date_ar[0]))
+ lines = changes[4].splitlines()
+ if lines[-1].startswith('committer:'):
+ committer = lines[-1].split(': ')[1].rstrip()
+ else:
+ committer = changes[1]
+
+ print "author %s %s %s" % (changes[1], date, date_ar[1])
+ print "committer %s %s %s" % (committer, date, date_ar[1])
+ print ""
+ if prefix != "":
+ print "%s%s" % (prefix, changes[4].replace('\n', nlprefix).strip())
+ else:
+ print changes[4]
+ if prefix:
+ sys.stdout.write('\0')
+
+def base(ui, repo, node1, node2):
+ """Output common ancestor information"""
+ node1 = repo.lookup(node1)
+ node2 = repo.lookup(node2)
+ n = repo.changelog.ancestor(node1, node2)
+ print hg.hex(n)
+
+def catfile(ui, repo, type=None, r=None, **opts):
+ """cat a specific revision"""
+ # in stdin mode, every line except the commit is prefixed with two
+ # spaces. This way the our caller can find the commit without magic
+ # strings
+ #
+ prefix = ""
+ if opts['stdin']:
+ try:
+ (type, r) = raw_input().split(' ');
+ prefix = " "
+ except EOFError:
+ return
+
+ else:
+ if not type or not r:
+ ui.warn("cat-file: type or revision not supplied\n")
+ commands.help_(ui, 'cat-file')
+
+ while r:
+ if type != "commit":
+ sys.stderr.write("aborting hg cat-file only understands commits\n")
+ sys.exit(1);
+ n = repo.lookup(r)
+ catcommit(repo, n, prefix)
+ if opts['stdin']:
+ try:
+ (type, r) = raw_input().split(' ');
+ except EOFError:
+ break
+ else:
+ break
+
+# git rev-tree is a confusing thing. You can supply a number of
+# commit sha1s on the command line, and it walks the commit history
+# telling you which commits are reachable from the supplied ones via
+# a bitmask based on arg position.
+# you can specify a commit to stop at by starting the sha1 with ^
+def revtree(args, repo, full="tree", maxnr=0, parents=False):
+ def chlogwalk():
+ ch = repo.changelog
+ count = ch.count()
+ i = count
+ l = [0] * 100
+ chunk = 100
+ while True:
+ if chunk > i:
+ chunk = i
+ i = 0
+ else:
+ i -= chunk
+
+ for x in xrange(0, chunk):
+ if i + x >= count:
+ l[chunk - x:] = [0] * (chunk - x)
+ break
+ if full != None:
+ l[x] = ch.read(ch.node(i + x))
+ else:
+ l[x] = 1
+ for x in xrange(chunk-1, -1, -1):
+ if l[x] != 0:
+ yield (i + x, full != None and l[x] or None)
+ if i == 0:
+ break
+
+ # calculate and return the reachability bitmask for sha
+ def is_reachable(ar, reachable, sha):
+ if len(ar) == 0:
+ return 1
+ mask = 0
+ for i in range(len(ar)):
+ if sha in reachable[i]:
+ mask |= 1 << i
+
+ return mask
+
+ reachable = []
+ stop_sha1 = []
+ want_sha1 = []
+ count = 0
+
+ # figure out which commits they are asking for and which ones they
+ # want us to stop on
+ for i in range(len(args)):
+ if args[i].startswith('^'):
+ s = repo.lookup(args[i][1:])
+ stop_sha1.append(s)
+ want_sha1.append(s)
+ elif args[i] != 'HEAD':
+ want_sha1.append(repo.lookup(args[i]))
+
+ # calculate the graph for the supplied commits
+ for i in range(len(want_sha1)):
+ reachable.append({});
+ n = want_sha1[i];
+ visit = [n];
+ reachable[i][n] = 1
+ while visit:
+ n = visit.pop(0)
+ if n in stop_sha1:
+ continue
+ for p in repo.changelog.parents(n):
+ if p not in reachable[i]:
+ reachable[i][p] = 1
+ visit.append(p)
+ if p in stop_sha1:
+ continue
+
+ # walk the repository looking for commits that are in our
+ # reachability graph
+ #for i in range(repo.changelog.count()-1, -1, -1):
+ for i, changes in chlogwalk():
+ n = repo.changelog.node(i)
+ mask = is_reachable(want_sha1, reachable, n)
+ if mask:
+ parentstr = ""
+ if parents:
+ pp = repo.changelog.parents(n)
+ if pp[0] != hg.nullid:
+ parentstr += " " + hg.hex(pp[0])
+ if pp[1] != hg.nullid:
+ parentstr += " " + hg.hex(pp[1])
+ if not full:
+ print hg.hex(n) + parentstr
+ elif full is "commit":
+ print hg.hex(n) + parentstr
+ catcommit(repo, n, ' ', changes)
+ else:
+ (p1, p2) = repo.changelog.parents(n)
+ (h, h1, h2) = map(hg.hex, (n, p1, p2))
+ (i1, i2) = map(repo.changelog.rev, (p1, p2))
+
+ date = changes[2][0]
+ print "%s %s:%s" % (date, h, mask),
+ mask = is_reachable(want_sha1, reachable, p1)
+ if i1 != -1 and mask > 0:
+ print "%s:%s " % (h1, mask),
+ mask = is_reachable(want_sha1, reachable, p2)
+ if i2 != -1 and mask > 0:
+ print "%s:%s " % (h2, mask),
+ print ""
+ if maxnr and count >= maxnr:
+ break
+ count += 1
+
+# git rev-list tries to order things by date, and has the ability to stop
+# at a given commit without walking the whole repo. TODO add the stop
+# parameter
+def revlist(ui, repo, *revs, **opts):
+ """print revisions"""
+ if opts['header']:
+ full = "commit"
+ else:
+ full = None
+ copy = [x for x in revs]
+ revtree(copy, repo, full, opts['max_count'], opts['parents'])
+
+def view(ui, repo, *etc):
+ "start interactive history viewer"
+ os.chdir(repo.root)
+ os.system(ui.config("hgk", "path", "hgk") + " " + " ".join(etc))
+
+cmdtable = {
+ "view": (view, [], 'hg view'),
+ "debug-diff-tree": (difftree, [('p', 'patch', None, 'generate patch'),
+ ('r', 'recursive', None, 'recursive'),
+ ('P', 'pretty', None, 'pretty'),
+ ('s', 'stdin', None, 'stdin'),
+ ('C', 'copy', None, 'detect copies'),
+ ('S', 'search', "", 'search')],
+ "hg git-diff-tree [options] node1 node2"),
+ "debug-cat-file": (catfile, [('s', 'stdin', None, 'stdin')],
+ "hg debug-cat-file [options] type file"),
+ "debug-merge-base": (base, [], "hg debug-merge-base node node"),
+ "debug-rev-list": (revlist, [('H', 'header', None, 'header'),
+ ('t', 'topo-order', None, 'topo-order'),
+ ('p', 'parents', None, 'parents'),
+ ('n', 'max-count', 0, 'max-count')],
+ "hg debug-rev-list [options] revs"),
+}
+
+def reposetup(ui, repo):
+ pass
new file mode 100644
--- /dev/null
+++ b/contrib/hgsh/Makefile
@@ -0,0 +1,13 @@
+CC := gcc
+CFLAGS := -g -O2 -Wall -Werror
+
+prefix ?= /usr/bin
+
+hgsh: hgsh.o
+ $(CC) -o $@ $<
+
+install: hgsh
+ install -m755 hgsh $(prefix)
+
+clean:
+ rm -f *.o hgsh
new file mode 100644
--- /dev/null
+++ b/contrib/hgsh/hgsh.c
@@ -0,0 +1,372 @@
+/*
+ * hgsh.c - restricted login shell for mercurial
+ *
+ * Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+ *
+ * This software may be used and distributed according to the terms of the
+ * GNU General Public License, incorporated herein by reference.
+ *
+ * this program is login shell for dedicated mercurial user account. it
+ * only allows few actions:
+ *
+ * 1. run hg in server mode on specific repository. no other hg commands
+ * are allowed. we try to verify that repo to be accessed exists under
+ * given top-level directory.
+ *
+ * 2. (optional) forward ssh connection from firewall/gateway machine to
+ * "real" mercurial host, to let users outside intranet pull and push
+ * changes through firewall.
+ *
+ * 3. (optional) run normal shell, to allow to "su" to mercurial user, use
+ * "sudo" to run programs as that user, or run cron jobs as that user.
+ *
+ * only tested on linux yet. patches for non-linux systems welcome.
+ */
+
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE /* for asprintf */
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <sysexits.h>
+#include <unistd.h>
+
+/*
+ * user config.
+ *
+ * if you see a hostname below, just use first part of hostname. example,
+ * if you have host named foo.bar.com, use "foo".
+ */
+
+/*
+ * HG_GATEWAY: hostname of gateway/firewall machine that people outside your
+ * intranet ssh into if they need to ssh to other machines. if you do not
+ * have such machine, set to NULL.
+ */
+#ifndef HG_GATEWAY
+#define HG_GATEWAY "gateway"
+#endif
+
+/*
+ * HG_HOST: hostname of mercurial server. if any machine is allowed, set to
+ * NULL.
+ */
+#ifndef HG_HOST
+#define HG_HOST "mercurial"
+#endif
+
+/*
+ * HG_USER: username to log in from HG_GATEWAY to HG_HOST. if gateway and
+ * host username are same, set to NULL.
+ */
+#ifndef HG_USER
+#define HG_USER "hg"
+#endif
+
+/*
+ * HG_ROOT: root of tree full of mercurial repos. if you do not want to
+ * validate location of repo when someone is try to access, set to NULL.
+ */
+#ifndef HG_ROOT
+#define HG_ROOT "/home/hg/repos"
+#endif
+
+/*
+ * HG: path to the mercurial executable to run.
+ */
+#ifndef HG
+#define HG "/home/hg/bin/hg"
+#endif
+
+/*
+ * HG_SHELL: shell to use for actions like "sudo" and "su" access to
+ * mercurial user, and cron jobs. if you want to make these things
+ * impossible, set to NULL.
+ */
+#ifndef HG_SHELL
+#define HG_SHELL NULL
+// #define HG_SHELL "/bin/bash"
+#endif
+
+/*
+ * HG_HELP: some way for users to get support if they have problem. if they
+ * should not get helpful message, set to NULL.
+ */
+#ifndef HG_HELP
+#define HG_HELP "please contact support@example.com for help."
+#endif
+
+/*
+ * SSH: path to ssh executable to run, if forwarding from HG_GATEWAY to
+ * HG_HOST. if you want to use rsh instead (why?), you need to modify
+ * arguments it is called with. see forward_through_gateway.
+ */
+#ifndef SSH
+#define SSH "/usr/bin/ssh"
+#endif
+
+/*
+ * tell whether to print command that is to be executed. useful for
+ * debugging. should not interfere with mercurial operation, since
+ * mercurial only cares about stdin and stdout, and this prints to stderr.
+ */
+static const int debug = 0;
+
+static void print_cmdline(int argc, char **argv)
+{
+ FILE *fp = stderr;
+ int i;
+
+ fputs("command: ", fp);
+
+ for (i = 0; i < argc; i++) {
+ char *spc = strpbrk(argv[i], " \t\r\n");
+ if (spc) {
+ fputc('\'', fp);
+ }
+ fputs(argv[i], fp);
+ if (spc) {
+ fputc('\'', fp);
+ }
+ if (i < argc - 1) {
+ fputc(' ', fp);
+ }
+ }
+ fputc('\n', fp);
+ fflush(fp);
+}
+
+static void usage(const char *reason, int exitcode)
+{
+ char *hg_help = HG_HELP;
+
+ if (reason) {
+ fprintf(stderr, "*** Error: %s.\n", reason);
+ }
+ fprintf(stderr, "*** This program has been invoked incorrectly.\n");
+ if (hg_help) {
+ fprintf(stderr, "*** %s\n", hg_help);
+ }
+ exit(exitcode ? exitcode : EX_USAGE);
+}
+
+/*
+ * run on gateway host to make another ssh connection, to "real" mercurial
+ * server. it sends its command line unmodified to far end.
+ *
+ * never called if HG_GATEWAY is NULL.
+ */
+static void forward_through_gateway(int argc, char **argv)
+{
+ char *ssh = SSH;
+ char *hg_host = HG_HOST;
+ char *hg_user = HG_USER;
+ char **nargv = alloca((10 + argc) * sizeof(char *));
+ int i = 0, j;
+
+ nargv[i++] = ssh;
+ nargv[i++] = "-q";
+ nargv[i++] = "-T";
+ nargv[i++] = "-x";
+ if (hg_user) {
+ nargv[i++] = "-l";
+ nargv[i++] = hg_user;
+ }
+ nargv[i++] = hg_host;
+
+ /*
+ * sshd called us with added "-c", because it thinks we are a shell.
+ * drop it if we find it.
+ */
+ j = 1;
+ if (j < argc && strcmp(argv[j], "-c") == 0) {
+ j++;
+ }
+
+ for (; j < argc; i++, j++) {
+ nargv[i] = argv[j];
+ }
+ nargv[i] = NULL;
+
+ if (debug) {
+ print_cmdline(i, nargv);
+ }
+
+ execv(ssh, nargv);
+ perror(ssh);
+ exit(EX_UNAVAILABLE);
+}
+
+/*
+ * run shell. let administrator "su" to mercurial user's account to do
+ * administrative works.
+ *
+ * never called if HG_SHELL is NULL.
+ */
+static void run_shell(int argc, char **argv)
+{
+ char *hg_shell = HG_SHELL;
+ char **nargv;
+ char *c;
+ int i;
+
+ nargv = alloca((argc + 3) * sizeof(char *));
+ c = strrchr(hg_shell, '/');
+
+ /* tell "real" shell it is login shell, if needed. */
+
+ if (argv[0][0] == '-' && c) {
+ nargv[0] = strdup(c);
+ if (nargv[0] == NULL) {
+ perror("malloc");
+ exit(EX_OSERR);
+ }
+ nargv[0][0] = '-';
+ } else {
+ nargv[0] = hg_shell;
+ }
+
+ for (i = 1; i < argc; i++) {
+ nargv[i] = argv[i];
+ }
+ nargv[i] = NULL;
+
+ if (debug) {
+ print_cmdline(i, nargv);
+ }
+
+ execv(hg_shell, nargv);
+ perror(hg_shell);
+ exit(EX_OSFILE);
+}
+
+/*
+ * paranoid wrapper, runs hg executable in server mode.
+ */
+static void serve_data(int argc, char **argv)
+{
+ char *hg_root = HG_ROOT;
+ char *repo, *abspath;
+ char *nargv[6];
+ struct stat st;
+ size_t repolen;
+ int i;
+
+ /*
+ * check argv for looking okay. we should be invoked with argv
+ * resembling like this:
+ *
+ * hgsh
+ * -c
+ * hg -R some/path serve --stdio
+ *
+ * the "-c" is added by sshd, because it thinks we are login shell.
+ */
+
+ if (argc != 3) {
+ goto badargs;
+ }
+
+ if (strcmp(argv[1], "-c") != 0) {
+ goto badargs;
+ }
+
+ if (sscanf(argv[2], "hg -R %as serve --stdio", &repo) != 1) {
+ goto badargs;
+ }
+
+ repolen = repo ? strlen(repo) : 0;
+
+ if (repolen == 0) {
+ goto badargs;
+ }
+
+ if (hg_root) {
+ if (asprintf(&abspath, "%s/%s/.hg/data", hg_root, repo) == -1) {
+ goto badargs;
+ }
+
+ /*
+ * attempt to stop break out from inside the repository tree. could
+ * do something more clever here, because e.g. we could traverse a
+ * symlink that looks safe, but really breaks us out of tree.
+ */
+
+ if (strstr(abspath, "/../") != NULL) {
+ goto badargs;
+ }
+
+ /* verify that we really are looking at valid repo. */
+
+ if (stat(abspath, &st) == -1) {
+ perror(repo);
+ exit(EX_DATAERR);
+ }
+
+ if (chdir(hg_root) == -1) {
+ perror(hg_root);
+ exit(EX_SOFTWARE);
+ }
+ }
+
+ i = 0;
+ nargv[i++] = HG;
+ nargv[i++] = "-R";
+ nargv[i++] = repo;
+ nargv[i++] = "serve";
+ nargv[i++] = "--stdio";
+ nargv[i] = NULL;
+
+ if (debug) {
+ print_cmdline(i, nargv);
+ }
+
+ execv(HG, nargv);
+ perror(HG);
+ exit(EX_UNAVAILABLE);
+
+badargs:
+ /* print useless error message. */
+
+ usage("invalid arguments", EX_DATAERR);
+}
+
+int main(int argc, char **argv)
+{
+ char host[1024];
+ char *c;
+
+ if (gethostname(host, sizeof(host)) == -1) {
+ perror("gethostname");
+ exit(EX_OSERR);
+ }
+
+ if ((c = strchr(host, '.')) != NULL) {
+ *c = '\0';
+ }
+
+ if (getenv("SSH_CLIENT")) {
+ char *hg_gateway = HG_GATEWAY;
+ char *hg_host = HG_HOST;
+
+ if (hg_gateway && strcmp(host, hg_gateway) == 0) {
+ forward_through_gateway(argc, argv);
+ }
+
+ if (hg_host && strcmp(host, hg_host) != 0) {
+ usage("invoked on unexpected host", EX_USAGE);
+ }
+
+ serve_data(argc, argv);
+ } else if (HG_SHELL) {
+ run_shell(argc, argv);
+ } else {
+ usage("invalid arguments", EX_DATAERR);
+ }
+
+ return 0;
+}
new file mode 100644
--- /dev/null
+++ b/contrib/macosx/Readme.html
@@ -0,0 +1,38 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <meta http-equiv="Content-Style-Type" content="text/css">
+ <title></title>
+ <style type="text/css">
+ p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 14.0px Helvetica}
+ p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica; min-height: 14.0px}
+ p.p3 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica}
+ p.p4 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica; color: #000fed}
+ span.s1 {text-decoration: underline}
+ span.s2 {font: 12.0px Courier}
+ </style>
+</head>
+<body>
+<p class="p1"><b>Before you install</b></p>
+<p class="p2"><br></p>
+<p class="p3">This is <i>not</i> a stand-alone version of Mercurial.</p>
+<p class="p2"><br></p>
+<p class="p3">To use it, you must have the “official unofficial” MacPython 2.4.1 installed.</p>
+<p class="p2"><br></p>
+<p class="p3">You can download MacPython 2.4.1 from here:</p>
+<p class="p4"><span class="s1"><a href="http://python.org/ftp/python/2.4.1/MacPython-OSX-2.4.1-1.dmg">http://python.org/ftp/python/2.4.1/MacPython-OSX-2.4.1-1.dmg</a></span></p>
+<p class="p2"><br></p>
+<p class="p3">For more information on MacPython, go here:</p>
+<p class="p4"><span class="s1"><a href="http://undefined.org/python/">http://undefined.org/python</a></span></p>
+<p class="p2"><br></p>
+<p class="p1"><b>After you install</b></p>
+<p class="p2"><br></p>
+<p class="p3">This package installs the <span class="s2">hg</span> executable in <span class="s2">/usr/local/bin</span>. This directory may not be in your shell's search path. Don't forget to check.</p>
+<p class="p2"><br></p>
+<p class="p1"><b>Reporting problems</b></p>
+<p class="p2"><br></p>
+<p class="p3">If you run into any problems, please file a bug online:</p>
+<p class="p3"><a href="http://www.selenic.com/mercurial/bts">http://www.selenic.com/mercurial/bts</a></p>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/contrib/macosx/Welcome.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <meta http-equiv="Content-Style-Type" content="text/css">
+ <title></title>
+ <style type="text/css">
+ p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica}
+ p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica; min-height: 14.0px}
+ </style>
+</head>
+<body>
+<p class="p1">This is a prepackaged release of <a href="http://www.selenic.com/mercurial">Mercurial</a> for Mac OS X.</p>
+<p class="p2"><br></p>
+<p class="p1">It is based on Mercurial 0.8.</p>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/contrib/macosx/macosx-build.txt
@@ -0,0 +1,11 @@
+to build a new macosx binary package:
+
+install macpython from http://undefined.org/python/
+
+install py2app from http://pythonmac.org/packages/
+
+make sure /usr/local/bin is in your path
+
+run bdist_mpkg in top-level hg directory
+
+find packaged stuff in dist directory
new file mode 100644
--- /dev/null
+++ b/contrib/mercurial.el
@@ -0,0 +1,1135 @@
+;;; mercurial.el --- Emacs support for the Mercurial distributed SCM
+
+;; Copyright (C) 2005 Bryan O'Sullivan
+
+;; Author: Bryan O'Sullivan <bos@serpentine.com>
+
+;; mercurial.el is free software; you can redistribute it and/or
+;; modify it under the terms of version 2 of the GNU General Public
+;; License as published by the Free Software Foundation.
+
+;; mercurial.el is distributed in the hope that it will be useful, but
+;; WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+;; General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with mercurial.el, GNU Emacs, or XEmacs; see the file COPYING
+;; (`C-h C-l'). If not, write to the Free Software Foundation, Inc.,
+;; 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+;;; Commentary:
+
+;; mercurial.el builds upon Emacs's VC mode to provide flexible
+;; integration with the Mercurial distributed SCM tool.
+
+;; To get going as quickly as possible, load mercurial.el into Emacs and
+;; type `C-c h h'; this runs hg-help-overview, which prints a helpful
+;; usage overview.
+
+;; Much of the inspiration for mercurial.el comes from Rajesh
+;; Vaidheeswarran's excellent p4.el, which does an admirably thorough
+;; job for the commercial Perforce SCM product. In fact, substantial
+;; chunks of code are adapted from p4.el.
+
+;; This code has been developed under XEmacs 21.5, and may not work as
+;; well under GNU Emacs (albeit tested under 21.4). Patches to
+;; enhance the portability of this code, fix bugs, and add features
+;; are most welcome. You can clone a Mercurial repository for this
+;; package from http://www.serpentine.com/hg/hg-emacs
+
+;; Please send problem reports and suggestions to bos@serpentine.com.
+
+
+;;; Code:
+
+(require 'advice)
+(require 'cl)
+(require 'diff-mode)
+(require 'easymenu)
+(require 'executable)
+(require 'vc)
+
+
+;;; XEmacs has view-less, while GNU Emacs has view. Joy.
+
+(condition-case nil
+ (require 'view-less)
+ (error nil))
+(condition-case nil
+ (require 'view)
+ (error nil))
+
+
+;;; Variables accessible through the custom system.
+
+(defgroup mercurial nil
+ "Mercurial distributed SCM."
+ :group 'tools)
+
+(defcustom hg-binary
+ (or (executable-find "hg")
+ (dolist (path '("~/bin/hg" "/usr/bin/hg" "/usr/local/bin/hg"))
+ (when (file-executable-p path)
+ (return path))))
+ "The path to Mercurial's hg executable."
+ :type '(file :must-match t)
+ :group 'mercurial)
+
+(defcustom hg-mode-hook nil
+ "Hook run when a buffer enters hg-mode."
+ :type 'sexp
+ :group 'mercurial)
+
+(defcustom hg-commit-mode-hook nil
+ "Hook run when a buffer is created to prepare a commit."
+ :type 'sexp
+ :group 'mercurial)
+
+(defcustom hg-pre-commit-hook nil
+ "Hook run before a commit is performed.
+If you want to prevent the commit from proceeding, raise an error."
+ :type 'sexp
+ :group 'mercurial)
+
+(defcustom hg-log-mode-hook nil
+ "Hook run after a buffer is filled with log information."
+ :type 'sexp
+ :group 'mercurial)
+
+(defcustom hg-global-prefix "\C-ch"
+ "The global prefix for Mercurial keymap bindings."
+ :type 'sexp
+ :group 'mercurial)
+
+(defcustom hg-commit-allow-empty-message nil
+ "Whether to allow changes to be committed with empty descriptions."
+ :type 'boolean
+ :group 'mercurial)
+
+(defcustom hg-commit-allow-empty-file-list nil
+ "Whether to allow changes to be committed without any modified files."
+ :type 'boolean
+ :group 'mercurial)
+
+(defcustom hg-rev-completion-limit 100
+ "The maximum number of revisions that hg-read-rev will offer to complete.
+This affects memory usage and performance when prompting for revisions
+in a repository with a lot of history."
+ :type 'integer
+ :group 'mercurial)
+
+(defcustom hg-log-limit 50
+ "The maximum number of revisions that hg-log will display."
+ :type 'integer
+ :group 'mercurial)
+
+(defcustom hg-update-modeline t
+ "Whether to update the modeline with the status of a file after every save.
+Set this to nil on platforms with poor process management, such as Windows."
+ :type 'boolean
+ :group 'mercurial)
+
+(defcustom hg-incoming-repository "default"
+ "The repository from which changes are pulled from by default.
+This should be a symbolic repository name, since it is used for all
+repository-related commands."
+ :type 'string
+ :group 'mercurial)
+
+(defcustom hg-outgoing-repository "default-push"
+ "The repository to which changes are pushed to by default.
+This should be a symbolic repository name, since it is used for all
+repository-related commands."
+ :type 'string
+ :group 'mercurial)
+
+
+;;; Other variables.
+
+(defconst hg-running-xemacs (string-match "XEmacs" emacs-version)
+ "Is mercurial.el running under XEmacs?")
+
+(defvar hg-mode nil
+ "Is this file managed by Mercurial?")
+(make-variable-buffer-local 'hg-mode)
+(put 'hg-mode 'permanent-local t)
+
+(defvar hg-status nil)
+(make-variable-buffer-local 'hg-status)
+(put 'hg-status 'permanent-local t)
+
+(defvar hg-prev-buffer nil)
+(make-variable-buffer-local 'hg-prev-buffer)
+(put 'hg-prev-buffer 'permanent-local t)
+
+(defvar hg-root nil)
+(make-variable-buffer-local 'hg-root)
+(put 'hg-root 'permanent-local t)
+
+(defvar hg-output-buffer-name "*Hg*"
+ "The name to use for Mercurial output buffers.")
+
+(defvar hg-file-history nil)
+(defvar hg-repo-history nil)
+(defvar hg-rev-history nil)
+
+
+;;; Random constants.
+
+(defconst hg-commit-message-start
+ "--- Enter your commit message. Type `C-c C-c' to commit. ---\n")
+
+(defconst hg-commit-message-end
+ "--- Files in bold will be committed. Click to toggle selection. ---\n")
+
+
+;;; hg-mode keymap.
+
+(defvar hg-mode-map (make-sparse-keymap))
+(define-key hg-mode-map "\C-xv" 'hg-prefix-map)
+
+(defvar hg-prefix-map
+ (let ((map (copy-keymap vc-prefix-map)))
+ (if (functionp 'set-keymap-name)
+ (set-keymap-name map 'hg-prefix-map)); XEmacs
+ map)
+ "This keymap overrides some default vc-mode bindings.")
+(fset 'hg-prefix-map hg-prefix-map)
+(define-key hg-prefix-map "=" 'hg-diff)
+(define-key hg-prefix-map "c" 'hg-undo)
+(define-key hg-prefix-map "g" 'hg-annotate)
+(define-key hg-prefix-map "l" 'hg-log)
+(define-key hg-prefix-map "n" 'hg-commit-start)
+;; (define-key hg-prefix-map "r" 'hg-update)
+(define-key hg-prefix-map "u" 'hg-revert-buffer)
+(define-key hg-prefix-map "~" 'hg-version-other-window)
+
+(add-minor-mode 'hg-mode 'hg-mode hg-mode-map)
+
+
+;;; Global keymap.
+
+(global-set-key "\C-xvi" 'hg-add)
+
+(defvar hg-global-map (make-sparse-keymap))
+(fset 'hg-global-map hg-global-map)
+(global-set-key hg-global-prefix 'hg-global-map)
+(define-key hg-global-map "," 'hg-incoming)
+(define-key hg-global-map "." 'hg-outgoing)
+(define-key hg-global-map "<" 'hg-pull)
+(define-key hg-global-map "=" 'hg-diff-repo)
+(define-key hg-global-map ">" 'hg-push)
+(define-key hg-global-map "?" 'hg-help-overview)
+(define-key hg-global-map "A" 'hg-addremove)
+(define-key hg-global-map "U" 'hg-revert)
+(define-key hg-global-map "a" 'hg-add)
+(define-key hg-global-map "c" 'hg-commit-start)
+(define-key hg-global-map "f" 'hg-forget)
+(define-key hg-global-map "h" 'hg-help-overview)
+(define-key hg-global-map "i" 'hg-init)
+(define-key hg-global-map "l" 'hg-log-repo)
+(define-key hg-global-map "r" 'hg-root)
+(define-key hg-global-map "s" 'hg-status)
+(define-key hg-global-map "u" 'hg-update)
+
+
+;;; View mode keymap.
+
+(defvar hg-view-mode-map
+ (let ((map (copy-keymap (if (boundp 'view-minor-mode-map)
+ view-minor-mode-map
+ view-mode-map))))
+ (if (functionp 'set-keymap-name)
+ (set-keymap-name map 'hg-view-mode-map)); XEmacs
+ map))
+(fset 'hg-view-mode-map hg-view-mode-map)
+(define-key hg-view-mode-map
+ (if hg-running-xemacs [button2] [mouse-2])
+ 'hg-buffer-mouse-clicked)
+
+
+;;; Commit mode keymaps.
+
+(defvar hg-commit-mode-map (make-sparse-keymap))
+(define-key hg-commit-mode-map "\C-c\C-c" 'hg-commit-finish)
+(define-key hg-commit-mode-map "\C-c\C-k" 'hg-commit-kill)
+(define-key hg-commit-mode-map "\C-xv=" 'hg-diff-repo)
+
+(defvar hg-commit-mode-file-map (make-sparse-keymap))
+(define-key hg-commit-mode-file-map
+ (if hg-running-xemacs [button2] [mouse-2])
+ 'hg-commit-mouse-clicked)
+(define-key hg-commit-mode-file-map " " 'hg-commit-toggle-file)
+(define-key hg-commit-mode-file-map "\r" 'hg-commit-toggle-file)
+
+
+;;; Convenience functions.
+
+(defsubst hg-binary ()
+ (if hg-binary
+ hg-binary
+ (error "No `hg' executable found!")))
+
+(defsubst hg-replace-in-string (str regexp newtext &optional literal)
+ "Replace all matches in STR for REGEXP with NEWTEXT string.
+Return the new string. Optional LITERAL non-nil means do a literal
+replacement.
+
+This function bridges yet another pointless impedance gap between
+XEmacs and GNU Emacs."
+ (if (fboundp 'replace-in-string)
+ (replace-in-string str regexp newtext literal)
+ (replace-regexp-in-string regexp newtext str nil literal)))
+
+(defsubst hg-strip (str)
+ "Strip leading and trailing blank lines from a string."
+ (hg-replace-in-string (hg-replace-in-string str "[\r\n][ \t\r\n]*\\'" "")
+ "\\`[ \t\r\n]*[\r\n]" ""))
+
+(defsubst hg-chomp (str)
+ "Strip trailing newlines from a string."
+ (hg-replace-in-string str "[\r\n]+\'" ""))
+
+(defun hg-run-command (command &rest args)
+ "Run the shell command COMMAND, returning (EXIT-CODE . COMMAND-OUTPUT).
+The list ARGS contains a list of arguments to pass to the command."
+ (let* (exit-code
+ (output
+ (with-output-to-string
+ (with-current-buffer
+ standard-output
+ (setq exit-code
+ (apply 'call-process command nil t nil args))))))
+ (cons exit-code output)))
+
+(defun hg-run (command &rest args)
+ "Run the Mercurial command COMMAND, returning (EXIT-CODE . COMMAND-OUTPUT)."
+ (apply 'hg-run-command (hg-binary) command args))
+
+(defun hg-run0 (command &rest args)
+ "Run the Mercurial command COMMAND, returning its output.
+If the command does not exit with a zero status code, raise an error."
+ (let ((res (apply 'hg-run-command (hg-binary) command args)))
+ (if (not (eq (car res) 0))
+ (error "Mercurial command failed %s - exit code %s"
+ (cons command args)
+ (car res))
+ (cdr res))))
+
+(defun hg-sync-buffers (path)
+ "Sync buffers visiting PATH with their on-disk copies.
+If PATH is not being visited, but is under the repository root, sync
+all buffers visiting files in the repository."
+ (let ((buf (find-buffer-visiting path)))
+ (if buf
+ (with-current-buffer buf
+ (vc-buffer-sync))
+ (hg-do-across-repo path
+ (vc-buffer-sync)))))
+
+(defun hg-buffer-commands (pnt)
+ "Use the properties of a character to do something sensible."
+ (interactive "d")
+ (let ((rev (get-char-property pnt 'rev))
+ (file (get-char-property pnt 'file))
+ (date (get-char-property pnt 'date))
+ (user (get-char-property pnt 'user))
+ (host (get-char-property pnt 'host))
+ (prev-buf (current-buffer)))
+ (cond
+ (file
+ (find-file-other-window file))
+ (rev
+ (hg-diff hg-view-file-name rev rev prev-buf))
+ ((message "I don't know how to do that yet")))))
+
+(defsubst hg-event-point (event)
+ "Return the character position of the mouse event EVENT."
+ (if hg-running-xemacs
+ (event-point event)
+ (posn-point (event-start event))))
+
+(defsubst hg-event-window (event)
+ "Return the window over which mouse event EVENT occurred."
+ (if hg-running-xemacs
+ (event-window event)
+ (posn-window (event-start event))))
+
+(defun hg-buffer-mouse-clicked (event)
+ "Translate the mouse clicks in a HG log buffer to character events.
+These are then handed off to `hg-buffer-commands'.
+
+Handle frickin' frackin' gratuitous event-related incompatibilities."
+ (interactive "e")
+ (select-window (hg-event-window event))
+ (hg-buffer-commands (hg-event-point event)))
+
+(unless (fboundp 'view-minor-mode)
+ (defun view-minor-mode (prev-buffer exit-func)
+ (view-mode)))
+
+(defsubst hg-abbrev-file-name (file)
+ "Portable wrapper around abbreviate-file-name."
+ (if hg-running-xemacs
+ (abbreviate-file-name file t)
+ (abbreviate-file-name file)))
+
+(defun hg-read-file-name (&optional prompt default)
+ "Read a file or directory name, or a pattern, to use with a command."
+ (save-excursion
+ (while hg-prev-buffer
+ (set-buffer hg-prev-buffer))
+ (let ((path (or default (buffer-file-name))))
+ (if (or (not path) current-prefix-arg)
+ (expand-file-name
+ (eval (list* 'read-file-name
+ (format "File, directory or pattern%s: "
+ (or prompt ""))
+ (and path (file-name-directory path))
+ nil nil
+ (and path (file-name-nondirectory path))
+ (if hg-running-xemacs
+ (cons (quote 'hg-file-history) nil)
+ nil))))
+ path))))
+
+(defun hg-read-number (&optional prompt default)
+ "Read a integer value."
+ (save-excursion
+ (if (or (not default) current-prefix-arg)
+ (string-to-number
+ (eval (list* 'read-string
+ (or prompt "")
+ (if default (cons (format "%d" default) nil) nil))))
+ default)))
+
+(defun hg-read-config ()
+ "Return an alist of (key . value) pairs of Mercurial config data.
+Each key is of the form (section . name)."
+ (let (items)
+ (dolist (line (split-string (hg-chomp (hg-run0 "debugconfig")) "\n") items)
+ (string-match "^\\([^=]*\\)=\\(.*\\)" line)
+ (let* ((left (substring line (match-beginning 1) (match-end 1)))
+ (right (substring line (match-beginning 2) (match-end 2)))
+ (key (split-string left "\\."))
+ (value (hg-replace-in-string right "\\\\n" "\n" t)))
+ (setq items (cons (cons (cons (car key) (cadr key)) value) items))))))
+
+(defun hg-config-section (section config)
+ "Return an alist of (name . value) pairs for SECTION of CONFIG."
+ (let (items)
+ (dolist (item config items)
+ (when (equal (caar item) section)
+ (setq items (cons (cons (cdar item) (cdr item)) items))))))
+
+(defun hg-string-starts-with (sub str)
+ "Indicate whether string STR starts with the substring or character SUB."
+ (if (not (stringp sub))
+ (and (> (length str) 0) (equal (elt str 0) sub))
+ (let ((sub-len (length sub)))
+ (and (<= sub-len (length str))
+ (string= sub (substring str 0 sub-len))))))
+
+(defun hg-complete-repo (string predicate all)
+ "Attempt to complete a repository name.
+We complete on either symbolic names from Mercurial's config or real
+directory names from the file system. We do not penalise URLs."
+ (or (if all
+ (all-completions string hg-repo-completion-table predicate)
+ (try-completion string hg-repo-completion-table predicate))
+ (let* ((str (expand-file-name string))
+ (dir (file-name-directory str))
+ (file (file-name-nondirectory str)))
+ (if all
+ (let (completions)
+ (dolist (name (delete "./" (file-name-all-completions file dir))
+ completions)
+ (let ((path (concat dir name)))
+ (when (file-directory-p path)
+ (setq completions (cons name completions))))))
+ (let ((comp (file-name-completion file dir)))
+ (if comp
+ (hg-abbrev-file-name (concat dir comp))))))))
+
+(defun hg-read-repo-name (&optional prompt initial-contents default)
+ "Read the location of a repository."
+ (save-excursion
+ (while hg-prev-buffer
+ (set-buffer hg-prev-buffer))
+ (let (hg-repo-completion-table)
+ (if current-prefix-arg
+ (progn
+ (dolist (path (hg-config-section "paths" (hg-read-config)))
+ (setq hg-repo-completion-table
+ (cons (cons (car path) t) hg-repo-completion-table))
+ (unless (hg-string-starts-with directory-sep-char (cdr path))
+ (setq hg-repo-completion-table
+ (cons (cons (cdr path) t) hg-repo-completion-table))))
+ (completing-read (format "Repository%s: " (or prompt ""))
+ 'hg-complete-repo
+ nil
+ nil
+ initial-contents
+ 'hg-repo-history
+ default))
+ default))))
+
+(defun hg-read-rev (&optional prompt default)
+ "Read a revision or tag, offering completions."
+ (save-excursion
+ (while hg-prev-buffer
+ (set-buffer hg-prev-buffer))
+ (let ((rev (or default "tip")))
+ (if current-prefix-arg
+ (let ((revs (split-string
+ (hg-chomp
+ (hg-run0 "-q" "log" "-r"
+ (format "-%d:tip" hg-rev-completion-limit)))
+ "[\n:]")))
+ (dolist (line (split-string (hg-chomp (hg-run0 "tags")) "\n"))
+ (setq revs (cons (car (split-string line "\\s-")) revs)))
+ (completing-read (format "Revision%s (%s): "
+ (or prompt "")
+ (or default "tip"))
+ (map 'list 'cons revs revs)
+ nil
+ nil
+ nil
+ 'hg-rev-history
+ (or default "tip")))
+ rev))))
+
+(defmacro hg-do-across-repo (path &rest body)
+ (let ((root-name (gensym "root-"))
+ (buf-name (gensym "buf-")))
+ `(let ((,root-name (hg-root ,path)))
+ (save-excursion
+ (dolist (,buf-name (buffer-list))
+ (set-buffer ,buf-name)
+ (when (and hg-status (equal (hg-root buffer-file-name) ,root-name))
+ ,@body))))))
+
+(put 'hg-do-across-repo 'lisp-indent-function 1)
+
+
+;;; View mode bits.
+
+(defun hg-exit-view-mode (buf)
+ "Exit from hg-view-mode.
+We delete the current window if entering hg-view-mode split the
+current frame."
+ (when (and (eq buf (current-buffer))
+ (> (length (window-list)) 1))
+ (delete-window))
+ (when (buffer-live-p buf)
+ (kill-buffer buf)))
+
+(defun hg-view-mode (prev-buffer &optional file-name)
+ (goto-char (point-min))
+ (set-buffer-modified-p nil)
+ (toggle-read-only t)
+ (view-minor-mode prev-buffer 'hg-exit-view-mode)
+ (use-local-map hg-view-mode-map)
+ (setq truncate-lines t)
+ (when file-name
+ (set (make-local-variable 'hg-view-file-name)
+ (hg-abbrev-file-name file-name))))
+
+(defun hg-file-status (file)
+ "Return status of FILE, or nil if FILE does not exist or is unmanaged."
+ (let* ((s (hg-run "status" file))
+ (exit (car s))
+ (output (cdr s)))
+ (if (= exit 0)
+ (let ((state (assoc (substring output 0 (min (length output) 2))
+ '(("M " . modified)
+ ("A " . added)
+ ("R " . removed)
+ ("? " . nil)))))
+ (if state
+ (cdr state)
+ 'normal)))))
+
+(defun hg-tip ()
+ (split-string (hg-chomp (hg-run0 "-q" "tip")) ":"))
+
+(defmacro hg-view-output (args &rest body)
+ "Execute BODY in a clean buffer, then quickly display that buffer.
+If the buffer contains one line, its contents are displayed in the
+minibuffer. Otherwise, the buffer is displayed in view-mode.
+ARGS is of the form (BUFFER-NAME &optional FILE), where BUFFER-NAME is
+the name of the buffer to create, and FILE is the name of the file
+being viewed."
+ (let ((prev-buf (gensym "prev-buf-"))
+ (v-b-name (car args))
+ (v-m-rest (cdr args)))
+ `(let ((view-buf-name ,v-b-name)
+ (,prev-buf (current-buffer)))
+ (get-buffer-create view-buf-name)
+ (kill-buffer view-buf-name)
+ (get-buffer-create view-buf-name)
+ (set-buffer view-buf-name)
+ (save-excursion
+ ,@body)
+ (case (count-lines (point-min) (point-max))
+ ((0)
+ (kill-buffer view-buf-name)
+ (message "(No output)"))
+ ((1)
+ (let ((msg (hg-chomp (buffer-substring (point-min) (point-max)))))
+ (kill-buffer view-buf-name)
+ (message "%s" msg)))
+ (t
+ (pop-to-buffer view-buf-name)
+ (setq hg-prev-buffer ,prev-buf)
+ (hg-view-mode ,prev-buf ,@v-m-rest))))))
+
+(put 'hg-view-output 'lisp-indent-function 1)
+
+;;; Context save and restore across revert.
+
+(defun hg-position-context (pos)
+ "Return information to help find the given position again."
+ (let* ((end (min (point-max) (+ pos 98))))
+ (list pos
+ (buffer-substring (max (point-min) (- pos 2)) end)
+ (- end pos))))
+
+(defun hg-buffer-context ()
+ "Return information to help restore a user's editing context.
+This is useful across reverts and merges, where a context is likely
+to have moved a little, but not really changed."
+ (let ((point-context (hg-position-context (point)))
+ (mark-context (let ((mark (mark-marker)))
+ (and mark (hg-position-context mark)))))
+ (list point-context mark-context)))
+
+(defun hg-find-context (ctx)
+ "Attempt to find a context in the given buffer.
+Always returns a valid, hopefully sane, position."
+ (let ((pos (nth 0 ctx))
+ (str (nth 1 ctx))
+ (fixup (nth 2 ctx)))
+ (save-excursion
+ (goto-char (max (point-min) (- pos 15000)))
+ (if (and (not (equal str ""))
+ (search-forward str nil t))
+ (- (point) fixup)
+ (max pos (point-min))))))
+
+(defun hg-restore-context (ctx)
+ "Attempt to restore the user's editing context."
+ (let ((point-context (nth 0 ctx))
+ (mark-context (nth 1 ctx)))
+ (goto-char (hg-find-context point-context))
+ (when mark-context
+ (set-mark (hg-find-context mark-context)))))
+
+
+;;; Hooks.
+
+(defun hg-mode-line (&optional force)
+ "Update the modeline with the current status of a file.
+An update occurs if optional argument FORCE is non-nil,
+hg-update-modeline is non-nil, or we have not yet checked the state of
+the file."
+ (when (and (hg-root) (or force hg-update-modeline (not hg-mode)))
+ (let ((status (hg-file-status buffer-file-name)))
+ (setq hg-status status
+ hg-mode (and status (concat " Hg:"
+ (car (hg-tip))
+ (cdr (assq status
+ '((normal . "")
+ (removed . "r")
+ (added . "a")
+ (modified . "m")))))))
+ status)))
+
+(defun hg-mode (&optional toggle)
+ "Minor mode for Mercurial distributed SCM integration.
+
+The Mercurial mode user interface is based on that of VC mode, so if
+you're already familiar with VC, the same keybindings and functions
+will generally work.
+
+Below is a list of many common SCM tasks. In the list, `G/L'
+indicates whether a key binding is global (G) to a repository or local
+(L) to a file. Many commands take a prefix argument.
+
+SCM Task G/L Key Binding Command Name
+-------- --- ----------- ------------
+Help overview (what you are reading) G C-c h h hg-help-overview
+
+Tell Mercurial to manage a file G C-c h a hg-add
+Commit changes to current file only L C-x v n hg-commit-start
+Undo changes to file since commit L C-x v u hg-revert-buffer
+
+Diff file vs last checkin L C-x v = hg-diff
+
+View file change history L C-x v l hg-log
+View annotated file L C-x v a hg-annotate
+
+Diff repo vs last checkin G C-c h = hg-diff-repo
+View status of files in repo G C-c h s hg-status
+Commit all changes G C-c h c hg-commit-start
+
+Undo all changes since last commit G C-c h U hg-revert
+View repo change history G C-c h l hg-log-repo
+
+See changes that can be pulled G C-c h , hg-incoming
+Pull changes G C-c h < hg-pull
+Update working directory after pull G C-c h u hg-update
+See changes that can be pushed G C-c h . hg-outgoing
+Push changes G C-c h > hg-push"
+ (run-hooks 'hg-mode-hook))
+
+(defun hg-find-file-hook ()
+ (when (hg-mode-line)
+ (hg-mode)))
+
+(add-hook 'find-file-hooks 'hg-find-file-hook)
+
+(defun hg-after-save-hook ()
+ (let ((old-status hg-status))
+ (hg-mode-line)
+ (if (and (not old-status) hg-status)
+ (hg-mode))))
+
+(add-hook 'after-save-hook 'hg-after-save-hook)
+
+
+;;; User interface functions.
+
+(defun hg-help-overview ()
+ "This is an overview of the Mercurial SCM mode for Emacs.
+
+You can find the source code, license (GPL v2), and credits for this
+code by typing `M-x find-library mercurial RET'."
+ (interactive)
+ (hg-view-output ("Mercurial Help Overview")
+ (insert (documentation 'hg-help-overview))
+ (let ((pos (point)))
+ (insert (documentation 'hg-mode))
+ (goto-char pos)
+ (kill-line))))
+
+(defun hg-add (path)
+ "Add PATH to the Mercurial repository on the next commit.
+With a prefix argument, prompt for the path to add."
+ (interactive (list (hg-read-file-name " to add")))
+ (let ((buf (current-buffer))
+ (update (equal buffer-file-name path)))
+ (hg-view-output (hg-output-buffer-name)
+ (apply 'call-process (hg-binary) nil t nil (list "add" path)))
+ (when update
+ (with-current-buffer buf
+ (hg-mode-line)))))
+
+(defun hg-addremove ()
+ (interactive)
+ (error "not implemented"))
+
+(defun hg-annotate ()
+ (interactive)
+ (error "not implemented"))
+
+(defun hg-commit-toggle-file (pos)
+ "Toggle whether or not the file at POS will be committed."
+ (interactive "d")
+ (save-excursion
+ (goto-char pos)
+ (let ((face (get-text-property pos 'face))
+ (inhibit-read-only t)
+ bol)
+ (beginning-of-line)
+ (setq bol (+ (point) 4))
+ (end-of-line)
+ (if (eq face 'bold)
+ (progn
+ (remove-text-properties bol (point) '(face nil))
+ (message "%s will not be committed"
+ (buffer-substring bol (point))))
+ (add-text-properties bol (point) '(face bold))
+ (message "%s will be committed"
+ (buffer-substring bol (point)))))))
+
+(defun hg-commit-mouse-clicked (event)
+ "Toggle whether or not the file at POS will be committed."
+ (interactive "@e")
+ (hg-commit-toggle-file (hg-event-point event)))
+
+(defun hg-commit-kill ()
+ "Kill the commit currently being prepared."
+ (interactive)
+ (when (or (not (buffer-modified-p)) (y-or-n-p "Really kill this commit? "))
+ (let ((buf hg-prev-buffer))
+ (kill-buffer nil)
+ (switch-to-buffer buf))))
+
+(defun hg-commit-finish ()
+ "Finish preparing a commit, and perform the actual commit.
+The hook hg-pre-commit-hook is run before anything else is done. If
+the commit message is empty and hg-commit-allow-empty-message is nil,
+an error is raised. If the list of files to commit is empty and
+hg-commit-allow-empty-file-list is nil, an error is raised."
+ (interactive)
+ (let ((root hg-root))
+ (save-excursion
+ (run-hooks 'hg-pre-commit-hook)
+ (goto-char (point-min))
+ (search-forward hg-commit-message-start)
+ (let (message files)
+ (let ((start (point)))
+ (goto-char (point-max))
+ (search-backward hg-commit-message-end)
+ (setq message (hg-strip (buffer-substring start (point)))))
+ (when (and (= (length message) 0)
+ (not hg-commit-allow-empty-message))
+ (error "Cannot proceed - commit message is empty"))
+ (forward-line 1)
+ (beginning-of-line)
+ (while (< (point) (point-max))
+ (let ((pos (+ (point) 4)))
+ (end-of-line)
+ (when (eq (get-text-property pos 'face) 'bold)
+ (end-of-line)
+ (setq files (cons (buffer-substring pos (point)) files))))
+ (forward-line 1))
+ (when (and (= (length files) 0)
+ (not hg-commit-allow-empty-file-list))
+ (error "Cannot proceed - no files to commit"))
+ (setq message (concat message "\n"))
+ (apply 'hg-run0 "--cwd" hg-root "commit" "-m" message files))
+ (let ((buf hg-prev-buffer))
+ (kill-buffer nil)
+ (switch-to-buffer buf))
+ (hg-do-across-repo root
+ (hg-mode-line)))))
+
+(defun hg-commit-mode ()
+ "Mode for describing a commit of changes to a Mercurial repository.
+This involves two actions: describing the changes with a commit
+message, and choosing the files to commit.
+
+To describe the commit, simply type some text in the designated area.
+
+By default, all modified, added and removed files are selected for
+committing. Files that will be committed are displayed in bold face\;
+those that will not are displayed in normal face.
+
+To toggle whether a file will be committed, move the cursor over a
+particular file and hit space or return. Alternatively, middle click
+on the file.
+
+Key bindings
+------------
+\\[hg-commit-finish] proceed with commit
+\\[hg-commit-kill] kill commit
+
+\\[hg-diff-repo] view diff of pending changes"
+ (interactive)
+ (use-local-map hg-commit-mode-map)
+ (set-syntax-table text-mode-syntax-table)
+ (setq local-abbrev-table text-mode-abbrev-table
+ major-mode 'hg-commit-mode
+ mode-name "Hg-Commit")
+ (set-buffer-modified-p nil)
+ (setq buffer-undo-list nil)
+ (run-hooks 'text-mode-hook 'hg-commit-mode-hook))
+
+(defun hg-commit-start ()
+ "Prepare a commit of changes to the repository containing the current file."
+ (interactive)
+ (while hg-prev-buffer
+ (set-buffer hg-prev-buffer))
+ (let ((root (hg-root))
+ (prev-buffer (current-buffer))
+ modified-files)
+ (unless root
+ (error "Cannot commit outside a repository!"))
+ (hg-sync-buffers root)
+ (setq modified-files (hg-chomp (hg-run0 "--cwd" root "status" "-arm")))
+ (when (and (= (length modified-files) 0)
+ (not hg-commit-allow-empty-file-list))
+ (error "No pending changes to commit"))
+ (let* ((buf-name (format "*Mercurial: Commit %s*" root)))
+ (pop-to-buffer (get-buffer-create buf-name))
+ (when (= (point-min) (point-max))
+ (set (make-local-variable 'hg-root) root)
+ (setq hg-prev-buffer prev-buffer)
+ (insert "\n")
+ (let ((bol (point)))
+ (insert hg-commit-message-end)
+ (add-text-properties bol (point) '(face bold-italic)))
+ (let ((file-area (point)))
+ (insert modified-files)
+ (goto-char file-area)
+ (while (< (point) (point-max))
+ (let ((bol (point)))
+ (forward-char 1)
+ (insert " ")
+ (end-of-line)
+ (add-text-properties (+ bol 4) (point)
+ '(face bold mouse-face highlight)))
+ (forward-line 1))
+ (goto-char file-area)
+ (add-text-properties (point) (point-max)
+ `(keymap ,hg-commit-mode-file-map))
+ (goto-char (point-min))
+ (insert hg-commit-message-start)
+ (add-text-properties (point-min) (point) '(face bold-italic))
+ (insert "\n\n")
+ (forward-line -1)
+ (save-excursion
+ (goto-char (point-max))
+ (search-backward hg-commit-message-end)
+ (add-text-properties (match-beginning 0) (point-max)
+ '(read-only t))
+ (goto-char (point-min))
+ (search-forward hg-commit-message-start)
+ (add-text-properties (match-beginning 0) (match-end 0)
+ '(read-only t)))
+ (hg-commit-mode))))))
+
+(defun hg-diff (path &optional rev1 rev2)
+ "Show the differences between REV1 and REV2 of PATH.
+When called interactively, the default behaviour is to treat REV1 as
+the tip revision, REV2 as the current edited version of the file, and
+PATH as the file edited in the current buffer.
+With a prefix argument, prompt for all of these."
+ (interactive (list (hg-read-file-name " to diff")
+ (hg-read-rev " to start with")
+ (let ((rev2 (hg-read-rev " to end with" 'working-dir)))
+ (and (not (eq rev2 'working-dir)) rev2))))
+ (hg-sync-buffers path)
+ (let ((a-path (hg-abbrev-file-name path))
+ (r1 (or rev1 "tip"))
+ diff)
+ (hg-view-output ((cond
+ ((and (equal r1 "tip") (not rev2))
+ (format "Mercurial: Diff against tip of %s" a-path))
+ ((equal r1 rev2)
+ (format "Mercurial: Diff of rev %s of %s" r1 a-path))
+ (t
+ (format "Mercurial: Diff from rev %s to %s of %s"
+ r1 (or rev2 "Current") a-path))))
+ (if rev2
+ (call-process (hg-binary) nil t nil "diff" "-r" r1 "-r" rev2 path)
+ (call-process (hg-binary) nil t nil "diff" "-r" r1 path))
+ (diff-mode)
+ (setq diff (not (= (point-min) (point-max))))
+ (font-lock-fontify-buffer))
+ diff))
+
+(defun hg-diff-repo ()
+ "Show the differences between the working copy and the tip revision."
+ (interactive)
+ (hg-diff (hg-root)))
+
+(defun hg-forget (path)
+ "Lose track of PATH, which has been added, but not yet committed.
+This will prevent the file from being incorporated into the Mercurial
+repository on the next commit.
+With a prefix argument, prompt for the path to forget."
+ (interactive (list (hg-read-file-name " to forget")))
+ (let ((buf (current-buffer))
+ (update (equal buffer-file-name path)))
+ (hg-view-output (hg-output-buffer-name)
+ (apply 'call-process (hg-binary) nil t nil (list "forget" path)))
+ (when update
+ (with-current-buffer buf
+ (hg-mode-line)))))
+
+(defun hg-incoming (&optional repo)
+ "Display changesets present in REPO that are not present locally."
+ (interactive (list (hg-read-repo-name " where changes would come from")))
+ (hg-view-output ((format "Mercurial: Incoming from %s to %s"
+ (hg-abbrev-file-name (hg-root))
+ (hg-abbrev-file-name
+ (or repo hg-incoming-repository))))
+ (call-process (hg-binary) nil t nil "incoming"
+ (or repo hg-incoming-repository))
+ (hg-log-mode)))
+
+(defun hg-init ()
+ (interactive)
+ (error "not implemented"))
+
+(defun hg-log-mode ()
+ "Mode for viewing a Mercurial change log."
+ (goto-char (point-min))
+ (when (looking-at "^searching for changes")
+ (kill-entire-line))
+ (run-hooks 'hg-log-mode-hook))
+
+(defun hg-log (path &optional rev1 rev2 log-limit)
+ "Display the revision history of PATH.
+History is displayed between REV1 and REV2.
+Number of displayed changesets is limited to LOG-LIMIT.
+REV1 defaults to the tip, while
+REV2 defaults to `hg-rev-completion-limit' changes from the tip revision.
+LOG-LIMIT defaults to `hg-log-limit'.
+With a prefix argument, prompt for each parameter."
+ (interactive (list (hg-read-file-name " to log")
+ (hg-read-rev " to start with"
+ "tip")
+ (hg-read-rev " to end with"
+ (format "%d" (- hg-rev-completion-limit)))
+ (hg-read-number "Output limited to: "
+ hg-log-limit)))
+ (let ((a-path (hg-abbrev-file-name path))
+ (r1 (or rev1 (format "-%d" hg-rev-completion-limit)))
+ (r2 (or rev2 rev1 "tip"))
+ (limit (format "%d" (or log-limit hg-log-limit))))
+ (hg-view-output ((if (equal r1 r2)
+ (format "Mercurial: Log of rev %s of %s" rev1 a-path)
+ (format
+ "Mercurial: at most %s log(s) from rev %s to %s of %s"
+ limit r1 r2 a-path)))
+ (eval (list* 'call-process (hg-binary) nil t nil
+ "log"
+ "-r" (format "%s:%s" r1 r2)
+ "-l" limit
+ (if (> (length path) (length (hg-root path)))
+ (cons path nil)
+ nil)))
+ (hg-log-mode))))
+
+(defun hg-log-repo (path &optional rev1 rev2 log-limit)
+ "Display the revision history of the repository containing PATH.
+History is displayed between REV1 and REV2.
+Number of displayed changesets is limited to LOG-LIMIT,
+REV1 defaults to the tip, while
+REV2 defaults to `hg-rev-completion-limit' changes from the tip revision.
+LOG-LIMIT defaults to `hg-log-limit'.
+With a prefix argument, prompt for each parameter."
+ (interactive (list (hg-read-file-name " to log")
+ (hg-read-rev " to start with"
+ "tip")
+ (hg-read-rev " to end with"
+ (format "%d" (- hg-rev-completion-limit)))
+ (hg-read-number "Output limited to: "
+ hg-log-limit)))
+ (hg-log (hg-root path) rev1 rev2 log-limit))
+
+(defun hg-outgoing (&optional repo)
+ "Display changesets present locally that are not present in REPO."
+ (interactive (list (hg-read-repo-name " where changes would go to" nil
+ hg-outgoing-repository)))
+ (hg-view-output ((format "Mercurial: Outgoing from %s to %s"
+ (hg-abbrev-file-name (hg-root))
+ (hg-abbrev-file-name
+ (or repo hg-outgoing-repository))))
+ (call-process (hg-binary) nil t nil "outgoing"
+ (or repo hg-outgoing-repository))
+ (hg-log-mode)))
+
+(defun hg-pull (&optional repo)
+ "Pull changes from repository REPO.
+This does not update the working directory."
+ (interactive (list (hg-read-repo-name " to pull from")))
+ (hg-view-output ((format "Mercurial: Pull to %s from %s"
+ (hg-abbrev-file-name (hg-root))
+ (hg-abbrev-file-name
+ (or repo hg-incoming-repository))))
+ (call-process (hg-binary) nil t nil "pull"
+ (or repo hg-incoming-repository))))
+
+(defun hg-push (&optional repo)
+ "Push changes to repository REPO."
+ (interactive (list (hg-read-repo-name " to push to")))
+ (hg-view-output ((format "Mercurial: Push from %s to %s"
+ (hg-abbrev-file-name (hg-root))
+ (hg-abbrev-file-name
+ (or repo hg-outgoing-repository))))
+ (call-process (hg-binary) nil t nil "push"
+ (or repo hg-outgoing-repository))))
+
+(defun hg-revert-buffer-internal ()
+ (let ((ctx (hg-buffer-context)))
+ (message "Reverting %s..." buffer-file-name)
+ (hg-run0 "revert" buffer-file-name)
+ (revert-buffer t t t)
+ (hg-restore-context ctx)
+ (hg-mode-line)
+ (message "Reverting %s...done" buffer-file-name)))
+
+(defun hg-revert-buffer ()
+ "Revert current buffer's file back to the latest committed version.
+If the file has not changed, nothing happens. Otherwise, this
+displays a diff and asks for confirmation before reverting."
+ (interactive)
+ (let ((vc-suppress-confirm nil)
+ (obuf (current-buffer))
+ diff)
+ (vc-buffer-sync)
+ (unwind-protect
+ (setq diff (hg-diff buffer-file-name))
+ (when diff
+ (unless (yes-or-no-p "Discard changes? ")
+ (error "Revert cancelled")))
+ (when diff
+ (let ((buf (current-buffer)))
+ (delete-window (selected-window))
+ (kill-buffer buf))))
+ (set-buffer obuf)
+ (when diff
+ (hg-revert-buffer-internal))))
+
+(defun hg-root (&optional path)
+ "Return the root of the repository that contains the given path.
+If the path is outside a repository, return nil.
+When called interactively, the root is printed. A prefix argument
+prompts for a path to check."
+ (interactive (list (hg-read-file-name)))
+ (if (or path (not hg-root))
+ (let ((root (do ((prev nil dir)
+ (dir (file-name-directory (or path buffer-file-name ""))
+ (file-name-directory (directory-file-name dir))))
+ ((equal prev dir))
+ (when (file-directory-p (concat dir ".hg"))
+ (return dir)))))
+ (when (interactive-p)
+ (if root
+ (message "The root of this repository is `%s'." root)
+ (message "The path `%s' is not in a Mercurial repository."
+ (hg-abbrev-file-name path))))
+ root)
+ hg-root))
+
+(defun hg-status (path)
+ "Print revision control status of a file or directory.
+With prefix argument, prompt for the path to give status for.
+Names are displayed relative to the repository root."
+ (interactive (list (hg-read-file-name " for status" (hg-root))))
+ (let ((root (hg-root)))
+ (hg-view-output ((format "Mercurial: Status of %s in %s"
+ (let ((name (substring (expand-file-name path)
+ (length root))))
+ (if (> (length name) 0)
+ name
+ "*"))
+ (hg-abbrev-file-name root)))
+ (apply 'call-process (hg-binary) nil t nil
+ (list "--cwd" root "status" path)))))
+
+(defun hg-undo ()
+ (interactive)
+ (error "not implemented"))
+
+(defun hg-update ()
+ (interactive)
+ (error "not implemented"))
+
+(defun hg-version-other-window ()
+ (interactive)
+ (error "not implemented"))
+
+
+(provide 'mercurial)
+
+
+;;; Local Variables:
+;;; prompt-to-byte-compile: nil
+;;; end:
new file mode 100644
--- /dev/null
+++ b/contrib/mercurial.spec
@@ -0,0 +1,42 @@
+Summary: Mercurial -- a distributed SCM
+Name: mercurial
+Version: 0.8
+Release: 0
+License: GPL
+Group: Development/Tools
+Source: http://www.selenic.com/mercurial/release/%{name}-%{version}.tar.gz
+URL: http://www.selenic.com/mercurial
+BuildRoot: /tmp/build.%{name}-%{version}-%{release}
+
+%define pythonver %(python -c 'import sys;print ".".join(map(str, sys.version_info[:2]))')
+%define pythonlib %{_libdir}/python%{pythonver}/site-packages/%{name}
+%define hgext %{_libdir}/python%{pythonver}/site-packages/hgext
+
+%description
+Mercurial is a fast, lightweight source control management system designed
+for efficient handling of very large distributed projects.
+
+%prep
+rm -rf $RPM_BUILD_ROOT
+%setup -q
+
+%build
+python setup.py build
+
+%install
+python setup.py install --root $RPM_BUILD_ROOT
+
+%clean
+rm -rf $RPM_BUILD_ROOT
+
+%files
+%defattr(-,root,root,-)
+%doc doc/* *.cgi
+%dir %{pythonlib}
+%dir %{hgext}
+%{_bindir}/hgmerge
+%{_bindir}/hg
+%{pythonlib}/templates
+%{pythonlib}/*.py*
+%{pythonlib}/*.so
+%{hgext}/*.py*
new file mode 100644
--- /dev/null
+++ b/contrib/tcsh_completion
@@ -0,0 +1,42 @@
+#
+# tcsh completion for Mercurial
+#
+# This file has been auto-generated by tcsh_completion_build.sh for
+# Mercurial Distributed SCM (version 325c07fd2ebd)
+#
+# Copyright (C) 2005 TK Soh.
+#
+# This is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free
+# Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+
+complete hg \
+ 'n/--cwd/d/' 'n/-R/d/' 'n/--repository/d/' \
+ 'C/-/( -R --repository \
+ --cwd \
+ -y --noninteractive \
+ -q --quiet \
+ -v --verbose \
+ --debug \
+ --debugger \
+ --traceback \
+ --time \
+ --profile \
+ --version \
+ -h --help)/' \
+ 'p/1/(add addremove annotate bundle cat \
+ clone commit ci copy cp \
+ debugancestor debugcheckstate debugconfig debugdata debugindex \
+ debugindexdot debugrename debugstate debugwalk diff \
+ export forget grep heads help \
+ identify id import patch incoming \
+ in init locate log history \
+ manifest outgoing out parents paths \
+ pull push rawcommit recover remove \
+ rm rename mv revert root \
+ serve status tag tags tip \
+ unbundle undo update up checkout \
+ co verify version)/'
+
new file mode 100644
--- /dev/null
+++ b/contrib/tcsh_completion_build.sh
@@ -0,0 +1,73 @@
+#!/bin/sh
+
+#
+# tcsh_completion_build.sh - script to generate tcsh completion
+#
+#
+# Copyright (C) 2005 TK Soh.
+#
+# This is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free
+# Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+#
+# Description
+# -----------
+# This script generates a tcsh source file to support completion
+# of Mercurial commands and options.
+#
+# Instruction:
+# -----------
+# Run this script to generate the tcsh source file, and source
+# the file to add command completion support for Mercurial.
+#
+# tcsh% tcsh_completion.sh FILE
+# tcsh% source FILE
+#
+# If FILE is not specified, tcsh_completion will be generated.
+#
+# Bugs:
+# ----
+# 1. command specific options are not supported
+# 2. hg commands must be specified immediately after 'hg'.
+#
+
+tcsh_file=${1-tcsh_completion}
+
+hg_commands=`hg --debug help | \
+ sed -e '1,/^list of commands:/d' \
+ -e '/^global options:/,$d' \
+ -e '/^ [^ ]/!d; s/[,:]//g;' | \
+ xargs -n5 | \
+ sed -e '$!s/$/ \\\\/g; 2,$s/^ */ /g'`
+
+hg_global_options=`hg -v help | \
+ sed -e '1,/global/d;/^ *-/!d; s/ [^- ].*//' | \
+ sed -e 's/ *$//; $!s/$/ \\\\/g; 2,$s/^ */ /g'`
+
+hg_version=`hg version | sed -e '1q'`
+
+script_name=`basename $0`
+
+cat > $tcsh_file <<END
+#
+# tcsh completion for Mercurial
+#
+# This file has been auto-generated by $script_name for
+# $hg_version
+#
+# Copyright (C) 2005 TK Soh.
+#
+# This is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free
+# Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+
+complete hg \\
+ 'n/--cwd/d/' 'n/-R/d/' 'n/--repository/d/' \\
+ 'C/-/($hg_global_options)/' \\
+ 'p/1/($hg_commands)/'
+
+END
new file mode 100644
--- /dev/null
+++ b/contrib/vim/hg-menu.vim
@@ -0,0 +1,93 @@
+" vim600: set foldmethod=marker:
+" =============================================================================
+" Name Of File: hg-menu.vim
+" Description: Interface to Mercurial Version Control.
+" Author: Steve Borho (modified Jeff Lanzarotta's RCS script)
+" Date: Wednesday, October 5, 2005
+" Version: 0.1.0
+" Copyright: None.
+" Usage: These command and gui menu displays useful hg functions
+" Configuration: Your hg executable must be in your path.
+" =============================================================================
+
+" Section: Init {{{1
+if exists("loaded_hg_menu")
+ finish
+endif
+let loaded_hg_menu = 1
+
+" Section: Menu Options {{{1
+if has("gui")
+" amenu H&G.Commit\ File<Tab>,ci :!hg commit %<CR>:e!<CR>
+" amenu H&G.Commit\ All<Tab>,call :!hg commit<CR>:e!<CR>
+" amenu H&G.-SEP1- <nul>
+ amenu H&G.Add<Tab>\\add :!hg add %<CR><CR>
+ amenu H&G.Forget\ Add<Tab>\\fgt :!hg forget %<CR><CR>
+ amenu H&G.Show\ Differences<Tab>\\diff :call ShowResults("FileDiff", "hg\ diff")<CR><CR>
+ amenu H&G.Revert\ to\ Last\ Version<Tab>\\revert :!hg revert %<CR>:e!<CR>
+ amenu H&G.Show\ History<Tab>\\log :call ShowResults("FileLog", "hg\ log")<CR><CR>
+ amenu H&G.Annotate<Tab>\\an :call ShowResults("annotate", "hg\ annotate")<CR><CR>
+ amenu H&G.-SEP1- <nul>
+ amenu H&G.Repo\ Status<Tab>\\stat :call ShowResults("RepoStatus", "hg\ status")<CR><CR>
+ amenu H&G.Pull<Tab>\\pull :!hg pull<CR>:e!<CR>
+ amenu H&G.Update<Tab>\\upd :!hg update<CR>:e!<CR>
+endif
+
+" Section: Mappings {{{1
+if(v:version >= 600)
+ " The default Leader is \ 'backslash'
+ map <Leader>add :!hg add %<CR><CR>
+ map <Leader>fgt :!hg forget %<CR><CR>
+ map <Leader>diff :call ShowResults("FileDiff", "hg\ diff")<CR><CR>
+ map <Leader>revert :!hg revert %<CR>:e!<CR>
+ map <Leader>log :call ShowResults("FileLog", "hg\ log")<CR><CR>
+ map <Leader>an :call ShowResults("annotate", "hg\ annotate")<CR><CR>
+ map <Leader>stat :call ShowResults("RepoStatus", "hg\ status")<CR><CR>
+ map <Leader>upd :!hg update<CR>:e!<CR>
+ map <Leader>pull :!hg pull<CR>:e!<CR>
+else
+ " pre 6.0, the default Leader was a comma
+ map ,add :!hg add %<CR><CR>
+ map ,fgt :!hg forget %<CR><CR>
+ map ,diff :call ShowResults("FileDiff", "hg\ diff")<CR><CR>
+ map ,revert :!hg revert<CR>:e!<CR>
+ map ,log :call ShowResults("FileLog", "hg\ log")<CR><CR>
+ map ,an :call ShowResults("annotate", "hg\ annotate")<CR><CR>
+ map ,stat :call ShowResults("RepoStatus", "hg\ status")<CR><CR>
+ map ,upd :!hg update<CR>:e!<CR>
+ map ,pull :!hg pull<CR>:e!<CR>
+endif
+
+" Section: Functions {{{1
+" Show the log results of the current file with a revision control system.
+function! ShowResults(bufferName, cmdName)
+ " Modify the shortmess option:
+ " A don't give the "ATTENTION" message when an existing swap file is
+ " found.
+ set shortmess+=A
+
+ " Get the name of the current buffer.
+ let currentBuffer = bufname("%")
+
+ " If a buffer with the name rlog exists, delete it.
+ if bufexists(a:bufferName)
+ execute 'bd! ' a:bufferName
+ endif
+
+ " Create a new buffer.
+ execute 'new ' a:bufferName
+
+ " Execute the command.
+ execute 'r!' a:cmdName ' ' currentBuffer
+
+ " Make is so that the file can't be edited.
+ setlocal nomodified
+ setlocal nomodifiable
+ setlocal readonly
+
+ " Go to the beginning of the buffer.
+ execute "normal 1G"
+
+ " Restore the shortmess option.
+ set shortmess-=A
+endfunction
new file mode 100644
--- /dev/null
+++ b/contrib/vim/patchreview.txt
@@ -0,0 +1,97 @@
+*patchreview.txt* Vim global plugin for doing single or multipatch code reviews
+
+ Author: Manpreet Singh (junkblocker-CAT-yahoo-DOG-com)
+ (Replace -CAT- and -DOG- with @ and . first)
+ Copyright (C) 2006 by Manpreet Singh
+ License : This file is placed in the public domain.
+
+=============================================================================
+
+CONTENTS *patchreview* *patchreview-contents*
+
+ 1. Contents.........................................: |patchreview-contents|
+ 2. Introduction.....................................: |patchreview-intro|
+ 3. PatchReview options..............................: |patchreview-options|
+ 4. PatchReview Usage................................: |patchreview-usage|
+ 4.1 PatchReview Usage............................: |:PatchReview|
+ 4.2 PatchReview Usage............................: |:PatchReviewCleanup|
+
+=============================================================================
+
+PatchReview Introduction *patchreview-intro*
+
+The Patch Review plugin allows single or multipatch code review to be done in
+VIM. VIM provides the |:diffpatch| command to do single file reviews but can
+not handle patch files containing multiple patches as is common with software
+development projects. This plugin provides that missing functionality. It also
+tries to improve on |:diffpatch|'s behaviour of creating the patched files in
+the same directory as original file which can lead to project workspace
+pollution.
+
+=============================================================================
+
+PatchReview Options *patchreview-options*
+
+ g:patchreview_filterdiff : Optional path to filterdiff binary. PatchReview
+ tries to locate filterdiff on system path
+ automatically. If the binary is not on system
+ path, this option tell PatchReview the full path
+ to the binary. This option, if specified,
+ overrides the default filterdiff binary on the
+ path.
+
+ examples:
+ (On Windows with Cygwin)
+
+ let g:patchreview_filterdiff = 'c:\\cygwin\\bin\\filterdiff.exe'
+
+ (On *nix systems)
+
+ let g:patchreview_filterdiff = '/usr/bin/filterdiff'
+
+ g:patchreview_patch : Optional path to patch binary. PatchReview tries
+ to locate patch on system path automatically. If
+ the binary is not on system path, this option
+ tell PatchReview the full path to the binary.
+ This option, if specified, overrides the default
+ patch binary on the path.
+
+ examples:
+ (On Windows with Cygwin)
+
+ let g:patchreview_patch = 'c:\\cygwin\\bin\\patch.exe'
+
+ (On *nix systems)
+
+ let g:patchreview_patch = '/usr/bin/gpatch'
+
+
+ g:patchreview_tmpdir : Optional path where the plugin can save temporary
+ files. If this is not specified, the plugin tries to
+ use TMP, TEMP and TMPDIR environment variables in
+ succession.
+
+ examples:
+ (On Windows) let g:patchreview_tmpdir = 'c:\\tmp'
+ (On *nix systems) let g:patchreview_tmpdir = '~/tmp'
+
+=============================================================================
+
+PatchReview Usage *patchreview-usage*
+ *:PatchReview*
+
+ :PatchReview patchfile_path [optional_source_directory]
+
+ Perform a patch review in the current directory based on the supplied
+ patchfile_path. If optional_source_directory is specified, patchreview is
+ done on that directory. Othewise, the current directory is assumed to be
+ the source directory.
+ *:PatchReviewCleanup*
+
+ :PatchReviewCleanup
+
+ After you are done using the :PatchReview command, you can cleanup the
+ temporary files in the temporary directory using this command.
+
+=============================================================================
+vim: ft=help:ts=2:sts=2:sw=2:tw=78:tw=78
new file mode 100644
--- /dev/null
+++ b/contrib/vim/patchreview.vim
@@ -0,0 +1,332 @@
+" Vim global plugin for doing single or multipatch code reviews"{{{
+
+" Version : 0.1 "{{{
+" Last Modified : Thu 25 May 2006 10:15:11 PM PDT
+" Author : Manpreet Singh (junkblocker AT yahoo DOT com)
+" Copyright : 2006 by Manpreet Singh
+" License : This file is placed in the public domain.
+"
+" History : 0.1 - First released
+"}}}
+" Documentation: "{{{
+" ===========================================================================
+" This plugin allows single or multipatch code reviews to be done in VIM. Vim
+" has :diffpatch command to do single file reviews but can not handle patch
+" files containing multiple patches. This plugin provides that missing
+" functionality and doesn't require the original file to be open.
+"
+" Installing: "{{{
+"
+" For a quick start...
+"
+" Requirements: "{{{
+"
+" 1) (g)vim 7.0 or higher built with +diff option.
+" 2) patch and patchutils ( http://cyberelk.net/tim/patchutils/ ) installed
+" for your OS. For windows it is availble from Cygwin (
+" http://www.cygwin.com ) or GnuWin32 ( http://gnuwin32.sourceforge.net/
+" ).
+""}}}
+" Install: "{{{
+"
+" 1) Extract this in your $VIM/vimfiles or $HOME/.vim directory and restart
+" vim.
+"
+" 2) Make sure that you have filterdiff from patchutils and patch commands
+" installed.
+"
+" 3) Optinally, specify the locations to filterdiff and patch commands and
+" location of a temporary directory to use in your .vimrc.
+"
+" let g:patchreview_filterdiff = '/path/to/filterdiff'
+" let g:patchreview_patch = '/path/to/patch'
+" let g:patchreview_tmpdir = '/tmp/or/something'
+"
+" 4) Optionally, generate help tags to use help
+"
+" :helptags ~/.vim/doc
+" or
+" :helptags c:\vim\vimfiles\doc
+""}}}
+""}}}
+" Usage: "{{{
+"
+" :PatchReview path_to_submitted_patchfile [optional_source_directory]
+"
+" after review is done
+"
+" :PatchReviewCleanup
+"
+" See :help patchreview for details after you've created help tags.
+""}}}
+"}}}
+" Code "{{{
+
+" Enabled only during development "{{{
+" unlet! g:loaded_patchreview " DEBUG
+" unlet! g:patchreview_tmpdir " DEBUG
+" unlet! g:patchreview_filterdiff " DEBUG
+" unlet! g:patchreview_patch " DEBUG
+"}}}
+
+" load only once "{{{
+if exists('g:loaded_patchreview')
+ finish
+endif
+let g:loaded_patchreview=1
+let s:msgbufname = 'Patch Review Messages'
+"}}}
+
+function! <SID>PR_wipeMsgBuf() "{{{
+ let s:winnum = bufwinnr(s:msgbufname)
+ if s:winnum != -1 " If the window is already open, jump to it
+ let s:cur_winnr = winnr()
+ if winnr() != s:winnum
+ exe s:winnum . 'wincmd w'
+ exe 'bw'
+ exe s:cur_winnr . 'wincmd w'
+ endif
+ endif
+endfunction
+"}}}
+
+function! <SID>PR_echo(...) "{{{
+ " Usage: PR_echo(msg, [return_to_original_window_flag])
+ " default return_to_original_window_flag = 0
+ "
+ let s:cur_winnr = winnr()
+ let s:winnum = bufwinnr(s:msgbufname)
+ if s:winnum != -1 " If the window is already open, jump to it
+ if winnr() != s:winnum
+ exe s:winnum . 'wincmd w'
+ endif
+ else
+ let s:bufnum = bufnr(s:msgbufname)
+ if s:bufnum == -1
+ let s:wcmd = s:msgbufname
+ else
+ let s:wcmd = '+buffer' . s:bufnum
+ endif
+ exe 'silent! botright 5split ' . s:wcmd
+ endif
+ setlocal modifiable
+ setlocal buftype=nofile
+ setlocal bufhidden=delete
+ setlocal noswapfile
+ setlocal nowrap
+ setlocal nobuflisted
+ if a:0 != 0
+ silent! $put =a:1
+ endif
+ exe ':$'
+ setlocal nomodifiable
+ if a:0 > 1 && a:2
+ exe s:cur_winnr . 'wincmd w'
+ endif
+endfunction
+"}}}
+
+function! <SID>PR_checkBinary(BinaryName) "{{{
+ " Verify that BinaryName is specified or available
+ if ! exists('g:patchreview_' . a:BinaryName)
+ if executable(a:BinaryName)
+ let g:patchreview_{a:BinaryName} = a:BinaryName
+ return 1
+ else
+ call s:PR_echo('g:patchreview_' . a:BinaryName . ' is not defined and could not be found on path. Please define it in your .vimrc.')
+ return 0
+ endif
+ elseif ! executable(g:patchreview_{a:BinaryName})
+ call s:PR_echo('Specified g:patchreview_' . a:BinaryName . ' [' . g:patchreview_{a.BinaryName} . '] is not executable.')
+ return 0
+ else
+ return 1
+ endif
+endfunction
+"}}}
+
+function! <SID>PR_GetTempDirLocation(Quiet) "{{{
+ if exists('g:patchreview_tmpdir')
+ if ! isdirectory(g:patchreview_tmpdir) || ! filewritable(g:patchreview_tmpdir)
+ if ! a:Quiet
+ call s:PR_echo('Temporary directory specified by g:patchreview_tmpdir [' . g:patchreview_tmpdir . '] is not accessible.')
+ return 0
+ endif
+ endif
+ elseif exists("$TMP") && isdirectory($TMP) && filewritable($TMP)
+ let g:patchreview_tmpdir = $TMP
+ elseif exists("$TEMP") && isdirectory($TEMP) && filewritable($TEMP)
+ let g:patchreview_tmpdir = $TEMP
+ elseif exists("$TMPDIR") && isdirectory($TMPDIR) && filewritable($TMPDIR)
+ let g:patchreview_tmpdir = $TMPDIR
+ else
+ if ! a:Quiet
+ call s:PR_echo('Could not figure out a temporary directory to use. Please specify g:patchreview_tmpdir in your .vimrc.')
+ return 0
+ endif
+ endif
+ let g:patchreview_tmpdir = g:patchreview_tmpdir . '/'
+ let g:patchreview_tmpdir = substitute(g:patchreview_tmpdir, '\\', '/', 'g')
+ let g:patchreview_tmpdir = substitute(g:patchreview_tmpdir, '/+$', '/', '')
+ if has('win32')
+ let g:patchreview_tmpdir = substitute(g:patchreview_tmpdir, '/', '\\', 'g')
+ endif
+ return 1
+endfunction
+"}}}
+
+function! <SID>PatchReview(...) "{{{
+ " VIM 7+ required"{{{
+ if version < 700
+ call s:PR_echo('This plugin needs VIM 7 or higher')
+ return
+ endif
+"}}}
+
+ let s:save_shortmess = &shortmess
+ set shortmess+=aW
+ call s:PR_wipeMsgBuf()
+
+ " Check passed arguments "{{{
+ if a:0 == 0
+ call s:PR_echo('PatchReview command needs at least one argument specifying a patchfile path.')
+ let &shortmess = s:save_shortmess
+ return
+ endif
+ if a:0 >= 1 && a:0 <= 2
+ let s:PatchFilePath = expand(a:1, ':p')
+ if ! filereadable(s:PatchFilePath)
+ call s:PR_echo('File [' . s:PatchFilePath . '] is not accessible.')
+ let &shortmess = s:save_shortmess
+ return
+ endif
+ if a:0 == 2
+ let s:SrcDirectory = expand(a:2, ':p')
+ if ! isdirectory(s:SrcDirectory)
+ call s:PR_echo('[' . s:SrcDirectory . '] is not a directory')
+ let &shortmess = s:save_shortmess
+ return
+ endif
+ try
+ exe 'cd ' . s:SrcDirectory
+ catch /^.*E344.*/
+ call s:PR_echo('Could not change to directory [' . s:SrcDirectory . ']')
+ let &shortmess = s:save_shortmess
+ return
+ endtry
+ endif
+ else
+ call s:PR_echo('PatchReview command needs at most two arguments: patchfile path and optional source directory path.')
+ let &shortmess = s:save_shortmess
+ return
+ endif
+"}}}
+
+ " Verify that filterdiff and patch are specified or available "{{{
+ if ! s:PR_checkBinary('filterdiff') || ! s:PR_checkBinary('patch')
+ let &shortmess = s:save_shortmess
+ return
+ endif
+
+ let s:retval = s:PR_GetTempDirLocation(0)
+ if ! s:retval
+ let &shortmess = s:save_shortmess
+ return
+ endif
+"}}}
+
+ " Requirements met, now execute "{{{
+ let s:PatchFilePath = fnamemodify(s:PatchFilePath, ':p')
+ call s:PR_echo('Patch file : ' . s:PatchFilePath)
+ call s:PR_echo('Source directory: ' . getcwd())
+ call s:PR_echo('------------------')
+ let s:theFilterDiffCommand = '' . g:patchreview_filterdiff . ' --list -s ' . s:PatchFilePath
+ let s:theFilesString = system(s:theFilterDiffCommand)
+ let s:theFilesList = split(s:theFilesString, '[\r\n]')
+ for s:filewithchangetype in s:theFilesList
+ if s:filewithchangetype !~ '^[!+-] '
+ call s:PR_echo('*** Skipping review generation due to understood change for [' . s:filewithchangetype . ']', 1)
+ continue
+ endif
+ unlet! s:RelativeFilePath
+ let s:RelativeFilePath = substitute(s:filewithchangetype, '^. ', '', '')
+ let s:RelativeFilePath = substitute(s:RelativeFilePath, '^[a-z][^\\\/]*[\\\/]' , '' , '')
+ if s:filewithchangetype =~ '^! '
+ let s:msgtype = 'Modification : '
+ elseif s:filewithchangetype =~ '^+ '
+ let s:msgtype = 'Addition : '
+ elseif s:filewithchangetype =~ '^- '
+ let s:msgtype = 'Deletion : '
+ endif
+ let s:bufnum = bufnr(s:RelativeFilePath)
+ if buflisted(s:bufnum) && getbufvar(s:bufnum, '&mod')
+ call s:PR_echo('Old buffer for file [' . s:RelativeFilePath . '] exists in modified state. Skipping review.', 1)
+ continue
+ endif
+ let s:tmpname = substitute(s:RelativeFilePath, '/', '_', 'g')
+ let s:tmpname = substitute(s:tmpname, '\\', '_', 'g')
+ let s:tmpname = g:patchreview_tmpdir . 'PatchReview.' . s:tmpname . '.' . strftime('%Y%m%d%H%M%S')
+ if has('win32')
+ let s:tmpname = substitute(s:tmpname, '/', '\\', 'g')
+ endif
+ if ! exists('s:patchreview_tmpfiles')
+ let s:patchreview_tmpfiles = []
+ endif
+ let s:patchreview_tmpfiles = s:patchreview_tmpfiles + [s:tmpname]
+
+ let s:filterdiffcmd = '!' . g:patchreview_filterdiff . ' -i ' . s:RelativeFilePath . ' ' . s:PatchFilePath . ' > ' . s:tmpname
+ silent! exe s:filterdiffcmd
+ if s:filewithchangetype =~ '^+ '
+ if has('win32')
+ let s:inputfile = 'nul'
+ else
+ let s:inputfile = '/dev/null'
+ endif
+ else
+ let s:inputfile = expand(s:RelativeFilePath, ':p')
+ endif
+ silent exe '!' . g:patchreview_patch . ' -o ' . s:tmpname . '.file ' . s:inputfile . ' < ' . s:tmpname
+ let s:origtabpagenr = tabpagenr()
+ silent! exe 'tabedit ' . s:RelativeFilePath
+ silent! exe 'vert diffsplit ' . s:tmpname . '.file'
+ if filereadable(s:tmpname . '.file.rej')
+ silent! exe 'topleft 5split ' . s:tmpname . '.file.rej'
+ call s:PR_echo(s:msgtype . '*** REJECTED *** ' . s:RelativeFilePath, 1)
+ else
+ call s:PR_echo(s:msgtype . ' ' . s:RelativeFilePath, 1)
+ endif
+ silent! exe 'tabn ' . s:origtabpagenr
+ endfor
+ call s:PR_echo('-----')
+ call s:PR_echo('Done.')
+ let &shortmess = s:save_shortmess
+"}}}
+endfunction
+"}}}
+
+function! <SID>PatchReviewCleanup() "{{{
+ let s:retval = s:PR_GetTempDirLocation(1)
+ if s:retval && exists('g:patchreview_tmpdir') && isdirectory(g:patchreview_tmpdir) && filewritable(g:patchreview_tmpdir)
+ let s:zefilestr = globpath(g:patchreview_tmpdir, 'PatchReview.*')
+ let s:theFilesList = split(s:zefilestr, '\m[\r\n]\+')
+ for s:thefile in s:theFilesList
+ call delete(s:thefile)
+ endfor
+ endif
+endfunction
+"}}}
+
+" Commands "{{{
+"============================================================================
+" :PatchReview
+command! -nargs=* -complete=file PatchReview call s:PatchReview (<f-args>)
+
+
+" :PatchReviewCleanup
+command! -nargs=0 PatchReviewCleanup call s:PatchReviewCleanup ()
+"}}}
+"}}}
+
+" vim: textwidth=78 nowrap tabstop=2 shiftwidth=2 softtabstop=2 expandtab
+" vim: filetype=vim encoding=latin1 fileformat=unix foldlevel=0 foldmethod=marker
+"}}}
new file mode 100644
--- /dev/null
+++ b/contrib/win32/ReadMe.html
@@ -0,0 +1,146 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+ <head>
+ <title>Mercurial for Windows</title>
+ <meta http-equiv="Content-Type" content="text/html;charset=utf-8" >
+ <style type="text/css">
+ <!--
+ .indented
+ {
+ padding-left: 10pt;
+ }
+ -->
+ </style>
+ </head>
+
+ <body>
+ <h1>Mercurial version 0.9 for Windows</h1>
+
+ <p>Welcome to Mercurial for Windows!</p>
+
+ <p>Mercurial is a command-line application. You must run it from
+ the Windows command prompt (or if you're hard core, a <a
+ href="http://www.mingw.org/">MinGW</a> shell).</p>
+
+ <p><div class="indented"><i>Note: the standard <a
+ href="http://www.mingw.org/">MinGW</a> msys startup script uses
+ rxvt which has problems setting up standard input and output.
+ Running bash directly works correctly.</i></div>
+
+ <p>For documentation, please visit the <a
+ href="http://www.selenic.com/mercurial">Mercurial web site</a>.</p>
+
+ <p>By default, Mercurial installs to <tt>C:\Mercurial</tt>. The
+ Mercurial command is called <tt>hg.exe</tt>. To run this
+ command, the install directory must be in your search path.</p>
+
+ <h2>Setting your search path temporarily</h2>
+
+ <p>To set your search path temporarily, type the following into a
+ command prompt window:</p>
+
+ <pre>
+set PATH=C:\Mercurial;%PATH%
+</pre>
+
+ <h2>Setting your search path permanently</h2>
+
+ <p>To set your search path permanently, perform the following
+ steps. These instructions are for Windows NT, 2000 and XP.</p>
+
+ <ol>
+ <li>Open the Control Panel. Under Windows XP, select the
+ "Classic View".</li>
+
+ <li>Double-click on the "System" control panel.</li>
+
+ <li>Click on the "Advanced" tab.</li>
+
+ <li>Click on "Environment Variables". You'll find this near the
+ bottom of the window.</li>
+
+ <li>Under "System variables", you will see "Path". Double-click
+ it.</li>
+
+ <li>Edit "Variable value". Each path element is separated by a
+ semicolon (";") character. Append a semicolon to the end of the
+ list, followed by the path where you installed Mercurial
+ (e.g. <tt>C:\Mercurial</tt>).</li>
+
+ <li>Click on the various "OK" buttons until you've completely
+ exited from the System control panel.</li>
+
+ <li>Log out and log back in, or restart your system.</li>
+
+ <li>The next time you run the Windows command prompt, you will be
+ able to run the <tt>hg</tt> command without any special
+ help.</li>
+ </ol>
+
+ <h1>Testing Mercurial after you've installed it</h1>
+
+ <p>The easiest way to check that Mercurial is installed properly is to
+ just type the following at the command prompt:</p>
+
+ <pre>
+hg
+</pre>
+
+ <p>This command should print a useful help message. If it does,
+ other Mercurial commands should work fine for you.</p>
+
+ <h1>Configuration notes</h1>
+ <p>The default editor for commit messages is 'vi'. You can set the EDITOR
+ (or HGEDITOR) environment variable to specify your preference or set it in
+ mercurial.ini:</p>
+ <pre>
+[ui]
+editor = whatever
+</pre>
+
+
+ <h1>Reporting problems</h1>
+
+ <p>Before you report any problems, please consult the <a
+ href="http://www.selenic.com/mercurial">Mercurial web site</a> and
+ see if your question is already in our list of <a
+ href="http://www.selenic.com/mercurial/wiki/index.cgi/FAQ">Frequently
+ Answered Questions</a> (the "FAQ").
+
+ <p>If you cannot find an answer to your question, please feel
+ free to send mail to the Mercurial mailing list, at <a
+ href="mailto:mercurial@selenic.com">mercurial@selenic.com</a>.
+ <b>Remember</b>, the more useful information you include in your
+ report, the easier it will be for us to help you!</p>
+
+ <p>If you are IRC-savvy, that's usually the fastest way to get
+ help. Go to <tt>#mercurial</tt> on
+ <tt>irc.freenode.net</tt>.</p>
+
+ <h1>Author and copyright information</h1>
+
+ <p>Mercurial was written by <a href="http://www.selenic.com">Matt
+ Mackall</a>, and is maintained by Matt and a team of
+ volunteers.</p>
+
+ <p>The Windows installer was written by <a
+ href="http://www.serpentine.com/blog">Bryan
+ O'Sullivan</a>.</p>
+
+ <p>Mercurial is Copyright 2005, 2006 Matt Mackall and others. See the
+ <tt>Contributors.txt</tt> file for a list of contributors.</p>
+
+ <p>Mercurial is free software; you can redistribute it and/or
+ modify it under the terms of the <a
+ href="http://www.gnu.org/copyleft/gpl.html">GNU General Public
+ License</a> as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later
+ version.</p>
+
+ <p>Mercurial is distributed in the hope that it will be useful,
+ but <b>without any warranty</b>; without even the implied
+ warranty of <b>merchantability</b> or <b>fitness for a
+ particular purpose</b>. See the GNU General Public License for
+ more details.</p>
+ </body>
+</html>
new file mode 100644
--- /dev/null
+++ b/contrib/win32/mercurial.ini
@@ -0,0 +1,36 @@
+; System-wide Mercurial config file. To override these settings on a
+; per-user basis, please edit the following file instead, where
+; USERNAME is your Windows user name:
+; C:\Documents and Settings\USERNAME\Mercurial.ini
+
+; By default, we try to encode and decode all files that do not
+; contain ASCII NUL characters. What this means is that we try to set
+; line endings to Windows style on update, and to Unix style on
+; commit. This lets us cooperate with Linux and Unix users, so
+; everybody sees files with their native line endings.
+
+[extensions]
+; The win32text extension is available and installed by default. It
+; provides built-in Python hooks to perform line ending conversions.
+; This is normally much faster than running an external program.
+hgext.win32text =
+
+
+[encode]
+; Encode files that don't contain NUL characters.
+** = cleverencode:
+
+; Alternatively, you can explicitly specify each file extension that
+; you want encoded (any you omit will be left untouched), like this:
+
+; *.txt = dumbencode:
+
+
+[decode]
+; Decode files that don't contain NUL characters.
+** = cleverdecode:
+
+; Alternatively, you can explicitly specify each file extension that
+; you want decoded (any you omit will be left untouched), like this:
+
+; **.txt = dumbdecode:
new file mode 100644
--- /dev/null
+++ b/contrib/win32/mercurial.iss
@@ -0,0 +1,57 @@
+; Script generated by the Inno Setup Script Wizard.
+; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
+
+[Setup]
+AppCopyright=Copyright 2005, 2006 Matt Mackall and others
+AppName=Mercurial
+AppVerName=Mercurial version 0.9
+InfoAfterFile=contrib/win32/postinstall.txt
+LicenseFile=COPYING
+ShowLanguageDialog=yes
+AppPublisher=Matt Mackall and others
+AppPublisherURL=http://www.selenic.com/mercurial
+AppSupportURL=http://www.selenic.com/mercurial
+AppUpdatesURL=http://www.selenic.com/mercurial
+AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3}
+AppContact=mercurial@selenic.com
+OutputBaseFilename=Mercurial-0.9
+DefaultDirName={sd}\Mercurial
+SourceDir=C:\hg\hg-release
+VersionInfoVersion=0.9
+VersionInfoDescription=Mercurial distributed SCM
+VersionInfoCopyright=Copyright 2005, 2006 Matt Mackall and others
+VersionInfoCompany=Matt Mackall and others
+InternalCompressLevel=max
+SolidCompression=true
+SetupIconFile=contrib\favicon.ico
+AllowNoIcons=true
+DefaultGroupName=Mercurial
+
+[Files]
+Source: ..\..\msys\1.0\bin\patch.exe; DestDir: {app}
+Source: contrib\mercurial.el; DestDir: {app}/Contrib
+Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme
+Source: contrib\win32\mercurial.ini; DestDir: {app}; DestName: Mercurial.ini; Flags: confirmoverwrite
+Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt
+Source: dist\hg.exe; DestDir: {app}
+Source: dist\library.zip; DestDir: {app}
+Source: dist\mfc71.dll; DestDir: {sys}; Flags: sharedfile uninsnosharedfileprompt
+Source: dist\msvcr71.dll; DestDir: {sys}; Flags: sharedfile uninsnosharedfileprompt
+Source: dist\w9xpopen.exe; DestDir: {app}
+Source: doc\*.txt; DestDir: {app}\Docs
+Source: templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs
+Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt
+Source: COPYING; DestDir: {app}; DestName: Copying.txt
+Source: comparison.txt; DestDir: {app}\Docs; DestName: Comparison.txt
+Source: notes.txt; DestDir: {app}\Docs; DestName: DesignNotes.txt
+
+[INI]
+Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: http://www.selenic.com/mercurial/
+
+[UninstallDelete]
+Type: files; Name: {app}\Mercurial.url
+
+[Icons]
+Name: {group}\Uninstall Mercurial; Filename: {uninstallexe}
+Name: {group}\Mercurial Command Reference; Filename: {app}\Docs\hg.1.txt
+Name: {group}\Mercurial Web Site; Filename: {app}\Mercurial.url
new file mode 100644
--- /dev/null
+++ b/contrib/win32/postinstall.txt
@@ -0,0 +1,112 @@
+Welcome to Mercurial for Windows!
+---------------------------------
+
+For configuration and usage directions, please read the ReadMe.html
+file that comes with this package.
+
+Release Notes
+-------------
+
+2006-05-10 v0.9
+
+* Major changes between Mercurial 0.8.1 and 0.9:
+
+ - The repository file format has been improved.
+ - This has resulted in an average 40% reduction in disk space usage.
+ - The new format (called RevlogNG) is now the default.
+ - Mercurial works perfectly with both the old and new repository
+ file formats. It can transfer changes transparently between
+ repositories of either format.
+ - To use the new repository format, simply use `hg clone --pull` to
+ clone an existing repository.
+ - Note: Versions 0.8.1 and earlier of Mercurial cannot read
+ RevlogNG repositories directly, but they can `clone`, `pull`
+ from, and `push` to servers that are serving RevlogNG
+ repositories.
+ - Memory usage has been improved by over 50% for many common operations.
+ - Substantial performance improvements on large repositories.
+ - New commands:
+ - 'archive' - generate a directory tree snapshot, tarball, or zip
+ file of a revision
+ - Deprecated commands:
+ - 'addremove' - replaced by 'add' and 'remove --after'
+ - 'forget' - replaced by 'revert'
+ - 'undo' - replaced by 'rollback'
+ - New extensions:
+ - Bugzilla integration hook
+ - Email notification hook
+ - Nested repositories are now supported. Mercurial will not recurse
+ into a subdirectory that contains a '.hg' directory. It is treated
+ as a separate repository.
+ - The standalone web server, 'hg serve', is now threaded, so it can
+ talk to multiple clients at a time.
+ - The web server can now display a "message of the day".
+ - Support added for hooks written in Python.
+ - Many improvements and clarifications to built-in help.
+
+
+2006-04-07 v0.8.1
+
+* Major changes from 0.8 to 0.8.1:
+
+ - new extensions:
+ mq (manage a queue of patches, like quilt only better)
+ email (send changes as series of email patches)
+ - new command: merge (replaces "update -m")
+ - improved commands: log (--limit option added), pull/push ("-r" works
+ on specific revisions), revert (rewritten, much better)
+ - comprehensive hook support
+ - output templating added, supporting e.g. GNU changelog style
+ - Windows, Mac OS X: prebuilt binary packages, better support
+ - many reliability, performance, and memory usage improvements
+
+
+2006-01-29 v0.8
+
+* Upgrade notes:
+
+ - diff and status command are now repo-wide by default
+ (use 'hg diff .' for the old behavior)
+ - GPG signing is now done with the gpg extension
+ - the --text option for commit, rawcommit, and tag has been removed
+ - the copy/rename --parents option has been removed
+
+* Major changes from 0.7 to 0.8:
+
+ - faster status, diff, and commit
+ - reduced memory usage for push and pull
+ - improved extension API
+ - new bisect, gpg, hgk, and win32text extensions
+ - short URLs, binary file handling, and optional gitweb skin for hgweb
+ - numerous new command options including log --keyword and pull --rev
+ - improved hooks and file filtering
+
+
+2005-09-21 v0.7 with modifications
+
+* New INI files have been added to control Mercurial's behaviour:
+
+ System-wide - C:\Mercurial\Mercurial.ini
+ Per-user - C:\Documents and Settings\USERNAME\Mercurial.ini
+
+ A default version of the system-wide INI file is installed with
+ Mercurial. No per-user INI file is installed, but it will be
+ honoured if you create one.
+
+* Windows line endings are now handled automatically and correctly by
+ the update and commit commands. See the INI file for how to
+ customise this behaviour.
+
+* NOTE: Much of the rest of the Mercurial code does not handle Windows
+ line endings properly. Accordingly, the output of the diff command,
+ for example, will appear huge until I fix this.
+
+* Packaged text files now have correct Windows line endings.
+
+
+2005-09-21 v0.7 with modifications
+
+* This is the first standalone release of Mercurial for Windows.
+
+* I believe it to be mostly functional, with one exception: there is
+ no support yet for DOS <-> Unix line ending conversion.
new file mode 100644
--- /dev/null
+++ b/contrib/win32/win32-build.txt
@@ -0,0 +1,43 @@
+The standalone Windows installer for Mercurial is built in a somewhat
+jury-rigged fashion.
+
+It has the following prerequisites, at least as I build it:
+
+ Python for Windows
+ http://www.python.org/ftp/python/2.4.1/python-2.4.1.msi
+
+ MinGW
+ http://www.mingw.org/
+
+ Python for Windows Extensions
+ http://sourceforge.net/projects/pywin32/
+
+ mfc71.dll (just download, don't install)
+ http://starship.python.net/crew/mhammond/win32/
+
+ The py2exe distutils extension
+ http://sourceforge.net/projects/py2exe/
+
+ Inno Setup
+ http://www.jrsoftware.org/isinfo.php
+
+ ISTool
+ http://www.istool.org/default.aspx/
+
+And, of course, Mercurial itself.
+
+Once you have all this installed and built, clone a copy of the
+Mercurial repository you want to package, and name the repo
+C:\hg\hg-release.
+
+In a shell, build a standalone copy of the hg.exe program:
+
+ python setup.py build -c mingw32 py2exe -b 1
+
+Copy mfc71.dll into the dist directory that just got created.
+
+Run ISTool, and open the C:\hg\hg-release\contrib\win32\mercurial.iss
+file.
+
+In ISTool, type Ctrl-F9 to compile the installer file. The actual
+installer will be in the C:\hg\hg-release\Output directory.
new file mode 100644
--- /dev/null
+++ b/contrib/zsh_completion
@@ -0,0 +1,425 @@
+#compdef hg
+
+# Zsh completion script for mercurial. Rename this file to _hg and copy
+# it into your zsh function path (/usr/share/zsh/site-functions for
+# instance)
+#
+# Copyright (C) 2005 Steve Borho
+#
+# This is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free
+# Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+
+local curcontext="$curcontext" state line
+typeset -A opt_args
+local subcmds repos tags newFiles addedFiles includeExclude
+
+tags=($(hg tags 2> /dev/null | sed -e 's/[0-9]*:[a-f0-9]\{40\}$//; s/ *$//'))
+subcmds=($(hg -v help | sed -e '1,/^list of commands:/d' \
+ -e '/^global options:/,$d' -e '/^ [^ ]/!d; s/[,:].*//g;'))
+
+# A lot of commands have these arguments
+includeExclude=(
+ '*-I-[include names matching the given patterns]:dir:_files -W $(hg root) -/'
+ '*--include-[include names matching the given patterns]:dir:_files -W $(hg root) -/'
+ '*-X-[exclude names matching the given patterns]:dir:_files -W $(hg root) -/'
+ '*--exclude-[exclude names matching the given patterns]:dir:_files -W $(hg root) -/')
+
+if [[ $service == "hg" ]]; then
+ _arguments -C -A "-*" \
+ '(--repository)-R[repository root directory]:root:_files -/' \
+ '(-R)--repository[repository root directory]:root:_files -/' \
+ '--cwd[change working directory]:new working directory:_files -/' \
+ '(--noninteractive)-y[do not prompt, assume yes for any required answers]' \
+ '(-y)--noninteractive[do not prompt, assume yes for any required answers]' \
+ '(--verbose)-v[enable additional output]' \
+ '(-v)--verbose[enable additional output]' \
+ '(--quiet)-q[suppress output]' \
+ '(-q)--quiet[suppress output]' \
+ '(--help)-h[display help and exit]' \
+ '(-h)--help[display help and exit]' \
+ '--debug[debug mode]' \
+ '--debugger[start debugger]' \
+ '--traceback[print traceback on exception]' \
+ '--time[time how long the command takes]' \
+ '--profile[profile]' \
+ '--version[output version information and exit]' \
+ '*::command:->subcmd' && return 0
+
+ if (( CURRENT == 1 )); then
+ _wanted commands expl 'hg command' compadd -a subcmds
+ return
+ fi
+ service="$words[1]"
+ curcontext="${curcontext%:*}=$service:"
+fi
+
+case $service in
+ (add)
+ newFiles=(${(ps:\0:)"$(hg status -0un .)"})
+ _arguments $includeExclude \
+ '*:file:->unknown'
+ _wanted files expl 'unknown files' compadd -a newFiles
+ ;;
+
+ (addremove)
+ _arguments $includeExclude \
+ '*:directories:_files -/' # assume they want to add/remove a dir
+ ;;
+
+ (forget)
+ addedFiles=(${(ps:\0:)"$(hg status -0an .)"})
+ _arguments $includeExclude \
+ '*:file:->added'
+ _wanted files expl 'newly added files' compadd -a addedFiles
+ ;;
+
+ (remove|rm)
+ _arguments $includeExclude \
+ '*:file:_files'
+ ;;
+
+ (copy|cp)
+ _arguments $includeExclude \
+ '(--after)-A[record a copy that has already occurred]' \
+ '(-A)--after[record a copy that has already occurred]' \
+ '(--force)-f[forcibly copy over an existing managed file]' \
+ '(-f)--force[forcibly copy over an existing managed file]' \
+ '(--parents)-p[append source path to dest]' \
+ '(-p)--parents[append source path to dest]' \
+ '*:files:_files'
+ ;;
+
+ (rename|mv)
+ if (( CURRENT == 2 )); then
+ _arguments $includeExclude \
+ '(--after)-A[record a rename that has already occurred]' \
+ '(-A)--after[record a rename that has already occurred]' \
+ '(--force)-f[replace destination if it exists]' \
+ '(-F)--force[replace destination if it exists]' \
+ '(--parents)-p[append source path to dest]' \
+ '(-p)--parents[append source path to dest]' \
+ '*:files:_files'
+ else
+ _arguments '*:destination:_files'
+ fi
+ ;;
+
+ (diff)
+ _arguments $includeExclude \
+ '*-r[revision]:revision:($tags)' \
+ '*--rev[revision]:revision:($tags)' \
+ '(--text)-a[treat all files as text]' \
+ '(-a)--text[treat all files as text]' \
+ '*:file:_files'
+ ;;
+
+ (status|st)
+ _arguments $includeExclude \
+ '(--no-status)-n[hide status prefix]' \
+ '(-n)--no-status[hide status prefix]' \
+ '(--print0)-0[end filenames with NUL, for use with xargs]' \
+ '(-0)--print0[end filenames with NUL, for use with xargs]' \
+ '(--modified)-m[show only modified files]' \
+ '(-m)--modified[show only modified files]' \
+ '(--added)-a[show only added files]' \
+ '(-a)--added[show only added files]' \
+ '(--removed)-r[show only removed files]' \
+ '(-r)--removed[show only removed files]' \
+ '(--unknown)-u[show only unknown files]' \
+ '(-u)--unknown[show only unknown files]' \
+ '*:search pattern, then files:_files'
+ ;;
+
+ (revert)
+ addedFiles=(${(ps:\0:)"$(hg status -0amrn .)"})
+ _arguments \
+ '(--rev)-r[revision to revert to]:revision:($tags)' \
+ '(-r)--rev[revision to revert to]:revision:($tags)' \
+ '(--nonrecursive)-n[do not recurse into subdirectories]' \
+ '(-n)--nonrecursive[do not recurse into subdirectories]' \
+ '*:file:->modified'
+ _wanted files expl 'mofified files' compadd -a addedFiles
+ ;;
+
+ (commit|ci)
+ addedFiles=(${(ps:\0:)"$(hg status -0amrn .)"})
+ _arguments $includeExclude \
+ '(--addremove)-A[run addremove during commit]' \
+ '(-A)--addremove[run addremove during commit]' \
+ '(--message)-m[use <txt> as commit message]:string:' \
+ '(-m)--message[use <txt> as commit message]:string:' \
+ '(--logfile)-l[read commit message from <file>]:.log file:_file -g \*.txt' \
+ '(-l)--logfile[read commit message from <file>]:.log file:_file -g \*.txt' \
+ '(--date)-d[record datecode as commit date]:date code:' \
+ '(-d)--date[record datecode as commit date]:date code:' \
+ '(--user)-u[record user as commiter]:user:' \
+ '(-u)--user[record user as commiter]:user:' \
+ '*:file:->modified'
+ _wanted files expl 'mofified files' compadd -a addedFiles
+ ;;
+
+ (cat)
+ _arguments $includeExclude \
+ '(--output)-o[print output to file with formatted name]:filespec:' \
+ '(-o)--output[print output to file with formatted name]:filespec:' \
+ '(--rev)-r[revision]:revision:($tags)' \
+ '(-r)--rev[revision]:revision:($tags)' \
+ '*:file:_files'
+ ;;
+
+ (annotate)
+ _arguments $includeExclude \
+ '(--rev)-r[annotate the specified revision]:revision:($tags)' \
+ '(-r)--rev[annotate the specified revision]:revision:($tags)' \
+ '(--text)-a[treat all files as text]' \
+ '(-a)--text[treat all files as text]' \
+ '(--user)-u[list the author]' \
+ '(-u)--user[list the author]' \
+ '(--changeset)-c[list the changeset]' \
+ '(-c)--changeset[list the changeset]' \
+ '(--number)-n[list the revision number (default)]' \
+ '(-n)--number[list the revision number (default)]' \
+ '*:files:_files'
+ ;;
+
+ (grep)
+ _arguments $includeExclude \
+ '*-r[search in given revision range]:revision:($tags)' \
+ '*--rev[search in given revision range]:revision:($tags)' \
+ '--all[print all revisions with matches]' \
+ '(-print0)-0[end filenames with NUL, for use with xargs]' \
+ '(-0)--print0[end filenames with NUL, for use with xargs]' \
+ '(--ignore-case)-i[ignore case when matching]' \
+ '(-i)--ignore-case[ignore case when matching]' \
+ '(--files-with-matches)-l[print names of files and revs that match]' \
+ '(-l)--files-with-matches[print names of files and revs that match]' \
+ '(--line-number)-n[print matching line numbers]' \
+ '(-n)--line-number[print matching line numbers]' \
+ '(--user)-u[print user who committed change]' \
+ '(-u)--user[print user who committed change]' \
+ '*:search pattern:'
+ ;;
+
+ (locate)
+ _arguments $includeExclude \
+ '(--rev)-r[search repository as it stood at revision]:revision:($tags)' \
+ '(-r)--rev[search repository as it stood at revision]:revision:($tags)' \
+ '(--print0)-0[end filenames with NUL, for use with xargs]' \
+ '(-0)--print0[end filenames with NUL, for use with xargs]' \
+ '(--fullpath)-f[print complete paths]' \
+ '(-f)--fullpath[print complete paths]' \
+ '*:search pattern:'
+ ;;
+
+ (log|history)
+ _arguments $includeExclude \
+ '*-r[show the specified revision or range]:revision:($tags)' \
+ '*--rev[show the specified revision or range]:revision:($tags)' \
+ '(--no-merges -M --only-merges)-m[show only merge revisions]' \
+ '(--no-merges -M -m)--only-merges[show only merge revisions]' \
+ '(--only-merges -m --no-merges)-M[do not show merge revisions]' \
+ '(--only-merges -m -M)--no-merges[do not show merge revisions]' \
+ '(--keyword)-k[search for a keyword]:keyword:' \
+ '(-k)--keyword[search for a keyword]:keyword:' \
+ '(--branch)-b[show branches]' \
+ '(-b)--branch[show branches]' \
+ '(--patch)-p[show patch]' \
+ '(-p)--patch[show patch]' \
+ '*:file:_files'
+ ;;
+
+ (update|checkout|co)
+ _arguments \
+ '(--branch)-b[checkout the head of a specific branch]' \
+ '(-b)--branch[checkout the head of a specific branch]' \
+ '(-C --clean --merge)-m[allow merging of branches]' \
+ '(-C --clean -m)--merge[allow merging of branches]' \
+ '(-m --merge --clean)-C[overwrite locally modified files]' \
+ '(-m --merge -C)--clean[overwrite locally modified files]' \
+ '*:revision or tag:($tags)'
+ ;;
+
+ (tag)
+ _arguments \
+ '(--local)-l[make the tag local]' \
+ '(-l)--local[make the tag local]' \
+ '(--message)-m[message for tag commit log entry]:string:' \
+ '(-m)--message[message for tag commit log entry]:string:' \
+ '(--date)-d[record datecode as commit date]:date code:' \
+ '(-d)--date[record datecode as commit date]:date code:' \
+ '(--user)-u[record user as commiter]:user:' \
+ '(-u)--user[record user as commiter]:user:' \
+ '*:name, then revision:($tags)'
+ ;;
+
+ (clone)
+ if (( CURRENT == 2 )); then
+ repos=( $(hg paths | sed -e 's/^.*= //') )
+ _arguments \
+ '(--no-update)-U[do not update the new working directory]' \
+ '(-U)--no-update[do not update the new working directory]' \
+ '(--ssh)-e[specify ssh command to use]:string:' \
+ '(-e)--ssh[specify ssh command to use]:string:' \
+ '--pull[use pull protocol to copy metadata]' \
+ '--remotecmd[specify hg command to run on the remote side]:remote hg:' \
+ '*:local repo:_files -/'
+ _wanted source expl 'source repository' compadd -a repos
+ elif (( CURRENT == 3 )); then
+ _arguments '*:dest repo:_files -/'
+ fi
+ ;;
+
+ (rawcommit)
+ _arguments \
+ '(--parent)-p[parent revision]:revision:($tags)' \
+ '(-p)--parent[parent revision]:revision:($tags)' \
+ '(--date)-d[record datecode as commit date]:date code:' \
+ '(-d)--date[record datecode as commit date]:date code:' \
+ '(--user)-u[record user as commiter]:user:' \
+ '(-u)--user[record user as commiter]:user:' \
+ '(--message)-m[use <txt> as commit message]:string:' \
+ '(-m)--message[use <txt> as commit message]:string:' \
+ '(--logfile)-l[read commit message from <file>]:.log file:_file -g \*.txt' \
+ '(-l)--logfile[read commit message from <file>]:.log file:_file -g \*.txt' \
+ '(--files)-F[file list]:file list:_files' \
+ '(-F)--files[file list]:file list:_files' \
+ '*:files to commit:_files'
+ ;;
+
+ (bundle)
+ if (( CURRENT == 2 )); then
+ _arguments '*:changegroup file:_files -g \*.hg'
+ elif (( CURRENT == 3 )); then
+ _arguments '*:other repo:_files -/'
+ fi
+ ;;
+
+ (unbundle)
+ _arguments '*:changegroup .hg file:_files -g \*.hg'
+ ;;
+
+ (incoming)
+ _arguments \
+ '(--patch)-p[show patch]' \
+ '(-p)--patch[show patch]' \
+ '(--no-merges)-M[do not show merge revisions]' \
+ '(-M)--no-merges[do not show merge revisions]' \
+ '(--newest-first)-n[show newest record first]' \
+ '(-n)--newest-first[show newest record first]' \
+ '*:mercurial repository:_files -/'
+ ;;
+
+ (import|patch)
+ _arguments \
+ '(--strip)-p[directory strip option for patch (default: 1)]:count:' \
+ '(-p)--strip[directory strip option for patch (default: 1)]:count:' \
+ '(--force)-f[skip check for outstanding uncommitted changes]' \
+ '(-f)--force[skip check for outstanding uncommitted changes]' \
+ '(--base)-b[base directory to read patches from]:file:_files -W $(hg root) -/' \
+ '(-b)--base[base directory to read patches from]:file:_files -W $(hg root) -/' \
+ '*:patch file:_files'
+ ;;
+
+ (pull)
+ repos=( $(hg paths | sed -e 's/^.*= //') )
+ _arguments \
+ '(--update)-u[update working directory to tip after pull]' \
+ '(-u)--update[update working directory to tip after pull]' \
+ '(--ssh)-e[specify ssh command to use]:ssh command:' \
+ '(-e)--ssh[specify ssh command to use]:ssh command:' \
+ '--remotecmd[specify hg command to run on the remote side]:remote hg:' \
+ '*:local repo:_files -/'
+ _wanted source expl 'source repository' compadd -a repos
+ ;;
+
+ (outgoing)
+ _arguments \
+ '(--patch)-p[show patch]' \
+ '(-p)--patch[show patch]' \
+ '(--no-merges)-M[do not show merge revisions]' \
+ '(-M)--no-merges[do not show merge revisions]' \
+ '(--newest-first)-n[show newest record first]' \
+ '(-n)--newest-first[show newest record first]' \
+ '*:local repo:_files -/'
+ _wanted source expl 'source repository' compadd -a repos
+ ;;
+
+ (export)
+ _arguments \
+ '(--outout)-o[print output to file with formatted name]:filespec:' \
+ '(-o)--output[print output to file with formatted name]:filespec:' \
+ '(--text)-a[treat all files as text]' \
+ '(-a)--text[treat all files as text]' \
+ '*:revision:->revs'
+ _wanted revs expl 'revision or tag' compadd -a tags
+ ;;
+
+ (push)
+ repos=( $(hg paths | sed -e 's/^.*= //') )
+ _arguments \
+ '(--force)-f[force push]' \
+ '(-f)--force[force push]' \
+ '(--ssh)-e[specify ssh command to use]:ssh command:' \
+ '(-e)--ssh[specify ssh command to use]:ssh command:' \
+ '--remotecmd[specify hg command to run on the remote side]:remote hg:' \
+ '*:local repo:_files -/'
+ _wanted source expl 'source repository' compadd -a repos
+ ;;
+
+ (serve)
+ _arguments \
+ '(--accesslog)-A[name of access log file]:log file:_files' \
+ '(-A)--accesslog[name of access log file]:log file:_files' \
+ '(--errorlog)-E[name of error log file]:log file:_files' \
+ '(-E)--errorlog[name of error log file]:log file:_files' \
+ '(--port)-p[listen port]:listen port:' \
+ '(-p)--port[listen port]:listen port:' \
+ '(--address)-a[interface address]:interface address:' \
+ '(-a)--address[interface address]:interface address:' \
+ '(--name)-n[name to show in web pages]:repository name:' \
+ '(-n)--name[name to show in web pages]:repository name:' \
+ '(--templates)-t[web template directory]:template dir:_files -/' \
+ '(-t)--templates[web template directory]:template dir:_files -/' \
+ '--style[web template style]:style' \
+ '--stdio[for remote clients]' \
+ '(--ipv6)-6[use IPv6 in addition to IPv4]' \
+ '(-6)--ipv6[use IPv6 in addition to IPv4]'
+ ;;
+
+ (help)
+ _wanted commands expl 'hg command' compadd -a subcmds
+ ;;
+
+ (heads)
+ _arguments \
+ '(--branches)-b[find branch info]' \
+ '(-b)--branches[find branch info]'
+ ;;
+
+ (paths)
+ _arguments '*:symbolic name:(default default-push)'
+ ;;
+
+ (init)
+ _arguments '*:new repo directory:_files -/'
+ ;;
+
+ (manifest)
+ _arguments '*:revision:($tags)'
+ ;;
+
+ (parents)
+ _arguments '*:revision:($tags)'
+ ;;
+
+ (identify|recover|root|undo|view|verify|version|ct|tags)
+ # no arguments for these commands
+ ;;
+
+ (*)
+ _message "unknown hg command completion: $service"
+ ;;
+esac
new file mode 100644
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,37 @@
+SOURCES=$(wildcard *.[0-9].txt)
+MAN=$(SOURCES:%.txt=%)
+HTML=$(SOURCES:%.txt=%.html)
+PREFIX=/usr/local
+MANDIR=$(PREFIX)/man
+INSTALL=install -c
+
+all: man html
+
+man: $(MAN)
+
+html: $(HTML)
+
+hg.1.txt: hg.1.gendoc.txt
+ touch hg.1.txt
+
+hg.1.gendoc.txt: ../mercurial/commands.py
+ python gendoc.py > $@
+
+%: %.xml
+ xmlto man $*.xml
+
+%.xml: %.txt
+ asciidoc -d manpage -b docbook $*.txt
+
+%.html: %.txt
+ asciidoc -b html4 $*.txt || asciidoc -b html $*.txt
+
+install: man
+ for i in $(MAN) ; do \
+ subdir=`echo $$i | sed -n 's/.\+\(\.[0-9]\)$$/man\1/p'` ; \
+ mkdir -p $(MANDIR)/$$subdir ; \
+ $(INSTALL) $$i $(MANDIR)/$$subdir ; \
+ done
+
+clean:
+ $(RM) $(MAN) $(MAN:%=%.xml) $(MAN:%=%.html) *.[0-9].gendoc.txt
new file mode 100644
--- /dev/null
+++ b/doc/README
@@ -0,0 +1,21 @@
+Mercurial's documentation is currently kept in ASCIIDOC format, which
+is a simple plain text format that's easy to read and edit. It's also
+convertible to a variety of other formats including standard UNIX man
+page format and HTML.
+
+To do this, you'll need to install ASCIIDOC:
+
+ http://www.methods.co.nz/asciidoc/
+
+To generate the man page:
+
+ asciidoc -d manpage -b docbook hg.1.txt
+ xmlto man hg.1.xml
+
+To display:
+
+ groff -mandoc -Tascii hg.1 | more
+
+To create the html page (without stylesheets):
+
+ asciidoc -b html hg.1.txt
new file mode 100644
--- /dev/null
+++ b/doc/gendoc.py
@@ -0,0 +1,92 @@
+import sys, textwrap
+# import from the live mercurial repo
+sys.path.insert(0, "..")
+from mercurial.commands import table, globalopts
+from mercurial.i18n import gettext as _
+
+def get_desc(docstr):
+ if not docstr:
+ return "", ""
+ # sanitize
+ docstr = docstr.strip("\n")
+ docstr = docstr.rstrip()
+ shortdesc = docstr.splitlines()[0].strip()
+
+ i = docstr.find("\n")
+ if i != -1:
+ desc = docstr[i+2:]
+ else:
+ desc = " %s" % shortdesc
+ return (shortdesc, desc)
+
+def get_opts(opts):
+ for shortopt, longopt, default, desc in opts:
+ allopts = []
+ if shortopt:
+ allopts.append("-%s" % shortopt)
+ if longopt:
+ allopts.append("--%s" % longopt)
+ desc += default and _(" (default: %s)") % default or ""
+ yield(", ".join(allopts), desc)
+
+def get_cmd(cmd):
+ d = {}
+ attr = table[cmd]
+ cmds = cmd.lstrip("^").split("|")
+
+ d['synopsis'] = attr[2]
+ d['cmd'] = cmds[0]
+ d['aliases'] = cmd.split("|")[1:]
+ d['desc'] = get_desc(attr[0].__doc__)
+ d['opts'] = list(get_opts(attr[1]))
+ return d
+
+
+def show_doc(ui):
+ def bold(s, text=""):
+ ui.write("%s\n%s\n%s\n" % (s, "="*len(s), text))
+ def underlined(s, text=""):
+ ui.write("%s\n%s\n%s\n" % (s, "-"*len(s), text))
+
+ # print options
+ underlined(_("OPTIONS"))
+ for optstr, desc in get_opts(globalopts):
+ ui.write("%s::\n %s\n\n" % (optstr, desc))
+
+ # print cmds
+ underlined(_("COMMANDS"))
+ h = {}
+ for c, attr in table.items():
+ f = c.split("|")[0]
+ f = f.lstrip("^")
+ h[f] = c
+ cmds = h.keys()
+ cmds.sort()
+
+ for f in cmds:
+ if f.startswith("debug"): continue
+ d = get_cmd(h[f])
+ # synopsis
+ ui.write("%s::\n" % d['synopsis'].replace("hg ","", 1))
+ # description
+ ui.write("%s\n\n" % d['desc'][1])
+ # options
+ opt_output = list(d['opts'])
+ if opt_output:
+ opts_len = max([len(line[0]) for line in opt_output])
+ ui.write(_(" options:\n"))
+ for optstr, desc in opt_output:
+ if desc:
+ s = "%-*s %s" % (opts_len, optstr, desc)
+ else:
+ s = optstr
+ s = textwrap.fill(s, initial_indent=4 * " ",
+ subsequent_indent=(6 + opts_len) * " ")
+ ui.write("%s\n" % s)
+ ui.write("\n")
+ # aliases
+ if d['aliases']:
+ ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases']))
+
+if __name__ == "__main__":
+ show_doc(sys.stdout)
new file mode 100644
--- /dev/null
+++ b/doc/hg.1.txt
@@ -0,0 +1,221 @@
+HG(1)
+=====
+Matt Mackall <mpm@selenic.com>
+
+NAME
+----
+hg - Mercurial source code management system
+
+SYNOPSIS
+--------
+'hg' [-v -d -q -y] <command> [command options] [files]
+
+DESCRIPTION
+-----------
+The hg(1) command provides a command line interface to the Mercurial system.
+
+COMMAND ELEMENTS
+----------------
+
+files ...::
+ indicates one or more filename or relative path filenames; see
+ "FILE NAME PATTERNS" for information on pattern matching
+
+path::
+ indicates a path on the local machine
+
+revision::
+ indicates a changeset which can be specified as a changeset revision
+ number, a tag, or a unique substring of the changeset hash value
+
+repository path::
+ either the pathname of a local repository or the URI of a remote
+ repository. There are two available URI protocols, http:// which is
+ fast and the old-http:// protocol which is much slower but does not
+ require a special server on the web host.
+
+
+include::hg.1.gendoc.txt[]
+
+FILE NAME PATTERNS
+------------------
+
+ Mercurial accepts several notations for identifying one or more
+ files at a time.
+
+ By default, Mercurial treats filenames as shell-style extended
+ glob patterns.
+
+ Alternate pattern notations must be specified explicitly.
+
+ To use a plain path name without any pattern matching, start a
+ name with "path:". These path names must match completely, from
+ the root of the current repository.
+
+ To use an extended glob, start a name with "glob:". Globs are
+ rooted at the current directory; a glob such as "*.c" will match
+ files ending in ".c" in the current directory only.
+
+ The supported glob syntax extensions are "**" to match any string
+ across path separators, and "{a,b}" to mean "a or b".
+
+ To use a Perl/Python regular expression, start a name with "re:".
+ Regexp pattern matching is anchored at the root of the repository.
+
+ Plain examples:
+
+ path:foo/bar a name bar in a directory named foo in the root of
+ the repository
+ path:path:name a file or directory named "path:name"
+
+ Glob examples:
+
+ glob:*.c any name ending in ".c" in the current directory
+ *.c any name ending in ".c" in the current directory
+ **.c any name ending in ".c" in the current directory, or
+ any subdirectory
+ foo/*.c any name ending in ".c" in the directory foo
+ foo/**.c any name ending in ".c" in the directory foo, or any
+ subdirectory
+
+ Regexp examples:
+
+ re:.*\.c$ any name ending in ".c", anywhere in the repository
+
+
+SPECIFYING SINGLE REVISIONS
+---------------------------
+
+ Mercurial accepts several notations for identifying individual
+ revisions.
+
+ A plain integer is treated as a revision number. Negative
+ integers are treated as offsets from the tip, with -1 denoting the
+ tip.
+
+ A 40-digit hexadecimal string is treated as a unique revision
+ identifier.
+
+ A hexadecimal string less than 40 characters long is treated as a
+ unique revision identifier, and referred to as a short-form
+ identifier. A short-form identifier is only valid if it is the
+ prefix of one full-length identifier.
+
+ Any other string is treated as a tag name, which is a symbolic
+ name associated with a revision identifier. Tag names may not
+ contain the ":" character.
+
+ The reserved name "tip" is a special tag that always identifies
+ the most recent revision.
+
+SPECIFYING MULTIPLE REVISIONS
+-----------------------------
+
+ When Mercurial accepts more than one revision, they may be
+ specified individually, or provided as a continuous range,
+ separated by the ":" character.
+
+ The syntax of range notation is [BEGIN]:[END], where BEGIN and END
+ are revision identifiers. Both BEGIN and END are optional. If
+ BEGIN is not specified, it defaults to revision number 0. If END
+ is not specified, it defaults to the tip. The range ":" thus
+ means "all revisions".
+
+ If BEGIN is greater than END, revisions are treated in reverse
+ order.
+
+ A range acts as a closed interval. This means that a range of 3:5
+ gives 3, 4 and 5. Similarly, a range of 4:2 gives 4, 3, and 2.
+
+ENVIRONMENT VARIABLES
+---------------------
+
+HGEDITOR::
+ This is the name of the editor to use when committing. Defaults to the
+ value of EDITOR.
+
+ (deprecated, use .hgrc)
+
+HGMERGE::
+ An executable to use for resolving merge conflicts. The program
+ will be executed with three arguments: local file, remote file,
+ ancestor file.
+
+ The default program is "hgmerge", which is a shell script provided
+ by Mercurial with some sensible defaults.
+
+ (deprecated, use .hgrc)
+
+HGRCPATH::
+ A list of files or directories to search for hgrc files. Item
+ separator is ":" on Unix, ";" on Windows. If HGRCPATH is not set,
+ platform default search path is used. If empty, only .hg/hgrc of
+ current repository is read.
+
+ For each element in path, if a directory, all entries in directory
+ ending with ".rc" are added to path. Else, element itself is
+ added to path.
+
+HGUSER::
+ This is the string used for the author of a commit.
+
+ (deprecated, use .hgrc)
+
+EMAIL::
+ If HGUSER is not set, this will be used as the author for a commit.
+
+LOGNAME::
+ If neither HGUSER nor EMAIL is set, LOGNAME will be used (with
+ '@hostname' appended) as the author value for a commit.
+
+EDITOR::
+ This is the name of the editor used in the hgmerge script. It will be
+ used for commit messages if HGEDITOR isn't set. Defaults to 'vi'.
+
+PYTHONPATH::
+ This is used by Python to find imported modules and may need to be set
+ appropriately if Mercurial is not installed system-wide.
+
+FILES
+-----
+ .hgignore::
+ This file contains regular expressions (one per line) that describe file
+ names that should be ignored by hg. For details, see hgignore(5).
+
+ .hgtags::
+ This file contains changeset hash values and text tag names (one of each
+ separated by spaces) that correspond to tagged versions of the repository
+ contents.
+
+ /etc/mercurial/hgrc, $HOME/.hgrc, .hg/hgrc::
+ This file contains defaults and configuration. Values in .hg/hgrc
+ override those in $HOME/.hgrc, and these override settings made in the
+ global /etc/mercurial/hgrc configuration. See hgrc(5) for details of
+ the contents and format of these files.
+
+BUGS
+----
+Probably lots, please post them to the mailing list (See Resources below)
+when you find them.
+
+SEE ALSO
+--------
+hgignore(5), hgrc(5)
+
+AUTHOR
+------
+Written by Matt Mackall <mpm@selenic.com>
+
+RESOURCES
+---------
+http://selenic.com/mercurial[Main Web Site]
+
+http://selenic.com/hg[Source code repository]
+
+http://selenic.com/mailman/listinfo/mercurial[Mailing list]
+
+COPYING
+-------
+Copyright \(C) 2005 Matt Mackall.
+Free use of this software is granted under the terms of the GNU General
+Public License (GPL).
new file mode 100644
--- /dev/null
+++ b/doc/hgignore.5.txt
@@ -0,0 +1,92 @@
+HGIGNORE(5)
+===========
+Vadim Gelfer <vadim.gelfer@gmail.com>
+
+NAME
+----
+hgignore - syntax for Mercurial ignore files
+
+SYNOPSIS
+--------
+
+The Mercurial system uses a file called .hgignore in the root
+directory of a repository to control its behavior when it finds files
+that it is not currently managing.
+
+DESCRIPTION
+-----------
+
+Mercurial ignores every unmanaged file that matches any pattern in an
+ignore file. The patterns in an ignore file do not apply to files
+managed by Mercurial. To control Mercurial's handling of files that
+it manages, see the hg(1) man page. Look for the "-I" and "-X"
+options.
+
+In addition, a Mercurial configuration file can point to a set of
+per-user or global ignore files. See the hgrc(5) man page for details
+of how to configure these files. Look for the "ignore" entry in the
+"ui" section.
+
+SYNTAX
+------
+
+An ignore file is a plain text file consisting of a list of patterns,
+with one pattern per line. Empty lines are skipped. The "#"
+character is treated as a comment character, and the "\" character is
+treated as an escape character.
+
+Mercurial supports several pattern syntaxes. The default syntax used
+is Python/Perl-style regular expressions.
+
+To change the syntax used, use a line of the following form:
+
+syntax: NAME
+
+where NAME is one of the following:
+
+regexp::
+ Regular expression, Python/Perl syntax.
+glob::
+ Shell-style glob.
+
+The chosen syntax stays in effect when parsing all patterns that
+follow, until another syntax is selected.
+
+Neither glob nor regexp patterns are rooted. A glob-syntax pattern of
+the form "*.c" will match a file ending in ".c" in any directory, and
+a regexp pattern of the form "\.c$" will do the same. To root a
+regexp pattern, start it with "^".
+
+EXAMPLE
+-------
+
+Here is an example ignore file.
+
+ # use glob syntax.
+ syntax: glob
+
+ *.elc
+ *.pyc
+ *~
+ .*.swp
+
+ # switch to regexp syntax.
+ syntax: regexp
+ ^\.pc/
+
+AUTHOR
+------
+Vadim Gelfer <vadim.gelfer@gmail.com>
+
+Mercurial was written by Matt Mackall <mpm@selenic.com>.
+
+SEE ALSO
+--------
+hg(1), hgrc(5)
+
+COPYING
+-------
+This manual page is copyright 2006 Vadim Gelfer.
+Mercurial is copyright 2005, 2006 Matt Mackall.
+Free use of this software is granted under the terms of the GNU General
+Public License (GPL).
new file mode 100644
--- /dev/null
+++ b/doc/hgmerge.1.txt
@@ -0,0 +1,35 @@
+HGMERGE(1)
+==========
+Matt Mackall <mpm@selenic.com>
+v0.1, 27 May 2005
+
+NAME
+----
+hgmerge - default wrapper to merge files in Mercurial SCM system
+
+SYNOPSIS
+--------
+'hgmerge' local ancestor remote
+
+DESCRIPTION
+-----------
+The hgmerge(1) command provides a graphical interface to merge files in the
+Mercurial system. It is a simple wrapper around kdiff3, merge(1) and tkdiff(1),
+or simply diff(1) and patch(1) depending on what is present on the system.
+
+hgmerge(1) is used by the Mercurial SCM if the environment variable HGMERGE is
+not set.
+
+AUTHOR
+------
+Written by Vincent Danjean <Vincent.Danjean@free.fr>
+
+SEE ALSO
+--------
+hg(1) - the command line interface to Mercurial SCM
+
+COPYING
+-------
+Copyright \(C) 2005 Matt Mackall.
+Free use of this software is granted under the terms of the GNU General
+Public License (GPL).
new file mode 100644
--- /dev/null
+++ b/doc/hgrc.5.txt
@@ -0,0 +1,420 @@
+HGRC(5)
+=======
+Bryan O'Sullivan <bos@serpentine.com>
+
+NAME
+----
+hgrc - configuration files for Mercurial
+
+SYNOPSIS
+--------
+
+The Mercurial system uses a set of configuration files to control
+aspects of its behaviour.
+
+FILES
+-----
+
+Mercurial reads configuration data from several files, if they exist.
+The names of these files depend on the system on which Mercurial is
+installed.
+
+(Unix) <install-root>/etc/mercurial/hgrc.d/*.rc::
+(Unix) <install-root>/etc/mercurial/hgrc::
+ Per-installation configuration files, searched for in the
+ directory where Mercurial is installed. For example, if installed
+ in /shared/tools, Mercurial will look in
+ /shared/tools/etc/mercurial/hgrc. Options in these files apply to
+ all Mercurial commands executed by any user in any directory.
+
+(Unix) /etc/mercurial/hgrc.d/*.rc::
+(Unix) /etc/mercurial/hgrc::
+(Windows) C:\Mercurial\Mercurial.ini::
+ Per-system configuration files, for the system on which Mercurial
+ is running. Options in these files apply to all Mercurial
+ commands executed by any user in any directory. Options in these
+ files override per-installation options.
+
+(Unix) $HOME/.hgrc::
+(Windows) C:\Documents and Settings\USERNAME\Mercurial.ini::
+(Windows) $HOME\Mercurial.ini::
+ Per-user configuration file, for the user running Mercurial.
+ Options in this file apply to all Mercurial commands executed by
+ any user in any directory. Options in this file override
+ per-installation and per-system options.
+ On Windows system, one of these is chosen exclusively according
+ to definition of HOME environment variable.
+
+(Unix, Windows) <repo>/.hg/hgrc::
+ Per-repository configuration options that only apply in a
+ particular repository. This file is not version-controlled, and
+ will not get transferred during a "clone" operation. Options in
+ this file override options in all other configuration files.
+
+SYNTAX
+------
+
+A configuration file consists of sections, led by a "[section]" header
+and followed by "name: value" entries; "name=value" is also accepted.
+
+ [spam]
+ eggs=ham
+ green=
+ eggs
+
+Each line contains one entry. If the lines that follow are indented,
+they are treated as continuations of that entry.
+
+Leading whitespace is removed from values. Empty lines are skipped.
+
+The optional values can contain format strings which refer to other
+values in the same section, or values in a special DEFAULT section.
+
+Lines beginning with "#" or ";" are ignored and may be used to provide
+comments.
+
+SECTIONS
+--------
+
+This section describes the different sections that may appear in a
+Mercurial "hgrc" file, the purpose of each section, its possible
+keys, and their possible values.
+
+decode/encode::
+ Filters for transforming files on checkout/checkin. This would
+ typically be used for newline processing or other
+ localization/canonicalization of files.
+
+ Filters consist of a filter pattern followed by a filter command.
+ Filter patterns are globs by default, rooted at the repository
+ root. For example, to match any file ending in ".txt" in the root
+ directory only, use the pattern "*.txt". To match any file ending
+ in ".c" anywhere in the repository, use the pattern "**.c".
+
+ The filter command can start with a specifier, either "pipe:" or
+ "tempfile:". If no specifier is given, "pipe:" is used by default.
+
+ A "pipe:" command must accept data on stdin and return the
+ transformed data on stdout.
+
+ Pipe example:
+
+ [encode]
+ # uncompress gzip files on checkin to improve delta compression
+ # note: not necessarily a good idea, just an example
+ *.gz = pipe: gunzip
+
+ [decode]
+ # recompress gzip files when writing them to the working dir (we
+ # can safely omit "pipe:", because it's the default)
+ *.gz = gzip
+
+ A "tempfile:" command is a template. The string INFILE is replaced
+ with the name of a temporary file that contains the data to be
+ filtered by the command. The string OUTFILE is replaced with the
+ name of an empty temporary file, where the filtered data must be
+ written by the command.
+
+ NOTE: the tempfile mechanism is recommended for Windows systems,
+ where the standard shell I/O redirection operators often have
+ strange effects. In particular, if you are doing line ending
+ conversion on Windows using the popular dos2unix and unix2dos
+ programs, you *must* use the tempfile mechanism, as using pipes will
+ corrupt the contents of your files.
+
+ Tempfile example:
+
+ [encode]
+ # convert files to unix line ending conventions on checkin
+ **.txt = tempfile: dos2unix -n INFILE OUTFILE
+
+ [decode]
+ # convert files to windows line ending conventions when writing
+ # them to the working dir
+ **.txt = tempfile: unix2dos -n INFILE OUTFILE
+
+email::
+ Settings for extensions that send email messages.
+ from;;
+ Optional. Email address to use in "From" header and SMTP envelope
+ of outgoing messages.
+ method;;
+ Optional. Method to use to send email messages. If value is
+ "smtp" (default), use SMTP (see section "[mail]" for
+ configuration). Otherwise, use as name of program to run that
+ acts like sendmail (takes "-f" option for sender, list of
+ recipients on command line, message on stdin). Normally, setting
+ this to "sendmail" or "/usr/sbin/sendmail" is enough to use
+ sendmail to send messages.
+
+ Email example:
+
+ [email]
+ from = Joseph User <joe.user@example.com>
+ method = /usr/sbin/sendmail
+
+extensions::
+ Mercurial has an extension mechanism for adding new features. To
+ enable an extension, create an entry for it in this section.
+
+ If you know that the extension is already in Python's search path,
+ you can give the name of the module, followed by "=", with nothing
+ after the "=".
+
+ Otherwise, give a name that you choose, followed by "=", followed by
+ the path to the ".py" file (including the file name extension) that
+ defines the extension.
+
+hooks::
+ Commands or Python functions that get automatically executed by
+ various actions such as starting or finishing a commit. Multiple
+ hooks can be run for the same action by appending a suffix to the
+ action. Overriding a site-wide hook can be done by changing its
+ value or setting it to an empty string.
+
+ Example .hg/hgrc:
+
+ [hooks]
+ # do not use the site-wide hook
+ incoming =
+ incoming.email = /my/email/hook
+ incoming.autobuild = /my/build/hook
+
+ Most hooks are run with environment variables set that give added
+ useful information. For each hook below, the environment variables
+ it is passed are listed with names of the form "$HG_foo".
+
+ changegroup;;
+ Run after a changegroup has been added via push, pull or
+ unbundle. ID of the first new changeset is in $HG_NODE.
+ commit;;
+ Run after a changeset has been created in the local repository.
+ ID of the newly created changeset is in $HG_NODE. Parent
+ changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
+ incoming;;
+ Run after a changeset has been pulled, pushed, or unbundled into
+ the local repository. The ID of the newly arrived changeset is in
+ $HG_NODE.
+ outgoing;;
+ Run after sending changes from local repository to another. ID of
+ first changeset sent is in $HG_NODE. Source of operation is in
+ $HG_SOURCE; see "preoutgoing" hook for description.
+ prechangegroup;;
+ Run before a changegroup is added via push, pull or unbundle.
+ Exit status 0 allows the changegroup to proceed. Non-zero status
+ will cause the push, pull or unbundle to fail.
+ precommit;;
+ Run before starting a local commit. Exit status 0 allows the
+ commit to proceed. Non-zero status will cause the commit to fail.
+ Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
+ preoutgoing;;
+ Run before computing changes to send from the local repository to
+ another. Non-zero status will cause failure. This lets you
+ prevent pull over http or ssh. Also prevents against local pull,
+ push (outbound) or bundle commands, but not effective, since you
+ can just copy files instead then. Source of operation is in
+ $HG_SOURCE. If "serve", operation is happening on behalf of
+ remote ssh or http repository. If "push", "pull" or "bundle",
+ operation is happening on behalf of repository on same system.
+ pretag;;
+ Run before creating a tag. Exit status 0 allows the tag to be
+ created. Non-zero status will cause the tag to fail. ID of
+ changeset to tag is in $HG_NODE. Name of tag is in $HG_TAG. Tag
+ is local if $HG_LOCAL=1, in repo if $HG_LOCAL=0.
+ pretxnchangegroup;;
+ Run after a changegroup has been added via push, pull or unbundle,
+ but before the transaction has been committed. Changegroup is
+ visible to hook program. This lets you validate incoming changes
+ before accepting them. Passed the ID of the first new changeset
+ in $HG_NODE. Exit status 0 allows the transaction to commit.
+ Non-zero status will cause the transaction to be rolled back and
+ the push, pull or unbundle will fail.
+ pretxncommit;;
+ Run after a changeset has been created but the transaction not yet
+ committed. Changeset is visible to hook program. This lets you
+ validate commit message and changes. Exit status 0 allows the
+ commit to proceed. Non-zero status will cause the transaction to
+ be rolled back. ID of changeset is in $HG_NODE. Parent changeset
+ IDs are in $HG_PARENT1 and $HG_PARENT2.
+ preupdate;;
+ Run before updating the working directory. Exit status 0 allows
+ the update to proceed. Non-zero status will prevent the update.
+ Changeset ID of first new parent is in $HG_PARENT1. If merge, ID
+ of second new parent is in $HG_PARENT2.
+ tag;;
+ Run after a tag is created. ID of tagged changeset is in
+ $HG_NODE. Name of tag is in $HG_TAG. Tag is local if
+ $HG_LOCAL=1, in repo if $HG_LOCAL=0.
+ update;;
+ Run after updating the working directory. Changeset ID of first
+ new parent is in $HG_PARENT1. If merge, ID of second new parent
+ is in $HG_PARENT2. If update succeeded, $HG_ERROR=0. If update
+ failed (e.g. because conflicts not resolved), $HG_ERROR=1.
+
+ Note: In earlier releases, the names of hook environment variables
+ did not have a "HG_" prefix. The old unprefixed names are no longer
+ provided in the environment.
+
+ The syntax for Python hooks is as follows:
+
+ hookname = python:modulename.submodule.callable
+
+ Python hooks are run within the Mercurial process. Each hook is
+ called with at least three keyword arguments: a ui object (keyword
+ "ui"), a repository object (keyword "repo"), and a "hooktype"
+ keyword that tells what kind of hook is used. Arguments listed as
+ environment variables above are passed as keyword arguments, with no
+ "HG_" prefix, and names in lower case.
+
+ A Python hook must return a "true" value to succeed. Returning a
+ "false" value or raising an exception is treated as failure of the
+ hook.
+
+http_proxy::
+ Used to access web-based Mercurial repositories through a HTTP
+ proxy.
+ host;;
+ Host name and (optional) port of the proxy server, for example
+ "myproxy:8000".
+ no;;
+ Optional. Comma-separated list of host names that should bypass
+ the proxy.
+ passwd;;
+ Optional. Password to authenticate with at the proxy server.
+ user;;
+ Optional. User name to authenticate with at the proxy server.
+
+smtp::
+ Configuration for extensions that need to send email messages.
+ host;;
+ Optional. Host name of mail server. Default: "mail".
+ port;;
+ Optional. Port to connect to on mail server. Default: 25.
+ tls;;
+ Optional. Whether to connect to mail server using TLS. True or
+ False. Default: False.
+ username;;
+ Optional. User name to authenticate to SMTP server with.
+ If username is specified, password must also be specified.
+ Default: none.
+ password;;
+ Optional. Password to authenticate to SMTP server with.
+ If username is specified, password must also be specified.
+ Default: none.
+
+paths::
+ Assigns symbolic names to repositories. The left side is the
+ symbolic name, and the right gives the directory or URL that is the
+ location of the repository. Default paths can be declared by
+ setting the following entries.
+ default;;
+ Directory or URL to use when pulling if no source is specified.
+ Default is set to repository from which the current repository
+ was cloned.
+ default-push;;
+ Optional. Directory or URL to use when pushing if no destination
+ is specified.
+
+ui::
+ User interface controls.
+ debug;;
+ Print debugging information. True or False. Default is False.
+ editor;;
+ The editor to use during a commit. Default is $EDITOR or "vi".
+ ignore;;
+ A file to read per-user ignore patterns from. This file should be in
+ the same format as a repository-wide .hgignore file. This option
+ supports hook syntax, so if you want to specify multiple ignore
+ files, you can do so by setting something like
+ "ignore.other = ~/.hgignore2". For details of the ignore file
+ format, see the hgignore(5) man page.
+ interactive;;
+ Allow to prompt the user. True or False. Default is True.
+ logtemplate;;
+ Template string for commands that print changesets.
+ style;;
+ Name of style to use for command output.
+ merge;;
+ The conflict resolution program to use during a manual merge.
+ Default is "hgmerge".
+ quiet;;
+ Reduce the amount of output printed. True or False. Default is False.
+ remotecmd;;
+ remote command to use for clone/push/pull operations. Default is 'hg'.
+ ssh;;
+ command to use for SSH connections. Default is 'ssh'.
+ timeout;;
+ The timeout used when a lock is held (in seconds), a negative value
+ means no timeout. Default is 600.
+ username;;
+ The committer of a changeset created when running "commit".
+ Typically a person's name and email address, e.g. "Fred Widget
+ <fred@example.com>". Default is $EMAIL or username@hostname, unless
+ username is set to an empty string, which enforces specifying the
+ username manually.
+ verbose;;
+ Increase the amount of output printed. True or False. Default is False.
+
+
+web::
+ Web interface configuration.
+ accesslog;;
+ Where to output the access log. Default is stdout.
+ address;;
+ Interface address to bind to. Default is all.
+ allow_archive;;
+ List of archive format (bz2, gz, zip) allowed for downloading.
+ Default is empty.
+ allowbz2;;
+ (DEPRECATED) Whether to allow .tar.bz2 downloading of repo revisions.
+ Default is false.
+ allowgz;;
+ (DEPRECATED) Whether to allow .tar.gz downloading of repo revisions.
+ Default is false.
+ allowpull;;
+ Whether to allow pulling from the repository. Default is true.
+ allowzip;;
+ (DEPRECATED) Whether to allow .zip downloading of repo revisions.
+ Default is false. This feature creates temporary files.
+ baseurl;;
+ Base URL to use when publishing URLs in other locations, so
+ third-party tools like email notification hooks can construct URLs.
+ Example: "http://hgserver/repos/"
+ description;;
+ Textual description of the repository's purpose or contents.
+ Default is "unknown".
+ errorlog;;
+ Where to output the error log. Default is stderr.
+ ipv6;;
+ Whether to use IPv6. Default is false.
+ name;;
+ Repository name to use in the web interface. Default is current
+ working directory.
+ maxchanges;;
+ Maximum number of changes to list on the changelog. Default is 10.
+ maxfiles;;
+ Maximum number of files to list per changeset. Default is 10.
+ port;;
+ Port to listen on. Default is 8000.
+ style;;
+ Which template map style to use.
+ templates;;
+ Where to find the HTML templates. Default is install path.
+
+
+AUTHOR
+------
+Bryan O'Sullivan <bos@serpentine.com>.
+
+Mercurial was written by Matt Mackall <mpm@selenic.com>.
+
+SEE ALSO
+--------
+hg(1), hgignore(5)
+
+COPYING
+-------
+This manual page is copyright 2005 Bryan O'Sullivan.
+Mercurial is copyright 2005, 2006 Matt Mackall.
+Free use of this software is granted under the terms of the GNU General
+Public License (GPL).
new file mode 100644
--- /dev/null
+++ b/doc/ja/Makefile
@@ -0,0 +1,21 @@
+SOURCES=$(wildcard *.[0-9].ja.txt)
+MAN=$(SOURCES:%.txt=%)
+HTML=$(SOURCES:%.txt=%.html)
+
+all: man html
+
+man: $(MAN)
+
+html: $(HTML)
+
+%: %.xml
+ xmlto -x docbook.ja.xsl man $*.xml
+
+%.xml: %.txt
+ -asciidoc -d manpage -b docbook -f docbook.ja.conf $*.txt
+
+%.html: %.txt
+ asciidoc -b html4 $*.txt
+
+clean:
+ $(RM) $(MAN:%.ja=%) $(MAN:%=%.xml) $(MAN:%=%.html)
new file mode 100644
--- /dev/null
+++ b/doc/ja/docbook.ja.conf
@@ -0,0 +1,583 @@
+#
+# docbook.conf
+#
+# Asciidoc configuration file.
+# Modified docbook backend for Japanese.
+#
+
+[miscellaneous]
+outfilesuffix=.xml
+# Printable page width in pts.
+pagewidth=380
+pageunits=pt
+
+[attributes]
+basebackend=docbook
+basebackend-docbook=
+
+[replacements]
+# Line break markup is dropped (there is no DocBook line break tag).
+(?m)^(.*)\s\+$=\1
+# Superscripts.
+\^(.+?)\^=<superscript>\1</superscript>
+# Subscripts.
+~(.+?)~=<subscript>\1</subscript>
+
+[ruler-blockmacro]
+# Only applies to HTML so don't output anything.
+
+[image-inlinemacro]
+<inlinemediaobject>
+ <imageobject>
+ <imagedata fileref="{target}"{width? contentwidth="{width}pt"}{height? contentdepth="{height}pt"}/>
+ </imageobject>
+ <textobject><phrase>{1={target}}</phrase></textobject>
+</inlinemediaobject>
+
+[image-blockmacro]
+<figure{id? id="{id}"}><title>{title}</title>
+{title%}<informalfigure{id? id="{id}"}>
+<mediaobject>
+ <imageobject>
+ <imagedata fileref="{target}"{width? contentwidth="{width}pt"}{height? contentdepth="{height}pt"}/>
+ </imageobject>
+ <textobject><phrase>{1={target}}</phrase></textobject>
+</mediaobject>
+{title#}</figure>
+{title%}</informalfigure>
+
+[indexterm-inlinemacro]
+# Inline index term.
+# Generate separate index entries for primary, secondary and tertiary
+# descriptions.
+# Primary only.
+{2%}<indexterm>
+{2%} <primary>{1}</primary>
+{2%}</indexterm>
+# Primary and secondary.
+{2#}{3%}<indexterm>
+{2#}{3%} <primary>{1}</primary><secondary>{2}</secondary>
+{2#}{3%}</indexterm>
+{2#}{3%}<indexterm>
+{2#}{3%} <primary>{2}</primary>
+{2#}{3%}</indexterm>
+# Primary, secondary and tertiary.
+{3#}<indexterm>
+ <primary>{1}</primary><secondary>{2}</secondary><tertiary>{3}</tertiary>
+{3#}</indexterm>
+{3#}<indexterm>
+ <primary>{2}</primary><secondary>{3}</secondary>
+{3#}</indexterm>
+{3#}<indexterm>
+ <primary>{3}</primary>
+{3#}</indexterm>
+
+[indexterm2-inlinemacro]
+# Inline index term.
+# Single entry index term that is visible in the primary text flow.
+<indexterm>
+ <primary>{1}</primary>
+</indexterm>
+{1}
+
+[footnote-inlinemacro]
+# Inline footnote.
+<footnote><simpara>{0}</simpara></footnote>
+
+[callout-inlinemacro]
+# Inline callout.
+<co id="{coid}"/>
+
+[tags]
+# Bulleted, numbered and labeled list tags.
+ilist=<itemizedlist{id? id="{id}"}>{title?<title>{title}</title>}|</itemizedlist>
+ilistitem=<listitem>|</listitem>
+ilisttext=<simpara>|</simpara>
+olist=<orderedlist{id? id="{id}"}>{title?<title>{title}</title>}|</orderedlist>
+olist2=<orderedlist{id? id="{id}"} numeration="loweralpha">|</orderedlist>
+olistitem=<listitem>|</listitem>
+olisttext=<simpara>|</simpara>
+vlist=<variablelist{id? id="{id}"}>{title?<title>{title}</title>}|</variablelist>
+vlistentry=<varlistentry>|</varlistentry>
+vlistterm=<term>|</term>
+vlisttext=<simpara>|</simpara>
+vlistitem=<listitem>|</listitem>
+# Horizontal labeled list (implemented with two column table).
+# Hardwired column widths to 30%,70% because the current crop of PDF
+# generators do not auto calculate column widths.
+hlist=<{title?table}{title!informaltable}{id? id="{id}"} tabstyle="{style=hlabeledlist}" pgwide="0" frame="none" colsep="0" rowsep="0">{title?<title>{title}</title>}<tgroup cols="2"><colspec colwidth="{1=3}*"/><colspec colwidth="{2=7}*"/><tbody valign="top">|</tbody></tgroup><{title?/table}{title!/informaltable}>
+hlistentry=<row>|</row>
+hlisttext=<simpara>|</simpara>
+hlistterm=<entry><simpara>|</simpara></entry>
+hlistitem=<entry>|</entry>
+
+# Question and Answer list.
+qlist=<qandaset{id? id="{id}"}>{title?<title>{title}</title>}|</qandaset>
+qlistentry=<qandaentry>|</qandaentry>
+qlistterm=<question><simpara>|</simpara></question>
+qlistitem=<answer>|</answer>
+qlisttext=<simpara>|</simpara>
+# Bibliography list.
+blist=|
+blistitem=<bibliomixed>|</bibliomixed>
+blisttext=<bibliomisc>|</bibliomisc>
+# Glossary list.
+glist=|
+glistentry=<glossentry>|</glossentry>
+glistterm=<glossterm>|</glossterm>
+glistitem=<glossdef>|</glossdef>
+glisttext=<simpara>|</simpara>
+# Callout list.
+colist=<calloutlist{id? id="{id}"}>{title?<title>{title}</title>}|</calloutlist>
+colistitem=<callout arearefs="{coids}">|</callout>
+colisttext=<simpara>|</simpara>
+
+# Quoted text
+emphasis=<emphasis>|</emphasis>
+strong=<emphasis role="strong">|</emphasis>
+monospaced=<literal>|</literal>
+quoted={amp}#8220;|{amp}#8221;
+
+# Inline macros
+[http-inlinemacro]
+<ulink url="{name}:{target}">{0={name}:{target}}</ulink>
+[https-inlinemacro]
+<ulink url="{name}:{target}">{0={name}:{target}}</ulink>
+[ftp-inlinemacro]
+<ulink url="{name}:{target}">{0={name}:{target}}</ulink>
+[file-inlinemacro]
+<ulink url="{name}:{target}">{0={name}:{target}}</ulink>
+[mailto-inlinemacro]
+<ulink url="{name}:{target}">{0={target}}</ulink>
+#<email>{target}</email>
+[link-inlinemacro]
+<ulink url="{target}">{0={target}}</ulink>
+# anchor:id[text]
+[anchor-inlinemacro]
+<anchor id="{target}" xreflabel="{0=[{target}]}"/>
+# [[id,text]]
+[anchor2-inlinemacro]
+<anchor id="{1}" xreflabel="{2=[{1}]}"/>
+# [[[id]]]
+[anchor3-inlinemacro]
+<anchor id="{1}" xreflabel="[{1}]"/>[{1}]
+# xref:id[text]
+[xref-inlinemacro]
+<link linkend="{target}">{0}</link>
+{2%}<xref linkend="{target}"/>
+# <<id,text>>
+[xref2-inlinemacro]
+<link linkend="{1}">{2}</link>
+{2%}<xref linkend="{1}"/>
+
+
+# Special word macros
+[emphasizedwords]
+<emphasis>{words}</emphasis>
+[monospacedwords]
+<literal>{words}</literal>
+[strongwords]
+<emphasis role="strong">{words}</emphasis>
+
+# Paragraph substitution.
+[paragraph]
+<formalpara{id? id="{id}"}><title>{title}</title><para>
+{title%}<simpara{id? id="{id}"}>
+|
+{title%}</simpara>
+{title#}</para></formalpara>
+{empty}
+
+[admonitionparagraph]
+<{name}{id? id="{id}"}><simpara>|</simpara></{name}>
+
+[literalparagraph]
+# The literal block employs the same markup.
+template::[literalblock]
+
+[verseparagraph]
+template::[verseblock]
+
+# Delimited blocks.
+[literalblock]
+<example><title>{title}</title>
+<literallayout{id? id="{id}"} class="{font=monospaced}">
+|
+</literallayout>
+{title#}</example>
+
+[listingblock]
+<example><title>{title}</title>
+<screen>
+|
+</screen>
+{title#}</example>
+
+[verseblock]
+<formalpara{id? id="{id}"}><title>{title}</title><para>
+{title%}<literallayout{id? id="{id}"}>
+{title#}<literallayout>
+|
+</literallayout>
+{title#}</para></formalpara>
+
+[sidebarblock]
+<sidebar{id? id="{id}"}>
+<title>{title}</title>
+|
+</sidebar>
+
+[backendblock]
+|
+
+[quoteblock]
+# The epigraph element may be more appropriate than blockquote.
+<blockquote{id? id="{id}"}>
+<title>{title}</title>
+<attribution>
+{attribution}
+<citetitle>{citetitle}</citetitle>
+</attribution>
+|
+</blockquote>
+
+[exampleblock]
+<{title?example}{title!informalexample}{id? id="{id}"}>
+<title>{title}</title>
+|
+</{title?example}{title!informalexample}>
+
+[admonitionblock]
+<{name}{id? id="{id}"}>
+<title>{title}</title>
+|
+</{name}>
+
+# Tables.
+[tabledef-default]
+template=table
+colspec=<colspec colwidth="{colwidth}{pageunits}" align="{colalign}"/>
+bodyrow=<row>|</row>
+bodydata=<entry>|</entry>
+
+[table]
+<{title?table}{title!informaltable}{id? id="{id}"} pgwide="0"
+frame="{frame=topbot}"
+{grid%rowsep="0" colsep="0"}
+{eval:\{"none":"rowsep=\"0\" colsep=\"0\"", "cols":"rowsep=\"0\" colsep=\"1\"", "all":"rowsep=\"1\" colsep=\"1\"", "rows":"rowsep=\"1\" colsep=\"0\"" \}["{grid}"]}
+>
+<title>{title}</title>
+<tgroup cols="{cols}">
+{colspecs}
+{headrows#}<thead>
+{headrows}
+{headrows#}</thead>
+{footrows#}<tfoot>
+{footrows}
+{footrows#}</tfoot>
+<tbody>
+{bodyrows}
+</tbody>
+</tgroup>
+</{title?table}{title!informaltable}>
+
+[specialsections]
+ifdef::doctype-article[]
+^Abstract$=sect-abstract
+endif::doctype-article[]
+
+ifdef::doctype-book[]
+^Colophon$=sect-colophon
+^Dedication$=sect-dedication
+^Preface$=sect-preface
+endif::doctype-book[]
+
+^Index$=sect-index
+^(Bibliography|References)$=sect-bibliography
+^Glossary$=sect-glossary
+^Appendix [A-Z][:.](?P<title>.*)$=sect-appendix
+
+# Special sections.
+[sect-preface]
+<preface{id? id="{id}"}>
+<title>{title}</title>
+|
+</preface>
+
+[sect-index]
+<index{id? id="{id}"}>
+<title>{title}</title>
+|
+</index>
+
+[sect-bibliography]
+<bibliography{id? id="{id}"}>
+<title>{title}</title>
+|
+</bibliography>
+
+[sect-glossary]
+<glossary{id? id="{id}"}>
+<title>{title}</title>
+|
+</glossary>
+
+[sect-appendix]
+<appendix{id? id="{id}"}>
+<title>{title}</title>
+|
+</appendix>
+
+
+[header-declarations]
+<?xml version="1.0" encoding="{encoding}"?>
+<!DOCTYPE {eval:\{"article":"article", "book":"book", "manpage":"refentry"\}["{doctype}"]} PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN" "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+#-------------------------
+# article document type
+#-------------------------
+ifdef::doctype-article[]
+
+[header]
+template::[header-declarations]
+
+<article lang="ja">
+{doctitle#}<articleinfo>
+ <title>{doctitle}</title>
+ <date>{date}</date>
+ {authored#}<author>
+ <firstname>{firstname}</firstname>
+ <othername>{middlename}</othername>
+ <surname>{lastname}</surname>
+ <affiliation><address><email>{email}</email></address></affiliation>
+ {authored#}</author>
+
+# If file named like source document with -revhistory.xml suffix exists
+# include it as the document history, otherwise use current revision.
+{revisionhistory#}{include:{docdir}/{docname}-revhistory.xml}
+{revisionhistory%}<revhistory><revision><revnumber>{revision}</revnumber><date>{date}</date>{revremark?<revremark>{revremark}</revremark>}</revision></revhistory>
+
+ <corpname>{companyname}</corpname>
+{doctitle#}</articleinfo>
+
+[footer]
+</article>
+
+[preamble]
+# Untitled elements between header and first section title.
+|
+
+[sect-abstract]
+<abstract{id? id="{id}"}>
+|
+</abstract>
+
+[sect1]
+<section{id? id="{id}"}>
+<title>{title}</title>
+|
+</section>
+
+[sect2]
+<section{id? id="{id}"}>
+<title>{title}</title>
+|
+</section>
+
+[sect3]
+<section{id? id="{id}"}>
+<title>{title}</title>
+|
+</section>
+
+[sect4]
+<section{id? id="{id}"}>
+<title>{title}</title>
+|
+</section>
+
+endif::doctype-article[]
+
+#-------------------------
+# manpage document type
+#-------------------------
+ifdef::doctype-manpage[]
+
+[replacements]
+# The roff format does not substitute special characters so just print them as
+# text.
+\(C\)=(C)
+\(TM\)=(TM)
+
+[header]
+template::[header-declarations]
+<refentry>
+<refmeta>
+<refentrytitle>{mantitle}</refentrytitle>
+<manvolnum>{manvolnum}</manvolnum>
+</refmeta>
+<refnamediv>
+ <refname>{manname}</refname>
+ <refpurpose>{manpurpose}</refpurpose>
+</refnamediv>
+
+[footer]
+</refentry>
+
+# Section macros
+[sect-synopsis]
+<refsynopsisdiv{id? id="{id}"}>
+|
+</refsynopsisdiv>
+
+[sect1]
+<refsect1{id? id="{id}"}>
+<title>{title}</title>
+|
+</refsect1>
+
+[sect2]
+<refsect2{id? id="{id}"}>
+<title>{title}</title>
+|
+</refsect2>
+
+[sect3]
+<refsect3{id? id="{id}"}>
+<title>{title}</title>
+|
+</refsect3>
+
+endif::doctype-manpage[]
+
+#-------------------------
+# book document type
+#-------------------------
+ifdef::doctype-book[]
+
+[header]
+template::[header-declarations]
+
+<book lang="ja">
+{doctitle#}<bookinfo>
+ <title>{doctitle}</title>
+ <date>{date}</date>
+ {authored#}<author>
+ <firstname>{firstname}</firstname>
+ <othername>{middlename}</othername>
+ <surname>{lastname}</surname>
+ <affiliation><address><email>{email}</email></address></affiliation>
+ {authored#}</author>
+
+# If file named like source document with -revhistory.xml suffix exists
+# include it as the document history, otherwise use current revision.
+{revisionhistory#}{include:{docdir}/{docname}-revhistory.xml}
+{revisionhistory%}<revhistory><revision><revnumber>{revision}</revnumber><date>{date}</date>{revremark?<revremark>{revremark}</revremark>}</revision></revhistory>
+
+ <corpname>{companyname}</corpname>
+{doctitle#}</bookinfo>
+
+[footer]
+</book>
+
+[preamble]
+# Preamble is not allowed in DocBook book so wrap it in a preface.
+<preface{id? id="{id}"}>
+<title>Preface</title>
+|
+</preface>
+
+[sect-dedication]
+<dedication{id? id="{id}"}>
+|
+</dedication>
+
+[sect-colophon]
+<colophon{id? id="{id}"}>
+|
+</colophon>
+
+[sect0]
+<part{id? id="{id}"}>
+<title>{title}</title>
+|
+</part>
+
+[sect1]
+<chapter{id? id="{id}"}>
+<title>{title}</title>
+|
+</chapter>
+
+[sect2]
+<section{id? id="{id}"}>
+<title>{title}</title>
+|
+</section>
+
+[sect3]
+<section{id? id="{id}"}>
+<title>{title}</title>
+|
+</section>
+
+[sect4]
+<section{id? id="{id}"}>
+<title>{title}</title>
+|
+</section>
+
+endif::doctype-book[]
+
+ifdef::sgml[]
+#
+# Optional DocBook SGML.
+#
+# Most of the differences between DocBook XML and DocBook SGML boils
+# down to the empty element syntax: SGML does not like the XML empty
+# element <.../> syntax, use <...> instead.
+#
+[miscellaneous]
+outfilesuffix=.sgml
+
+[header-declarations]
+<!DOCTYPE {eval:\{"article":"article", "book":"book", "manpage":"refentry"\}["{doctype}"]} PUBLIC "-//OASIS//DTD DocBook V4.1//EN">
+
+[tabledef-default]
+colspec=<colspec colwidth="{colwidth}{pageunits}" align="{colalign}">
+
+[image-inlinemacro]
+<inlinemediaobject>
+ <imageobject>
+ <imagedata fileref="{target}"{width? width="{width}pt"}{height? depth="{height}pt"}>
+ </imageobject>
+ <textobject><phrase>{1={target}}</phrase></textobject>
+</inlinemediaobject>
+
+[image-blockmacro]
+<figure><title>{title}</title>
+{title%}<informalfigure>
+<mediaobject>
+ <imageobject>
+ <imagedata fileref="{target}"{width? width="{width}pt"}{height? depth="{height}pt"}>
+ </imageobject>
+ <textobject><phrase>{1={target}}</phrase></textobject>
+</mediaobject>
+{title#}</figure>
+{title%}</informalfigure>
+
+# Inline macros
+[xref-inlinemacro]
+<link linkend="{target}">{0}</link>
+{2%}<xref linkend="{target}">
+[xref2-inlinemacro]
+# <<id,text>>
+<link linkend="{1}">{2}</link>
+{2%}<xref linkend="{1}">
+[anchor-inlinemacro]
+<anchor id="{target}" xreflabel="{0=[{target}]}">
+[anchor2-inlinemacro]
+# [[id,text]]
+<anchor id="{1}" xreflabel="{2=[{1}]}">
+
+endif::sgml[]
new file mode 100644
--- /dev/null
+++ b/doc/ja/docbook.ja.xsl
@@ -0,0 +1,23 @@
+<?xml version='1.0' encoding="UTF-8"?>
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version='1.0'>
+ <xsl:import href="http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl"/>
+ <xsl:output encoding="UTF-8"/>
+
+ <xsl:template match="refnamediv">
+ <xsl:text>.SH 名前 </xsl:text>
+ <xsl:for-each select="refname">
+ <xsl:if test="position()>1">
+ <xsl:text>, </xsl:text>
+ </xsl:if>
+ <xsl:value-of select="."/>
+ </xsl:for-each>
+ <xsl:text> \- </xsl:text>
+ <xsl:value-of select="normalize-space (refpurpose)"/>
+ </xsl:template>
+
+ <xsl:template match="refsynopsisdiv">
+ <xsl:text> .SH "書式" </xsl:text>
+ <xsl:apply-templates/>
+ </xsl:template>
+
+</xsl:stylesheet>
new file mode 100644
--- /dev/null
+++ b/doc/ja/hg.1.ja.txt
@@ -0,0 +1,867 @@
+HG(1)
+=====
+Matt Mackall <mpm@selenic.com>
+
+名前
+--
+hg - Mercurial ソースコード管理システム
+
+書式
+--
+'hg' [-v -d -q -y] <command> [command options] [files]
+
+説明
+--
+hg(1) コマンドは Mercurial システムへのコマンドラインインターフェ
+イスを提供します。
+
+オプション
+----
+
+-R, --repository::
+ リポジトリのルートディレクトリを指定します。
+
+--cwd::
+ 作業ディレクトリを変更します。
+
+-y, --noninteractive::
+ プロンプトを出さずに、要求された答えが全て 'yes' であると仮定
+ します。
+
+-q, --quiet::
+ 出力を抑制します。
+
+-v, --verbose::
+ さらなる出力を可能にします。
+
+7--debug::
+ デバッグ出力を可能にします。
+
+--traceback::
+ 例外時にトレースバックを表示します。
+
+--time::
+ コマンドにどのくらい時間がかかるかを表示します。
+
+--profile::
+ コマンドを実行したときのプロファイルを表示します。
+
+--version::
+ バージョン情報を表示して終了します。
+
+-h, --help::
+ ヘルプを表示して終了します。
+
+コマンドの要素
+-------
+
+files ...::
+ 1つ以上のファイル名か相対的なパスを表します; パターンマッチン
+ グの情報は「ファイル名のパターン」を参照してください。
+
+path::
+ ローカルマシン上のパスを表します
+
+revision::
+ チェンジセットのリビジョンナンバー, タグ, チェンジセットのハッ
+ シュ値のユニークな部分文字列により指定できるチェンジセットを表
+ します
+
+repository path::
+ ローカルのリポジトリのパス名かリモートのリポジトリの URI を表
+ します。URI のプロトコルとしては現在 2 つが利用可能です。
+ http:// は高速で、old-http:// は遅いですがウェブのホストに特別
+ なサーバを必要としません。
+
+コマンド
+----
+
+add [options] [files ...]::
+ ファイルをバージョン管理下に置きリポジトリに追加することを予定
+ します。
+
+ ファイルは次にコミット時にリポジトリに追加されます。
+
+ ファイル名が与えられなければ、現在のディレクトリとサブディレク
+ トリの全てのファイルを追加します。
+
+addremove [options] [files ...]::
+ 新しいファイルを全て追加し無くなったファイルを全てリポジトリか
+ ら取り除きます。
+
+ 新しいファイルは .hgignore 中のパターンにマッチした場合無視さ
+ れます。add のようにこの変更は次のコミット時に効果を持ちます。
+
+annotate [-r <rev> -u -n -c] [files ...]::
+ ファイル中の変更を列挙し、各行の原因であるリビジョン id を表示
+ します。
+
+ このコマンドある変更が生じた際に誰がその変更をしたかを発見する
+ のに役に立ちます。
+
+ -a オプションが無いと、annotate はバイナリとして検出されたファ
+ イルを避けるようになります。-a があると、annotate はとくかく注
+ 釈を生成し、おそらく望ましくない結果になるでしょう。
+
+ オプション:
+ -a, --text 全てのファイルをテキストとして扱います
+ -I, --include <pat> 与えられたパターンにマッチした名前を含め
+ ます
+ -X, --exclude <pat> 与えられたパターンにマッチした名前を除外
+ します
+ -r, --revision <rev> 指定されたリビジョンの注釈を生成します
+ -u, --user 著者を列挙します
+ -c, --changeset チェンジセットを列挙します
+ -n, --number リビジョンナンバーを列挙します
+ (デフォルト)
+
+bundle <file> <other>::
+ (実験的)
+
+ 他のリポジトリには見付からなかった全てのチェンジセットを集めた、
+ 圧縮済みチェンジグループファイルを生成します。
+
+ このファイルは従来の方法で転送することができ、他のリポジトリに
+ unbundle コマンドで適用できます。これは push と pull が使えな
+ いか、リポジトリ全体をエクスポートしてしまうことが望ましくない
+ ときに便利です。標準のファイル拡張子は ".hg" です。
+
+ import/export と違って、これはパーミッション、名前変更のデータ、
+ リビジョンの履歴を含めたチェンジセットの内容全てを保存します。
+
+cat [options] <file ...>::
+ 指定されたファイルを与えられたリビジョンの内容で表示します。リ
+ ビジョンが指定されなかった場合は tip が使われます。
+
+ 出力はファイルに対しても可能です。その場合、ファイル名はフォー
+ マット文字列により指定されます。フォーマット規則は export コマ
+ ンドと同じですが、さらに次のものが追加されます。
+
+ %s 出力されるファイルのベース名
+ %d 出力されるファイルのディレクトリ名か、リポジトリのルート
+ にいる場合は "."
+ %p 出力されるファイルのルートからの相対パス
+
+ オプション:
+ -I, --include <pat> 与えられたパターンにマッチした名前
+ を含めます
+ -X, --exclude <pat> 与えられたパターンにマッチした名前
+ を除外します
+ -o, --output <filespec> 整形された名前でファイルに出力しま
+ す
+ -r, --rev <rev> 与えられたリビジョンを表示します
+
+clone [-U] <source> [dest]::
+ 既存のリポジトリのコピーを新しいディレクトリに作成します
+
+ コピー先のディレクトリ名が指定されなければ、デフォルトでソース
+ のベース名を使用します。
+
+ 今後の pull に使えるように、コピー元が新しいリポジトリの
+ .hg/hgrc に追加されます。
+
+ 効率のために、コピー元とコピー先が同じファイルシステム上にある
+ 場合はハードリンクが使われます。
+
+ オプションン:
+ -U, --noupdate 新しい作業ディレクトリで update を行いません
+ -e, --ssh 使用される ssh コマンドを指定します
+ --remotecmd リモート側で実行する hg コマンドを指定します
+
+commit [options] [files...]::
+ 指定されたファイルの変更をリポジトリにコミットします。
+
+ もしファイルのリストが省略されたら、リポジトリのルートから実行
+ した"hg status" で報告される全ての変更がコミットされます。
+
+ HGEDITOR や EDITOR 環境変数はコミット時のコメントを追加するエ
+ ディタを起動するために使われます。
+
+ オプション:
+
+ -A, --addremove コミット中に addremove を実行します
+ -I, --include <pat> 与えられたパターンにマッチした名前を含め
+ ます
+ -X, --exclude <pat> 与えられたパターンにマッチした名前を除外
+ します
+ -m, --message <text> <text> をコミットメッセージとして使いま
+ す
+ -l, --logfile <file> <file> からコミットメッセージを読み込み
+ ます
+ -d, --date <datecode> datecode をコミットした日付として記録し
+ ます
+ -u, --user <user> user をコミッタとして記録します。
+
+ 別名: ci
+
+copy <source ...> <dest>::
+ コピー先がコピー元のファイルのコピーを持っていると印を付けます。
+ もしコピー先がディレクトリなら、コピーはディレクトリ中に置かれ
+ ます。もしコピー先がファイルなら、コピー元は1つのみ指定可能で
+ す。
+
+ デフォルトでは、このコマンドはファイルがその作業ディレクトリに
+ あるものとしてその内容をコピーします。もし --after と一緒に呼
+ び出されれば、操作は記録されますが、コピーは実行されません。
+
+ このコマンドは次のコミット時に効果を持ちます。
+
+ 注意: このコマンドは実験的です。リネームされたファイルを適切に
+ 記録できますが、この情報はマージによってまだ完全には使われませ
+ んし、ログで完全に報告されることもありません。
+
+ オプション:
+ -A, --after 既に発生したコピーを記録します。
+ -I, --include <pat> 与えられたパターンにマッチした名前を含め
+ ます
+ -X, --exclude <pat> 与えられたパターンにマッチした名前を除外
+ します
+ -f, --force 既存の変更されたファイルに無理矢理コピー
+ します
+ -p, --parents コピー先にコピー元のパスを追加します
+
+ 別名: cp
+
+diff [-a] [-r revision] [-r revision] [files ...]::
+ 指定されたファイルのリビジョン間の差分を表示します。
+
+ ファイル間の差分は unified diff 形式で表示されます。
+
+ 2つのリビジョンが引数として指定された場合、それらのリビジョン
+ 間の差分が表示されます。1つのリビジョンしか指定されなければ、
+ そのリビジョンは作業ディレクトリと比較されます。そして リビジョ
+ ンが指定されなければ、作業ディレクトリのファイルがその親と比較
+ されます。
+
+ -a オプション無しでは、diff はバイナリファイルを検出したときに
+ その差分を生成するのを避けます。-a オプションでは、diff はとに
+ かく差分を生成し、恐らく望ましくない結果をもたらすでしょう。
+
+ オプション:
+ -a, --text 全てのファイルをテキストとして扱います
+ -I, --include <pat> 与えられたパターンにマッチした名前を含め
+ ます
+ -X, --exclude <pat> 与えられたパターンにマッチした名前を除外
+ します
+
+export [-o filespec] [revision] ...::
+ 1つ以上のリビジョンのチェンジセットのヘッダと差分を出力します。
+
+ チェンジセットのヘッダに表示される情報は: 著者、チェンジセット
+ のハッシュ、親、コミット時のコメントです。
+
+ 出力はファイルに対しても可能です。その場合、ファイル名はフォー
+ マット文字列により指定されます。フォーマット規則は下記の通りで
+ す:
+
+ %% そのままの "%" 文字
+ %H チェンジセットのハッシュ (40 バイトの 16 進数)
+ %N 生成されているパッチの番号
+ %R チェンジセットのリビジョンナンバー
+ %b エクスポートしているリポジトリのメース名
+ %h 短い形式のチェンジセットのハッシュ (12 バイトの 16 進数)
+ %n 0 で 詰められた 1 から始まる連番
+ %r 0 で 詰められたリビジョンナンバー
+
+ -a オプション無しでは、diff はバイナリファイルを検出したときに
+ その差分を生成するのを避けます。-a オプションでは、diff はとに
+ かく差分を生成し、恐らく望ましくない結果をもたらすでしょう。
+
+ オプション:
+ -a, --text 全てのファイルをテキストとして扱います
+ -o, --output <filespec> 整形された名前でファイルに出力します
+
+forget [options] [files]::
+ 次のコミット時に予定された 'hg add' を取り消します。
+
+ オプション:
+ -I, --include <pat> 与えられたパターンにマッチした名前を含めま
+ -すX, --exclude <pat> 与えられたパターンにマッチした名前を除外
+ -します
+
+grep [options] pattern [files]::
+ 正規表現によりファイルのリビジョンを検索します。
+
+ このコマンドは Unix の grep とは違う振舞いをします。これは
+ Python/Perl の正規表現だけを受けつけます。これは作業ディレクト
+ リではなくリポジトリの履歴を検索します。これは常にマッチしたも
+ のが現れたリビジョンナンバーを表示します。
+
+ デフォルトでは、grep はマッチしたものが見つかったファイルの最
+ 初のリビジョンを出力します。マッチ状況の変化("-" はマッチが非
+ マッチに、"+" は非マッチがマッチに)を含んだ各リビジョンを表示
+ するには、--all フラグを使ってください。
+
+ オプション:
+ -0, --print0 ファイル名を NUL で終えます。
+ -I, --include <pat> 与えられたパターンにマッチした名前
+ を含めます
+ -X, --exclude <pat> 与えられたパターンにマッチした名前
+ を除外します
+ --all マッチした全てのリビジョンを表示し
+ ます
+ -i, --ignore-case マッチのときに英大文字と小文字を区
+ 別しないようにします
+ -l, --files-with-matches マッチしたファイル名とリビジョンの
+ みを表示します
+ -n, --line-number マッチした行番号を表示します
+ -r, --rev <rev> 指定されたリビジョンの間で検索しま
+ す
+ -u, --user その変更をコミットしたユーザを表示
+ します
+
+heads::
+ リポジトリの先頭のチェンジセットを全て表示します。
+
+ リポジトリの「先頭」とは子のチェンジセットを持っていないチェン
+ ジセットです。それらは大抵開発が行われる場所で、通常 update と
+ merge 操作の対象となるところです。
+
+identify::
+ レポジトリの現在の状態の短いサマリを表示します。
+
+ このサマリはリポジトリの状態を1つまたは2つの親のハッシュ識別子
+ を使って識別します。親のハッシュ識別子はもし作業ディレクトリに
+ コミットされていない変更があれば後ろに + が付き、更にその後に
+ このリビジョンのタグのリストが付きます。
+
+ 別名: id
+
+import [-p <n> -b <base> -f] <patches>::
+ 一連のパッチをインポートし、それぞれ個別にコミットします。
+
+ 作業ディレクトリに未解決の変更があった場合、import は -f フラ
+ グが指定されてなければ中断します。
+
+ もしパッチがメールのよう(最初の行が "From " か RFC 822 ヘッダ
+ のよう) であれば、-f オプションが使われていない限りそれは適用
+ されません。インポート機構はメールのヘッダをパースもしなければ
+ 破棄もしないので、本物のメールをインポートしないようにする「メー
+ ルのようなものの」健全性チェックを上書きするためだけに -f を使っ
+ てください。
+
+ オプション:
+ -p, --strip <n> patch の ディレクトリ除去オプションです。これ
+ は関連する patch のオプションと同じ意味を持ち
+ ます
+ -b <path> パッチを読み込むベースとなるディレクトリを指
+ 定します
+ -f, --force 未解決でまだコミットされていない変更のチェッ
+ クを省略します
+
+ 別名: patch
+
+incoming [-p] [source]::
+ 指定されたリポジトリか、デフォルトで pull するリポジトリ中に見
+ つかった新しいチェンジセットを表示します。これらは pull が要求
+ されたときにpull されるチェンジセットです。
+
+ 現在はローカルのリポジトリのみがサポートされています。
+
+ オプション:
+ -p, --patch パッチを表示します
+
+ 別名: in
+
+init [dest]::
+ 指定されたディレクトリ中に新しいリポジトリを初期化します。指定
+ されたディレクトリが存在しない場合は作成されます。
+
+ ディレクトリが指定されなければ、現在のディレクトリが使用されま
+ す。
+
+locate [options] [files]::
+ Mercurial の管理下にあるファイルで名前が指定されたパターンにマッ
+ チしたものを全て表示します。
+
+ このコマンドは現在のディレクトリとサブディレクトリを検索します。
+ リポジトリ全体を検索するには、リポジトリのルートに移動してくだ
+ さい。
+
+ もしマッチするためのパターンが与えられなければ、このコマンドは
+ 全てのファイルの名前を表示します。
+
+ もしこのコマンドの出力を "xargs" コマンドに送りたいなら、
+ "-0" オプションをこのコマンドと "xargs" コマンドの両方で使用し
+ てください。これは "xargs" がスペースの入ったファイル名を複数
+ のファイル名として扱わないようにします。
+
+ オプション:
+
+ -0, --print0 xargs と一緒に使うために、ファイル名を
+ NUL で終えます
+ -f, --fullpath ファイルシステムのルートからの完全なパ
+ スを表示します
+ -I, --include <pat> 与えられたパターンにマッチした名前を含
+ めます
+ -r, --rev <rev> rev のときのリポジトリを検索します
+ -X, --exclude <pat> 与えられたパターンにマッチした名前を除外
+ します
+
+log [-r revision ...] [-p] [files]::
+ 指定されたファイルかプロジェクト全体のリビジョンの履歴を表示し
+ ます。
+
+ デフォルトではこのコマンドは次のものを出力します: チェンジセッ
+ トのid とハッシュ、タグ、親、ユーザ、日付、各コミットのサマ
+ リ。-v スイッチは変更されたファイルやマニフェストのハッシュ、
+ メッセージのシグネチャといったより詳しい情報を追加します。
+
+ オプション:
+ -I, --include <pat> 与えられたパターンにマッチした名前を含め
+ ます
+ -X, --exclude <pat> 与えられたパターンにマッチした名前を除外
+ します
+ -r, --rev <A> 指定されたリビジョンまたは範囲を表示しま
+ す
+ -p, --patch パッチを表示します
+
+ 別名: history
+
+manifest [revision]::
+ 指定されたリビジョンでバージョン管理されているファイルのリスト
+ を表示します。
+
+ manifest はバージョン管理されているファイルのリストです。もし
+ リビジョンが指定されなければ、tip が使われます。
+
+outgoing [-p] [dest]::
+ 指定された行き先のリポジトリかデフォルトで push するリポジトリ
+ 中に見付からなかったチェンジセットを表示します。これらは push
+ が要求されたときに push されるであろうチェンジセットです。
+
+ オプション:
+ -p, --patch パッチを表示します
+
+ 別名: out
+
+parents::
+ 作業ディレクトリの親リビジョンを表示します。
+
+paths [NAME]::
+ シンボルのパス名である NAME の行き先を表示します。もしシンボル
+ 名が指定されなければ、利用可能なシンボル名の行き先を表示します。
+
+ パス名は /etc/mercurial/hgrc と $HOME/.hgrc の [paths] セクショ
+ ンで定義されます。もしリポジトリの内部で実行された場
+ 合、.hg/hgrc も使用されます。
+
+pull <repository path>::
+ リモートのリポジトリの変更をローカルのリポジトリに pull します。
+
+ これは指定されたパスや URL にあるリポジトリの全ての変更を見つ
+ けて、それらをローカルのリポジトリに追加します。デフォルトでは、
+ これは作業ディレクトリのプロジェクトのコピーを更新しません。
+
+ 有効な URL の次の形式です:
+
+ local/filesystem/path
+ http://[user@]host[:port][/path]
+ https://[user@]host[:port][/path]
+ ssh://[user@]host[:port][/path]
+
+ SSH は行き先のマシンのシェルアカウントと、リモートのパスにhg
+ のコピーが必要になります。SSH を使用すると、パスはデフォルトで
+ はリモートのユーザのホームディレクトリの相対パスになります; ファ
+ イルシステムのルートからの相対パスであることを指定するには、パ
+ スの最初にスラッシュを 2つ使用してください。
+
+ オプション:
+ -u, --update pull の後に作業ディレクトリを tip に更新します
+ -e, --ssh 使用する ssh コマンドを指定します
+ --remotecmd リモート側で使われる hg コマンドを指定します
+
+push <destination>::
+ ローカルのリポジトリの変更を指定された行き先に push します。
+
+ これは pull と対称的な操作です。これは現在のリポジトリの変更を
+ 他のリポジトリへ移すのに役立ちます。もし行き先がローカルであれ
+ ば、これはそのディレクトリから現在のディレクトリに対して pull
+ するのと同じです。
+
+ デフォルトでは、push は実行した結果リモートのヘッドの数が増え
+ るならば、実行を拒否します。これはたいていクライアントが push
+ する前に sync とmerge を忘れていることを意味します。
+
+ 有効な URL は次の形式です:
+
+ local/filesystem/path
+ ssh://[user@]host[:port][/path]
+
+ SSH は行き先のマシンのシェルアカウントと、リモートのパスに hg
+ のコピーが必要になります。
+
+ オプション:
+
+ -f, --force update を強行します
+ -e, --ssh 使用される ssh コマンドを指定します
+ --remotecmd リモート側で実行される hg コマンドを指定します
+
+rawcommit [-p -d -u -F -m -l]::
+ 低レベルのコミットで、ヘルパースクリプト中で使用されます。
+
+ このコマンドは通常のユーザに使われることは想定していません。こ
+ れは主に他の SCM からインポートするときに便利です。
+
+recover::
+ 中断された commit や pull から復帰します。
+
+ このコマンドは中断された操作からリポジトリの状態を修整しようと
+ 試みます。これは Mercurial がそうするよう提案したときのみ必要
+ でしょう。
+
+remove [options] [files ...]::
+ 指定されたファイルをリポジトリから削除することを予定します。
+
+ このコマンドはファイルを次のコミット時に削除することを予定しま
+ す。このコマンドはファイルを現在の枝から取り除くだけで、プロジェ
+ クトの履歴全体からは削除しません。もしファイルが作業ディレクト
+ リ中にまだ存在していれば、それらは作業ディレクトリから削除され
+ ます。
+
+ 別名: rm
+
+rename <source ...> <dest>::
+ コピー先をコピー元のコピーのコピーであると印をつけます; コピー
+ 元に削除の印をつけます。もしコピー先がディレクトリであれば、コ
+ ピーはそのディレクトリ中に置かれます。もしコピー先がファイルな
+ ら、コピー元は 1 つのみ指定可能です。
+
+ デフォルトでは、このコマンドはファイルがその作業ディレクトリに
+ あるものとしてその内容をコピーします。もし --after と一緒に呼
+ び出されれば、操作は記録されますが、コピーは実行されません。
+
+ このコマンドは次のコミット時に効果を持ちます。
+
+ 注意: このコマンドは実験的です。リネームされたファイルを適切に
+ 記録できますが、この情報はマージによってまだ完全には使われませ
+ んし、ログで完全に報告されることもありません。
+
+ オプション:
+ -A, --after 既に発生したリネームを記録します
+ -f, --force 既存の変更されたファイルに無理矢理コピーし
+ ます
+ -p, --parents コピー先にコピー元のパスを追加します
+
+ 別名: mv
+
+revert [names ...]::
+ 指定されたファイルやディレクトリの未コミットの変更を取り消しま
+ す。これは関連したファイルの内容をコミットされてない状態に戻し
+ ます。
+
+ もしファイルが削除されていれば、再作成されます。もしファイルの
+ 実行モードが変更されていれば、リセットされます。
+
+ ディレクトリが指定された場合、そのディレクトリ中のすべてのファ
+ イルとサブディレクトリが元に戻されます。
+
+ もし引数が指定されなければ、現在のディレクトリ中の全てのファイ
+ ルとサブディレクトリが元の戻されます。
+
+ オプション:
+ -r, --rev <rev> 元に戻す先のリビジョンを指定します
+ -n, --nonrecursive サブディレクトリを再帰的に辿らないように
+ します
+
+root::
+ 現在のリポジトリのルートディレクトリを表示します。
+
+serve [options]::
+ ローカルの HTTP リポジトリと pull サーバを起動します。
+
+ デフォルトでは、サーバはアクセスログを標準出力に、エラーログを
+ 標準エラー出力に出力します。ファイルにログを取るには "-A" と
+ "-E" オプションを使ってください。
+
+ オプション:
+ -A, --accesslog <file> アクセスログが出力されるファイルの名前
+ を指定します
+ -E, --errorlog <file> エラーログが出力されるファイルの名前を
+ 指定します
+ -a, --address <addr> 使用するアドレスを指定します
+ -p, --port <n> 使用するポートを指定します
+ (デフォルト: 8000)
+ -n, --name <name> ウェブページで表示する名前を指定します
+ (デフォルト: working dir)
+ -t, --templatedir <path> 使用するウェブの雛型を指定します
+ -6, --ipv6 IPv4 に加えて IPv6 も使用します
+
+status [options] [files]::
+ 作業ディレクトリ中の変更されたファイルを表示します。名前が指定
+ されなければ、全てのファイルが表示されます。名前が指定されれば、
+ 指定された名前にマッチしたファイルのみが表示されます。
+
+ ファイルの状態を表示するのに使われる記号:
+
+ M = 変更されました
+ A = 追加されました
+ R = 削除されました
+ ? = バージョン管理下にありません
+
+ オプション:
+
+ -m, --modified 変更されたファイルのみを表示します
+ -a, --added 追加されたファイルのみを表示します
+ -r, --removed 削除されたファイルのみを表示します
+ -u, --unknown 不明な(バージョン管理下にない)ファイルのみ
+ を表示します
+ -n, --no-status 状態を示す接頭辞を隠します
+ -0, --print0 xargs と一緒に使うために、ファイル名を NUL
+ で終えます
+ -I, --include <pat> 与えられたパターンにマッチした名前を含めま
+ す
+ -X, --exclude <pat> 与えられたパターンにマッチした名前を除外し
+ ます
+
+tag [-l -m <text> -d <datecode> -u <user>] <name> [revision]::
+ 特定のリビジョンに <name> を使って名前を付けます。
+
+ タグはリポジトリの特定のリビジョンに名前を付けるために使われ、
+ そして異なるリビジョンを比較したり、重要な以前のバージョンに戻っ
+ たり、リリース等の分岐点に印をつけたりするのに便利です。
+
+ もしバージョンが指定されなければ、tip が使われます。
+
+ バージョン管理、配布、タグのマージを楽にするために、それらは
+ ".hgtags" という名前のファイルに格納され、他のプロジェクトのファ
+ イルと同様に扱ったり、必要であれば手で編集できます。
+
+ オプション:
+ -l, --local タグをローカルにします
+ -m, --message <text> タグをコミットしたときのログのエントリの
+ メッセージを指定します
+ -d, --date <datecode> コミットの日付を指定します
+ -u, --user <user> コミットするユーザを指定します
+
+ 注意: ローカルのタグはバージョン管理や配布されることはなく、ま
+ た. hg/localtags ファイルに格納されます。もし同じ名前のローカ
+ ルのタグと公開されたタグがあれば、ローカルのタグが使われます。
+
+tags::
+ リポジトリのタグを列挙します。
+
+ これは通常のタグとローカルのタグを両方列挙します。
+
+tip::
+ tip のリビジョンを表示します。
+
+unbundle <file>::
+ (実験的)
+
+ bundle コマンドで生成された、圧縮済みチェンジグループファイル
+ を適用します。
+
+undo::
+ 最後の commit や pull の処理を取り消します。
+
+ リポジトリの最後の pull や commit 処理を巻戻し、プロジェクトを
+ それより前の状態に戻します。
+
+ このコマンドは注意して使ってください。まだ 1回の undo だけで、
+ redo はありません。
+
+ このコマンドは公開したリポジトリで使われることは想定していませ
+ ん。いったん他のユーザから pull で変更が見えるようになれば、ロー
+ カルでそれを取り消しても意味がありません。
+
+update [-m -C] [revision]::
+ 作業ディレクトリを指定されたリビジョンに更新します。
+
+ デフォルトでは、更新によりローカルの変更をマージしたり破棄した
+ りすることが必要となるとき、update はそれを拒否します。
+
+ -m オプションで、マージが実行されます。
+
+ -C オプションで、ローカルの変更が失われます。
+
+ オプション:
+ -m, --merge 枝のマージを許可します
+ -C, --clean ローカルで変更されたファイルを上書きします
+
+ 別名: up checkout co
+
+verify::
+ 現在のリポジトリの整合性を検証します。
+
+ これはリポジトリの整合性を全面的にチェックし、チェンジログの各
+ エントリ、manifest, 管理下のファイルのハッシュとチェックサムを
+ 検証し、またクロスリンクとインデクスの整合性も検証します。
+
+ファイル名とパターン
+---------
+
+ Mercurial では同時に複数のファイルを識別するのに複数の記法が使
+ えます。
+
+ デフォルトでは、Mercurial はファイル名をシェルのスタイルの拡張
+ glob パターンとして扱います。
+
+ 別のパターン表記は明示的に指定する必要があります。
+
+ パターンマッチングなしの単純なパス名を使うには、パス名を
+ "path:" で始めてください。これらのパス名は、現在のリポジトリの
+ ルートから完全にマッチしている必要があります。
+
+ 拡張 glob を使うには、名前を "glob:" で始めてください。glob は
+ 現在のディレクトリのみに適用されます: "*.c" といった glob は現
+ 在のディレクトリ中の ".c" で終わるファイルのみにマッチします。
+
+ サポートされている glob 文法の拡張はパスの分離記号を越えて全て
+ の文字列にマッチする "**" と、"a または b" を意味する "{a,b}"
+ です。
+
+ Perl/Python の正規表現を使うには、名前を "re:" で始めてくださ
+ い。正規表現によるマッチはリポジトリのルートの固定されています。
+
+ 単純な例:
+
+ path:foo/bar リポジトリのルートにある foo というディレクトリ
+ の bar という名前
+ path:path:name "path:name" という名前のファイルまたはディレク
+ トリ
+
+ Glob の例:
+
+ glob:*.c 現在のディレクトリ中の ".c" で終わる全ての名前
+ *.c 現在のディレクトリ中の ".c" で終わる全ての名前
+ **.c 現在のディレクトリと全てのサブディレクトリ中の
+ ".c" で終わる全ての名前
+ foo/*.c ディレクトリ foo 中の ".c" で終わる全ての名前
+ foo/**.c ディレクトリ foo とその全てのサブディレクトリ中
+ の ".c" で終わる全ての名前
+
+ 正規表現の例:
+
+ re:.*\.c$ リポジトリ全体の中の ".c" で終わる全ての名前
+
+
+単一のリビジョンの指定法
+-----------
+
+ Mercurial では個々のリビジョンを識別するのに複数の記法が使えま
+ す。
+
+ 単純な整数はリビジョンナンバーとして扱われます。負の整数はtip
+ からのオフセットとして扱われ、-1 が tip を表します。
+
+ 40 桁の 16 進数の文字列はユニークなリビジョン識別子として扱わ
+ れます。
+
+ 40 桁より少ない 16 進数の文字列はユニークなリビジョン識別子と
+ して扱われ、短い形式の識別子と呼ばれます。短い形式の識別子はそ
+ れが完全な長さの識別子の接頭語であるときだけ有効です。
+
+ 他の文字列は全てタグ名として扱われます。タグはあるリビジョン識
+ 別子に関連付けられたシンボル名です。タグ名は ":" 文字を含んで
+ はいけません。
+
+ リビジョン名 "tip" は特別なタグで、常に一番最新のリビジョンを
+ 指します。
+
+複数のリビジョンの指定法
+-----------
+
+ Mercurial が 1つより多くのリビジョンを受けいれるとき、それらは
+ 別々に指定されるか、連続した範囲として ":" 文字で区切って与え
+ られるかもれません。
+
+ 範囲表記の構文は [BEGIN]:[END] で BEGIN と END はリビジョンの
+ 識別子です。BEGIN も END も両方とも任意です。もし BEGIN が指定
+ されなければ、デフォルトでリビジョンナンバー 0 になります。も
+ し END が指定されなければ、デフォルトで tip になります。従って
+ 範囲 ":" は "全てのリビジョン" を意味します。
+
+ もし BEGIN が END より大きければ、リビジョンは逆の順序として扱
+ われます。
+
+ 範囲は閉区間として動作します。これは範囲が 3:5 は 3,4,5 になる
+ ことを意味します。同様に、範囲 4:2 は 4,3,2 になります。
+
+環境変数
+----
+
+HGEDITOR::
+ これはコミッチ時に使われるエディタの名前です。デフォルトでは
+ EDITOR の値が使われます。
+
+ (廃止予定です, .hgrc を使ってください)
+
+HGMERGE::
+ merge 時の衝突を解決するのに使われる実行ファイルです。プログラ
+ ムは3 つの引数で実行されます: ローカルのファイル、リモートのファ
+ イル、1 世代前のファイルです。
+
+ デフォルトのプログラムは "hgmerge" で、これは Mercurial によっ
+ て提供される常識的な設定のシェルスクリプトです。
+
+ (廃止予定です, .hgrc を使ってください)
+
+HGUSER::
+ これはコミット時の著者として使われる文字列です。
+
+ (廃止予定です, .hgrc を使ってください)
+
+EMAIL::
+ もし HGUSER が設定されていなければ、これがコミット時の著者とし
+ て使われます。
+
+LOGNAME::
+ もし HGUSER も EMAIL も設定されていなければ、コミット時の著者
+ としてLOGNAME が('@hostname' を付けた形で)使われます。
+
+EDITOR::
+ これは hgmerge スクリプト中で使われるエディタの名前です。もし
+ HGEDITOR が設定されていなければ、コミット時のメッセージに使わ
+ れます。デフォルトは 'vi' です。
+
+PYTHONPATH::
+ これはインポートされるモジュールを見つけるために Python によっ
+ て使われ、Mercurial がシステム全体にインストールされていなけれ
+ ば、適切に設定される必要があるでしょう。
+
+ファイル
+----
+ .hgignore::
+ このファイルは(1行ごとに) hg によって無視されるべきファイルを
+ 記述した正規表現を含みます。
+
+ .hgtags::
+ このファイルはリポジトリの内容のタグ付けされたバージョンに一致
+ したハッシュ値とテキストのタグ名(それぞれは空白で区切られます)を
+ 含みます。
+
+ /etc/mercurial/hgrc, $HOME/.hgrc, .hg/hgrc::
+ このファイルはデフォルトの設定を含みます。.hg/hgrc の値は
+ $HOME/.hgrc の設定を上書きし、$HOME/.hgrc の設定はグローバルな
+ /etc/mercurial/hgrc の設定を上書きします。これらのファイルの内
+ 容と書式の詳細については hgrc(5) を参照してください。
+
+バグ
+--
+沢山あるでしょうから、もしバグを見つけたらそれをメーリングリスト
+(下の情報源を参照)に送ってください。
+
+関連項目
+----
+hgrc(5)
+
+著者
+--
+Matt Mackall <mpm@selenic.com> により書かれました。
+
+情報源
+---
+http://selenic.com/mercurial[主なウェブサイト]
+
+http://www.serpentine.com/mercurial[Wiki サイト]
+
+http://selenic.com/hg[ソースコードのリポジトリ]
+
+http://selenic.com/mailman/listinfo/mercurial[メーリングリスト]
+
+著作権情報
+-----
+Copyright (C) 2005 Matt Mackall.
+このソフトウェアの自由な使用は GNU 一般公有使用許諾 (GPL) のもとで
+認められます。
new file mode 100644
--- /dev/null
+++ b/doc/ja/hgmerge.1.ja.txt
@@ -0,0 +1,37 @@
+HGMERGE(1)
+==========
+Matt Mackall <mpm@selenic.com>
+v0.1, 27 May 2005
+
+名前
+--
+hgmerge - Mercurial ソースコード管理システムでファイルをマージする
+のに使われるデフォルトのラッパー
+
+書式
+--
+'hgmerge' local ancestor remote
+
+説明
+--
+hgmerge(1) コマンドは Mercurial システムでファイルをマージするため
+のグラフィカルなインターフェイスを提供します。これは kdiff3,
+merge(1), tkdiff(1), または単純に diff(1) と patch(1) のラッパーで、
+どれがシステム上にあるかに依存します。
+
+hgmerge(1) は Mercurial ソースコード管理システムで環境変数
+HGMERGE が設定されていない場合に使われます。
+
+著者
+--
+Vincent Danjean <Vincent.Danjean@free.fr> によって書かれました。
+
+関連情報
+--
+hg(1) - Mercurial システムへのコマンドラインインターフェイス
+
+著作権情報
+----
+Copyright (C) 2005 Matt Mackall.
+このソフトウェアの自由な使用は GNU 一般公有使用許諾 (GPL) のもとで
+認められます。
new file mode 100644
--- /dev/null
+++ b/doc/ja/hgrc.5.ja.txt
@@ -0,0 +1,204 @@
+HGRC(5)
+=======
+Bryan O'Sullivan <bos@serpentine.com>
+
+名前
+--
+hgrc - Mercurial の設定ファイル
+
+書式
+--
+
+Mercurial システムはその振舞いの正常を制御するのに、一連の設定ファ
+イルを使用します。
+
+ファイル
+----
+
+Mercurial は 3つのファイルから設定を読みます:
+
+/etc/mercurial/hgrc::
+ このグローバルの設定ファイルのオプションは実行したユーザ、ディ
+ レクトリを問わず全ての Mercurial コマンドに適用されます。
+
+$HOME/.hgrc::
+ ユーザ毎の設定オプションで、ディレクトリを問わず全ての
+ Mercurial コマンドに適用されます。このファイルの値はグローバル
+ の設定を上書きします。
+
+<repo>/.hg/hgrc::
+ リポジトリ毎の設定オプションで、そのリポジトリのみに適用されま
+ す。このファイルはバージョン管理されず、 "clone" 操作で転送さ
+ れることもありません。このファイルの値はグローバルの設定とユー
+ ザ毎の設定を上書きします。
+
+構文
+--
+
+設定ファイルは "[セクション]" ヘッダから始まるセクションと、それに
+続く"名前: 値"のエントリから成ります: "名前=値"も認められます。
+
+ [spam]
+ eggs=ham
+ green=
+ eggs
+
+各行は1つのエントリを含みます。もし次の行がインデントされていた場
+合、それは前の行の続きとして扱われます。
+
+先行する空白は値から取り除かれます。空行は読み飛ばされます。
+
+オプションの値は同じセクションや、特別な DEFAULT セクションの別の
+値を参照するフォーマット文字列を含むことができます。
+
+"#" や ";" で始まる行は無視されるので、コメントとして使うことがで
+きます。
+
+セクション
+-----
+
+このセクションは Merucurial の "hgrc" に使うことができる異なったセ
+クションのそれぞれの目的や可能なキー、そして取り得る値について記述
+します。
+
+decode/encode::
+ checkout/checkin でファイルを転送するときのフィルターです。これ
+ は典型的には改行を処理したり、他の地域化/標準化に使われるでしょ
+ う。
+
+ フィルターはフィルターパターンとそれに続くフィルターコマンドから
+ なります。コマンドは標準入力からのデータを受け付け、変換したデー
+ タを標準出力に返す必要があります。
+
+ 例:
+
+ [encode]
+ # delta 圧縮を改善するためにチェックイン時に gzip ファイルを
+ # 伸長します
+ # 注意: 必ずしも良いアイディアではありません。ただの例です
+ *.gz = gunzip
+
+ [decode]
+ # 作業ディレクトリに書き出すときにファイルを gzip で再圧縮します
+ *.gz = gzip
+
+hooks::
+ コミットの開始、終了時など様々なアクションで自動的に実行されるコ
+ マンドです。
+ changegroup;;
+ push や pull でチェンジグループが加えられたあとに起動します。
+ commit;;
+ チェンジセットが作成された後に起動します。新しく作成されたチェ
+ ンジセットの ID が渡されます。
+ precommit;;
+ コミット前に起動します。終了ステータス 0 によりコミットを続行
+ します。非ゼロのステータスでコミットは失敗します。
+
+http_proxy::
+ HTTP プロキシを通してウェブを使った Mercurial のリポジトリにアク
+ セスするのに使われます。
+ host;;
+ プロキシサーバのホスト名と(オプションの)ポートで、例えば
+ "myproxy:8000"などです。
+ no;;
+ オプションです。コンマで区切られたプロキシを通過すべきホスト名
+ のリストです。
+ passwd;;
+ オプションです。プロキシサーバの認証用のパスワードです。
+ user;;
+ オプションです。プロキシサーバの認証用のユーザ名です。
+
+paths::
+ リポジトリにシンボル名を割当てます。左側がシンボル名で、右側がリ
+ ポジトリの場所を示すディレクトリや URL です。
+
+ui::
+ ユーザインターフェースの設定です。
+ debug;;
+ デバッグ情報を表示します。True か False を取ります。デフォルト
+ では False です。
+ editor;;
+ コミット中に使用するエディタです。デフォルトは $EDITOR か
+ "vi" です。
+ interactive;;
+ ユーザに対してプロンプトを出すようにします。True か False を取
+ ります。デフォルトでは True です。
+ merge;;
+ 手動での merge 中に衝突を解決するために使われるプログラムです。
+ デフォルトは "hgmerge" です。
+ quiet;;
+ 表示される出力の量を減らします。True か False を取ります。デフォ
+ ルトは False です。
+ remotecmd;;
+ clone/push/pull 操作で使われるリモートのコマンドです。デフォル
+ トは 'hg' です。
+ ssh;;
+ SSH 接続で使われるコマンドです。デフォルトは 'ssh' です。
+ username;;
+ コミットを実行したときに作成されるチェンジセットのコミッタです。
+ 一般的には人名と電子メールアドレスで、例えば "Fred Widget
+ <fred@example.com>" などです。デフォルトは $EMAIL か
+ username@hostname です。
+ verbose;;
+ 表示される出力の量を増やします。True か False を取ります。デフォ
+ ルトは False です。
+
+web::
+ ウェブインターフェイスの設定です。
+ accesslog;;
+ アクセスログの出力先です。デフォルトでは標準出力です。
+ address;;
+ バインドするインターフェイスアドレスです。デフォルトでは全てで
+ す。
+ allowbz2;;
+ リポジトリのリビジョンから .tar.bz2 をダウンロードさせるかどう
+ かです。デフォルトでは false です。
+ allowgz;;
+ リポジトリのリビジョンから .tar.gz をダウンロードさせるかどう
+ かです。デフォルトでは false です。
+ allowpull;;
+ リポジトリから pull させるかどうかです。デフォルトでは true で
+ す。
+ allowzip;;
+ リポジトリのリビジョンから .zip をダウンロードさせるかどうかで
+ す。デフォルトでは false です。この機能は一時ファイルを作成し
+ ます。
+ description;;
+ リポジトリの目的や内容についてのテキストによる説明です。デフォ
+ ルトでは"unknown" です。
+ errorlog;;
+ エラーログの出力先です。デフォルトでは標準エラー出力です。
+ ipv6;;
+ IPv6 を使うかどうかです。デフォルトでは false です。
+ name;;
+ ウェブインターフェイスを使うときのリポジトリの名前です。デフォ
+ ルトは現在の作業ディレクトリです。
+ maxchanges;;
+ チェンジログに記載する変更の最大数です。デフォルトでは 10 です。
+ maxfiles;;
+ チェンジセットに記載するファイルの最大数です。デフォルトでは
+ 10 です。
+ port;;
+ リスンするポートです。デフォルト は 8000 です。
+ style;;
+ 使用するテンプレートマップのスタイルです。
+ templates;;
+ HTML テンプレートの在処です。デフォルトではインストールしたと
+ きのパスです。
+
+著者
+--
+Bryan O'Sullivan <bos@serpentine.com>.
+
+Mercurial は Matt Mackall <mpm@selenic.com> により書かれました。
+
+関連項目
+----
+hg(1)
+
+COPYING
+-------
+このマニュアルの著作権は 2005 Bryan O'Sullivan です。
+Mercurial の著作権は 2005 Matt Mackall です。
+このソフトウェアの自由な使用は GNU 一般公有使用許諾 (GPL) のもとで
+認められます。
new file mode 100755
--- /dev/null
+++ b/hg
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+#
+# mercurial - scalable distributed SCM
+#
+# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial import commands
+
+commands.run()
new file mode 100755
--- /dev/null
+++ b/hgeditor
@@ -0,0 +1,54 @@
+#!/bin/sh
+#
+# This is an example of using HGEDITOR to create of diff to review the
+# changes while commiting.
+
+# If you want to pass your favourite editor some other parameters
+# only for Mercurial, modify this:
+case "${EDITOR}" in
+ "")
+ EDITOR="vi"
+ ;;
+ emacs)
+ EDITOR="$EDITOR -nw"
+ ;;
+ gvim|vim)
+ EDITOR="$EDITOR -f -o"
+ ;;
+esac
+
+
+HGTMP=""
+cleanup_exit() {
+ rm -rf "$HGTMP"
+}
+
+# Remove temporary files even if we get interrupted
+trap "cleanup_exit" 0 # normal exit
+trap "exit 255" 1 2 3 6 15 # HUP INT QUIT ABRT TERM
+
+HGTMP="${TMPDIR-/tmp}/hgeditor.$RANDOM.$RANDOM.$RANDOM.$$"
+(umask 077 && mkdir "$HGTMP") || {
+ echo "Could not create temporary directory! Exiting." 1>&2
+ exit 1
+}
+
+(
+ grep '^HG: changed' "$1" | cut -b 13- | while read changed; do
+ hg diff "$changed" >> "$HGTMP/diff"
+ done
+)
+
+cat "$1" > "$HGTMP/msg"
+
+CHECKSUM=`md5sum "$HGTMP/msg"`
+if [ -s "$HGTMP/diff" ]; then
+ $EDITOR "$HGTMP/msg" "$HGTMP/diff" || exit $?
+else
+ $EDITOR "$HGTMP/msg" || exit $?
+fi
+echo "$CHECKSUM" | md5sum -c >/dev/null 2>&1 && exit 13
+
+mv "$HGTMP/msg" "$1"
+
+exit $?
new file mode 100644
--- /dev/null
+++ b/hgext/__init__.py
@@ -0,0 +1,1 @@
+# placeholder
new file mode 100644
--- /dev/null
+++ b/hgext/acl.py
@@ -0,0 +1,124 @@
+# acl.py - changeset access control for mercurial
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+# this hook allows to allow or deny access to parts of a repo when
+# taking incoming changesets.
+#
+# authorization is against local user name on system where hook is
+# run, not committer of original changeset (since that is easy to
+# spoof).
+#
+# acl hook is best to use if you use hgsh to set up restricted shells
+# for authenticated users to only push to / pull from. not safe if
+# user has interactive shell access, because they can disable hook.
+# also not safe if remote users share one local account, because then
+# no way to tell remote users apart.
+#
+# to use, configure acl extension in hgrc like this:
+#
+# [extensions]
+# hgext.acl =
+#
+# [hooks]
+# pretxnchangegroup.acl = python:hgext.acl.hook
+#
+# [acl]
+# sources = serve # check if source of incoming changes in this list
+# # ("serve" == ssh or http, "push", "pull", "bundle")
+#
+# allow and deny lists have subtree pattern (default syntax is glob)
+# on left, user names on right. deny list checked before allow list.
+#
+# [acl.allow]
+# # if acl.allow not present, all users allowed by default
+# # empty acl.allow = no users allowed
+# docs/** = doc_writer
+# .hgtags = release_engineer
+#
+# [acl.deny]
+# # if acl.deny not present, no users denied by default
+# # empty acl.deny = all users allowed
+# glob pattern = user4, user5
+# ** = user6
+
+from mercurial.demandload import *
+from mercurial.i18n import gettext as _
+from mercurial.node import *
+demandload(globals(), 'getpass mercurial:util')
+
+class checker(object):
+ '''acl checker.'''
+
+ def buildmatch(self, key):
+ '''return tuple of (match function, list enabled).'''
+ if not self.ui.has_config(key):
+ self.ui.debug(_('acl: %s not enabled\n') % key)
+ return None, False
+
+ thisuser = self.getuser()
+ pats = [pat for pat, user in self.ui.configitems(key)
+ if user == thisuser]
+ self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
+ (key, len(pats), thisuser))
+ if pats:
+ match = util.matcher(self.repo.root, names=pats)[1]
+ else:
+ match = util.never
+ return match, True
+
+ def getuser(self):
+ '''return name of authenticated user.'''
+ return self.user
+
+ def __init__(self, ui, repo):
+ self.ui = ui
+ self.repo = repo
+ self.user = getpass.getuser()
+ cfg = self.ui.config('acl', 'config')
+ if cfg:
+ self.ui.readconfig(cfg)
+ self.allow, self.allowable = self.buildmatch('acl.allow')
+ self.deny, self.deniable = self.buildmatch('acl.deny')
+
+ def skipsource(self, source):
+ '''true if incoming changes from this source should be skipped.'''
+ ok_sources = self.ui.config('acl', 'sources', 'serve').split()
+ return source not in ok_sources
+
+ def check(self, node):
+ '''return if access allowed, raise exception if not.'''
+ files = self.repo.changelog.read(node)[3]
+ if self.deniable:
+ for f in files:
+ if self.deny(f):
+ self.ui.debug(_('acl: user %s denied on %s\n') %
+ (self.getuser(), f))
+ raise util.Abort(_('acl: access denied for changeset %s') %
+ short(node))
+ if self.allowable:
+ for f in files:
+ if not self.allow(f):
+ self.ui.debug(_('acl: user %s not allowed on %s\n') %
+ (self.getuser(), f))
+ raise util.Abort(_('acl: access denied for changeset %s') %
+ short(node))
+ self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
+
+def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
+ if hooktype != 'pretxnchangegroup':
+ raise util.Abort(_('config error - hook type "%s" cannot stop '
+ 'incoming changesets') % hooktype)
+
+ c = checker(ui, repo)
+ if c.skipsource(source):
+ ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
+ return
+
+ start = repo.changelog.rev(bin(node))
+ end = repo.changelog.count()
+ for rev in xrange(start, end):
+ c.check(repo.changelog.node(rev))
new file mode 100644
--- /dev/null
+++ b/hgext/bugzilla.py
@@ -0,0 +1,310 @@
+# bugzilla.py - bugzilla integration for mercurial
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+# hook extension to update comments of bugzilla bugs when changesets
+# that refer to bugs by id are seen. this hook does not change bug
+# status, only comments.
+#
+# to configure, add items to '[bugzilla]' section of hgrc.
+#
+# to use, configure bugzilla extension and enable like this:
+#
+# [extensions]
+# hgext.bugzilla =
+#
+# [hooks]
+# # run bugzilla hook on every change pulled or pushed in here
+# incoming.bugzilla = python:hgext.bugzilla.hook
+#
+# config items:
+#
+# section name is 'bugzilla'.
+# [bugzilla]
+#
+# REQUIRED:
+# host = bugzilla # mysql server where bugzilla database lives
+# password = ** # user's password
+# version = 2.16 # version of bugzilla installed
+#
+# OPTIONAL:
+# bzuser = ... # fallback bugzilla user name to record comments with
+# db = bugs # database to connect to
+# notify = ... # command to run to get bugzilla to send mail
+# regexp = ... # regexp to match bug ids (must contain one "()" group)
+# strip = 0 # number of slashes to strip for url paths
+# style = ... # style file to use when formatting comments
+# template = ... # template to use when formatting comments
+# timeout = 5 # database connection timeout (seconds)
+# user = bugs # user to connect to database as
+# [web]
+# baseurl = http://hgserver/... # root of hg web site for browsing commits
+#
+# if hg committer names are not same as bugzilla user names, use
+# "usermap" feature to map from committer email to bugzilla user name.
+# usermap can be in hgrc or separate config file.
+#
+# [bugzilla]
+# usermap = filename # cfg file with "committer"="bugzilla user" info
+# [usermap]
+# committer_email = bugzilla_user_name
+
+from mercurial.demandload import *
+from mercurial.i18n import gettext as _
+from mercurial.node import *
+demandload(globals(), 'mercurial:templater,util os re time')
+
+MySQLdb = None
+
+def buglist(ids):
+ return '(' + ','.join(map(str, ids)) + ')'
+
+class bugzilla_2_16(object):
+ '''support for bugzilla version 2.16.'''
+
+ def __init__(self, ui):
+ self.ui = ui
+ host = self.ui.config('bugzilla', 'host', 'localhost')
+ user = self.ui.config('bugzilla', 'user', 'bugs')
+ passwd = self.ui.config('bugzilla', 'password')
+ db = self.ui.config('bugzilla', 'db', 'bugs')
+ timeout = int(self.ui.config('bugzilla', 'timeout', 5))
+ usermap = self.ui.config('bugzilla', 'usermap')
+ if usermap:
+ self.ui.readconfig(usermap)
+ self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
+ (host, db, user, '*' * len(passwd)))
+ self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
+ db=db, connect_timeout=timeout)
+ self.cursor = self.conn.cursor()
+ self.run('select fieldid from fielddefs where name = "longdesc"')
+ ids = self.cursor.fetchall()
+ if len(ids) != 1:
+ raise util.Abort(_('unknown database schema'))
+ self.longdesc_id = ids[0][0]
+ self.user_ids = {}
+
+ def run(self, *args, **kwargs):
+ '''run a query.'''
+ self.ui.note(_('query: %s %s\n') % (args, kwargs))
+ try:
+ self.cursor.execute(*args, **kwargs)
+ except MySQLdb.MySQLError, err:
+ self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
+ raise
+
+ def filter_real_bug_ids(self, ids):
+ '''filter not-existing bug ids from list.'''
+ self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
+ ids = [c[0] for c in self.cursor.fetchall()]
+ ids.sort()
+ return ids
+
+ def filter_unknown_bug_ids(self, node, ids):
+ '''filter bug ids from list that already refer to this changeset.'''
+
+ self.run('''select bug_id from longdescs where
+ bug_id in %s and thetext like "%%%s%%"''' %
+ (buglist(ids), short(node)))
+ unknown = dict.fromkeys(ids)
+ for (id,) in self.cursor.fetchall():
+ self.ui.status(_('bug %d already knows about changeset %s\n') %
+ (id, short(node)))
+ unknown.pop(id, None)
+ ids = unknown.keys()
+ ids.sort()
+ return ids
+
+ def notify(self, ids):
+ '''tell bugzilla to send mail.'''
+
+ self.ui.status(_('telling bugzilla to send mail:\n'))
+ for id in ids:
+ self.ui.status(_(' bug %s\n') % id)
+ cmd = self.ui.config('bugzilla', 'notify',
+ 'cd /var/www/html/bugzilla && '
+ './processmail %s nobody@nowhere.com') % id
+ fp = os.popen('(%s) 2>&1' % cmd)
+ out = fp.read()
+ ret = fp.close()
+ if ret:
+ self.ui.warn(out)
+ raise util.Abort(_('bugzilla notify command %s') %
+ util.explain_exit(ret)[0])
+ self.ui.status(_('done\n'))
+
+ def get_user_id(self, user):
+ '''look up numeric bugzilla user id.'''
+ try:
+ return self.user_ids[user]
+ except KeyError:
+ try:
+ userid = int(user)
+ except ValueError:
+ self.ui.note(_('looking up user %s\n') % user)
+ self.run('''select userid from profiles
+ where login_name like %s''', user)
+ all = self.cursor.fetchall()
+ if len(all) != 1:
+ raise KeyError(user)
+ userid = int(all[0][0])
+ self.user_ids[user] = userid
+ return userid
+
+ def map_committer(self, user):
+ '''map name of committer to bugzilla user name.'''
+ for committer, bzuser in self.ui.configitems('usermap'):
+ if committer.lower() == user.lower():
+ return bzuser
+ return user
+
+ def add_comment(self, bugid, text, committer):
+ '''add comment to bug. try adding comment as committer of
+ changeset, otherwise as default bugzilla user.'''
+ user = self.map_committer(committer)
+ try:
+ userid = self.get_user_id(user)
+ except KeyError:
+ try:
+ defaultuser = self.ui.config('bugzilla', 'bzuser')
+ if not defaultuser:
+ raise util.Abort(_('cannot find bugzilla user id for %s') %
+ user)
+ userid = self.get_user_id(defaultuser)
+ except KeyError:
+ raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
+ (user, defaultuser))
+ now = time.strftime('%Y-%m-%d %H:%M:%S')
+ self.run('''insert into longdescs
+ (bug_id, who, bug_when, thetext)
+ values (%s, %s, %s, %s)''',
+ (bugid, userid, now, text))
+ self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
+ values (%s, %s, %s, %s)''',
+ (bugid, userid, now, self.longdesc_id))
+
+class bugzilla(object):
+ # supported versions of bugzilla. different versions have
+ # different schemas.
+ _versions = {
+ '2.16': bugzilla_2_16,
+ }
+
+ _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
+ r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
+
+ _bz = None
+
+ def __init__(self, ui, repo):
+ self.ui = ui
+ self.repo = repo
+
+ def bz(self):
+ '''return object that knows how to talk to bugzilla version in
+ use.'''
+
+ if bugzilla._bz is None:
+ bzversion = self.ui.config('bugzilla', 'version')
+ try:
+ bzclass = bugzilla._versions[bzversion]
+ except KeyError:
+ raise util.Abort(_('bugzilla version %s not supported') %
+ bzversion)
+ bugzilla._bz = bzclass(self.ui)
+ return bugzilla._bz
+
+ def __getattr__(self, key):
+ return getattr(self.bz(), key)
+
+ _bug_re = None
+ _split_re = None
+
+ def find_bug_ids(self, node, desc):
+ '''find valid bug ids that are referred to in changeset
+ comments and that do not already have references to this
+ changeset.'''
+
+ if bugzilla._bug_re is None:
+ bugzilla._bug_re = re.compile(
+ self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
+ re.IGNORECASE)
+ bugzilla._split_re = re.compile(r'\D+')
+ start = 0
+ ids = {}
+ while True:
+ m = bugzilla._bug_re.search(desc, start)
+ if not m:
+ break
+ start = m.end()
+ for id in bugzilla._split_re.split(m.group(1)):
+ if not id: continue
+ ids[int(id)] = 1
+ ids = ids.keys()
+ if ids:
+ ids = self.filter_real_bug_ids(ids)
+ if ids:
+ ids = self.filter_unknown_bug_ids(node, ids)
+ return ids
+
+ def update(self, bugid, node, changes):
+ '''update bugzilla bug with reference to changeset.'''
+
+ def webroot(root):
+ '''strip leading prefix of repo root and turn into
+ url-safe path.'''
+ count = int(self.ui.config('bugzilla', 'strip', 0))
+ root = util.pconvert(root)
+ while count > 0:
+ c = root.find('/')
+ if c == -1:
+ break
+ root = root[c+1:]
+ count -= 1
+ return root
+
+ mapfile = self.ui.config('bugzilla', 'style')
+ tmpl = self.ui.config('bugzilla', 'template')
+ sio = templater.stringio()
+ t = templater.changeset_templater(self.ui, self.repo, mapfile, sio)
+ if not mapfile and not tmpl:
+ tmpl = _('changeset {node|short} in repo {root} refers '
+ 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
+ if tmpl:
+ tmpl = templater.parsestring(tmpl, quoted=False)
+ t.use_template(tmpl)
+ t.show(changenode=node, changes=changes,
+ bug=str(bugid),
+ hgweb=self.ui.config('web', 'baseurl'),
+ root=self.repo.root,
+ webroot=webroot(self.repo.root))
+ self.add_comment(bugid, sio.getvalue(), templater.email(changes[1]))
+
+def hook(ui, repo, hooktype, node=None, **kwargs):
+ '''add comment to bugzilla for each changeset that refers to a
+ bugzilla bug id. only add a comment once per bug, so same change
+ seen multiple times does not fill bug with duplicate data.'''
+ try:
+ import MySQLdb as mysql
+ global MySQLdb
+ MySQLdb = mysql
+ except ImportError, err:
+ raise util.Abort(_('python mysql support not available: %s') % err)
+
+ if node is None:
+ raise util.Abort(_('hook type %s does not pass a changeset id') %
+ hooktype)
+ try:
+ bz = bugzilla(ui, repo)
+ bin_node = bin(node)
+ changes = repo.changelog.read(bin_node)
+ ids = bz.find_bug_ids(bin_node, changes[4])
+ if ids:
+ for id in ids:
+ bz.update(id, bin_node, changes)
+ bz.notify(ids)
+ except MySQLdb.MySQLError, err:
+ raise util.Abort(_('database error: %s') % err[1])
+
new file mode 100644
--- /dev/null
+++ b/hgext/extdiff.py
@@ -0,0 +1,150 @@
+# extdiff.py - external diff program support for mercurial
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+# allow to use external programs to compare revisions, or revision
+# with working dir. program is called with two arguments: paths to
+# directories containing snapshots of files to compare.
+#
+# to enable:
+#
+# [extensions]
+# hgext.extdiff =
+#
+# also allows to configure new diff commands, so you do not need to
+# type "hg extdiff -p kdiff3" always.
+#
+# [extdiff]
+# # add new command called vdiff, runs kdiff3
+# cmd.vdiff = kdiff3
+# # add new command called meld, runs meld (no need to name twice)
+# cmd.meld =
+#
+# you can use -I/-X and list of file or directory names like normal
+# "hg diff" command. extdiff makes snapshots of only needed files, so
+# compare program will be fast.
+
+from mercurial.demandload import demandload
+from mercurial.i18n import gettext as _
+from mercurial.node import *
+demandload(globals(), 'mercurial:commands,util os shutil tempfile')
+
+def dodiff(ui, repo, diffcmd, pats, opts):
+ def snapshot_node(files, node):
+ '''snapshot files as of some revision'''
+ changes = repo.changelog.read(node)
+ mf = repo.manifest.read(changes[0])
+ dirname = '%s.%s' % (os.path.basename(repo.root), short(node))
+ base = os.path.join(tmproot, dirname)
+ os.mkdir(base)
+ if not ui.quiet:
+ ui.write_err(_('making snapshot of %d files from rev %s\n') %
+ (len(files), short(node)))
+ for fn in files:
+ wfn = util.pconvert(fn)
+ ui.note(' %s\n' % wfn)
+ dest = os.path.join(base, wfn)
+ destdir = os.path.dirname(dest)
+ if not os.path.isdir(destdir):
+ os.makedirs(destdir)
+ repo.wwrite(wfn, repo.file(fn).read(mf[fn]), open(dest, 'w'))
+ return dirname
+
+ def snapshot_wdir(files):
+ '''snapshot files from working directory.
+ if not using snapshot, -I/-X does not work and recursive diff
+ in tools like kdiff3 and meld displays too many files.'''
+ dirname = os.path.basename(repo.root)
+ base = os.path.join(tmproot, dirname)
+ os.mkdir(base)
+ if not ui.quiet:
+ ui.write_err(_('making snapshot of %d files from working dir\n') %
+ (len(files)))
+ for fn in files:
+ wfn = util.pconvert(fn)
+ ui.note(' %s\n' % wfn)
+ dest = os.path.join(base, wfn)
+ destdir = os.path.dirname(dest)
+ if not os.path.isdir(destdir):
+ os.makedirs(destdir)
+ fp = open(dest, 'w')
+ for chunk in util.filechunkiter(repo.wopener(wfn)):
+ fp.write(chunk)
+ return dirname
+
+ node1, node2 = commands.revpair(ui, repo, opts['rev'])
+ files, matchfn, anypats = commands.matchpats(repo, pats, opts)
+ modified, added, removed, deleted, unknown = repo.changes(
+ node1, node2, files, match=matchfn)
+ if not (modified or added or removed):
+ return 0
+
+ tmproot = tempfile.mkdtemp(prefix='extdiff.')
+ try:
+ dir1 = snapshot_node(modified + removed, node1)
+ if node2:
+ dir2 = snapshot_node(modified + added, node2)
+ else:
+ dir2 = snapshot_wdir(modified + added)
+ util.system('%s %s "%s" "%s"' %
+ (diffcmd, ' '.join(opts['option']), dir1, dir2),
+ cwd=tmproot)
+ return 1
+ finally:
+ ui.note(_('cleaning up temp directory\n'))
+ shutil.rmtree(tmproot)
+
+def extdiff(ui, repo, *pats, **opts):
+ '''use external program to diff repository (or selected files)
+
+ Show differences between revisions for the specified files, using
+ an external program. The default program used is "diff -Npru".
+ To select a different program, use the -p option. The program
+ will be passed the names of two directories to compare. To pass
+ additional options to the program, use the -o option. These will
+ be passed before the names of the directories to compare.
+
+ When two revision arguments are given, then changes are
+ shown between those revisions. If only one revision is
+ specified then that revision is compared to the working
+ directory, and, when no revisions are specified, the
+ working directory files are compared to its parent.'''
+ return dodiff(ui, repo, opts['program'] or 'diff -Npru', pats, opts)
+
+cmdtable = {
+ "extdiff":
+ (extdiff,
+ [('p', 'program', '', _('comparison program to run')),
+ ('o', 'option', [], _('pass option to comparison program')),
+ ('r', 'rev', [], _('revision')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg extdiff [OPT]... [FILE]...')),
+ }
+
+def uisetup(ui):
+ for cmd, path in ui.configitems('extdiff'):
+ if not cmd.startswith('cmd.'): continue
+ cmd = cmd[4:]
+ if not path: path = cmd
+ def save(cmd, path):
+ '''use closure to save diff command to use'''
+ def mydiff(ui, repo, *pats, **opts):
+ return dodiff(ui, repo, path, pats, opts)
+ mydiff.__doc__ = '''use %s to diff repository (or selected files)
+
+ Show differences between revisions for the specified
+ files, using the %s program.
+
+ When two revision arguments are given, then changes are
+ shown between those revisions. If only one revision is
+ specified then that revision is compared to the working
+ directory, and, when no revisions are specified, the
+ working directory files are compared to its parent.''' % (cmd, cmd)
+ return mydiff
+ cmdtable[cmd] = (save(cmd, path),
+ cmdtable['extdiff'][1][1:],
+ _('hg %s [OPT]... [FILE]...') % cmd)
new file mode 100644
--- /dev/null
+++ b/hgext/gpg.py
@@ -0,0 +1,269 @@
+# GnuPG signing extension for Mercurial
+#
+# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os, tempfile, binascii
+from mercurial import util
+from mercurial import node as hgnode
+from mercurial.i18n import gettext as _
+
+class gpg:
+ def __init__(self, path, key=None):
+ self.path = path
+ self.key = (key and " --local-user \"%s\"" % key) or ""
+
+ def sign(self, data):
+ gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
+ return util.filter(data, gpgcmd)
+
+ def verify(self, data, sig):
+ """ returns of the good and bad signatures"""
+ sigfile = datafile = None
+ try:
+ # create temporary files
+ fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
+ fp = os.fdopen(fd, 'wb')
+ fp.write(sig)
+ fp.close()
+ fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
+ fp = os.fdopen(fd, 'wb')
+ fp.write(data)
+ fp.close()
+ gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
+ "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
+ ret = util.filter("", gpgcmd)
+ finally:
+ for f in (sigfile, datafile):
+ try:
+ if f: os.unlink(f)
+ except: pass
+ keys = []
+ key, fingerprint = None, None
+ err = ""
+ for l in ret.splitlines():
+ # see DETAILS in the gnupg documentation
+ # filter the logger output
+ if not l.startswith("[GNUPG:]"):
+ continue
+ l = l[9:]
+ if l.startswith("ERRSIG"):
+ err = _("error while verifying signature")
+ break
+ elif l.startswith("VALIDSIG"):
+ # fingerprint of the primary key
+ fingerprint = l.split()[10]
+ elif (l.startswith("GOODSIG") or
+ l.startswith("EXPSIG") or
+ l.startswith("EXPKEYSIG") or
+ l.startswith("BADSIG")):
+ if key is not None:
+ keys.append(key + [fingerprint])
+ key = l.split(" ", 2)
+ fingerprint = None
+ if err:
+ return err, []
+ if key is not None:
+ keys.append(key + [fingerprint])
+ return err, keys
+
+def newgpg(ui, **opts):
+ """create a new gpg instance"""
+ gpgpath = ui.config("gpg", "cmd", "gpg")
+ gpgkey = opts.get('key')
+ if not gpgkey:
+ gpgkey = ui.config("gpg", "key", None)
+ return gpg(gpgpath, gpgkey)
+
+def sigwalk(repo):
+ """
+ walk over every sigs, yields a couple
+ ((node, version, sig), (filename, linenumber))
+ """
+ def parsefile(fileiter, context):
+ ln = 1
+ for l in fileiter:
+ if not l:
+ continue
+ yield (l.split(" ", 2), (context, ln))
+ ln +=1
+
+ fl = repo.file(".hgsigs")
+ h = fl.heads()
+ h.reverse()
+ # read the heads
+ for r in h:
+ fn = ".hgsigs|%s" % hgnode.short(r)
+ for item in parsefile(fl.read(r).splitlines(), fn):
+ yield item
+ try:
+ # read local signatures
+ fn = "localsigs"
+ for item in parsefile(repo.opener(fn), fn):
+ yield item
+ except IOError:
+ pass
+
+def getkeys(ui, repo, mygpg, sigdata, context):
+ """get the keys who signed a data"""
+ fn, ln = context
+ node, version, sig = sigdata
+ prefix = "%s:%d" % (fn, ln)
+ node = hgnode.bin(node)
+
+ data = node2txt(repo, node, version)
+ sig = binascii.a2b_base64(sig)
+ err, keys = mygpg.verify(data, sig)
+ if err:
+ ui.warn("%s:%d %s\n" % (fn, ln , err))
+ return None
+
+ validkeys = []
+ # warn for expired key and/or sigs
+ for key in keys:
+ if key[0] == "BADSIG":
+ ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
+ continue
+ if key[0] == "EXPSIG":
+ ui.write(_("%s Note: Signature has expired"
+ " (signed by: \"%s\")\n") % (prefix, key[2]))
+ elif key[0] == "EXPKEYSIG":
+ ui.write(_("%s Note: This key has expired"
+ " (signed by: \"%s\")\n") % (prefix, key[2]))
+ validkeys.append((key[1], key[2], key[3]))
+ return validkeys
+
+def sigs(ui, repo):
+ """list signed changesets"""
+ mygpg = newgpg(ui)
+ revs = {}
+
+ for data, context in sigwalk(repo):
+ node, version, sig = data
+ fn, ln = context
+ try:
+ n = repo.lookup(node)
+ except KeyError:
+ ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
+ continue
+ r = repo.changelog.rev(n)
+ keys = getkeys(ui, repo, mygpg, data, context)
+ if not keys:
+ continue
+ revs.setdefault(r, [])
+ revs[r].extend(keys)
+ nodes = list(revs)
+ nodes.reverse()
+ for rev in nodes:
+ for k in revs[rev]:
+ r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
+ ui.write("%-30s %s\n" % (keystr(ui, k), r))
+
+def check(ui, repo, rev):
+ """verify all the signatures there may be for a particular revision"""
+ mygpg = newgpg(ui)
+ rev = repo.lookup(rev)
+ hexrev = hgnode.hex(rev)
+ keys = []
+
+ for data, context in sigwalk(repo):
+ node, version, sig = data
+ if node == hexrev:
+ k = getkeys(ui, repo, mygpg, data, context)
+ if k:
+ keys.extend(k)
+
+ if not keys:
+ ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
+ return
+
+ # print summary
+ ui.write("%s is signed by:\n" % hgnode.short(rev))
+ for key in keys:
+ ui.write(" %s\n" % keystr(ui, key))
+
+def keystr(ui, key):
+ """associate a string to a key (username, comment)"""
+ keyid, user, fingerprint = key
+ comment = ui.config("gpg", fingerprint, None)
+ if comment:
+ return "%s (%s)" % (user, comment)
+ else:
+ return user
+
+def sign(ui, repo, *revs, **opts):
+ """add a signature for the current tip or a given revision"""
+ mygpg = newgpg(ui, **opts)
+ sigver = "0"
+ sigmessage = ""
+ if revs:
+ nodes = [repo.lookup(n) for n in revs]
+ else:
+ nodes = [repo.changelog.tip()]
+
+ for n in nodes:
+ hexnode = hgnode.hex(n)
+ ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
+ hgnode.short(n)))
+ # build data
+ data = node2txt(repo, n, sigver)
+ sig = mygpg.sign(data)
+ if not sig:
+ raise util.Abort(_("Error while signing"))
+ sig = binascii.b2a_base64(sig)
+ sig = sig.replace("\n", "")
+ sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
+
+ # write it
+ if opts['local']:
+ repo.opener("localsigs", "ab").write(sigmessage)
+ return
+
+ for x in repo.changes():
+ if ".hgsigs" in x and not opts["force"]:
+ raise util.Abort(_("working copy of .hgsigs is changed "
+ "(please commit .hgsigs manually "
+ "or use --force)"))
+
+ repo.wfile(".hgsigs", "ab").write(sigmessage)
+
+ if repo.dirstate.state(".hgsigs") == '?':
+ repo.add([".hgsigs"])
+
+ if opts["no_commit"]:
+ return
+
+ message = opts['message']
+ if not message:
+ message = "\n".join([_("Added signature for changeset %s")
+ % hgnode.hex(n)
+ for n in nodes])
+ try:
+ repo.commit([".hgsigs"], message, opts['user'], opts['date'])
+ except ValueError, inst:
+ raise util.Abort(str(inst))
+
+def node2txt(repo, node, ver):
+ """map a manifest into some text"""
+ if ver == "0":
+ return "%s\n" % hgnode.hex(node)
+ else:
+ raise util.Abort(_("unknown signature version"))
+
+cmdtable = {
+ "sign":
+ (sign,
+ [('l', 'local', None, _("make the signature local")),
+ ('f', 'force', None, _("sign even if the sigfile is modified")),
+ ('', 'no-commit', None, _("do not commit the sigfile after signing")),
+ ('m', 'message', "", _("commit message")),
+ ('d', 'date', "", _("date code")),
+ ('u', 'user', "", _("user")),
+ ('k', 'key', "", _("the key id to sign with"))],
+ _("hg sign [OPTION]... [REVISION]...")),
+ "sigcheck": (check, [], _('hg sigcheck REVISION')),
+ "sigs": (sigs, [], _('hg sigs')),
+}
+
new file mode 100644
--- /dev/null
+++ b/hgext/hbisect.py
@@ -0,0 +1,296 @@
+# bisect extension for mercurial
+#
+# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
+# Inspired by git bisect, extension skeleton taken from mq.py.
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial.i18n import gettext as _
+from mercurial.demandload import demandload
+demandload(globals(), "os sys sets mercurial:hg,util,commands")
+
+versionstr = "0.0.3"
+
+def lookup_rev(ui, repo, rev=None):
+ """returns rev or the checked-out revision if rev is None"""
+ if not rev is None:
+ return repo.lookup(rev)
+ parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
+ if len(parents) != 1:
+ raise util.Abort(_("unexpected number of parents, "
+ "please commit or revert"))
+ return parents.pop()
+
+def check_clean(ui, repo):
+ modified, added, removed, deleted, unknown = repo.changes()
+ if modified or added or removed:
+ ui.warn("Repository is not clean, please commit or revert\n")
+ sys.exit(1)
+
+class bisect(object):
+ """dichotomic search in the DAG of changesets"""
+ def __init__(self, ui, repo):
+ self.repo = repo
+ self.path = repo.join("bisect")
+ self.opener = util.opener(self.path)
+ self.ui = ui
+ self.goodrevs = []
+ self.badrev = None
+ self.good_dirty = 0
+ self.bad_dirty = 0
+ self.good_path = "good"
+ self.bad_path = "bad"
+
+ if os.path.exists(os.path.join(self.path, self.good_path)):
+ self.goodrevs = self.opener(self.good_path).read().splitlines()
+ self.goodrevs = [hg.bin(x) for x in self.goodrevs]
+ if os.path.exists(os.path.join(self.path, self.bad_path)):
+ r = self.opener(self.bad_path).read().splitlines()
+ if r:
+ self.badrev = hg.bin(r.pop(0))
+
+ def __del__(self):
+ if not os.path.isdir(self.path):
+ return
+ f = self.opener(self.good_path, "w")
+ f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
+ if len(self.goodrevs) > 0:
+ f.write("\n")
+ f = self.opener(self.bad_path, "w")
+ if self.badrev:
+ f.write(hg.hex(self.badrev) + "\n")
+
+ def init(self):
+ """start a new bisection"""
+ if os.path.isdir(self.path):
+ raise util.Abort(_("bisect directory already exists\n"))
+ os.mkdir(self.path)
+ check_clean(self.ui, self.repo)
+ return 0
+
+ def reset(self):
+ """finish a bisection"""
+ if os.path.isdir(self.path):
+ sl = [os.path.join(self.path, p)
+ for p in [self.bad_path, self.good_path]]
+ for s in sl:
+ if os.path.exists(s):
+ os.unlink(s)
+ os.rmdir(self.path)
+ # Not sure about this
+ #self.ui.write("Going back to tip\n")
+ #self.repo.update(self.repo.changelog.tip())
+ return 1
+
+ def num_ancestors(self, head=None, stop=None):
+ """
+ returns a dict with the mapping:
+ node -> number of ancestors (self included)
+ for all nodes who are ancestor of head and
+ not in stop.
+ """
+ if head is None:
+ head = self.badrev
+ return self.__ancestors_and_nb_ancestors(head, stop)[1]
+
+ def ancestors(self, head=None, stop=None):
+ """
+ returns the set of the ancestors of head (self included)
+ who are not in stop.
+ """
+ if head is None:
+ head = self.badrev
+ return self.__ancestors_and_nb_ancestors(head, stop)[0]
+
+ def __ancestors_and_nb_ancestors(self, head, stop=None):
+ """
+ if stop is None then ancestors of goodrevs are used as
+ lower limit.
+
+ returns (anc, n_child) where anc is the set of the ancestors of head
+ and n_child is a dictionary with the following mapping:
+ node -> number of ancestors (self included)
+ """
+ cl = self.repo.changelog
+ if not stop:
+ stop = sets.Set([])
+ for i in xrange(len(self.goodrevs)-1, -1, -1):
+ g = self.goodrevs[i]
+ if g in stop:
+ continue
+ stop.update(cl.reachable(g))
+ def num_children(a):
+ """
+ returns a dictionnary with the following mapping
+ node -> [number of children, empty set]
+ """
+ d = {a: [0, sets.Set([])]}
+ for i in xrange(cl.rev(a)+1):
+ n = cl.node(i)
+ if not d.has_key(n):
+ d[n] = [0, sets.Set([])]
+ parents = [p for p in cl.parents(n) if p != hg.nullid]
+ for p in parents:
+ d[p][0] += 1
+ return d
+
+ if head in stop:
+ raise util.Abort(_("Unconsistent state, %s:%s is good and bad")
+ % (cl.rev(head), hg.short(head)))
+ n_child = num_children(head)
+ for i in xrange(cl.rev(head)+1):
+ n = cl.node(i)
+ parents = [p for p in cl.parents(n) if p != hg.nullid]
+ for p in parents:
+ n_child[p][0] -= 1
+ if not n in stop:
+ n_child[n][1].union_update(n_child[p][1])
+ if n_child[p][0] == 0:
+ n_child[p] = len(n_child[p][1])
+ if not n in stop:
+ n_child[n][1].add(n)
+ if n_child[n][0] == 0:
+ if n == head:
+ anc = n_child[n][1]
+ n_child[n] = len(n_child[n][1])
+ return anc, n_child
+
+ def next(self):
+ if not self.badrev:
+ raise util.Abort(_("You should give at least one bad revision"))
+ if not self.goodrevs:
+ self.ui.warn(_("No good revision given\n"))
+ self.ui.warn(_("Marking the first revision as good\n"))
+ ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(
+ self.badrev)
+ tot = len(ancestors)
+ if tot == 1:
+ if ancestors.pop() != self.badrev:
+ raise util.Abort(_("Could not find the first bad revision"))
+ self.ui.write(_("The first bad revision is:\n"))
+ displayer = commands.show_changeset(self.ui, self.repo, {})
+ displayer.show(changenode=self.badrev)
+ return None
+ best_rev = None
+ best_len = -1
+ for n in ancestors:
+ l = num_ancestors[n]
+ l = min(l, tot - l)
+ if l > best_len:
+ best_len = l
+ best_rev = n
+ assert best_rev is not None
+ nb_tests = 0
+ q, r = divmod(tot, 2)
+ while q:
+ nb_tests += 1
+ q, r = divmod(q, 2)
+ msg = _("Testing changeset %s:%s (%s changesets remaining, "
+ "~%s tests)\n") % (self.repo.changelog.rev(best_rev),
+ hg.short(best_rev), tot, nb_tests)
+ self.ui.write(msg)
+ return best_rev
+
+ def autonext(self):
+ """find and update to the next revision to test"""
+ check_clean(self.ui, self.repo)
+ rev = self.next()
+ if rev is not None:
+ return self.repo.update(rev, force=True)
+
+ def good(self, rev):
+ self.goodrevs.append(rev)
+
+ def autogood(self, rev=None):
+ """mark revision as good and update to the next revision to test"""
+ check_clean(self.ui, self.repo)
+ rev = lookup_rev(self.ui, self.repo, rev)
+ self.good(rev)
+ if self.badrev:
+ return self.autonext()
+
+ def bad(self, rev):
+ self.badrev = rev
+
+ def autobad(self, rev=None):
+ """mark revision as bad and update to the next revision to test"""
+ check_clean(self.ui, self.repo)
+ rev = lookup_rev(self.ui, self.repo, rev)
+ self.bad(rev)
+ if self.goodrevs:
+ self.autonext()
+
+# should we put it in the class ?
+def test(ui, repo, rev):
+ """test the bisection code"""
+ b = bisect(ui, repo)
+ rev = repo.lookup(rev)
+ ui.write("testing with rev %s\n" % hg.hex(rev))
+ anc = b.ancestors()
+ while len(anc) > 1:
+ if not rev in anc:
+ ui.warn("failure while bisecting\n")
+ sys.exit(1)
+ ui.write("it worked :)\n")
+ new_rev = b.next()
+ ui.write("choosing if good or bad\n")
+ if rev in b.ancestors(head=new_rev):
+ b.bad(new_rev)
+ ui.write("it is bad\n")
+ else:
+ b.good(new_rev)
+ ui.write("it is good\n")
+ anc = b.ancestors()
+ #repo.update(new_rev, force=True)
+ for v in anc:
+ if v != rev:
+ ui.warn("fail to found cset! :(\n")
+ return 1
+ ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
+ ui.write("Everything is ok :)\n")
+ return 0
+
+def bisect_run(ui, repo, cmd=None, *args):
+ """bisect extension: dichotomic search in the DAG of changesets
+for subcommands see "hg bisect help\"
+ """
+ def help_(cmd=None, *args):
+ """show help for a given bisect subcommand or all subcommands"""
+ cmdtable = bisectcmdtable
+ if cmd:
+ doc = cmdtable[cmd][0].__doc__
+ synopsis = cmdtable[cmd][2]
+ ui.write(synopsis + "\n")
+ ui.write("\n" + doc + "\n")
+ return
+ ui.write(_("list of subcommands for the bisect extension\n\n"))
+ cmds = cmdtable.keys()
+ cmds.sort()
+ m = max([len(c) for c in cmds])
+ for cmd in cmds:
+ doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
+ ui.write(" %-*s %s\n" % (m, cmd, doc))
+
+ b = bisect(ui, repo)
+ bisectcmdtable = {
+ "init": (b.init, 0, _("hg bisect init")),
+ "bad": (b.autobad, 1, _("hg bisect bad [<rev>]")),
+ "good": (b.autogood, 1, _("hg bisect good [<rev>]")),
+ "next": (b.autonext, 0, _("hg bisect next")),
+ "reset": (b.reset, 0, _("hg bisect reset")),
+ "help": (help_, 1, _("hg bisect help [<subcommand>]")),
+ }
+
+ if not bisectcmdtable.has_key(cmd):
+ ui.warn(_("bisect: Unknown sub-command\n"))
+ return help_()
+ if len(args) > bisectcmdtable[cmd][1]:
+ ui.warn(_("bisect: Too many arguments\n"))
+ return help_()
+ return bisectcmdtable[cmd][0](*args)
+
+cmdtable = {
+ "bisect": (bisect_run, [], _("hg bisect [help|init|reset|next|good|bad]")),
+ #"bisect-test": (test, [], "hg bisect-test rev"),
+}
new file mode 100644
--- /dev/null
+++ b/hgext/mq.py
@@ -0,0 +1,1309 @@
+# queue.py - patch queues for mercurial
+#
+# Copyright 2005 Chris Mason <mason@suse.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial.demandload import *
+demandload(globals(), "os sys re struct traceback errno bz2")
+from mercurial.i18n import gettext as _
+from mercurial import ui, hg, revlog, commands, util
+
+versionstr = "0.45"
+
+repomap = {}
+
+commands.norepo += " qversion"
+class queue:
+ def __init__(self, ui, path, patchdir=None):
+ self.basepath = path
+ if patchdir:
+ self.path = patchdir
+ else:
+ self.path = os.path.join(path, "patches")
+ self.opener = util.opener(self.path)
+ self.ui = ui
+ self.applied = []
+ self.full_series = []
+ self.applied_dirty = 0
+ self.series_dirty = 0
+ self.series_path = "series"
+ self.status_path = "status"
+
+ if os.path.exists(os.path.join(self.path, self.series_path)):
+ self.full_series = self.opener(self.series_path).read().splitlines()
+ self.read_series(self.full_series)
+
+ if os.path.exists(os.path.join(self.path, self.status_path)):
+ self.applied = self.opener(self.status_path).read().splitlines()
+
+ def find_series(self, patch):
+ pre = re.compile("(\s*)([^#]+)")
+ index = 0
+ for l in self.full_series:
+ m = pre.match(l)
+ if m:
+ s = m.group(2)
+ s = s.rstrip()
+ if s == patch:
+ return index
+ index += 1
+ return None
+
+ def read_series(self, list):
+ def matcher(list):
+ pre = re.compile("(\s*)([^#]+)")
+ for l in list:
+ m = pre.match(l)
+ if m:
+ s = m.group(2)
+ s = s.rstrip()
+ if len(s) > 0:
+ yield s
+ self.series = []
+ self.series = [ x for x in matcher(list) ]
+
+ def save_dirty(self):
+ if self.applied_dirty:
+ if len(self.applied) > 0:
+ nl = "\n"
+ else:
+ nl = ""
+ f = self.opener(self.status_path, "w")
+ f.write("\n".join(self.applied) + nl)
+ if self.series_dirty:
+ if len(self.full_series) > 0:
+ nl = "\n"
+ else:
+ nl = ""
+ f = self.opener(self.series_path, "w")
+ f.write("\n".join(self.full_series) + nl)
+
+ def readheaders(self, patch):
+ def eatdiff(lines):
+ while lines:
+ l = lines[-1]
+ if (l.startswith("diff -") or
+ l.startswith("Index:") or
+ l.startswith("===========")):
+ del lines[-1]
+ else:
+ break
+ def eatempty(lines):
+ while lines:
+ l = lines[-1]
+ if re.match('\s*$', l):
+ del lines[-1]
+ else:
+ break
+
+ pf = os.path.join(self.path, patch)
+ message = []
+ comments = []
+ user = None
+ date = None
+ format = None
+ subject = None
+ diffstart = 0
+
+ for line in file(pf):
+ line = line.rstrip()
+ if diffstart:
+ if line.startswith('+++ '):
+ diffstart = 2
+ break
+ if line.startswith("--- "):
+ diffstart = 1
+ continue
+ elif format == "hgpatch":
+ # parse values when importing the result of an hg export
+ if line.startswith("# User "):
+ user = line[7:]
+ elif line.startswith("# Date "):
+ date = line[7:]
+ elif not line.startswith("# ") and line:
+ message.append(line)
+ format = None
+ elif line == '# HG changeset patch':
+ format = "hgpatch"
+ elif (format != "tagdone" and (line.startswith("Subject: ") or
+ line.startswith("subject: "))):
+ subject = line[9:]
+ format = "tag"
+ elif (format != "tagdone" and (line.startswith("From: ") or
+ line.startswith("from: "))):
+ user = line[6:]
+ format = "tag"
+ elif format == "tag" and line == "":
+ # when looking for tags (subject: from: etc) they
+ # end once you find a blank line in the source
+ format = "tagdone"
+ elif message or line:
+ message.append(line)
+ comments.append(line)
+
+ eatdiff(message)
+ eatdiff(comments)
+ eatempty(message)
+ eatempty(comments)
+
+ # make sure message isn't empty
+ if format and format.startswith("tag") and subject:
+ message.insert(0, "")
+ message.insert(0, subject)
+ return (message, comments, user, date, diffstart > 1)
+
+ def mergeone(self, repo, mergeq, head, patch, rev, wlock):
+ # first try just applying the patch
+ (err, n) = self.apply(repo, [ patch ], update_status=False,
+ strict=True, merge=rev, wlock=wlock)
+
+ if err == 0:
+ return (err, n)
+
+ if n is None:
+ self.ui.warn("apply failed for patch %s\n" % patch)
+ sys.exit(1)
+
+ self.ui.warn("patch didn't work out, merging %s\n" % patch)
+
+ # apply failed, strip away that rev and merge.
+ repo.update(head, allow=False, force=True, wlock=wlock)
+ self.strip(repo, n, update=False, backup='strip', wlock=wlock)
+
+ c = repo.changelog.read(rev)
+ ret = repo.update(rev, allow=True, wlock=wlock)
+ if ret:
+ self.ui.warn("update returned %d\n" % ret)
+ sys.exit(1)
+ n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
+ if n == None:
+ self.ui.warn("repo commit failed\n")
+ sys.exit(1)
+ try:
+ message, comments, user, date, patchfound = mergeq.readheaders(patch)
+ except:
+ self.ui.warn("Unable to read %s\n" % patch)
+ sys.exit(1)
+
+ patchf = self.opener(patch, "w")
+ if comments:
+ comments = "\n".join(comments) + '\n\n'
+ patchf.write(comments)
+ commands.dodiff(patchf, self.ui, repo, head, n)
+ patchf.close()
+ return (0, n)
+
+ def qparents(self, repo, rev=None):
+ if rev is None:
+ (p1, p2) = repo.dirstate.parents()
+ if p2 == revlog.nullid:
+ return p1
+ if len(self.applied) == 0:
+ return None
+ (top, patch) = self.applied[-1].split(':')
+ top = revlog.bin(top)
+ return top
+ pp = repo.changelog.parents(rev)
+ if pp[1] != revlog.nullid:
+ arevs = [ x.split(':')[0] for x in self.applied ]
+ p0 = revlog.hex(pp[0])
+ p1 = revlog.hex(pp[1])
+ if p0 in arevs:
+ return pp[0]
+ if p1 in arevs:
+ return pp[1]
+ return None
+ return pp[0]
+
+ def mergepatch(self, repo, mergeq, series, wlock):
+ if len(self.applied) == 0:
+ # each of the patches merged in will have two parents. This
+ # can confuse the qrefresh, qdiff, and strip code because it
+ # needs to know which parent is actually in the patch queue.
+ # so, we insert a merge marker with only one parent. This way
+ # the first patch in the queue is never a merge patch
+ #
+ pname = ".hg.patches.merge.marker"
+ n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
+ wlock=wlock)
+ self.applied.append(revlog.hex(n) + ":" + pname)
+ self.applied_dirty = 1
+
+ head = self.qparents(repo)
+
+ for patch in series:
+ patch = mergeq.lookup(patch)
+ if not patch:
+ self.ui.warn("patch %s does not exist\n" % patch)
+ return (1, None)
+
+ info = mergeq.isapplied(patch)
+ if not info:
+ self.ui.warn("patch %s is not applied\n" % patch)
+ return (1, None)
+ rev = revlog.bin(info[1])
+ (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
+ if head:
+ self.applied.append(revlog.hex(head) + ":" + patch)
+ self.applied_dirty = 1
+ if err:
+ return (err, head)
+ return (0, head)
+
+ def apply(self, repo, series, list=False, update_status=True,
+ strict=False, patchdir=None, merge=None, wlock=None):
+ # TODO unify with commands.py
+ if not patchdir:
+ patchdir = self.path
+ pwd = os.getcwd()
+ os.chdir(repo.root)
+ err = 0
+ if not wlock:
+ wlock = repo.wlock()
+ lock = repo.lock()
+ tr = repo.transaction()
+ n = None
+ for patch in series:
+ self.ui.warn("applying %s\n" % patch)
+ pf = os.path.join(patchdir, patch)
+
+ try:
+ message, comments, user, date, patchfound = self.readheaders(patch)
+ except:
+ self.ui.warn("Unable to read %s\n" % pf)
+ err = 1
+ break
+
+ if not message:
+ message = "imported patch %s\n" % patch
+ else:
+ if list:
+ message.append("\nimported patch %s" % patch)
+ message = '\n'.join(message)
+
+ try:
+ pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
+ f = os.popen("%s -p1 --no-backup-if-mismatch < '%s'" % (pp, pf))
+ except:
+ self.ui.warn("patch failed, unable to continue (try -v)\n")
+ err = 1
+ break
+ files = []
+ fuzz = False
+ for l in f:
+ l = l.rstrip('\r\n');
+ if self.ui.verbose:
+ self.ui.warn(l + "\n")
+ if l[:14] == 'patching file ':
+ pf = os.path.normpath(l[14:])
+ # when patch finds a space in the file name, it puts
+ # single quotes around the filename. strip them off
+ if pf[0] == "'" and pf[-1] == "'":
+ pf = pf[1:-1]
+ if pf not in files:
+ files.append(pf)
+ printed_file = False
+ file_str = l
+ elif l.find('with fuzz') >= 0:
+ if not printed_file:
+ self.ui.warn(file_str + '\n')
+ printed_file = True
+ self.ui.warn(l + '\n')
+ fuzz = True
+ elif l.find('saving rejects to file') >= 0:
+ self.ui.warn(l + '\n')
+ elif l.find('FAILED') >= 0:
+ if not printed_file:
+ self.ui.warn(file_str + '\n')
+ printed_file = True
+ self.ui.warn(l + '\n')
+ patcherr = f.close()
+
+ if merge and len(files) > 0:
+ # Mark as merged and update dirstate parent info
+ repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
+ p1, p2 = repo.dirstate.parents()
+ repo.dirstate.setparents(p1, merge)
+ if len(files) > 0:
+ commands.addremove_lock(self.ui, repo, files,
+ opts={}, wlock=wlock)
+ n = repo.commit(files, message, user, date, force=1, lock=lock,
+ wlock=wlock)
+
+ if n == None:
+ self.ui.warn("repo commit failed\n")
+ sys.exit(1)
+
+ if update_status:
+ self.applied.append(revlog.hex(n) + ":" + patch)
+
+ if patcherr:
+ if not patchfound:
+ self.ui.warn("patch %s is empty\n" % patch)
+ err = 0
+ else:
+ self.ui.warn("patch failed, rejects left in working dir\n")
+ err = 1
+ break
+
+ if fuzz and strict:
+ self.ui.warn("fuzz found when applying patch, stopping\n")
+ err = 1
+ break
+ tr.close()
+ os.chdir(pwd)
+ return (err, n)
+
+ def delete(self, repo, patch):
+ patch = self.lookup(patch)
+ info = self.isapplied(patch)
+ if info:
+ self.ui.warn("cannot delete applied patch %s\n" % patch)
+ sys.exit(1)
+ if patch not in self.series:
+ self.ui.warn("patch %s not in series file\n" % patch)
+ sys.exit(1)
+ i = self.find_series(patch)
+ del self.full_series[i]
+ self.read_series(self.full_series)
+ self.series_dirty = 1
+
+ def check_toppatch(self, repo):
+ if len(self.applied) > 0:
+ (top, patch) = self.applied[-1].split(':')
+ top = revlog.bin(top)
+ pp = repo.dirstate.parents()
+ if top not in pp:
+ self.ui.warn("queue top not at dirstate parents. top %s dirstate %s %s\n" %( revlog.short(top), revlog.short(pp[0]), revlog.short(pp[1])))
+ sys.exit(1)
+ return top
+ return None
+ def check_localchanges(self, repo):
+ (c, a, r, d, u) = repo.changes(None, None)
+ if c or a or d or r:
+ self.ui.write("Local changes found, refresh first\n")
+ sys.exit(1)
+ def new(self, repo, patch, msg=None, force=None):
+ if not force:
+ self.check_localchanges(repo)
+ self.check_toppatch(repo)
+ wlock = repo.wlock()
+ insert = self.series_end()
+ if msg:
+ n = repo.commit([], "[mq]: %s" % msg, force=True, wlock=wlock)
+ else:
+ n = repo.commit([],
+ "New patch: %s" % patch, force=True, wlock=wlock)
+ if n == None:
+ self.ui.warn("repo commit failed\n")
+ sys.exit(1)
+ self.full_series[insert:insert] = [patch]
+ self.applied.append(revlog.hex(n) + ":" + patch)
+ self.read_series(self.full_series)
+ self.series_dirty = 1
+ self.applied_dirty = 1
+ p = self.opener(patch, "w")
+ if msg:
+ msg = msg + "\n"
+ p.write(msg)
+ p.close()
+ wlock = None
+ r = self.qrepo()
+ if r: r.add([patch])
+
+ def strip(self, repo, rev, update=True, backup="all", wlock=None):
+ def limitheads(chlog, stop):
+ """return the list of all nodes that have no children"""
+ p = {}
+ h = []
+ stoprev = 0
+ if stop in chlog.nodemap:
+ stoprev = chlog.rev(stop)
+
+ for r in range(chlog.count() - 1, -1, -1):
+ n = chlog.node(r)
+ if n not in p:
+ h.append(n)
+ if n == stop:
+ break
+ if r < stoprev:
+ break
+ for pn in chlog.parents(n):
+ p[pn] = 1
+ return h
+
+ def bundle(cg):
+ backupdir = repo.join("strip-backup")
+ if not os.path.isdir(backupdir):
+ os.mkdir(backupdir)
+ name = os.path.join(backupdir, "%s" % revlog.short(rev))
+ name = savename(name)
+ self.ui.warn("saving bundle to %s\n" % name)
+ # TODO, exclusive open
+ f = open(name, "wb")
+ try:
+ f.write("HG10")
+ z = bz2.BZ2Compressor(9)
+ while 1:
+ chunk = cg.read(4096)
+ if not chunk:
+ break
+ f.write(z.compress(chunk))
+ f.write(z.flush())
+ except:
+ os.unlink(name)
+ raise
+ f.close()
+ return name
+
+ def stripall(rev, revnum):
+ cl = repo.changelog
+ c = cl.read(rev)
+ mm = repo.manifest.read(c[0])
+ seen = {}
+
+ for x in xrange(revnum, cl.count()):
+ c = cl.read(cl.node(x))
+ for f in c[3]:
+ if f in seen:
+ continue
+ seen[f] = 1
+ if f in mm:
+ filerev = mm[f]
+ else:
+ filerev = 0
+ seen[f] = filerev
+ # we go in two steps here so the strip loop happens in a
+ # sensible order. When stripping many files, this helps keep
+ # our disk access patterns under control.
+ list = seen.keys()
+ list.sort()
+ for f in list:
+ ff = repo.file(f)
+ filerev = seen[f]
+ if filerev != 0:
+ if filerev in ff.nodemap:
+ filerev = ff.rev(filerev)
+ else:
+ filerev = 0
+ ff.strip(filerev, revnum)
+
+ if not wlock:
+ wlock = repo.wlock()
+ lock = repo.lock()
+ chlog = repo.changelog
+ # TODO delete the undo files, and handle undo of merge sets
+ pp = chlog.parents(rev)
+ revnum = chlog.rev(rev)
+
+ if update:
+ urev = self.qparents(repo, rev)
+ repo.update(urev, allow=False, force=True, wlock=wlock)
+ repo.dirstate.write()
+
+ # save is a list of all the branches we are truncating away
+ # that we actually want to keep. changegroup will be used
+ # to preserve them and add them back after the truncate
+ saveheads = []
+ savebases = {}
+
+ tip = chlog.tip()
+ heads = limitheads(chlog, rev)
+ seen = {}
+
+ # search through all the heads, finding those where the revision
+ # we want to strip away is an ancestor. Also look for merges
+ # that might be turned into new heads by the strip.
+ while heads:
+ h = heads.pop()
+ n = h
+ while True:
+ seen[n] = 1
+ pp = chlog.parents(n)
+ if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
+ if pp[1] not in seen:
+ heads.append(pp[1])
+ if pp[0] == revlog.nullid:
+ break
+ if chlog.rev(pp[0]) < revnum:
+ break
+ n = pp[0]
+ if n == rev:
+ break
+ r = chlog.reachable(h, rev)
+ if rev not in r:
+ saveheads.append(h)
+ for x in r:
+ if chlog.rev(x) > revnum:
+ savebases[x] = 1
+
+ # create a changegroup for all the branches we need to keep
+ if backup is "all":
+ backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
+ bundle(backupch)
+ if saveheads:
+ backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
+ chgrpfile = bundle(backupch)
+
+ stripall(rev, revnum)
+
+ change = chlog.read(rev)
+ repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
+ chlog.strip(revnum, revnum)
+ if saveheads:
+ self.ui.status("adding branch\n")
+ commands.unbundle(self.ui, repo, chgrpfile, update=False)
+ if backup is not "strip":
+ os.unlink(chgrpfile)
+
+ def isapplied(self, patch):
+ """returns (index, rev, patch)"""
+ for i in xrange(len(self.applied)):
+ p = self.applied[i]
+ a = p.split(':')
+ if a[1] == patch:
+ return (i, a[0], a[1])
+ return None
+
+ def lookup(self, patch):
+ if patch == None:
+ return None
+ if patch in self.series:
+ return patch
+ if not os.path.isfile(os.path.join(self.path, patch)):
+ try:
+ sno = int(patch)
+ except(ValueError, OverflowError):
+ self.ui.warn("patch %s not in series\n" % patch)
+ sys.exit(1)
+ if sno >= len(self.series):
+ self.ui.warn("patch number %d is out of range\n" % sno)
+ sys.exit(1)
+ patch = self.series[sno]
+ else:
+ self.ui.warn("patch %s not in series\n" % patch)
+ sys.exit(1)
+ return patch
+
+ def push(self, repo, patch=None, force=False, list=False,
+ mergeq=None, wlock=None):
+ if not wlock:
+ wlock = repo.wlock()
+ patch = self.lookup(patch)
+ if patch and self.isapplied(patch):
+ self.ui.warn("patch %s is already applied\n" % patch)
+ sys.exit(1)
+ if self.series_end() == len(self.series):
+ self.ui.warn("File series fully applied\n")
+ sys.exit(1)
+ if not force:
+ self.check_localchanges(repo)
+
+ self.applied_dirty = 1;
+ start = self.series_end()
+ if start > 0:
+ self.check_toppatch(repo)
+ if not patch:
+ patch = self.series[start]
+ end = start + 1
+ else:
+ end = self.series.index(patch, start) + 1
+ s = self.series[start:end]
+ if mergeq:
+ ret = self.mergepatch(repo, mergeq, s, wlock)
+ else:
+ ret = self.apply(repo, s, list, wlock=wlock)
+ top = self.applied[-1].split(':')[1]
+ if ret[0]:
+ self.ui.write("Errors during apply, please fix and refresh %s\n" %
+ top)
+ else:
+ self.ui.write("Now at: %s\n" % top)
+ return ret[0]
+
+ def pop(self, repo, patch=None, force=False, update=True, wlock=None):
+ def getfile(f, rev):
+ t = repo.file(f).read(rev)
+ try:
+ repo.wfile(f, "w").write(t)
+ except IOError:
+ try:
+ os.makedirs(os.path.dirname(repo.wjoin(f)))
+ except OSError, err:
+ if err.errno != errno.EEXIST: raise
+ repo.wfile(f, "w").write(t)
+
+ if not wlock:
+ wlock = repo.wlock()
+ if patch:
+ # index, rev, patch
+ info = self.isapplied(patch)
+ if not info:
+ patch = self.lookup(patch)
+ info = self.isapplied(patch)
+ if not info:
+ self.ui.warn("patch %s is not applied\n" % patch)
+ sys.exit(1)
+ if len(self.applied) == 0:
+ self.ui.warn("No patches applied\n")
+ sys.exit(1)
+
+ if not update:
+ parents = repo.dirstate.parents()
+ rr = [ revlog.bin(x.split(':')[0]) for x in self.applied ]
+ for p in parents:
+ if p in rr:
+ self.ui.warn("qpop: forcing dirstate update\n")
+ update = True
+
+ if not force and update:
+ self.check_localchanges(repo)
+
+ self.applied_dirty = 1;
+ end = len(self.applied)
+ if not patch:
+ info = [len(self.applied) - 1] + self.applied[-1].split(':')
+ start = info[0]
+ rev = revlog.bin(info[1])
+
+ # we know there are no local changes, so we can make a simplified
+ # form of hg.update.
+ if update:
+ top = self.check_toppatch(repo)
+ qp = self.qparents(repo, rev)
+ changes = repo.changelog.read(qp)
+ mf1 = repo.manifest.readflags(changes[0])
+ mmap = repo.manifest.read(changes[0])
+ (c, a, r, d, u) = repo.changes(qp, top)
+ if d:
+ raise util.Abort("deletions found between repo revs")
+ for f in c:
+ getfile(f, mmap[f])
+ for f in r:
+ getfile(f, mmap[f])
+ util.set_exec(repo.wjoin(f), mf1[f])
+ repo.dirstate.update(c + r, 'n')
+ for f in a:
+ try: os.unlink(repo.wjoin(f))
+ except: raise
+ try: os.removedirs(os.path.dirname(repo.wjoin(f)))
+ except: pass
+ if a:
+ repo.dirstate.forget(a)
+ repo.dirstate.setparents(qp, revlog.nullid)
+ self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
+ del self.applied[start:end]
+ if len(self.applied):
+ self.ui.write("Now at: %s\n" % self.applied[-1].split(':')[1])
+ else:
+ self.ui.write("Patch queue now empty\n")
+
+ def diff(self, repo, files):
+ top = self.check_toppatch(repo)
+ if not top:
+ self.ui.write("No patches applied\n")
+ return
+ qp = self.qparents(repo, top)
+ commands.dodiff(sys.stdout, self.ui, repo, qp, None, files)
+
+ def refresh(self, repo, short=False):
+ if len(self.applied) == 0:
+ self.ui.write("No patches applied\n")
+ return
+ wlock = repo.wlock()
+ self.check_toppatch(repo)
+ qp = self.qparents(repo)
+ (top, patch) = self.applied[-1].split(':')
+ top = revlog.bin(top)
+ cparents = repo.changelog.parents(top)
+ patchparent = self.qparents(repo, top)
+ message, comments, user, date, patchfound = self.readheaders(patch)
+
+ patchf = self.opener(patch, "w")
+ if comments:
+ comments = "\n".join(comments) + '\n\n'
+ patchf.write(comments)
+
+ tip = repo.changelog.tip()
+ if top == tip:
+ # if the top of our patch queue is also the tip, there is an
+ # optimization here. We update the dirstate in place and strip
+ # off the tip commit. Then just commit the current directory
+ # tree. We can also send repo.commit the list of files
+ # changed to speed up the diff
+ #
+ # in short mode, we only diff the files included in the
+ # patch already
+ #
+ # this should really read:
+ #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent)
+ # but we do it backwards to take advantage of manifest/chlog
+ # caching against the next repo.changes call
+ #
+ (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip)
+ if short:
+ filelist = cc + aa + dd
+ else:
+ filelist = None
+ (c, a, r, d, u) = repo.changes(None, None, filelist)
+
+ # we might end up with files that were added between tip and
+ # the dirstate parent, but then changed in the local dirstate.
+ # in this case, we want them to only show up in the added section
+ for x in c:
+ if x not in aa:
+ cc.append(x)
+ # we might end up with files added by the local dirstate that
+ # were deleted by the patch. In this case, they should only
+ # show up in the changed section.
+ for x in a:
+ if x in dd:
+ del dd[dd.index(x)]
+ cc.append(x)
+ else:
+ aa.append(x)
+ # make sure any files deleted in the local dirstate
+ # are not in the add or change column of the patch
+ forget = []
+ for x in d + r:
+ if x in aa:
+ del aa[aa.index(x)]
+ forget.append(x)
+ continue
+ elif x in cc:
+ del cc[cc.index(x)]
+ dd.append(x)
+
+ c = list(util.unique(cc))
+ r = list(util.unique(dd))
+ a = list(util.unique(aa))
+ filelist = list(util.unique(c + r + a ))
+ commands.dodiff(patchf, self.ui, repo, patchparent, None,
+ filelist, changes=(c, a, r, [], u))
+ patchf.close()
+
+ changes = repo.changelog.read(tip)
+ repo.dirstate.setparents(*cparents)
+ repo.dirstate.update(a, 'a')
+ repo.dirstate.update(r, 'r')
+ repo.dirstate.update(c, 'n')
+ repo.dirstate.forget(forget)
+
+ if not message:
+ message = "patch queue: %s\n" % patch
+ else:
+ message = "\n".join(message)
+ self.strip(repo, top, update=False, backup='strip', wlock=wlock)
+ n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
+ self.applied[-1] = revlog.hex(n) + ':' + patch
+ self.applied_dirty = 1
+ else:
+ commands.dodiff(patchf, self.ui, repo, patchparent, None)
+ patchf.close()
+ self.pop(repo, force=True, wlock=wlock)
+ self.push(repo, force=True, wlock=wlock)
+
+ def init(self, repo, create=False):
+ if os.path.isdir(self.path):
+ raise util.Abort("patch queue directory already exists")
+ os.mkdir(self.path)
+ if create:
+ return self.qrepo(create=True)
+
+ def unapplied(self, repo, patch=None):
+ if patch and patch not in self.series:
+ self.ui.warn("%s not in the series file\n" % patch)
+ sys.exit(1)
+ if not patch:
+ start = self.series_end()
+ else:
+ start = self.series.index(patch) + 1
+ for p in self.series[start:]:
+ self.ui.write("%s\n" % p)
+
+ def qseries(self, repo, missing=None):
+ start = self.series_end()
+ if not missing:
+ for p in self.series[:start]:
+ if self.ui.verbose:
+ self.ui.write("%d A " % self.series.index(p))
+ self.ui.write("%s\n" % p)
+ for p in self.series[start:]:
+ if self.ui.verbose:
+ self.ui.write("%d U " % self.series.index(p))
+ self.ui.write("%s\n" % p)
+ else:
+ list = []
+ for root, dirs, files in os.walk(self.path):
+ d = root[len(self.path) + 1:]
+ for f in files:
+ fl = os.path.join(d, f)
+ if (fl not in self.series and
+ fl not in (self.status_path, self.series_path)
+ and not fl.startswith('.')):
+ list.append(fl)
+ list.sort()
+ if list:
+ for x in list:
+ if self.ui.verbose:
+ self.ui.write("D ")
+ self.ui.write("%s\n" % x)
+
+ def issaveline(self, l):
+ name = l.split(':')[1]
+ if name == '.hg.patches.save.line':
+ return True
+
+ def qrepo(self, create=False):
+ if create or os.path.isdir(os.path.join(self.path, ".hg")):
+ return hg.repository(self.ui, path=self.path, create=create)
+
+ def restore(self, repo, rev, delete=None, qupdate=None):
+ c = repo.changelog.read(rev)
+ desc = c[4].strip()
+ lines = desc.splitlines()
+ i = 0
+ datastart = None
+ series = []
+ applied = []
+ qpp = None
+ for i in xrange(0, len(lines)):
+ if lines[i] == 'Patch Data:':
+ datastart = i + 1
+ elif lines[i].startswith('Dirstate:'):
+ l = lines[i].rstrip()
+ l = l[10:].split(' ')
+ qpp = [ hg.bin(x) for x in l ]
+ elif datastart != None:
+ l = lines[i].rstrip()
+ index = l.index(':')
+ id = l[:index]
+ file = l[index + 1:]
+ if id:
+ applied.append(l)
+ series.append(file)
+ if datastart == None:
+ self.ui.warn("No saved patch data found\n")
+ return 1
+ self.ui.warn("restoring status: %s\n" % lines[0])
+ self.full_series = series
+ self.applied = applied
+ self.read_series(self.full_series)
+ self.series_dirty = 1
+ self.applied_dirty = 1
+ heads = repo.changelog.heads()
+ if delete:
+ if rev not in heads:
+ self.ui.warn("save entry has children, leaving it alone\n")
+ else:
+ self.ui.warn("removing save entry %s\n" % hg.short(rev))
+ pp = repo.dirstate.parents()
+ if rev in pp:
+ update = True
+ else:
+ update = False
+ self.strip(repo, rev, update=update, backup='strip')
+ if qpp:
+ self.ui.warn("saved queue repository parents: %s %s\n" %
+ (hg.short(qpp[0]), hg.short(qpp[1])))
+ if qupdate:
+ print "queue directory updating"
+ r = self.qrepo()
+ if not r:
+ self.ui.warn("Unable to load queue repository\n")
+ return 1
+ r.update(qpp[0], allow=False, force=True)
+
+ def save(self, repo, msg=None):
+ if len(self.applied) == 0:
+ self.ui.warn("save: no patches applied, exiting\n")
+ return 1
+ if self.issaveline(self.applied[-1]):
+ self.ui.warn("status is already saved\n")
+ return 1
+
+ ar = [ ':' + x for x in self.full_series ]
+ if not msg:
+ msg = "hg patches saved state"
+ else:
+ msg = "hg patches: " + msg.rstrip('\r\n')
+ r = self.qrepo()
+ if r:
+ pp = r.dirstate.parents()
+ msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
+ msg += "\n\nPatch Data:\n"
+ text = msg + "\n".join(self.applied) + '\n' + (ar and "\n".join(ar)
+ + '\n' or "")
+ n = repo.commit(None, text, user=None, force=1)
+ if not n:
+ self.ui.warn("repo commit failed\n")
+ return 1
+ self.applied.append(revlog.hex(n) + ":" + '.hg.patches.save.line')
+ self.applied_dirty = 1
+
+ def series_end(self):
+ end = 0
+ if len(self.applied) > 0:
+ (top, p) = self.applied[-1].split(':')
+ try:
+ end = self.series.index(p)
+ except ValueError:
+ return 0
+ return end + 1
+ return end
+
+ def qapplied(self, repo, patch=None):
+ if patch and patch not in self.series:
+ self.ui.warn("%s not in the series file\n" % patch)
+ sys.exit(1)
+ if not patch:
+ end = len(self.applied)
+ else:
+ end = self.series.index(patch) + 1
+ for x in xrange(end):
+ p = self.appliedname(x)
+ self.ui.write("%s\n" % p)
+
+ def appliedname(self, index):
+ p = self.applied[index]
+ if not self.ui.verbose:
+ p = p.split(':')[1]
+ return p
+
+ def top(self, repo):
+ if len(self.applied):
+ p = self.appliedname(-1)
+ self.ui.write(p + '\n')
+ else:
+ self.ui.write("No patches applied\n")
+
+ def next(self, repo):
+ end = self.series_end()
+ if end == len(self.series):
+ self.ui.write("All patches applied\n")
+ else:
+ self.ui.write(self.series[end] + '\n')
+
+ def prev(self, repo):
+ if len(self.applied) > 1:
+ p = self.appliedname(-2)
+ self.ui.write(p + '\n')
+ elif len(self.applied) == 1:
+ self.ui.write("Only one patch applied\n")
+ else:
+ self.ui.write("No patches applied\n")
+
+ def qimport(self, repo, files, patch=None, existing=None, force=None):
+ if len(files) > 1 and patch:
+ self.ui.warn("-n option not valid when importing multiple files\n")
+ sys.exit(1)
+ i = 0
+ for filename in files:
+ if existing:
+ if not patch:
+ patch = filename
+ if not os.path.isfile(os.path.join(self.path, patch)):
+ self.ui.warn("patch %s does not exist\n" % patch)
+ sys.exit(1)
+ else:
+ try:
+ text = file(filename).read()
+ except IOError:
+ self.ui.warn("Unable to read %s\n" % patch)
+ sys.exit(1)
+ if not patch:
+ patch = os.path.split(filename)[1]
+ if not force and os.path.isfile(os.path.join(self.path, patch)):
+ self.ui.warn("patch %s already exists\n" % patch)
+ sys.exit(1)
+ patchf = self.opener(patch, "w")
+ patchf.write(text)
+ if patch in self.series:
+ self.ui.warn("patch %s is already in the series file\n" % patch)
+ sys.exit(1)
+ index = self.series_end() + i
+ self.full_series[index:index] = [patch]
+ self.read_series(self.full_series)
+ self.ui.warn("adding %s to series file\n" % patch)
+ i += 1
+ patch = None
+ self.series_dirty = 1
+
+def delete(ui, repo, patch, **opts):
+ """remove a patch from the series file"""
+ q = repomap[repo]
+ q.delete(repo, patch)
+ q.save_dirty()
+ return 0
+
+def applied(ui, repo, patch=None, **opts):
+ """print the patches already applied"""
+ repomap[repo].qapplied(repo, patch)
+ return 0
+
+def unapplied(ui, repo, patch=None, **opts):
+ """print the patches not yet applied"""
+ repomap[repo].unapplied(repo, patch)
+ return 0
+
+def qimport(ui, repo, *filename, **opts):
+ """import a patch"""
+ q = repomap[repo]
+ q.qimport(repo, filename, patch=opts['name'],
+ existing=opts['existing'], force=opts['force'])
+ q.save_dirty()
+ return 0
+
+def init(ui, repo, **opts):
+ """init a new queue repository"""
+ q = repomap[repo]
+ r = q.init(repo, create=opts['create_repo'])
+ q.save_dirty()
+ if r:
+ fp = r.wopener('.hgignore', 'w')
+ print >> fp, 'syntax: glob'
+ print >> fp, 'status'
+ fp.close()
+ r.wopener('series', 'w').close()
+ r.add(['.hgignore', 'series'])
+ return 0
+
+def commit(ui, repo, *pats, **opts):
+ q = repomap[repo]
+ r = q.qrepo()
+ if not r: raise util.Abort('no queue repository')
+ commands.commit(r.ui, r, *pats, **opts)
+
+def series(ui, repo, **opts):
+ """print the entire series file"""
+ repomap[repo].qseries(repo, missing=opts['missing'])
+ return 0
+
+def top(ui, repo, **opts):
+ """print the name of the current patch"""
+ repomap[repo].top(repo)
+ return 0
+
+def next(ui, repo, **opts):
+ """print the name of the next patch"""
+ repomap[repo].next(repo)
+ return 0
+
+def prev(ui, repo, **opts):
+ """print the name of the previous patch"""
+ repomap[repo].prev(repo)
+ return 0
+
+def new(ui, repo, patch, **opts):
+ """create a new patch"""
+ q = repomap[repo]
+ q.new(repo, patch, msg=opts['message'], force=opts['force'])
+ q.save_dirty()
+ return 0
+
+def refresh(ui, repo, **opts):
+ """update the current patch"""
+ q = repomap[repo]
+ q.refresh(repo, short=opts['short'])
+ q.save_dirty()
+ return 0
+
+def diff(ui, repo, *files, **opts):
+ """diff of the current patch"""
+ # deep in the dirstate code, the walkhelper method wants a list, not a tuple
+ repomap[repo].diff(repo, list(files))
+ return 0
+
+def lastsavename(path):
+ (dir, base) = os.path.split(path)
+ names = os.listdir(dir)
+ namere = re.compile("%s.([0-9]+)" % base)
+ max = None
+ maxname = None
+ for f in names:
+ m = namere.match(f)
+ if m:
+ index = int(m.group(1))
+ if max == None or index > max:
+ max = index
+ maxname = f
+ if maxname:
+ return (os.path.join(dir, maxname), max)
+ return (None, None)
+
+def savename(path):
+ (last, index) = lastsavename(path)
+ if last is None:
+ index = 0
+ newpath = path + ".%d" % (index + 1)
+ return newpath
+
+def push(ui, repo, patch=None, **opts):
+ """push the next patch onto the stack"""
+ q = repomap[repo]
+ mergeq = None
+
+ if opts['all']:
+ patch = q.series[-1]
+ if opts['merge']:
+ if opts['name']:
+ newpath = opts['name']
+ else:
+ newpath, i = lastsavename(q.path)
+ if not newpath:
+ ui.warn("no saved queues found, please use -n\n")
+ return 1
+ mergeq = queue(ui, repo.join(""), newpath)
+ ui.warn("merging with queue at: %s\n" % mergeq.path)
+ ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
+ mergeq=mergeq)
+ q.save_dirty()
+ return ret
+
+def pop(ui, repo, patch=None, **opts):
+ """pop the current patch off the stack"""
+ localupdate = True
+ if opts['name']:
+ q = queue(ui, repo.join(""), repo.join(opts['name']))
+ ui.warn('using patch queue: %s\n' % q.path)
+ localupdate = False
+ else:
+ q = repomap[repo]
+ if opts['all'] and len(q.applied) > 0:
+ patch = q.applied[0].split(':')[1]
+ q.pop(repo, patch, force=opts['force'], update=localupdate)
+ q.save_dirty()
+ return 0
+
+def restore(ui, repo, rev, **opts):
+ """restore the queue state saved by a rev"""
+ rev = repo.lookup(rev)
+ q = repomap[repo]
+ q.restore(repo, rev, delete=opts['delete'],
+ qupdate=opts['update'])
+ q.save_dirty()
+ return 0
+
+def save(ui, repo, **opts):
+ """save current queue state"""
+ q = repomap[repo]
+ ret = q.save(repo, msg=opts['message'])
+ if ret:
+ return ret
+ q.save_dirty()
+ if opts['copy']:
+ path = q.path
+ if opts['name']:
+ newpath = os.path.join(q.basepath, opts['name'])
+ if os.path.exists(newpath):
+ if not os.path.isdir(newpath):
+ ui.warn("destination %s exists and is not a directory\n" %
+ newpath)
+ sys.exit(1)
+ if not opts['force']:
+ ui.warn("destination %s exists, use -f to force\n" %
+ newpath)
+ sys.exit(1)
+ else:
+ newpath = savename(path)
+ ui.warn("copy %s to %s\n" % (path, newpath))
+ util.copyfiles(path, newpath)
+ if opts['empty']:
+ try:
+ os.unlink(os.path.join(q.path, q.status_path))
+ except:
+ pass
+ return 0
+
+def strip(ui, repo, rev, **opts):
+ """strip a revision and all later revs on the same branch"""
+ rev = repo.lookup(rev)
+ backup = 'all'
+ if opts['backup']:
+ backup = 'strip'
+ elif opts['nobackup']:
+ backup = 'none'
+ repomap[repo].strip(repo, rev, backup=backup)
+ return 0
+
+def version(ui, q=None):
+ """print the version number"""
+ ui.write("mq version %s\n" % versionstr)
+ return 0
+
+def reposetup(ui, repo):
+ repomap[repo] = queue(ui, repo.join(""))
+
+cmdtable = {
+ "qapplied": (applied, [], 'hg qapplied [PATCH]'),
+ "qcommit|qci":
+ (commit,
+ commands.table["^commit|ci"][1],
+ 'hg qcommit [OPTION]... [FILE]...'),
+ "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
+ "qdelete": (delete, [], 'hg qdelete PATCH'),
+ "^qimport":
+ (qimport,
+ [('e', 'existing', None, 'import file in patch dir'),
+ ('n', 'name', '', 'patch file name'),
+ ('f', 'force', None, 'overwrite existing files')],
+ 'hg qimport [-e] [-n NAME] [-f] FILE...'),
+ "^qinit":
+ (init,
+ [('c', 'create-repo', None, 'create patch repository')],
+ 'hg qinit [-c]'),
+ "qnew":
+ (new,
+ [('m', 'message', '', 'commit message'),
+ ('f', 'force', None, 'force')],
+ 'hg qnew [-m TEXT] [-f] PATCH'),
+ "qnext": (next, [], 'hg qnext'),
+ "qprev": (prev, [], 'hg qprev'),
+ "^qpop":
+ (pop,
+ [('a', 'all', None, 'pop all patches'),
+ ('n', 'name', '', 'queue name to pop'),
+ ('f', 'force', None, 'forget any local changes')],
+ 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
+ "^qpush":
+ (push,
+ [('f', 'force', None, 'apply if the patch has rejects'),
+ ('l', 'list', None, 'list patch name in commit text'),
+ ('a', 'all', None, 'apply all patches'),
+ ('m', 'merge', None, 'merge from another queue'),
+ ('n', 'name', '', 'merge queue name')],
+ 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
+ "^qrefresh":
+ (refresh,
+ [('s', 'short', None, 'short refresh')],
+ 'hg qrefresh [-s]'),
+ "qrestore":
+ (restore,
+ [('d', 'delete', None, 'delete save entry'),
+ ('u', 'update', None, 'update queue working dir')],
+ 'hg qrestore [-d] [-u] REV'),
+ "qsave":
+ (save,
+ [('m', 'message', '', 'commit message'),
+ ('c', 'copy', None, 'copy patch directory'),
+ ('n', 'name', '', 'copy directory name'),
+ ('e', 'empty', None, 'clear queue status file'),
+ ('f', 'force', None, 'force copy')],
+ 'hg qsave [-m TEXT] [-c] [-n NAME] [-e] [-f]'),
+ "qseries":
+ (series,
+ [('m', 'missing', None, 'print patches not in series')],
+ 'hg qseries [-m]'),
+ "^strip":
+ (strip,
+ [('f', 'force', None, 'force multi-head removal'),
+ ('b', 'backup', None, 'bundle unrelated changesets'),
+ ('n', 'nobackup', None, 'no backups')],
+ 'hg strip [-f] [-b] [-n] REV'),
+ "qtop": (top, [], 'hg qtop'),
+ "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
+ "qversion": (version, [], 'hg qversion')
+}
+
new file mode 100644
--- /dev/null
+++ b/hgext/notify.py
@@ -0,0 +1,276 @@
+# notify.py - email notifications for mercurial
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+#
+# hook extension to email notifications to people when changesets are
+# committed to a repo they subscribe to.
+#
+# default mode is to print messages to stdout, for testing and
+# configuring.
+#
+# to use, configure notify extension and enable in hgrc like this:
+#
+# [extensions]
+# hgext.notify =
+#
+# [hooks]
+# # one email for each incoming changeset
+# incoming.notify = python:hgext.notify.hook
+# # batch emails when many changesets incoming at one time
+# changegroup.notify = python:hgext.notify.hook
+#
+# [notify]
+# # config items go in here
+#
+# config items:
+#
+# REQUIRED:
+# config = /path/to/file # file containing subscriptions
+#
+# OPTIONAL:
+# test = True # print messages to stdout for testing
+# strip = 3 # number of slashes to strip for url paths
+# domain = example.com # domain to use if committer missing domain
+# style = ... # style file to use when formatting email
+# template = ... # template to use when formatting email
+# incoming = ... # template to use when run as incoming hook
+# changegroup = ... # template when run as changegroup hook
+# maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
+# maxsubject = 67 # truncate subject line longer than this
+# sources = serve # notify if source of incoming changes in this list
+# # (serve == ssh or http, push, pull, bundle)
+# [email]
+# from = user@host.com # email address to send as if none given
+# [web]
+# baseurl = http://hgserver/... # root of hg web site for browsing commits
+#
+# notify config file has same format as regular hgrc. it has two
+# sections so you can express subscriptions in whatever way is handier
+# for you.
+#
+# [usersubs]
+# # key is subscriber email, value is ","-separated list of glob patterns
+# user@host = pattern
+#
+# [reposubs]
+# # key is glob pattern, value is ","-separated list of subscriber emails
+# pattern = user@host
+#
+# glob patterns are matched against path to repo root.
+#
+# if you like, you can put notify config file in repo that users can
+# push changes to, they can manage their own subscriptions.
+
+from mercurial.demandload import *
+from mercurial.i18n import gettext as _
+from mercurial.node import *
+demandload(globals(), 'email.Parser mercurial:commands,templater,util')
+demandload(globals(), 'fnmatch socket time')
+
+# template for single changeset can include email headers.
+single_template = '''
+Subject: changeset in {webroot}: {desc|firstline|strip}
+From: {author}
+
+changeset {node|short} in {root}
+details: {baseurl}{webroot}?cmd=changeset;node={node|short}
+description:
+\t{desc|tabindent|strip}
+'''.lstrip()
+
+# template for multiple changesets should not contain email headers,
+# because only first set of headers will be used and result will look
+# strange.
+multiple_template = '''
+changeset {node|short} in {root}
+details: {baseurl}{webroot}?cmd=changeset;node={node|short}
+summary: {desc|firstline}
+'''
+
+deftemplates = {
+ 'changegroup': multiple_template,
+ }
+
+class notifier(object):
+ '''email notification class.'''
+
+ def __init__(self, ui, repo, hooktype):
+ self.ui = ui
+ cfg = self.ui.config('notify', 'config')
+ if cfg:
+ self.ui.readconfig(cfg)
+ self.repo = repo
+ self.stripcount = int(self.ui.config('notify', 'strip', 0))
+ self.root = self.strip(self.repo.root)
+ self.domain = self.ui.config('notify', 'domain')
+ self.sio = templater.stringio()
+ self.subs = self.subscribers()
+
+ mapfile = self.ui.config('notify', 'style')
+ template = (self.ui.config('notify', hooktype) or
+ self.ui.config('notify', 'template'))
+ self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
+ self.sio)
+ if not mapfile and not template:
+ template = deftemplates.get(hooktype) or single_template
+ if template:
+ template = templater.parsestring(template, quoted=False)
+ self.t.use_template(template)
+
+ def strip(self, path):
+ '''strip leading slashes from local path, turn into web-safe path.'''
+
+ path = util.pconvert(path)
+ count = self.stripcount
+ while count > 0:
+ c = path.find('/')
+ if c == -1:
+ break
+ path = path[c+1:]
+ count -= 1
+ return path
+
+ def fixmail(self, addr):
+ '''try to clean up email addresses.'''
+
+ addr = templater.email(addr.strip())
+ a = addr.find('@localhost')
+ if a != -1:
+ addr = addr[:a]
+ if '@' not in addr:
+ return addr + '@' + self.domain
+ return addr
+
+ def subscribers(self):
+ '''return list of email addresses of subscribers to this repo.'''
+
+ subs = {}
+ for user, pats in self.ui.configitems('usersubs'):
+ for pat in pats.split(','):
+ if fnmatch.fnmatch(self.repo.root, pat.strip()):
+ subs[self.fixmail(user)] = 1
+ for pat, users in self.ui.configitems('reposubs'):
+ if fnmatch.fnmatch(self.repo.root, pat):
+ for user in users.split(','):
+ subs[self.fixmail(user)] = 1
+ subs = subs.keys()
+ subs.sort()
+ return subs
+
+ def url(self, path=None):
+ return self.ui.config('web', 'baseurl') + (path or self.root)
+
+ def node(self, node):
+ '''format one changeset.'''
+
+ self.t.show(changenode=node, changes=self.repo.changelog.read(node),
+ baseurl=self.ui.config('web', 'baseurl'),
+ root=self.repo.root,
+ webroot=self.root)
+
+ def skipsource(self, source):
+ '''true if incoming changes from this source should be skipped.'''
+ ok_sources = self.ui.config('notify', 'sources', 'serve').split()
+ return source not in ok_sources
+
+ def send(self, node, count):
+ '''send message.'''
+
+ p = email.Parser.Parser()
+ self.sio.seek(0)
+ msg = p.parse(self.sio)
+
+ def fix_subject():
+ '''try to make subject line exist and be useful.'''
+
+ subject = msg['Subject']
+ if not subject:
+ if count > 1:
+ subject = _('%s: %d new changesets') % (self.root, count)
+ else:
+ changes = self.repo.changelog.read(node)
+ s = changes[4].lstrip().split('\n', 1)[0].rstrip()
+ subject = '%s: %s' % (self.root, s)
+ maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
+ if maxsubject and len(subject) > maxsubject:
+ subject = subject[:maxsubject-3] + '...'
+ del msg['Subject']
+ msg['Subject'] = subject
+
+ def fix_sender():
+ '''try to make message have proper sender.'''
+
+ sender = msg['From']
+ if not sender:
+ sender = self.ui.config('email', 'from') or self.ui.username()
+ if '@' not in sender or '@localhost' in sender:
+ sender = self.fixmail(sender)
+ del msg['From']
+ msg['From'] = sender
+
+ fix_subject()
+ fix_sender()
+
+ msg['X-Hg-Notification'] = 'changeset ' + short(node)
+ if not msg['Message-Id']:
+ msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
+ (short(node), int(time.time()),
+ hash(self.repo.root), socket.getfqdn()))
+ msg['To'] = ', '.join(self.subs)
+
+ msgtext = msg.as_string(0)
+ if self.ui.configbool('notify', 'test', True):
+ self.ui.write(msgtext)
+ if not msgtext.endswith('\n'):
+ self.ui.write('\n')
+ else:
+ self.ui.status(_('notify: sending %d subscribers %d changes\n') %
+ (len(self.subs), count))
+ mail = self.ui.sendmail()
+ mail.sendmail(templater.email(msg['From']), self.subs, msgtext)
+
+ def diff(self, node, ref):
+ maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
+ if maxdiff == 0:
+ return
+ fp = templater.stringio()
+ prev = self.repo.changelog.parents(node)[0]
+ commands.dodiff(fp, self.ui, self.repo, prev, ref)
+ difflines = fp.getvalue().splitlines(1)
+ if maxdiff > 0 and len(difflines) > maxdiff:
+ self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
+ (len(difflines), maxdiff))
+ difflines = difflines[:maxdiff]
+ elif difflines:
+ self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
+ self.sio.write(*difflines)
+
+def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
+ '''send email notifications to interested subscribers.
+
+ if used as changegroup hook, send one email for all changesets in
+ changegroup. else send one email per changeset.'''
+ n = notifier(ui, repo, hooktype)
+ if not n.subs:
+ ui.debug(_('notify: no subscribers to this repo\n'))
+ return
+ if n.skipsource(source):
+ ui.debug(_('notify: changes have source "%s" - skipping\n') %
+ source)
+ return
+ node = bin(node)
+ if hooktype == 'changegroup':
+ start = repo.changelog.rev(node)
+ end = repo.changelog.count()
+ count = end - start
+ for rev in xrange(start, end):
+ n.node(repo.changelog.node(rev))
+ n.diff(node, repo.changelog.tip())
+ else:
+ count = 1
+ n.node(node)
+ n.diff(node, node)
+ n.send(node, count)
new file mode 100644
--- /dev/null
+++ b/hgext/patchbomb.py
@@ -0,0 +1,270 @@
+# Command for sending a collection of Mercurial changesets as a series
+# of patch emails.
+#
+# The series is started off with a "[PATCH 0 of N]" introduction,
+# which describes the series as a whole.
+#
+# Each patch email has a Subject line of "[PATCH M of N] ...", using
+# the first line of the changeset description as the subject text.
+# The message contains two or three body parts:
+#
+# The remainder of the changeset description.
+#
+# [Optional] If the diffstat program is installed, the result of
+# running diffstat on the patch.
+#
+# The patch itself, as generated by "hg export".
+#
+# Each message refers to all of its predecessors using the In-Reply-To
+# and References headers, so they will show up as a sequence in
+# threaded mail and news readers, and in mail archives.
+#
+# For each changeset, you will be prompted with a diffstat summary and
+# the changeset summary, so you can be sure you are sending the right
+# changes.
+#
+# It is best to run this script with the "-n" (test only) flag before
+# firing it up "for real", in which case it will use your pager to
+# display each of the messages that it would send.
+#
+# The "-m" (mbox) option will create an mbox file instead of sending
+# the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
+# and finally sent with "formail -s sendmail -bm -t < mbox".
+#
+# To configure other defaults, add a section like this to your hgrc
+# file:
+#
+# [email]
+# from = My Name <my@email>
+# to = recipient1, recipient2, ...
+# cc = cc1, cc2, ...
+
+from mercurial.demandload import *
+demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
+ mercurial:commands,hg,ui
+ os errno popen2 socket sys tempfile time''')
+from mercurial.i18n import gettext as _
+
+try:
+ # readline gives raw_input editing capabilities, but is not
+ # present on windows
+ import readline
+except ImportError: pass
+
+def diffstat(patch):
+ fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
+ try:
+ p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
+ try:
+ for line in patch: print >> p.tochild, line
+ p.tochild.close()
+ if p.wait(): return
+ fp = os.fdopen(fd, 'r')
+ stat = []
+ for line in fp: stat.append(line.lstrip())
+ last = stat.pop()
+ stat.insert(0, last)
+ stat = ''.join(stat)
+ if stat.startswith('0 files'): raise ValueError
+ return stat
+ except: raise
+ finally:
+ try: os.unlink(name)
+ except: pass
+
+def patchbomb(ui, repo, *revs, **opts):
+ '''send changesets as a series of patch emails
+
+ The series starts with a "[PATCH 0 of N]" introduction, which
+ describes the series as a whole.
+
+ Each patch email has a Subject line of "[PATCH M of N] ...", using
+ the first line of the changeset description as the subject text.
+ The message contains two or three body parts. First, the rest of
+ the changeset description. Next, (optionally) if the diffstat
+ program is installed, the result of running diffstat on the patch.
+ Finally, the patch itself, as generated by "hg export".'''
+ def prompt(prompt, default = None, rest = ': ', empty_ok = False):
+ if default: prompt += ' [%s]' % default
+ prompt += rest
+ while True:
+ r = raw_input(prompt)
+ if r: return r
+ if default is not None: return default
+ if empty_ok: return r
+ ui.warn(_('Please enter a valid value.\n'))
+
+ def confirm(s):
+ if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
+ raise ValueError
+
+ def cdiffstat(summary, patch):
+ s = diffstat(patch)
+ if s:
+ if summary:
+ ui.write(summary, '\n')
+ ui.write(s, '\n')
+ confirm(_('Does the diffstat above look okay'))
+ return s
+
+ def makepatch(patch, idx, total):
+ desc = []
+ node = None
+ body = ''
+ for line in patch:
+ if line.startswith('#'):
+ if line.startswith('# Node ID'): node = line.split()[-1]
+ continue
+ if line.startswith('diff -r'): break
+ desc.append(line)
+ if not node: raise ValueError
+
+ #body = ('\n'.join(desc[1:]).strip() or
+ # 'Patch subject is complete summary.')
+ #body += '\n\n\n'
+
+ if opts['plain']:
+ while patch and patch[0].startswith('# '): patch.pop(0)
+ if patch: patch.pop(0)
+ while patch and not patch[0].strip(): patch.pop(0)
+ if opts['diffstat']:
+ body += cdiffstat('\n'.join(desc), patch) + '\n\n'
+ body += '\n'.join(patch)
+ msg = email.MIMEText.MIMEText(body)
+ if total == 1:
+ subj = '[PATCH] ' + desc[0].strip()
+ else:
+ subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
+ if subj.endswith('.'): subj = subj[:-1]
+ msg['Subject'] = subj
+ msg['X-Mercurial-Node'] = node
+ return msg
+
+ start_time = int(time.time())
+
+ def genmsgid(id):
+ return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
+
+ patches = []
+
+ class exportee:
+ def __init__(self, container):
+ self.lines = []
+ self.container = container
+ self.name = 'email'
+
+ def write(self, data):
+ self.lines.append(data)
+
+ def close(self):
+ self.container.append(''.join(self.lines).split('\n'))
+ self.lines = []
+
+ commands.export(ui, repo, *revs, **{'output': exportee(patches),
+ 'switch_parent': False,
+ 'text': None})
+
+ jumbo = []
+ msgs = []
+
+ ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
+
+ for p, i in zip(patches, range(len(patches))):
+ jumbo.extend(p)
+ msgs.append(makepatch(p, i + 1, len(patches)))
+
+ sender = (opts['from'] or ui.config('email', 'from') or
+ ui.config('patchbomb', 'from') or
+ prompt('From', ui.username()))
+
+ def getaddrs(opt, prpt, default = None):
+ addrs = opts[opt] or (ui.config('email', opt) or
+ ui.config('patchbomb', opt) or
+ prompt(prpt, default = default)).split(',')
+ return [a.strip() for a in addrs if a.strip()]
+ to = getaddrs('to', 'To')
+ cc = getaddrs('cc', 'Cc', '')
+
+ if len(patches) > 1:
+ ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
+
+ msg = email.MIMEMultipart.MIMEMultipart()
+ msg['Subject'] = '[PATCH 0 of %d] %s' % (
+ len(patches),
+ opts['subject'] or
+ prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
+
+ ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
+
+ body = []
+
+ while True:
+ try: l = raw_input()
+ except EOFError: break
+ if l == '.': break
+ body.append(l)
+
+ msg.attach(email.MIMEText.MIMEText('\n'.join(body) + '\n'))
+
+ if opts['diffstat']:
+ d = cdiffstat(_('Final summary:\n'), jumbo)
+ if d: msg.attach(email.MIMEText.MIMEText(d))
+
+ msgs.insert(0, msg)
+
+ ui.write('\n')
+
+ if not opts['test'] and not opts['mbox']:
+ mail = ui.sendmail()
+ parent = None
+ tz = time.strftime('%z')
+ sender_addr = email.Utils.parseaddr(sender)[1]
+ for m in msgs:
+ try:
+ m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
+ except TypeError:
+ m['Message-Id'] = genmsgid('patchbomb')
+ if parent:
+ m['In-Reply-To'] = parent
+ else:
+ parent = m['Message-Id']
+ m['Date'] = time.strftime('%a, %e %b %Y %T ', time.localtime(start_time)) + tz
+ start_time += 1
+ m['From'] = sender
+ m['To'] = ', '.join(to)
+ if cc: m['Cc'] = ', '.join(cc)
+ if opts['test']:
+ ui.status('Displaying ', m['Subject'], ' ...\n')
+ fp = os.popen(os.getenv('PAGER', 'more'), 'w')
+ try:
+ fp.write(m.as_string(0))
+ fp.write('\n')
+ except IOError, inst:
+ if inst.errno != errno.EPIPE:
+ raise
+ fp.close()
+ elif opts['mbox']:
+ ui.status('Writing ', m['Subject'], ' ...\n')
+ fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
+ date = time.asctime(time.localtime(start_time))
+ fp.write('From %s %s\n' % (sender_addr, date))
+ fp.write(m.as_string(0))
+ fp.write('\n\n')
+ fp.close()
+ else:
+ ui.status('Sending ', m['Subject'], ' ...\n')
+ mail.sendmail(sender, to + cc, m.as_string(0))
+
+cmdtable = {
+ 'email':
+ (patchbomb,
+ [('c', 'cc', [], 'email addresses of copy recipients'),
+ ('d', 'diffstat', None, 'add diffstat output to messages'),
+ ('f', 'from', '', 'email address of sender'),
+ ('', 'plain', None, 'omit hg patch header'),
+ ('n', 'test', None, 'print messages that would be sent'),
+ ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
+ ('s', 'subject', '', 'subject of introductory message'),
+ ('t', 'to', [], 'email addresses of recipients')],
+ "hg email [OPTION]... [REV]...")
+ }
new file mode 100644
--- /dev/null
+++ b/hgext/win32text.py
@@ -0,0 +1,28 @@
+import mercurial.util
+
+def dumbdecode(s, cmd):
+ return s.replace('\n', '\r\n')
+
+def dumbencode(s, cmd):
+ return s.replace('\r\n', '\n')
+
+def clevertest(s, cmd):
+ if '\0' in s: return False
+ return True
+
+def cleverdecode(s, cmd):
+ if clevertest(s, cmd):
+ return dumbdecode(s, cmd)
+ return s
+
+def cleverencode(s, cmd):
+ if clevertest(s, cmd):
+ return dumbencode(s, cmd)
+ return s
+
+mercurial.util.filtertable.update({
+ 'dumbdecode:': dumbdecode,
+ 'dumbencode:': dumbencode,
+ 'cleverdecode:': cleverdecode,
+ 'cleverencode:': cleverencode,
+ })
new file mode 100755
--- /dev/null
+++ b/hgmerge
@@ -0,0 +1,188 @@
+#!/bin/sh
+#
+# hgmerge - default merge helper for Mercurial
+#
+# This tries to find a way to do three-way merge on the current system.
+# The result ought to end up in $1. Script is run in root directory of
+# repository.
+#
+# Environment variables set by Mercurial:
+# HG_FILE name of file within repo
+# HG_MY_NODE revision being merged
+# HG_OTHER_NODE revision being merged
+
+set -e # bail out quickly on failure
+
+LOCAL="$1"
+BASE="$2"
+OTHER="$3"
+
+if [ -z "$EDITOR" ]; then
+ EDITOR="vi"
+fi
+
+# find decent versions of our utilities, insisting on the GNU versions where we
+# need to
+MERGE="merge"
+DIFF3="gdiff3"
+DIFF="gdiff"
+PATCH="gpatch"
+
+type "$MERGE" >/dev/null 2>&1 || MERGE=
+type "$DIFF3" >/dev/null 2>&1 || DIFF3="diff3"
+$DIFF3 --version >/dev/null 2>&1 || DIFF3=
+type "$DIFF" >/dev/null 2>&1 || DIFF="diff"
+type "$DIFF" >/dev/null 2>&1 || DIFF=
+type "$PATCH" >/dev/null 2>&1 || PATCH="patch"
+type "$PATCH" >/dev/null 2>&1 || PATCH=
+
+# find optional visual utilities
+FILEMERGE="/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge"
+KDIFF3="kdiff3"
+TKDIFF="tkdiff"
+MELD="meld"
+
+type "$FILEMERGE" >/dev/null 2>&1 || FILEMERGE=
+type "$KDIFF3" >/dev/null 2>&1 || KDIFF3=
+type "$TKDIFF" >/dev/null 2>&1 || TKDIFF=
+type "$MELD" >/dev/null 2>&1 || MELD=
+
+# Hack for Solaris
+TEST="/usr/bin/test"
+type "$TEST" >/dev/null 2>&1 || TEST="/bin/test"
+type "$TEST" >/dev/null 2>&1 || TEST="test"
+
+# random part of names
+RAND="$RANDOM$RANDOM"
+
+# temporary directory for diff+patch merge
+HGTMP="${TMPDIR-/tmp}/hgmerge.$RAND"
+
+# backup file
+BACKUP="$LOCAL.orig.$RAND"
+
+# file used to test for file change
+CHGTEST="$LOCAL.chg.$RAND"
+
+# put all your required cleanup here
+cleanup() {
+ rm -f "$BACKUP" "$CHGTEST"
+ rm -rf "$HGTMP"
+}
+
+# functions concerning program exit
+success() {
+ cleanup
+ exit 0
+}
+
+failure() {
+ echo "merge failed" 1>&2
+ mv "$BACKUP" "$LOCAL"
+ cleanup
+ exit 1
+}
+
+# Ask if the merge was successful
+ask_if_merged() {
+ while true; do
+ echo "$LOCAL seems unchanged."
+ echo "Was the merge successful? [y/n]"
+ read answer
+ case "$answer" in
+ y*|Y*) success;;
+ n*|N*) failure;;
+ esac
+ done
+}
+
+# Clean up when interrupted
+trap "failure" 1 2 3 6 15 # HUP INT QUIT ABRT TERM
+
+# Back up our file (and try hard to keep the mtime unchanged)
+mv "$LOCAL" "$BACKUP"
+cp "$BACKUP" "$LOCAL"
+
+# Attempt to do a non-interactive merge
+if [ -n "$MERGE" -o -n "$DIFF3" ]; then
+ if [ -n "$MERGE" ]; then
+ $MERGE "$LOCAL" "$BASE" "$OTHER" 2> /dev/null && success
+ elif [ -n "$DIFF3" ]; then
+ $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" > "$LOCAL" && success
+ fi
+ if [ $? -gt 1 ]; then
+ echo "automatic merge failed! Exiting." 1>&2
+ failure
+ fi
+fi
+
+# on MacOS X try FileMerge.app, shipped with Apple's developer tools
+if [ -n "$FILEMERGE" ]; then
+ cp "$BACKUP" "$LOCAL"
+ cp "$BACKUP" "$CHGTEST"
+ # filemerge prefers the right by default
+ $FILEMERGE -left "$OTHER" -right "$LOCAL" -ancestor "$BASE" -merge "$LOCAL"
+ [ $? -ne 0 ] && echo "FileMerge failed to launch" && failure
+ $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+fi
+
+if [ -n "$DISPLAY" ]; then
+ # try using kdiff3, which is fairly nice
+ if [ -n "$KDIFF3" ]; then
+ $KDIFF3 --auto "$BASE" "$BACKUP" "$OTHER" -o "$LOCAL" || failure
+ success
+ fi
+
+ # try using tkdiff, which is a bit less sophisticated
+ if [ -n "$TKDIFF" ]; then
+ $TKDIFF "$BACKUP" "$OTHER" -a "$BASE" -o "$LOCAL" || failure
+ success
+ fi
+
+ if [ -n "$MELD" ]; then
+ cp "$BACKUP" "$CHGTEST"
+ # protect our feet - meld allows us to save to the left file
+ cp "$BACKUP" "$LOCAL.tmp.$RAND"
+ # Meld doesn't have automatic merging, so to reduce intervention
+ # use the file with conflicts
+ $MELD "$LOCAL.tmp.$RAND" "$LOCAL" "$OTHER" || failure
+ # Also it doesn't return good error code
+ $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+ fi
+fi
+
+# Attempt to do a merge with $EDITOR
+if [ -n "$MERGE" -o -n "$DIFF3" ]; then
+ echo "conflicts detected in $LOCAL"
+ cp "$BACKUP" "$CHGTEST"
+ $EDITOR "$LOCAL" || failure
+ # Some editors do not return meaningful error codes
+ # Do not take any chances
+ $TEST "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged
+fi
+
+# attempt to manually merge with diff and patch
+if [ -n "$DIFF" -a -n "$PATCH" ]; then
+
+ (umask 077 && mkdir "$HGTMP") || {
+ echo "Could not create temporary directory $HGTMP" 1>&2
+ failure
+ }
+
+ $DIFF -u "$BASE" "$OTHER" > "$HGTMP/diff" || :
+ if $PATCH "$LOCAL" < "$HGTMP/diff"; then
+ success
+ else
+ # If rejects are empty after using the editor, merge was ok
+ $EDITOR "$LOCAL" "$LOCAL.rej" || failure
+ $TEST -s "$LOCAL.rej" || success
+ fi
+ failure
+fi
+
+echo
+echo "hgmerge: unable to find any merge utility!"
+echo "supported programs:"
+echo "merge, FileMerge, tkdiff, kdiff3, meld, diff+patch"
+echo
+failure
new file mode 100644
--- /dev/null
+++ b/hgweb.cgi
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+#
+# An example CGI script to use hgweb, edit as necessary
+
+import cgitb, os, sys
+cgitb.enable()
+
+# sys.path.insert(0, "/path/to/python/lib") # if not a system-wide install
+from mercurial import hgweb
+
+h = hgweb.hgweb("/path/to/repo", "repository name")
+h.run()
new file mode 100644
--- /dev/null
+++ b/hgwebdir.cgi
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+#
+# An example CGI script to export multiple hgweb repos, edit as necessary
+
+import cgitb, sys
+cgitb.enable()
+
+# sys.path.insert(0, "/path/to/python/lib") # if not a system-wide install
+from mercurial import hgweb
+
+# The config file looks like this. You can have paths to individual
+# repos, collections of repos in a directory tree, or both.
+#
+# [paths]
+# virtual/path = /real/path
+# virtual/path = /real/path
+#
+# [collections]
+# /prefix/to/strip/off = /root/of/tree/full/of/repos
+#
+# collections example: say directory tree /foo contains repos /foo/bar,
+# /foo/quux/baz. Give this config section:
+# [collections]
+# /foo = /foo
+# Then repos will list as bar and quux/baz.
+
+# Alternatively you can pass a list of ('virtual/path', '/real/path') tuples
+# or use a dictionary with entries like 'virtual/path': '/real/path'
+
+h = hgweb.hgwebdir("hgweb.config")
+h.run()
new file mode 100644
--- /dev/null
+++ b/mercurial/appendfile.py
@@ -0,0 +1,162 @@
+# appendfile.py - special classes to make repo updates atomic
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from demandload import *
+demandload(globals(), "cStringIO changelog errno manifest os tempfile util")
+
+# writes to metadata files are ordered. reads: changelog, manifest,
+# normal files. writes: normal files, manifest, changelog.
+
+# manifest contains pointers to offsets in normal files. changelog
+# contains pointers to offsets in manifest. if reader reads old
+# changelog while manifest or normal files are written, it has no
+# pointers into new parts of those files that are maybe not consistent
+# yet, so will not read them.
+
+# localrepo.addchangegroup thinks it writes changelog first, then
+# manifest, then normal files (this is order they are available, and
+# needed for computing linkrev fields), but uses appendfile to hide
+# updates from readers. data not written to manifest or changelog
+# until all normal files updated. write manifest first, then
+# changelog.
+
+# with this write ordering, readers cannot see inconsistent view of
+# repo during update.
+
+class appendfile(object):
+ '''implement enough of file protocol to append to revlog file.
+ appended data is written to temp file. reads and seeks span real
+ file and temp file. readers cannot see appended data until
+ writedata called.'''
+
+ def __init__(self, fp, tmpname):
+ if tmpname:
+ self.tmpname = tmpname
+ self.tmpfp = util.posixfile(self.tmpname, 'ab+')
+ else:
+ fd, self.tmpname = tempfile.mkstemp(prefix="hg-appendfile-")
+ os.close(fd)
+ self.tmpfp = util.posixfile(self.tmpname, 'ab+')
+ self.realfp = fp
+ self.offset = fp.tell()
+ # real file is not written by anyone else. cache its size so
+ # seek and read can be fast.
+ self.realsize = util.fstat(fp).st_size
+ self.name = fp.name
+
+ def end(self):
+ self.tmpfp.flush() # make sure the stat is correct
+ return self.realsize + util.fstat(self.tmpfp).st_size
+
+ def tell(self):
+ return self.offset
+
+ def flush(self):
+ self.tmpfp.flush()
+
+ def close(self):
+ self.realfp.close()
+ self.tmpfp.close()
+
+ def seek(self, offset, whence=0):
+ '''virtual file offset spans real file and temp file.'''
+ if whence == 0:
+ self.offset = offset
+ elif whence == 1:
+ self.offset += offset
+ elif whence == 2:
+ self.offset = self.end() + offset
+
+ if self.offset < self.realsize:
+ self.realfp.seek(self.offset)
+ else:
+ self.tmpfp.seek(self.offset - self.realsize)
+
+ def read(self, count=-1):
+ '''only trick here is reads that span real file and temp file.'''
+ fp = cStringIO.StringIO()
+ old_offset = self.offset
+ if self.offset < self.realsize:
+ s = self.realfp.read(count)
+ fp.write(s)
+ self.offset += len(s)
+ if count > 0:
+ count -= len(s)
+ if count != 0:
+ if old_offset != self.offset:
+ self.tmpfp.seek(self.offset - self.realsize)
+ s = self.tmpfp.read(count)
+ fp.write(s)
+ self.offset += len(s)
+ return fp.getvalue()
+
+ def write(self, s):
+ '''append to temp file.'''
+ self.tmpfp.seek(0, 2)
+ self.tmpfp.write(s)
+ # all writes are appends, so offset must go to end of file.
+ self.offset = self.realsize + self.tmpfp.tell()
+
+class appendopener(object):
+ '''special opener for files that only read or append.'''
+
+ def __init__(self, opener):
+ self.realopener = opener
+ # key: file name, value: appendfile name
+ self.tmpnames = {}
+
+ def __call__(self, name, mode='r'):
+ '''open file.'''
+
+ assert mode in 'ra+'
+ try:
+ realfp = self.realopener(name, 'r')
+ except IOError, err:
+ if err.errno != errno.ENOENT: raise
+ realfp = self.realopener(name, 'w+')
+ tmpname = self.tmpnames.get(name)
+ fp = appendfile(realfp, tmpname)
+ if tmpname is None:
+ self.tmpnames[name] = fp.tmpname
+ return fp
+
+ def writedata(self):
+ '''copy data from temp files to real files.'''
+ # write .d file before .i file.
+ tmpnames = self.tmpnames.items()
+ tmpnames.sort()
+ for name, tmpname in tmpnames:
+ ifp = open(tmpname, 'rb')
+ ofp = self.realopener(name, 'a')
+ for chunk in util.filechunkiter(ifp):
+ ofp.write(chunk)
+ ifp.close()
+ os.unlink(tmpname)
+ del self.tmpnames[name]
+ ofp.close()
+
+ def cleanup(self):
+ '''delete temp files (this discards unwritten data!)'''
+ for tmpname in self.tmpnames.values():
+ os.unlink(tmpname)
+
+# files for changelog and manifest are in different appendopeners, so
+# not mixed up together.
+
+class appendchangelog(changelog.changelog, appendopener):
+ def __init__(self, opener, version):
+ appendopener.__init__(self, opener)
+ changelog.changelog.__init__(self, self, version)
+ def checkinlinesize(self, fp, tr):
+ return
+
+class appendmanifest(manifest.manifest, appendopener):
+ def __init__(self, opener, version):
+ appendopener.__init__(self, opener)
+ manifest.manifest.__init__(self, self, version)
+ def checkinlinesize(self, fp, tr):
+ return
new file mode 100644
--- /dev/null
+++ b/mercurial/archival.py
@@ -0,0 +1,173 @@
+# archival.py - revision archival for mercurial
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms of
+# the GNU General Public License, incorporated herein by reference.
+
+from demandload import *
+from i18n import gettext as _
+from node import *
+demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
+
+def tidyprefix(dest, prefix, suffixes):
+ '''choose prefix to use for names in archive. make sure prefix is
+ safe for consumers.'''
+
+ if prefix:
+ prefix = prefix.replace('\\', '/')
+ else:
+ if not isinstance(dest, str):
+ raise ValueError('dest must be string if no prefix')
+ prefix = os.path.basename(dest)
+ lower = prefix.lower()
+ for sfx in suffixes:
+ if lower.endswith(sfx):
+ prefix = prefix[:-len(sfx)]
+ break
+ lpfx = os.path.normpath(util.localpath(prefix))
+ prefix = util.pconvert(lpfx)
+ if not prefix.endswith('/'):
+ prefix += '/'
+ if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
+ raise util.Abort(_('archive prefix contains illegal components'))
+ return prefix
+
+class tarit:
+ '''write archive to tar file or stream. can write uncompressed,
+ or compress with gzip or bzip2.'''
+
+ def __init__(self, dest, prefix, kind=''):
+ self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
+ '.tgz', 'tbz2'])
+ self.mtime = int(time.time())
+ if isinstance(dest, str):
+ self.z = tarfile.open(dest, mode='w:'+kind)
+ else:
+ self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
+
+ def addfile(self, name, mode, data):
+ i = tarfile.TarInfo(self.prefix + name)
+ i.mtime = self.mtime
+ i.size = len(data)
+ i.mode = mode
+ self.z.addfile(i, cStringIO.StringIO(data))
+
+ def done(self):
+ self.z.close()
+
+class tellable:
+ '''provide tell method for zipfile.ZipFile when writing to http
+ response file object.'''
+
+ def __init__(self, fp):
+ self.fp = fp
+ self.offset = 0
+
+ def __getattr__(self, key):
+ return getattr(self.fp, key)
+
+ def write(self, s):
+ self.fp.write(s)
+ self.offset += len(s)
+
+ def tell(self):
+ return self.offset
+
+class zipit:
+ '''write archive to zip file or stream. can write uncompressed,
+ or compressed with deflate.'''
+
+ def __init__(self, dest, prefix, compress=True):
+ self.prefix = tidyprefix(dest, prefix, ('.zip',))
+ if not isinstance(dest, str):
+ try:
+ dest.tell()
+ except (AttributeError, IOError):
+ dest = tellable(dest)
+ self.z = zipfile.ZipFile(dest, 'w',
+ compress and zipfile.ZIP_DEFLATED or
+ zipfile.ZIP_STORED)
+ self.date_time = time.gmtime(time.time())[:6]
+
+ def addfile(self, name, mode, data):
+ i = zipfile.ZipInfo(self.prefix + name, self.date_time)
+ i.compress_type = self.z.compression
+ i.flag_bits = 0x08
+ # unzip will not honor unix file modes unless file creator is
+ # set to unix (id 3).
+ i.create_system = 3
+ i.external_attr = (mode | stat.S_IFREG) << 16L
+ self.z.writestr(i, data)
+
+ def done(self):
+ self.z.close()
+
+class fileit:
+ '''write archive as files in directory.'''
+
+ def __init__(self, name, prefix):
+ if prefix:
+ raise util.Abort(_('cannot give prefix when archiving to files'))
+ self.basedir = name
+ self.dirs = {}
+ self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
+ getattr(os, 'O_BINARY', 0) |
+ getattr(os, 'O_NOFOLLOW', 0))
+
+ def addfile(self, name, mode, data):
+ destfile = os.path.join(self.basedir, name)
+ destdir = os.path.dirname(destfile)
+ if destdir not in self.dirs:
+ if not os.path.isdir(destdir):
+ os.makedirs(destdir)
+ self.dirs[destdir] = 1
+ os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
+
+ def done(self):
+ pass
+
+archivers = {
+ 'files': fileit,
+ 'tar': tarit,
+ 'tbz2': lambda name, prefix: tarit(name, prefix, 'bz2'),
+ 'tgz': lambda name, prefix: tarit(name, prefix, 'gz'),
+ 'uzip': lambda name, prefix: zipit(name, prefix, False),
+ 'zip': zipit,
+ }
+
+def archive(repo, dest, node, kind, decode=True, matchfn=None,
+ prefix=None):
+ '''create archive of repo as it was at node.
+
+ dest can be name of directory, name of archive file, or file
+ object to write archive to.
+
+ kind is type of archive to create.
+
+ decode tells whether to put files through decode filters from
+ hgrc.
+
+ matchfn is function to filter names of files to write to archive.
+
+ prefix is name of path to put before every archive member.'''
+
+ def write(name, mode, data):
+ if matchfn and not matchfn(name): return
+ if decode:
+ fp = cStringIO.StringIO()
+ repo.wwrite(name, data, fp)
+ data = fp.getvalue()
+ archiver.addfile(name, mode, data)
+
+ archiver = archivers[kind](dest, prefix)
+ mn = repo.changelog.read(node)[0]
+ mf = repo.manifest.read(mn).items()
+ mff = repo.manifest.readflags(mn)
+ mf.sort()
+ write('.hg_archival.txt', 0644,
+ 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
+ for filename, filenode in mf:
+ write(filename, mff[filename] and 0755 or 0644,
+ repo.file(filename).read(filenode))
+ archiver.done()
new file mode 100644
--- /dev/null
+++ b/mercurial/bdiff.c
@@ -0,0 +1,361 @@
+/*
+ bdiff.c - efficient binary diff extension for Mercurial
+
+ Copyright 2005 Matt Mackall <mpm@selenic.com>
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+
+ Based roughly on Python difflib
+*/
+
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef __hpux
+#define inline
+#endif
+
+#ifdef __SUNPRO_C
+# define inline
+#endif
+
+#ifdef _WIN32
+#ifdef _MSC_VER
+#define inline __inline
+typedef unsigned long uint32_t;
+#else
+#include <stdint.h>
+#endif
+static uint32_t htonl(uint32_t x)
+{
+ return ((x & 0x000000ffUL) << 24) |
+ ((x & 0x0000ff00UL) << 8) |
+ ((x & 0x00ff0000UL) >> 8) |
+ ((x & 0xff000000UL) >> 24);
+}
+#else
+#include <sys/types.h>
+#include <arpa/inet.h>
+#endif
+
+struct line {
+ int h, len, n, e;
+ const char *l;
+};
+
+struct pos {
+ int pos, len;
+};
+
+struct hunk {
+ int a1, a2, b1, b2;
+};
+
+struct hunklist {
+ struct hunk *base, *head;
+};
+
+static inline uint32_t rol32(uint32_t word, unsigned int shift)
+{
+ return (word << shift) | (word >> (32 - shift));
+}
+
+int splitlines(const char *a, int len, struct line **lr)
+{
+ int h, i;
+ const char *p, *b = a;
+ struct line *l;
+
+ /* count the lines */
+ i = 1; /* extra line for sentinel */
+ for (p = a; p < a + len; p++)
+ if (*p == '\n' || p == a + len - 1)
+ i++;
+
+ *lr = l = (struct line *)malloc(sizeof(struct line) * i);
+ if (!l)
+ return -1;
+
+ /* build the line array and calculate hashes */
+ h = 0;
+ for (p = a; p < a + len; p++) {
+ h = *p + rol32(h, 7); /* a simple hash from GNU diff */
+ if (*p == '\n' || p == a + len - 1) {
+ l->len = p - b + 1;
+ l->h = h * l->len;
+ l->l = b;
+ l->n = -1;
+ l++;
+ b = p + 1;
+ h = 0;
+ }
+ }
+
+ /* set up a sentinel */
+ l->h = l->len = 0;
+ l->l = a + len;
+ return i - 1;
+}
+
+int inline cmp(struct line *a, struct line *b)
+{
+ return a->h != b->h || a->len != b->len || memcmp(a->l, b->l, a->len);
+}
+
+static int equatelines(struct line *a, int an, struct line *b, int bn)
+{
+ int i, j, buckets = 1, t;
+ struct pos *h;
+
+ /* build a hash table of the next highest power of 2 */
+ while (buckets < bn + 1)
+ buckets *= 2;
+
+ h = (struct pos *)malloc(buckets * sizeof(struct pos));
+ buckets = buckets - 1;
+ if (!h)
+ return 0;
+
+ /* clear the hash table */
+ for (i = 0; i <= buckets; i++) {
+ h[i].pos = -1;
+ h[i].len = 0;
+ }
+
+ /* add lines to the hash table chains */
+ for (i = bn - 1; i >= 0; i--) {
+ /* find the equivalence class */
+ for (j = b[i].h & buckets; h[j].pos != -1;
+ j = (j + 1) & buckets)
+ if (!cmp(b + i, b + h[j].pos))
+ break;
+
+ /* add to the head of the equivalence class */
+ b[i].n = h[j].pos;
+ b[i].e = j;
+ h[j].pos = i;
+ h[j].len++; /* keep track of popularity */
+ }
+
+ /* compute popularity threshold */
+ t = (bn >= 200) ? bn / 100 : bn + 1;
+
+ /* match items in a to their equivalence class in b */
+ for (i = 0; i < an; i++) {
+ /* find the equivalence class */
+ for (j = a[i].h & buckets; h[j].pos != -1;
+ j = (j + 1) & buckets)
+ if (!cmp(a + i, b + h[j].pos))
+ break;
+
+ a[i].e = j; /* use equivalence class for quick compare */
+ if (h[j].len <= t)
+ a[i].n = h[j].pos; /* point to head of match list */
+ else
+ a[i].n = -1; /* too popular */
+ }
+
+ /* discard hash tables */
+ free(h);
+ return 1;
+}
+
+static int longest_match(struct line *a, struct line *b, struct pos *pos,
+ int a1, int a2, int b1, int b2, int *omi, int *omj)
+{
+ int mi = a1, mj = b1, mk = 0, mb = 0, i, j, k;
+
+ for (i = a1; i < a2; i++) {
+ /* skip things before the current block */
+ for (j = a[i].n; j != -1 && j < b1; j = b[j].n)
+ ;
+
+ /* loop through all lines match a[i] in b */
+ for (; j != -1 && j < b2; j = b[j].n) {
+ /* does this extend an earlier match? */
+ if (i > a1 && j > b1 && pos[j - 1].pos == i - 1)
+ k = pos[j - 1].len + 1;
+ else
+ k = 1;
+ pos[j].pos = i;
+ pos[j].len = k;
+
+ /* best match so far? */
+ if (k > mk) {
+ mi = i;
+ mj = j;
+ mk = k;
+ }
+ }
+ }
+
+ if (mk) {
+ mi = mi - mk + 1;
+ mj = mj - mk + 1;
+ }
+
+ /* expand match to include neighboring popular lines */
+ while (mi - mb > a1 && mj - mb > b1 &&
+ a[mi - mb - 1].e == b[mj - mb - 1].e)
+ mb++;
+ while (mi + mk < a2 && mj + mk < b2 &&
+ a[mi + mk].e == b[mj + mk].e)
+ mk++;
+
+ *omi = mi - mb;
+ *omj = mj - mb;
+ return mk + mb;
+}
+
+static void recurse(struct line *a, struct line *b, struct pos *pos,
+ int a1, int a2, int b1, int b2, struct hunklist *l)
+{
+ int i, j, k;
+
+ /* find the longest match in this chunk */
+ k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j);
+ if (!k)
+ return;
+
+ /* and recurse on the remaining chunks on either side */
+ recurse(a, b, pos, a1, i, b1, j, l);
+ l->head->a1 = i;
+ l->head->a2 = i + k;
+ l->head->b1 = j;
+ l->head->b2 = j + k;
+ l->head++;
+ recurse(a, b, pos, i + k, a2, j + k, b2, l);
+}
+
+static struct hunklist diff(struct line *a, int an, struct line *b, int bn)
+{
+ struct hunklist l;
+ struct pos *pos;
+ int t;
+
+ /* allocate and fill arrays */
+ t = equatelines(a, an, b, bn);
+ pos = (struct pos *)calloc(bn, sizeof(struct pos));
+ /* we can't have more matches than lines in the shorter file */
+ l.head = l.base = (struct hunk *)malloc(sizeof(struct hunk) *
+ ((an<bn ? an:bn) + 1));
+
+ if (pos && l.base && t) {
+ /* generate the matching block list */
+ recurse(a, b, pos, 0, an, 0, bn, &l);
+ l.head->a1 = an;
+ l.head->b1 = bn;
+ l.head++;
+ }
+
+ free(pos);
+ return l;
+}
+
+static PyObject *blocks(PyObject *self, PyObject *args)
+{
+ PyObject *sa, *sb, *rl = NULL, *m;
+ struct line *a, *b;
+ struct hunklist l = {NULL, NULL};
+ struct hunk *h;
+ int an, bn, pos = 0;
+
+ if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
+ return NULL;
+
+ an = splitlines(PyString_AsString(sa), PyString_Size(sa), &a);
+ bn = splitlines(PyString_AsString(sb), PyString_Size(sb), &b);
+ if (!a || !b)
+ goto nomem;
+
+ l = diff(a, an, b, bn);
+ rl = PyList_New(l.head - l.base);
+ if (!l.head || !rl)
+ goto nomem;
+
+ for (h = l.base; h != l.head; h++) {
+ m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
+ PyList_SetItem(rl, pos, m);
+ pos++;
+ }
+
+nomem:
+ free(a);
+ free(b);
+ free(l.base);
+ return rl ? rl : PyErr_NoMemory();
+}
+
+static PyObject *bdiff(PyObject *self, PyObject *args)
+{
+ PyObject *sa, *sb, *result = NULL;
+ struct line *al, *bl;
+ struct hunklist l = {NULL, NULL};
+ struct hunk *h;
+ char encode[12], *rb;
+ int an, bn, len = 0, la = 0, lb = 0;
+
+ if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
+ return NULL;
+
+ an = splitlines(PyString_AsString(sa), PyString_Size(sa), &al);
+ bn = splitlines(PyString_AsString(sb), PyString_Size(sb), &bl);
+ if (!al || !bl)
+ goto nomem;
+
+ l = diff(al, an, bl, bn);
+ if (!l.head)
+ goto nomem;
+
+ /* calculate length of output */
+ for (h = l.base; h != l.head; h++) {
+ if (h->a1 != la || h->b1 != lb)
+ len += 12 + bl[h->b1].l - bl[lb].l;
+ la = h->a2;
+ lb = h->b2;
+ }
+
+ result = PyString_FromStringAndSize(NULL, len);
+ if (!result)
+ goto nomem;
+
+ /* build binary patch */
+ rb = PyString_AsString(result);
+ la = lb = 0;
+
+ for (h = l.base; h != l.head; h++) {
+ if (h->a1 != la || h->b1 != lb) {
+ len = bl[h->b1].l - bl[lb].l;
+ *(uint32_t *)(encode) = htonl(al[la].l - al->l);
+ *(uint32_t *)(encode + 4) = htonl(al[h->a1].l - al->l);
+ *(uint32_t *)(encode + 8) = htonl(len);
+ memcpy(rb, encode, 12);
+ memcpy(rb + 12, bl[lb].l, len);
+ rb += 12 + len;
+ }
+ la = h->a2;
+ lb = h->b2;
+ }
+
+nomem:
+ free(al);
+ free(bl);
+ free(l.base);
+ return result ? result : PyErr_NoMemory();
+}
+
+static char mdiff_doc[] = "Efficient binary diff.";
+
+static PyMethodDef methods[] = {
+ {"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
+ {"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
+ {NULL, NULL}
+};
+
+PyMODINIT_FUNC initbdiff(void)
+{
+ Py_InitModule3("bdiff", methods, mdiff_doc);
+}
+
new file mode 100644
--- /dev/null
+++ b/mercurial/bundlerepo.py
@@ -0,0 +1,232 @@
+"""
+bundlerepo.py - repository class for viewing uncompressed bundles
+
+This provides a read-only repository interface to bundles as if
+they were part of the actual repository.
+
+Copyright 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
+
+This software may be used and distributed according to the terms
+of the GNU General Public License, incorporated herein by reference.
+"""
+
+from node import *
+from i18n import gettext as _
+from demandload import demandload
+demandload(globals(), "changegroup util os struct bz2 tempfile")
+
+import localrepo, changelog, manifest, filelog, revlog
+
+class bundlerevlog(revlog.revlog):
+ def __init__(self, opener, indexfile, datafile, bundlefile,
+ linkmapper=None):
+ # How it works:
+ # to retrieve a revision, we need to know the offset of
+ # the revision in the bundlefile (an opened file).
+ #
+ # We store this offset in the index (start), to differentiate a
+ # rev in the bundle and from a rev in the revlog, we check
+ # len(index[r]). If the tuple is bigger than 7, it is a bundle
+ # (it is bigger since we store the node to which the delta is)
+ #
+ revlog.revlog.__init__(self, opener, indexfile, datafile)
+ self.bundlefile = bundlefile
+ self.basemap = {}
+ def chunkpositer():
+ for chunk in changegroup.chunkiter(bundlefile):
+ pos = bundlefile.tell()
+ yield chunk, pos - len(chunk)
+ n = self.count()
+ prev = None
+ for chunk, start in chunkpositer():
+ size = len(chunk)
+ if size < 80:
+ raise util.Abort("invalid changegroup")
+ start += 80
+ size -= 80
+ node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
+ if node in self.nodemap:
+ prev = node
+ continue
+ for p in (p1, p2):
+ if not p in self.nodemap:
+ raise revlog.RevlogError(_("unknown parent %s") % short(p1))
+ if linkmapper is None:
+ link = n
+ else:
+ link = linkmapper(cs)
+
+ if not prev:
+ prev = p1
+ # start, size, base is not used, link, p1, p2, delta ref
+ if self.version == 0:
+ e = (start, size, None, link, p1, p2, node)
+ else:
+ e = (self.offset_type(start, 0), size, -1, None, link,
+ self.rev(p1), self.rev(p2), node)
+ self.basemap[n] = prev
+ self.index.append(e)
+ self.nodemap[node] = n
+ prev = node
+ n += 1
+
+ def bundle(self, rev):
+ """is rev from the bundle"""
+ if rev < 0:
+ return False
+ return rev in self.basemap
+ def bundlebase(self, rev): return self.basemap[rev]
+ def chunk(self, rev, df=None, cachelen=4096):
+ # Warning: in case of bundle, the diff is against bundlebase,
+ # not against rev - 1
+ # XXX: could use some caching
+ if not self.bundle(rev):
+ return revlog.revlog.chunk(self, rev, df, cachelen)
+ self.bundlefile.seek(self.start(rev))
+ return self.bundlefile.read(self.length(rev))
+
+ def revdiff(self, rev1, rev2):
+ """return or calculate a delta between two revisions"""
+ if self.bundle(rev1) and self.bundle(rev2):
+ # hot path for bundle
+ revb = self.rev(self.bundlebase(rev2))
+ if revb == rev1:
+ return self.chunk(rev2)
+ elif not self.bundle(rev1) and not self.bundle(rev2):
+ return revlog.revlog.chunk(self, rev1, rev2)
+
+ return self.diff(self.revision(self.node(rev1)),
+ self.revision(self.node(rev2)))
+
+ def revision(self, node):
+ """return an uncompressed revision of a given"""
+ if node == nullid: return ""
+
+ text = None
+ chain = []
+ iter_node = node
+ rev = self.rev(iter_node)
+ # reconstruct the revision if it is from a changegroup
+ while self.bundle(rev):
+ if self.cache and self.cache[0] == iter_node:
+ text = self.cache[2]
+ break
+ chain.append(rev)
+ iter_node = self.bundlebase(rev)
+ rev = self.rev(iter_node)
+ if text is None:
+ text = revlog.revlog.revision(self, iter_node)
+
+ while chain:
+ delta = self.chunk(chain.pop())
+ text = self.patches(text, [delta])
+
+ p1, p2 = self.parents(node)
+ if node != revlog.hash(text, p1, p2):
+ raise revlog.RevlogError(_("integrity check failed on %s:%d")
+ % (self.datafile, self.rev(node)))
+
+ self.cache = (node, self.rev(node), text)
+ return text
+
+ def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
+ raise NotImplementedError
+ def addgroup(self, revs, linkmapper, transaction, unique=0):
+ raise NotImplementedError
+ def strip(self, rev, minlink):
+ raise NotImplementedError
+ def checksize(self):
+ raise NotImplementedError
+
+class bundlechangelog(bundlerevlog, changelog.changelog):
+ def __init__(self, opener, bundlefile):
+ changelog.changelog.__init__(self, opener)
+ bundlerevlog.__init__(self, opener, "00changelog.i", "00changelog.d",
+ bundlefile)
+
+class bundlemanifest(bundlerevlog, manifest.manifest):
+ def __init__(self, opener, bundlefile, linkmapper):
+ manifest.manifest.__init__(self, opener)
+ bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
+ bundlefile, linkmapper)
+
+class bundlefilelog(bundlerevlog, filelog.filelog):
+ def __init__(self, opener, path, bundlefile, linkmapper):
+ filelog.filelog.__init__(self, opener, path)
+ bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
+ bundlefile, linkmapper)
+
+class bundlerepository(localrepo.localrepository):
+ def __init__(self, ui, path, bundlename):
+ localrepo.localrepository.__init__(self, ui, path)
+ self.tempfile = None
+ self.bundlefile = open(bundlename, "rb")
+ header = self.bundlefile.read(6)
+ if not header.startswith("HG"):
+ raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
+ elif not header.startswith("HG10"):
+ raise util.Abort(_("%s: unknown bundle version") % bundlename)
+ elif header == "HG10BZ":
+ fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
+ suffix=".hg10un", dir=self.path)
+ self.tempfile = temp
+ fptemp = os.fdopen(fdtemp, 'wb')
+ def generator(f):
+ zd = bz2.BZ2Decompressor()
+ zd.decompress("BZ")
+ for chunk in f:
+ yield zd.decompress(chunk)
+ gen = generator(util.filechunkiter(self.bundlefile, 4096))
+
+ try:
+ fptemp.write("HG10UN")
+ for chunk in gen:
+ fptemp.write(chunk)
+ finally:
+ fptemp.close()
+ self.bundlefile.close()
+
+ self.bundlefile = open(self.tempfile, "rb")
+ # seek right after the header
+ self.bundlefile.seek(6)
+ elif header == "HG10UN":
+ # nothing to do
+ pass
+ else:
+ raise util.Abort(_("%s: unknown bundle compression type")
+ % bundlename)
+ self.changelog = bundlechangelog(self.opener, self.bundlefile)
+ self.manifest = bundlemanifest(self.opener, self.bundlefile,
+ self.changelog.rev)
+ # dict with the mapping 'filename' -> position in the bundle
+ self.bundlefilespos = {}
+ while 1:
+ f = changegroup.getchunk(self.bundlefile)
+ if not f:
+ break
+ self.bundlefilespos[f] = self.bundlefile.tell()
+ for c in changegroup.chunkiter(self.bundlefile):
+ pass
+
+ def dev(self):
+ return -1
+
+ def file(self, f):
+ if f[0] == '/':
+ f = f[1:]
+ if f in self.bundlefilespos:
+ self.bundlefile.seek(self.bundlefilespos[f])
+ return bundlefilelog(self.opener, f, self.bundlefile,
+ self.changelog.rev)
+ else:
+ return filelog.filelog(self.opener, f)
+
+ def close(self):
+ """Close assigned bundle file immediately."""
+ self.bundlefile.close()
+
+ def __del__(self):
+ if not self.bundlefile.closed:
+ self.bundlefile.close()
+ if self.tempfile is not None:
+ os.unlink(self.tempfile)
new file mode 100644
--- /dev/null
+++ b/mercurial/byterange.py
@@ -0,0 +1,450 @@
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the
+# Free Software Foundation, Inc.,
+# 59 Temple Place, Suite 330,
+# Boston, MA 02111-1307 USA
+
+# This file is part of urlgrabber, a high-level cross-protocol url-grabber
+# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
+
+# $Id: byterange.py,v 1.9 2005/02/14 21:55:07 mstenner Exp $
+
+import os
+import stat
+import urllib
+import urllib2
+import rfc822
+
+try:
+ from cStringIO import StringIO
+except ImportError, msg:
+ from StringIO import StringIO
+
+class RangeError(IOError):
+ """Error raised when an unsatisfiable range is requested."""
+ pass
+
+class HTTPRangeHandler(urllib2.BaseHandler):
+ """Handler that enables HTTP Range headers.
+
+ This was extremely simple. The Range header is a HTTP feature to
+ begin with so all this class does is tell urllib2 that the
+ "206 Partial Content" reponse from the HTTP server is what we
+ expected.
+
+ Example:
+ import urllib2
+ import byterange
+
+ range_handler = range.HTTPRangeHandler()
+ opener = urllib2.build_opener(range_handler)
+
+ # install it
+ urllib2.install_opener(opener)
+
+ # create Request and set Range header
+ req = urllib2.Request('http://www.python.org/')
+ req.header['Range'] = 'bytes=30-50'
+ f = urllib2.urlopen(req)
+ """
+
+ def http_error_206(self, req, fp, code, msg, hdrs):
+ # 206 Partial Content Response
+ r = urllib.addinfourl(fp, hdrs, req.get_full_url())
+ r.code = code
+ r.msg = msg
+ return r
+
+ def http_error_416(self, req, fp, code, msg, hdrs):
+ # HTTP's Range Not Satisfiable error
+ raise RangeError('Requested Range Not Satisfiable')
+
+class RangeableFileObject:
+ """File object wrapper to enable raw range handling.
+ This was implemented primarilary for handling range
+ specifications for file:// urls. This object effectively makes
+ a file object look like it consists only of a range of bytes in
+ the stream.
+
+ Examples:
+ # expose 10 bytes, starting at byte position 20, from
+ # /etc/aliases.
+ >>> fo = RangeableFileObject(file('/etc/passwd', 'r'), (20,30))
+ # seek seeks within the range (to position 23 in this case)
+ >>> fo.seek(3)
+ # tell tells where your at _within the range_ (position 3 in
+ # this case)
+ >>> fo.tell()
+ # read EOFs if an attempt is made to read past the last
+ # byte in the range. the following will return only 7 bytes.
+ >>> fo.read(30)
+ """
+
+ def __init__(self, fo, rangetup):
+ """Create a RangeableFileObject.
+ fo -- a file like object. only the read() method need be
+ supported but supporting an optimized seek() is
+ preferable.
+ rangetup -- a (firstbyte,lastbyte) tuple specifying the range
+ to work over.
+ The file object provided is assumed to be at byte offset 0.
+ """
+ self.fo = fo
+ (self.firstbyte, self.lastbyte) = range_tuple_normalize(rangetup)
+ self.realpos = 0
+ self._do_seek(self.firstbyte)
+
+ def __getattr__(self, name):
+ """This effectively allows us to wrap at the instance level.
+ Any attribute not found in _this_ object will be searched for
+ in self.fo. This includes methods."""
+ if hasattr(self.fo, name):
+ return getattr(self.fo, name)
+ raise AttributeError, name
+
+ def tell(self):
+ """Return the position within the range.
+ This is different from fo.seek in that position 0 is the
+ first byte position of the range tuple. For example, if
+ this object was created with a range tuple of (500,899),
+ tell() will return 0 when at byte position 500 of the file.
+ """
+ return (self.realpos - self.firstbyte)
+
+ def seek(self,offset,whence=0):
+ """Seek within the byte range.
+ Positioning is identical to that described under tell().
+ """
+ assert whence in (0, 1, 2)
+ if whence == 0: # absolute seek
+ realoffset = self.firstbyte + offset
+ elif whence == 1: # relative seek
+ realoffset = self.realpos + offset
+ elif whence == 2: # absolute from end of file
+ # XXX: are we raising the right Error here?
+ raise IOError('seek from end of file not supported.')
+
+ # do not allow seek past lastbyte in range
+ if self.lastbyte and (realoffset >= self.lastbyte):
+ realoffset = self.lastbyte
+
+ self._do_seek(realoffset - self.realpos)
+
+ def read(self, size=-1):
+ """Read within the range.
+ This method will limit the size read based on the range.
+ """
+ size = self._calc_read_size(size)
+ rslt = self.fo.read(size)
+ self.realpos += len(rslt)
+ return rslt
+
+ def readline(self, size=-1):
+ """Read lines within the range.
+ This method will limit the size read based on the range.
+ """
+ size = self._calc_read_size(size)
+ rslt = self.fo.readline(size)
+ self.realpos += len(rslt)
+ return rslt
+
+ def _calc_read_size(self, size):
+ """Handles calculating the amount of data to read based on
+ the range.
+ """
+ if self.lastbyte:
+ if size > -1:
+ if ((self.realpos + size) >= self.lastbyte):
+ size = (self.lastbyte - self.realpos)
+ else:
+ size = (self.lastbyte - self.realpos)
+ return size
+
+ def _do_seek(self,offset):
+ """Seek based on whether wrapped object supports seek().
+ offset is relative to the current position (self.realpos).
+ """
+ assert offset >= 0
+ if not hasattr(self.fo, 'seek'):
+ self._poor_mans_seek(offset)
+ else:
+ self.fo.seek(self.realpos + offset)
+ self.realpos+= offset
+
+ def _poor_mans_seek(self,offset):
+ """Seek by calling the wrapped file objects read() method.
+ This is used for file like objects that do not have native
+ seek support. The wrapped objects read() method is called
+ to manually seek to the desired position.
+ offset -- read this number of bytes from the wrapped
+ file object.
+ raise RangeError if we encounter EOF before reaching the
+ specified offset.
+ """
+ pos = 0
+ bufsize = 1024
+ while pos < offset:
+ if (pos + bufsize) > offset:
+ bufsize = offset - pos
+ buf = self.fo.read(bufsize)
+ if len(buf) != bufsize:
+ raise RangeError('Requested Range Not Satisfiable')
+ pos+= bufsize
+
+class FileRangeHandler(urllib2.FileHandler):
+ """FileHandler subclass that adds Range support.
+ This class handles Range headers exactly like an HTTP
+ server would.
+ """
+ def open_local_file(self, req):
+ import mimetypes
+ import mimetools
+ host = req.get_host()
+ file = req.get_selector()
+ localfile = urllib.url2pathname(file)
+ stats = os.stat(localfile)
+ size = stats[stat.ST_SIZE]
+ modified = rfc822.formatdate(stats[stat.ST_MTIME])
+ mtype = mimetypes.guess_type(file)[0]
+ if host:
+ host, port = urllib.splitport(host)
+ if port or socket.gethostbyname(host) not in self.get_names():
+ raise urllib2.URLError('file not on local host')
+ fo = open(localfile,'rb')
+ brange = req.headers.get('Range',None)
+ brange = range_header_to_tuple(brange)
+ assert brange != ()
+ if brange:
+ (fb,lb) = brange
+ if lb == '': lb = size
+ if fb < 0 or fb > size or lb > size:
+ raise RangeError('Requested Range Not Satisfiable')
+ size = (lb - fb)
+ fo = RangeableFileObject(fo, (fb,lb))
+ headers = mimetools.Message(StringIO(
+ 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
+ (mtype or 'text/plain', size, modified)))
+ return urllib.addinfourl(fo, headers, 'file:'+file)
+
+
+# FTP Range Support
+# Unfortunately, a large amount of base FTP code had to be copied
+# from urllib and urllib2 in order to insert the FTP REST command.
+# Code modifications for range support have been commented as
+# follows:
+# -- range support modifications start/end here
+
+from urllib import splitport, splituser, splitpasswd, splitattr, \
+ unquote, addclosehook, addinfourl
+import ftplib
+import socket
+import sys
+import mimetypes
+import mimetools
+
+class FTPRangeHandler(urllib2.FTPHandler):
+ def ftp_open(self, req):
+ host = req.get_host()
+ if not host:
+ raise IOError, ('ftp error', 'no host given')
+ host, port = splitport(host)
+ if port is None:
+ port = ftplib.FTP_PORT
+
+ # username/password handling
+ user, host = splituser(host)
+ if user:
+ user, passwd = splitpasswd(user)
+ else:
+ passwd = None
+ host = unquote(host)
+ user = unquote(user or '')
+ passwd = unquote(passwd or '')
+
+ try:
+ host = socket.gethostbyname(host)
+ except socket.error, msg:
+ raise urllib2.URLError(msg)
+ path, attrs = splitattr(req.get_selector())
+ dirs = path.split('/')
+ dirs = map(unquote, dirs)
+ dirs, file = dirs[:-1], dirs[-1]
+ if dirs and not dirs[0]:
+ dirs = dirs[1:]
+ try:
+ fw = self.connect_ftp(user, passwd, host, port, dirs)
+ type = file and 'I' or 'D'
+ for attr in attrs:
+ attr, value = splitattr(attr)
+ if attr.lower() == 'type' and \
+ value in ('a', 'A', 'i', 'I', 'd', 'D'):
+ type = value.upper()
+
+ # -- range support modifications start here
+ rest = None
+ range_tup = range_header_to_tuple(req.headers.get('Range',None))
+ assert range_tup != ()
+ if range_tup:
+ (fb,lb) = range_tup
+ if fb > 0: rest = fb
+ # -- range support modifications end here
+
+ fp, retrlen = fw.retrfile(file, type, rest)
+
+ # -- range support modifications start here
+ if range_tup:
+ (fb,lb) = range_tup
+ if lb == '':
+ if retrlen is None or retrlen == 0:
+ raise RangeError('Requested Range Not Satisfiable due to unobtainable file length.')
+ lb = retrlen
+ retrlen = lb - fb
+ if retrlen < 0:
+ # beginning of range is larger than file
+ raise RangeError('Requested Range Not Satisfiable')
+ else:
+ retrlen = lb - fb
+ fp = RangeableFileObject(fp, (0,retrlen))
+ # -- range support modifications end here
+
+ headers = ""
+ mtype = mimetypes.guess_type(req.get_full_url())[0]
+ if mtype:
+ headers += "Content-Type: %s\n" % mtype
+ if retrlen is not None and retrlen >= 0:
+ headers += "Content-Length: %d\n" % retrlen
+ sf = StringIO(headers)
+ headers = mimetools.Message(sf)
+ return addinfourl(fp, headers, req.get_full_url())
+ except ftplib.all_errors, msg:
+ raise IOError, ('ftp error', msg), sys.exc_info()[2]
+
+ def connect_ftp(self, user, passwd, host, port, dirs):
+ fw = ftpwrapper(user, passwd, host, port, dirs)
+ return fw
+
+class ftpwrapper(urllib.ftpwrapper):
+ # range support note:
+ # this ftpwrapper code is copied directly from
+ # urllib. The only enhancement is to add the rest
+ # argument and pass it on to ftp.ntransfercmd
+ def retrfile(self, file, type, rest=None):
+ self.endtransfer()
+ if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
+ else: cmd = 'TYPE ' + type; isdir = 0
+ try:
+ self.ftp.voidcmd(cmd)
+ except ftplib.all_errors:
+ self.init()
+ self.ftp.voidcmd(cmd)
+ conn = None
+ if file and not isdir:
+ # Use nlst to see if the file exists at all
+ try:
+ self.ftp.nlst(file)
+ except ftplib.error_perm, reason:
+ raise IOError, ('ftp error', reason), sys.exc_info()[2]
+ # Restore the transfer mode!
+ self.ftp.voidcmd(cmd)
+ # Try to retrieve as a file
+ try:
+ cmd = 'RETR ' + file
+ conn = self.ftp.ntransfercmd(cmd, rest)
+ except ftplib.error_perm, reason:
+ if str(reason).startswith('501'):
+ # workaround for REST not supported error
+ fp, retrlen = self.retrfile(file, type)
+ fp = RangeableFileObject(fp, (rest,''))
+ return (fp, retrlen)
+ elif not str(reason).startswith('550'):
+ raise IOError, ('ftp error', reason), sys.exc_info()[2]
+ if not conn:
+ # Set transfer mode to ASCII!
+ self.ftp.voidcmd('TYPE A')
+ # Try a directory listing
+ if file: cmd = 'LIST ' + file
+ else: cmd = 'LIST'
+ conn = self.ftp.ntransfercmd(cmd)
+ self.busy = 1
+ # Pass back both a suitably decorated object and a retrieval length
+ return (addclosehook(conn[0].makefile('rb'),
+ self.endtransfer), conn[1])
+
+
+####################################################################
+# Range Tuple Functions
+# XXX: These range tuple functions might go better in a class.
+
+_rangere = None
+def range_header_to_tuple(range_header):
+ """Get a (firstbyte,lastbyte) tuple from a Range header value.
+
+ Range headers have the form "bytes=<firstbyte>-<lastbyte>". This
+ function pulls the firstbyte and lastbyte values and returns
+ a (firstbyte,lastbyte) tuple. If lastbyte is not specified in
+ the header value, it is returned as an empty string in the
+ tuple.
+
+ Return None if range_header is None
+ Return () if range_header does not conform to the range spec
+ pattern.
+
+ """
+ global _rangere
+ if range_header is None: return None
+ if _rangere is None:
+ import re
+ _rangere = re.compile(r'^bytes=(\d{1,})-(\d*)')
+ match = _rangere.match(range_header)
+ if match:
+ tup = range_tuple_normalize(match.group(1,2))
+ if tup and tup[1]:
+ tup = (tup[0],tup[1]+1)
+ return tup
+ return ()
+
+def range_tuple_to_header(range_tup):
+ """Convert a range tuple to a Range header value.
+ Return a string of the form "bytes=<firstbyte>-<lastbyte>" or None
+ if no range is needed.
+ """
+ if range_tup is None: return None
+ range_tup = range_tuple_normalize(range_tup)
+ if range_tup:
+ if range_tup[1]:
+ range_tup = (range_tup[0],range_tup[1] - 1)
+ return 'bytes=%s-%s' % range_tup
+
+def range_tuple_normalize(range_tup):
+ """Normalize a (first_byte,last_byte) range tuple.
+ Return a tuple whose first element is guaranteed to be an int
+ and whose second element will be '' (meaning: the last byte) or
+ an int. Finally, return None if the normalized tuple == (0,'')
+ as that is equivelant to retrieving the entire file.
+ """
+ if range_tup is None: return None
+ # handle first byte
+ fb = range_tup[0]
+ if fb in (None,''): fb = 0
+ else: fb = int(fb)
+ # handle last byte
+ try: lb = range_tup[1]
+ except IndexError: lb = ''
+ else:
+ if lb is None: lb = ''
+ elif lb != '': lb = int(lb)
+ # check if range is over the entire file
+ if (fb,lb) == (0,''): return None
+ # check that the range is valid
+ if lb < fb: raise RangeError('Invalid byte range: %s-%s' % (fb,lb))
+ return (fb,lb)
new file mode 100644
--- /dev/null
+++ b/mercurial/changegroup.py
@@ -0,0 +1,44 @@
+"""
+changegroup.py - Mercurial changegroup manipulation functions
+
+ Copyright 2006 Matt Mackall <mpm@selenic.com>
+
+This software may be used and distributed according to the terms
+of the GNU General Public License, incorporated herein by reference.
+"""
+import struct
+from i18n import gettext as _
+from demandload import *
+demandload(globals(), "util")
+
+def getchunk(source):
+ """get a chunk from a changegroup"""
+ d = source.read(4)
+ if not d:
+ return ""
+ l = struct.unpack(">l", d)[0]
+ if l <= 4:
+ return ""
+ d = source.read(l - 4)
+ if len(d) < l - 4:
+ raise util.Abort(_("premature EOF reading chunk"
+ " (got %d bytes, expected %d)")
+ % (len(d), l - 4))
+ return d
+
+def chunkiter(source):
+ """iterate through the chunks in source"""
+ while 1:
+ c = getchunk(source)
+ if not c:
+ break
+ yield c
+
+def genchunk(data):
+ """build a changegroup chunk"""
+ header = struct.pack(">l", len(data)+ 4)
+ return "%s%s" % (header, data)
+
+def closechunk():
+ return struct.pack(">l", 0)
+
new file mode 100644
--- /dev/null
+++ b/mercurial/changelog.py
@@ -0,0 +1,59 @@
+# changelog.py - changelog class for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from revlog import *
+from i18n import gettext as _
+from demandload import demandload
+demandload(globals(), "os time util")
+
+class changelog(revlog):
+ def __init__(self, opener, defversion=REVLOGV0):
+ revlog.__init__(self, opener, "00changelog.i", "00changelog.d",
+ defversion)
+
+ def extract(self, text):
+ if not text:
+ return (nullid, "", (0, 0), [], "")
+ last = text.index("\n\n")
+ desc = text[last + 2:]
+ l = text[:last].splitlines()
+ manifest = bin(l[0])
+ user = l[1]
+ date = l[2].split(' ')
+ time = float(date.pop(0))
+ try:
+ # various tools did silly things with the time zone field.
+ timezone = int(date[0])
+ except:
+ timezone = 0
+ files = l[3:]
+ return (manifest, user, (time, timezone), files, desc)
+
+ def read(self, node):
+ return self.extract(self.revision(node))
+
+ def add(self, manifest, list, desc, transaction, p1=None, p2=None,
+ user=None, date=None):
+ if date:
+ # validate explicit (probably user-specified) date and
+ # time zone offset. values must fit in signed 32 bits for
+ # current 32-bit linux runtimes. timezones go from UTC-12
+ # to UTC+14
+ try:
+ when, offset = map(int, date.split(' '))
+ except ValueError:
+ raise ValueError(_('invalid date: %r') % date)
+ if abs(when) > 0x7fffffff:
+ raise ValueError(_('date exceeds 32 bits: %d') % when)
+ if offset < -50400 or offset > 43200:
+ raise ValueError(_('impossible time zone offset: %d') % offset)
+ else:
+ date = "%d %d" % util.makedate()
+ list.sort()
+ l = [hex(manifest), user, date] + list + ["", desc]
+ text = "\n".join(l)
+ return self.addrevision(text, transaction, self.count(), p1, p2)
new file mode 100644
--- /dev/null
+++ b/mercurial/commands.py
@@ -0,0 +1,3481 @@
+# commands.py - command processing for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from demandload import demandload
+from node import *
+from i18n import gettext as _
+demandload(globals(), "os re sys signal shutil imp urllib pdb")
+demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
+demandload(globals(), "fnmatch mdiff random signal tempfile time")
+demandload(globals(), "traceback errno socket version struct atexit sets bz2")
+demandload(globals(), "archival changegroup")
+demandload(globals(), "mercurial.hgweb.server:create_server")
+demandload(globals(), "mercurial.hgweb:hgweb,hgwebdir")
+
+class UnknownCommand(Exception):
+ """Exception raised if command is not in the command table."""
+class AmbiguousCommand(Exception):
+ """Exception raised if command shortcut matches more than one command."""
+
+def bail_if_changed(repo):
+ modified, added, removed, deleted, unknown = repo.changes()
+ if modified or added or removed or deleted:
+ raise util.Abort(_("outstanding uncommitted changes"))
+
+def filterfiles(filters, files):
+ l = [x for x in files if x in filters]
+
+ for t in filters:
+ if t and t[-1] != "/":
+ t += "/"
+ l += [x for x in files if x.startswith(t)]
+ return l
+
+def relpath(repo, args):
+ cwd = repo.getcwd()
+ if cwd:
+ return [util.normpath(os.path.join(cwd, x)) for x in args]
+ return args
+
+def matchpats(repo, pats=[], opts={}, head=''):
+ cwd = repo.getcwd()
+ if not pats and cwd:
+ opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
+ opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
+ cwd = ''
+ return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
+ opts.get('exclude'), head)
+
+def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
+ files, matchfn, anypats = matchpats(repo, pats, opts, head)
+ exact = dict(zip(files, files))
+ def walk():
+ for src, fn in repo.walk(node=node, files=files, match=matchfn,
+ badmatch=badmatch):
+ yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
+ return files, matchfn, walk()
+
+def walk(repo, pats, opts, node=None, head='', badmatch=None):
+ files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
+ for r in results:
+ yield r
+
+def walkchangerevs(ui, repo, pats, opts):
+ '''Iterate over files and the revs they changed in.
+
+ Callers most commonly need to iterate backwards over the history
+ it is interested in. Doing so has awful (quadratic-looking)
+ performance, so we use iterators in a "windowed" way.
+
+ We walk a window of revisions in the desired order. Within the
+ window, we first walk forwards to gather data, then in the desired
+ order (usually backwards) to display it.
+
+ This function returns an (iterator, getchange, matchfn) tuple. The
+ getchange function returns the changelog entry for a numeric
+ revision. The iterator yields 3-tuples. They will be of one of
+ the following forms:
+
+ "window", incrementing, lastrev: stepping through a window,
+ positive if walking forwards through revs, last rev in the
+ sequence iterated over - use to reset state for the current window
+
+ "add", rev, fns: out-of-order traversal of the given file names
+ fns, which changed during revision rev - use to gather data for
+ possible display
+
+ "iter", rev, None: in-order traversal of the revs earlier iterated
+ over with "add" - use to display data'''
+
+ def increasing_windows(start, end, windowsize=8, sizelimit=512):
+ if start < end:
+ while start < end:
+ yield start, min(windowsize, end-start)
+ start += windowsize
+ if windowsize < sizelimit:
+ windowsize *= 2
+ else:
+ while start > end:
+ yield start, min(windowsize, start-end-1)
+ start -= windowsize
+ if windowsize < sizelimit:
+ windowsize *= 2
+
+
+ files, matchfn, anypats = matchpats(repo, pats, opts)
+
+ if repo.changelog.count() == 0:
+ return [], False, matchfn
+
+ revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
+ wanted = {}
+ slowpath = anypats
+ fncache = {}
+
+ chcache = {}
+ def getchange(rev):
+ ch = chcache.get(rev)
+ if ch is None:
+ chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
+ return ch
+
+ if not slowpath and not files:
+ # No files, no patterns. Display all revs.
+ wanted = dict(zip(revs, revs))
+ if not slowpath:
+ # Only files, no patterns. Check the history of each file.
+ def filerevgen(filelog):
+ for i, window in increasing_windows(filelog.count()-1, -1):
+ revs = []
+ for j in xrange(i - window, i + 1):
+ revs.append(filelog.linkrev(filelog.node(j)))
+ revs.reverse()
+ for rev in revs:
+ yield rev
+
+ minrev, maxrev = min(revs), max(revs)
+ for file_ in files:
+ filelog = repo.file(file_)
+ # A zero count may be a directory or deleted file, so
+ # try to find matching entries on the slow path.
+ if filelog.count() == 0:
+ slowpath = True
+ break
+ for rev in filerevgen(filelog):
+ if rev <= maxrev:
+ if rev < minrev:
+ break
+ fncache.setdefault(rev, [])
+ fncache[rev].append(file_)
+ wanted[rev] = 1
+ if slowpath:
+ # The slow path checks files modified in every changeset.
+ def changerevgen():
+ for i, window in increasing_windows(repo.changelog.count()-1, -1):
+ for j in xrange(i - window, i + 1):
+ yield j, getchange(j)[3]
+
+ for rev, changefiles in changerevgen():
+ matches = filter(matchfn, changefiles)
+ if matches:
+ fncache[rev] = matches
+ wanted[rev] = 1
+
+ def iterate():
+ for i, window in increasing_windows(0, len(revs)):
+ yield 'window', revs[0] < revs[-1], revs[-1]
+ nrevs = [rev for rev in revs[i:i+window]
+ if rev in wanted]
+ srevs = list(nrevs)
+ srevs.sort()
+ for rev in srevs:
+ fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
+ yield 'add', rev, fns
+ for rev in nrevs:
+ yield 'iter', rev, None
+ return iterate(), getchange, matchfn
+
+revrangesep = ':'
+
+def revfix(repo, val, defval):
+ '''turn user-level id of changeset into rev number.
+ user-level id can be tag, changeset, rev number, or negative rev
+ number relative to number of revs (-1 is tip, etc).'''
+ if not val:
+ return defval
+ try:
+ num = int(val)
+ if str(num) != val:
+ raise ValueError
+ if num < 0:
+ num += repo.changelog.count()
+ if num < 0:
+ num = 0
+ elif num >= repo.changelog.count():
+ raise ValueError
+ except ValueError:
+ try:
+ num = repo.changelog.rev(repo.lookup(val))
+ except KeyError:
+ raise util.Abort(_('invalid revision identifier %s'), val)
+ return num
+
+def revpair(ui, repo, revs):
+ '''return pair of nodes, given list of revisions. second item can
+ be None, meaning use working dir.'''
+ if not revs:
+ return repo.dirstate.parents()[0], None
+ end = None
+ if len(revs) == 1:
+ start = revs[0]
+ if revrangesep in start:
+ start, end = start.split(revrangesep, 1)
+ start = revfix(repo, start, 0)
+ end = revfix(repo, end, repo.changelog.count() - 1)
+ else:
+ start = revfix(repo, start, None)
+ elif len(revs) == 2:
+ if revrangesep in revs[0] or revrangesep in revs[1]:
+ raise util.Abort(_('too many revisions specified'))
+ start = revfix(repo, revs[0], None)
+ end = revfix(repo, revs[1], None)
+ else:
+ raise util.Abort(_('too many revisions specified'))
+ if end is not None: end = repo.lookup(str(end))
+ return repo.lookup(str(start)), end
+
+def revrange(ui, repo, revs):
+ """Yield revision as strings from a list of revision specifications."""
+ seen = {}
+ for spec in revs:
+ if spec.find(revrangesep) >= 0:
+ start, end = spec.split(revrangesep, 1)
+ start = revfix(repo, start, 0)
+ end = revfix(repo, end, repo.changelog.count() - 1)
+ step = start > end and -1 or 1
+ for rev in xrange(start, end+step, step):
+ if rev in seen:
+ continue
+ seen[rev] = 1
+ yield str(rev)
+ else:
+ rev = revfix(repo, spec, None)
+ if rev in seen:
+ continue
+ seen[rev] = 1
+ yield str(rev)
+
+def make_filename(repo, r, pat, node=None,
+ total=None, seqno=None, revwidth=None, pathname=None):
+ node_expander = {
+ 'H': lambda: hex(node),
+ 'R': lambda: str(r.rev(node)),
+ 'h': lambda: short(node),
+ }
+ expander = {
+ '%': lambda: '%',
+ 'b': lambda: os.path.basename(repo.root),
+ }
+
+ try:
+ if node:
+ expander.update(node_expander)
+ if node and revwidth is not None:
+ expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
+ if total is not None:
+ expander['N'] = lambda: str(total)
+ if seqno is not None:
+ expander['n'] = lambda: str(seqno)
+ if total is not None and seqno is not None:
+ expander['n'] = lambda:str(seqno).zfill(len(str(total)))
+ if pathname is not None:
+ expander['s'] = lambda: os.path.basename(pathname)
+ expander['d'] = lambda: os.path.dirname(pathname) or '.'
+ expander['p'] = lambda: pathname
+
+ newname = []
+ patlen = len(pat)
+ i = 0
+ while i < patlen:
+ c = pat[i]
+ if c == '%':
+ i += 1
+ c = pat[i]
+ c = expander[c]()
+ newname.append(c)
+ i += 1
+ return ''.join(newname)
+ except KeyError, inst:
+ raise util.Abort(_("invalid format spec '%%%s' in output file name"),
+ inst.args[0])
+
+def make_file(repo, r, pat, node=None,
+ total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
+ if not pat or pat == '-':
+ return 'w' in mode and sys.stdout or sys.stdin
+ if hasattr(pat, 'write') and 'w' in mode:
+ return pat
+ if hasattr(pat, 'read') and 'r' in mode:
+ return pat
+ return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
+ pathname),
+ mode)
+
+def write_bundle(cg, filename=None, compress=True):
+ """Write a bundle file and return its filename.
+
+ Existing files will not be overwritten.
+ If no filename is specified, a temporary file is created.
+ bz2 compression can be turned off.
+ The bundle file will be deleted in case of errors.
+ """
+ class nocompress(object):
+ def compress(self, x):
+ return x
+ def flush(self):
+ return ""
+
+ fh = None
+ cleanup = None
+ try:
+ if filename:
+ if os.path.exists(filename):
+ raise util.Abort(_("file '%s' already exists"), filename)
+ fh = open(filename, "wb")
+ else:
+ fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
+ fh = os.fdopen(fd, "wb")
+ cleanup = filename
+
+ if compress:
+ fh.write("HG10")
+ z = bz2.BZ2Compressor(9)
+ else:
+ fh.write("HG10UN")
+ z = nocompress()
+ # parse the changegroup data, otherwise we will block
+ # in case of sshrepo because we don't know the end of the stream
+
+ # an empty chunkiter is the end of the changegroup
+ empty = False
+ while not empty:
+ empty = True
+ for chunk in changegroup.chunkiter(cg):
+ empty = False
+ fh.write(z.compress(changegroup.genchunk(chunk)))
+ fh.write(z.compress(changegroup.closechunk()))
+ fh.write(z.flush())
+ cleanup = None
+ return filename
+ finally:
+ if fh is not None:
+ fh.close()
+ if cleanup is not None:
+ os.unlink(cleanup)
+
+def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
+ changes=None, text=False, opts={}):
+ if not node1:
+ node1 = repo.dirstate.parents()[0]
+ # reading the data for node1 early allows it to play nicely
+ # with repo.changes and the revlog cache.
+ change = repo.changelog.read(node1)
+ mmap = repo.manifest.read(change[0])
+ date1 = util.datestr(change[2])
+
+ if not changes:
+ changes = repo.changes(node1, node2, files, match=match)
+ modified, added, removed, deleted, unknown = changes
+ if files:
+ modified, added, removed = map(lambda x: filterfiles(files, x),
+ (modified, added, removed))
+
+ if not modified and not added and not removed:
+ return
+
+ if node2:
+ change = repo.changelog.read(node2)
+ mmap2 = repo.manifest.read(change[0])
+ date2 = util.datestr(change[2])
+ def read(f):
+ return repo.file(f).read(mmap2[f])
+ else:
+ date2 = util.datestr()
+ def read(f):
+ return repo.wread(f)
+
+ if ui.quiet:
+ r = None
+ else:
+ hexfunc = ui.verbose and hex or short
+ r = [hexfunc(node) for node in [node1, node2] if node]
+
+ diffopts = ui.diffopts()
+ showfunc = opts.get('show_function') or diffopts['showfunc']
+ ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
+ for f in modified:
+ to = None
+ if f in mmap:
+ to = repo.file(f).read(mmap[f])
+ tn = read(f)
+ fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
+ showfunc=showfunc, ignorews=ignorews))
+ for f in added:
+ to = None
+ tn = read(f)
+ fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
+ showfunc=showfunc, ignorews=ignorews))
+ for f in removed:
+ to = repo.file(f).read(mmap[f])
+ tn = None
+ fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
+ showfunc=showfunc, ignorews=ignorews))
+
+def trimuser(ui, name, rev, revcache):
+ """trim the name of the user who committed a change"""
+ user = revcache.get(rev)
+ if user is None:
+ user = revcache[rev] = ui.shortuser(name)
+ return user
+
+class changeset_printer(object):
+ '''show changeset information when templating not requested.'''
+
+ def __init__(self, ui, repo):
+ self.ui = ui
+ self.repo = repo
+
+ def show(self, rev=0, changenode=None, brinfo=None):
+ '''show a single changeset or file revision'''
+ log = self.repo.changelog
+ if changenode is None:
+ changenode = log.node(rev)
+ elif not rev:
+ rev = log.rev(changenode)
+
+ if self.ui.quiet:
+ self.ui.write("%d:%s\n" % (rev, short(changenode)))
+ return
+
+ changes = log.read(changenode)
+ date = util.datestr(changes[2])
+
+ parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
+ for p in log.parents(changenode)
+ if self.ui.debugflag or p != nullid]
+ if (not self.ui.debugflag and len(parents) == 1 and
+ parents[0][0] == rev-1):
+ parents = []
+
+ if self.ui.verbose:
+ self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
+ else:
+ self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
+
+ for tag in self.repo.nodetags(changenode):
+ self.ui.status(_("tag: %s\n") % tag)
+ for parent in parents:
+ self.ui.write(_("parent: %d:%s\n") % parent)
+
+ if brinfo and changenode in brinfo:
+ br = brinfo[changenode]
+ self.ui.write(_("branch: %s\n") % " ".join(br))
+
+ self.ui.debug(_("manifest: %d:%s\n") %
+ (self.repo.manifest.rev(changes[0]), hex(changes[0])))
+ self.ui.status(_("user: %s\n") % changes[1])
+ self.ui.status(_("date: %s\n") % date)
+
+ if self.ui.debugflag:
+ files = self.repo.changes(log.parents(changenode)[0], changenode)
+ for key, value in zip([_("files:"), _("files+:"), _("files-:")],
+ files):
+ if value:
+ self.ui.note("%-12s %s\n" % (key, " ".join(value)))
+ else:
+ self.ui.note(_("files: %s\n") % " ".join(changes[3]))
+
+ description = changes[4].strip()
+ if description:
+ if self.ui.verbose:
+ self.ui.status(_("description:\n"))
+ self.ui.status(description)
+ self.ui.status("\n\n")
+ else:
+ self.ui.status(_("summary: %s\n") %
+ description.splitlines()[0])
+ self.ui.status("\n")
+
+def show_changeset(ui, repo, opts):
+ '''show one changeset. uses template or regular display. caller
+ can pass in 'style' and 'template' options in opts.'''
+
+ tmpl = opts.get('template')
+ if tmpl:
+ tmpl = templater.parsestring(tmpl, quoted=False)
+ else:
+ tmpl = ui.config('ui', 'logtemplate')
+ if tmpl: tmpl = templater.parsestring(tmpl)
+ mapfile = opts.get('style') or ui.config('ui', 'style')
+ if tmpl or mapfile:
+ if mapfile:
+ if not os.path.isfile(mapfile):
+ mapname = templater.templatepath('map-cmdline.' + mapfile)
+ if not mapname: mapname = templater.templatepath(mapfile)
+ if mapname: mapfile = mapname
+ try:
+ t = templater.changeset_templater(ui, repo, mapfile)
+ except SyntaxError, inst:
+ raise util.Abort(inst.args[0])
+ if tmpl: t.use_template(tmpl)
+ return t
+ return changeset_printer(ui, repo)
+
+def show_version(ui):
+ """output version and copyright information"""
+ ui.write(_("Mercurial Distributed SCM (version %s)\n")
+ % version.get_version())
+ ui.status(_(
+ "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
+ "This is free software; see the source for copying conditions. "
+ "There is NO\nwarranty; "
+ "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
+ ))
+
+def help_(ui, cmd=None, with_version=False):
+ """show help for a given command or all commands"""
+ option_lists = []
+ if cmd and cmd != 'shortlist':
+ if with_version:
+ show_version(ui)
+ ui.write('\n')
+ aliases, i = find(cmd)
+ # synopsis
+ ui.write("%s\n\n" % i[2])
+
+ # description
+ doc = i[0].__doc__
+ if not doc:
+ doc = _("(No help text available)")
+ if ui.quiet:
+ doc = doc.splitlines(0)[0]
+ ui.write("%s\n" % doc.rstrip())
+
+ if not ui.quiet:
+ # aliases
+ if len(aliases) > 1:
+ ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
+
+ # options
+ if i[1]:
+ option_lists.append(("options", i[1]))
+
+ else:
+ # program name
+ if ui.verbose or with_version:
+ show_version(ui)
+ else:
+ ui.status(_("Mercurial Distributed SCM\n"))
+ ui.status('\n')
+
+ # list of commands
+ if cmd == "shortlist":
+ ui.status(_('basic commands (use "hg help" '
+ 'for the full list or option "-v" for details):\n\n'))
+ elif ui.verbose:
+ ui.status(_('list of commands:\n\n'))
+ else:
+ ui.status(_('list of commands (use "hg help -v" '
+ 'to show aliases and global options):\n\n'))
+
+ h = {}
+ cmds = {}
+ for c, e in table.items():
+ f = c.split("|")[0]
+ if cmd == "shortlist" and not f.startswith("^"):
+ continue
+ f = f.lstrip("^")
+ if not ui.debugflag and f.startswith("debug"):
+ continue
+ doc = e[0].__doc__
+ if not doc:
+ doc = _("(No help text available)")
+ h[f] = doc.splitlines(0)[0].rstrip()
+ cmds[f] = c.lstrip("^")
+
+ fns = h.keys()
+ fns.sort()
+ m = max(map(len, fns))
+ for f in fns:
+ if ui.verbose:
+ commands = cmds[f].replace("|",", ")
+ ui.write(" %s:\n %s\n"%(commands, h[f]))
+ else:
+ ui.write(' %-*s %s\n' % (m, f, h[f]))
+
+ # global options
+ if ui.verbose:
+ option_lists.append(("global options", globalopts))
+
+ # list all option lists
+ opt_output = []
+ for title, options in option_lists:
+ opt_output.append(("\n%s:\n" % title, None))
+ for shortopt, longopt, default, desc in options:
+ opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
+ longopt and " --%s" % longopt),
+ "%s%s" % (desc,
+ default
+ and _(" (default: %s)") % default
+ or "")))
+
+ if opt_output:
+ opts_len = max([len(line[0]) for line in opt_output if line[1]])
+ for first, second in opt_output:
+ if second:
+ ui.write(" %-*s %s\n" % (opts_len, first, second))
+ else:
+ ui.write("%s\n" % first)
+
+# Commands start here, listed alphabetically
+
+def add(ui, repo, *pats, **opts):
+ """add the specified files on the next commit
+
+ Schedule files to be version controlled and added to the repository.
+
+ The files will be added to the repository at the next commit.
+
+ If no names are given, add all files in the repository.
+ """
+
+ names = []
+ for src, abs, rel, exact in walk(repo, pats, opts):
+ if exact:
+ if ui.verbose:
+ ui.status(_('adding %s\n') % rel)
+ names.append(abs)
+ elif repo.dirstate.state(abs) == '?':
+ ui.status(_('adding %s\n') % rel)
+ names.append(abs)
+ repo.add(names)
+
+def addremove(ui, repo, *pats, **opts):
+ """add all new files, delete all missing files (DEPRECATED)
+
+ (DEPRECATED)
+ Add all new files and remove all missing files from the repository.
+
+ New files are ignored if they match any of the patterns in .hgignore. As
+ with add, these changes take effect at the next commit.
+
+ This command is now deprecated and will be removed in a future
+ release. Please use add and remove --after instead.
+ """
+ ui.warn(_('(the addremove command is deprecated; use add and remove '
+ '--after instead)\n'))
+ return addremove_lock(ui, repo, pats, opts)
+
+def addremove_lock(ui, repo, pats, opts, wlock=None):
+ add, remove = [], []
+ for src, abs, rel, exact in walk(repo, pats, opts):
+ if src == 'f' and repo.dirstate.state(abs) == '?':
+ add.append(abs)
+ if ui.verbose or not exact:
+ ui.status(_('adding %s\n') % ((pats and rel) or abs))
+ if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
+ remove.append(abs)
+ if ui.verbose or not exact:
+ ui.status(_('removing %s\n') % ((pats and rel) or abs))
+ repo.add(add, wlock=wlock)
+ repo.remove(remove, wlock=wlock)
+
+def annotate(ui, repo, *pats, **opts):
+ """show changeset information per file line
+
+ List changes in files, showing the revision id responsible for each line
+
+ This command is useful to discover who did a change or when a change took
+ place.
+
+ Without the -a option, annotate will avoid processing files it
+ detects as binary. With -a, annotate will generate an annotation
+ anyway, probably with undesirable results.
+ """
+ def getnode(rev):
+ return short(repo.changelog.node(rev))
+
+ ucache = {}
+ def getname(rev):
+ cl = repo.changelog.read(repo.changelog.node(rev))
+ return trimuser(ui, cl[1], rev, ucache)
+
+ dcache = {}
+ def getdate(rev):
+ datestr = dcache.get(rev)
+ if datestr is None:
+ cl = repo.changelog.read(repo.changelog.node(rev))
+ datestr = dcache[rev] = util.datestr(cl[2])
+ return datestr
+
+ if not pats:
+ raise util.Abort(_('at least one file name or pattern required'))
+
+ opmap = [['user', getname], ['number', str], ['changeset', getnode],
+ ['date', getdate]]
+ if not opts['user'] and not opts['changeset'] and not opts['date']:
+ opts['number'] = 1
+
+ if opts['rev']:
+ node = repo.changelog.lookup(opts['rev'])
+ else:
+ node = repo.dirstate.parents()[0]
+ change = repo.changelog.read(node)
+ mmap = repo.manifest.read(change[0])
+
+ for src, abs, rel, exact in walk(repo, pats, opts, node=node):
+ f = repo.file(abs)
+ if not opts['text'] and util.binary(f.read(mmap[abs])):
+ ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
+ continue
+
+ lines = f.annotate(mmap[abs])
+ pieces = []
+
+ for o, f in opmap:
+ if opts[o]:
+ l = [f(n) for n, dummy in lines]
+ if l:
+ m = max(map(len, l))
+ pieces.append(["%*s" % (m, x) for x in l])
+
+ if pieces:
+ for p, l in zip(zip(*pieces), lines):
+ ui.write("%s: %s" % (" ".join(p), l[1]))
+
+def archive(ui, repo, dest, **opts):
+ '''create unversioned archive of a repository revision
+
+ By default, the revision used is the parent of the working
+ directory; use "-r" to specify a different revision.
+
+ To specify the type of archive to create, use "-t". Valid
+ types are:
+
+ "files" (default): a directory full of files
+ "tar": tar archive, uncompressed
+ "tbz2": tar archive, compressed using bzip2
+ "tgz": tar archive, compressed using gzip
+ "uzip": zip archive, uncompressed
+ "zip": zip archive, compressed using deflate
+
+ The exact name of the destination archive or directory is given
+ using a format string; see "hg help export" for details.
+
+ Each member added to an archive file has a directory prefix
+ prepended. Use "-p" to specify a format string for the prefix.
+ The default is the basename of the archive, with suffixes removed.
+ '''
+
+ if opts['rev']:
+ node = repo.lookup(opts['rev'])
+ else:
+ node, p2 = repo.dirstate.parents()
+ if p2 != nullid:
+ raise util.Abort(_('uncommitted merge - please provide a '
+ 'specific revision'))
+
+ dest = make_filename(repo, repo.changelog, dest, node)
+ prefix = make_filename(repo, repo.changelog, opts['prefix'], node)
+ if os.path.realpath(dest) == repo.root:
+ raise util.Abort(_('repository root cannot be destination'))
+ dummy, matchfn, dummy = matchpats(repo, [], opts)
+ archival.archive(repo, dest, node, opts.get('type') or 'files',
+ not opts['no_decode'], matchfn, prefix)
+
+def backout(ui, repo, rev, **opts):
+ '''reverse effect of earlier changeset
+
+ Commit the backed out changes as a new changeset. The new
+ changeset is a child of the backed out changeset.
+
+ If you back out a changeset other than the tip, a new head is
+ created. This head is the parent of the working directory. If
+ you back out an old changeset, your working directory will appear
+ old after the backout. You should merge the backout changeset
+ with another head.
+
+ The --merge option remembers the parent of the working directory
+ before starting the backout, then merges the new head with that
+ changeset afterwards. This saves you from doing the merge by
+ hand. The result of this merge is not committed, as for a normal
+ merge.'''
+
+ bail_if_changed(repo)
+ op1, op2 = repo.dirstate.parents()
+ if op2 != nullid:
+ raise util.Abort(_('outstanding uncommitted merge'))
+ node = repo.lookup(rev)
+ parent, p2 = repo.changelog.parents(node)
+ if parent == nullid:
+ raise util.Abort(_('cannot back out a change with no parents'))
+ if p2 != nullid:
+ raise util.Abort(_('cannot back out a merge'))
+ repo.update(node, force=True, show_stats=False)
+ revert_opts = opts.copy()
+ revert_opts['rev'] = hex(parent)
+ revert(ui, repo, **revert_opts)
+ commit_opts = opts.copy()
+ commit_opts['addremove'] = False
+ if not commit_opts['message'] and not commit_opts['logfile']:
+ commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
+ commit_opts['force_editor'] = True
+ commit(ui, repo, **commit_opts)
+ def nice(node):
+ return '%d:%s' % (repo.changelog.rev(node), short(node))
+ ui.status(_('changeset %s backs out changeset %s\n') %
+ (nice(repo.changelog.tip()), nice(node)))
+ if opts['merge'] and op1 != node:
+ ui.status(_('merging with changeset %s\n') % nice(op1))
+ doupdate(ui, repo, hex(op1), **opts)
+
+def bundle(ui, repo, fname, dest="default-push", **opts):
+ """create a changegroup file
+
+ Generate a compressed changegroup file collecting all changesets
+ not found in the other repository.
+
+ This file can then be transferred using conventional means and
+ applied to another repository with the unbundle command. This is
+ useful when native push and pull are not available or when
+ exporting an entire repository is undesirable. The standard file
+ extension is ".hg".
+
+ Unlike import/export, this exactly preserves all changeset
+ contents including permissions, rename data, and revision history.
+ """
+ dest = ui.expandpath(dest)
+ other = hg.repository(ui, dest)
+ o = repo.findoutgoing(other, force=opts['force'])
+ cg = repo.changegroup(o, 'bundle')
+ write_bundle(cg, fname)
+
+def cat(ui, repo, file1, *pats, **opts):
+ """output the latest or given revisions of files
+
+ Print the specified files as they were at the given revision.
+ If no revision is given then the tip is used.
+
+ Output may be to a file, in which case the name of the file is
+ given using a format string. The formatting rules are the same as
+ for the export command, with the following additions:
+
+ %s basename of file being printed
+ %d dirname of file being printed, or '.' if in repo root
+ %p root-relative path name of file being printed
+ """
+ mf = {}
+ rev = opts['rev']
+ if rev:
+ node = repo.lookup(rev)
+ else:
+ node = repo.changelog.tip()
+ change = repo.changelog.read(node)
+ mf = repo.manifest.read(change[0])
+ for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
+ r = repo.file(abs)
+ n = mf[abs]
+ fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
+ fp.write(r.read(n))
+
+def clone(ui, source, dest=None, **opts):
+ """make a copy of an existing repository
+
+ Create a copy of an existing repository in a new directory.
+
+ If no destination directory name is specified, it defaults to the
+ basename of the source.
+
+ The location of the source is added to the new repository's
+ .hg/hgrc file, as the default to be used for future pulls.
+
+ For efficiency, hardlinks are used for cloning whenever the source
+ and destination are on the same filesystem. Some filesystems,
+ such as AFS, implement hardlinking incorrectly, but do not report
+ errors. In these cases, use the --pull option to avoid
+ hardlinking.
+
+ See pull for valid source format details.
+ """
+ if dest is None:
+ dest = os.path.basename(os.path.normpath(source))
+
+ if os.path.exists(dest):
+ raise util.Abort(_("destination '%s' already exists"), dest)
+
+ dest = os.path.realpath(dest)
+
+ class Dircleanup(object):
+ def __init__(self, dir_):
+ self.rmtree = shutil.rmtree
+ self.dir_ = dir_
+ os.mkdir(dir_)
+ def close(self):
+ self.dir_ = None
+ def __del__(self):
+ if self.dir_:
+ self.rmtree(self.dir_, True)
+
+ if opts['ssh']:
+ ui.setconfig("ui", "ssh", opts['ssh'])
+ if opts['remotecmd']:
+ ui.setconfig("ui", "remotecmd", opts['remotecmd'])
+
+ source = ui.expandpath(source)
+
+ d = Dircleanup(dest)
+ abspath = source
+ other = hg.repository(ui, source)
+
+ copy = False
+ if other.dev() != -1:
+ abspath = os.path.abspath(source)
+ if not opts['pull'] and not opts['rev']:
+ copy = True
+
+ if copy:
+ try:
+ # we use a lock here because if we race with commit, we
+ # can end up with extra data in the cloned revlogs that's
+ # not pointed to by changesets, thus causing verify to
+ # fail
+ l1 = other.lock()
+ except lock.LockException:
+ copy = False
+
+ if copy:
+ # we lock here to avoid premature writing to the target
+ os.mkdir(os.path.join(dest, ".hg"))
+ l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
+
+ files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
+ for f in files.split():
+ src = os.path.join(source, ".hg", f)
+ dst = os.path.join(dest, ".hg", f)
+ try:
+ util.copyfiles(src, dst)
+ except OSError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
+
+ repo = hg.repository(ui, dest)
+
+ else:
+ revs = None
+ if opts['rev']:
+ if not other.local():
+ error = _("clone -r not supported yet for remote repositories.")
+ raise util.Abort(error)
+ else:
+ revs = [other.lookup(rev) for rev in opts['rev']]
+ repo = hg.repository(ui, dest, create=1)
+ repo.pull(other, heads = revs)
+
+ f = repo.opener("hgrc", "w", text=True)
+ f.write("[paths]\n")
+ f.write("default = %s\n" % abspath)
+ f.close()
+
+ if not opts['noupdate']:
+ doupdate(repo.ui, repo)
+
+ d.close()
+
+def commit(ui, repo, *pats, **opts):
+ """commit the specified files or all outstanding changes
+
+ Commit changes to the given files into the repository.
+
+ If a list of files is omitted, all changes reported by "hg status"
+ will be committed.
+
+ If no commit message is specified, the editor configured in your hgrc
+ or in the EDITOR environment variable is started to enter a message.
+ """
+ message = opts['message']
+ logfile = opts['logfile']
+
+ if message and logfile:
+ raise util.Abort(_('options --message and --logfile are mutually '
+ 'exclusive'))
+ if not message and logfile:
+ try:
+ if logfile == '-':
+ message = sys.stdin.read()
+ else:
+ message = open(logfile).read()
+ except IOError, inst:
+ raise util.Abort(_("can't read commit message '%s': %s") %
+ (logfile, inst.strerror))
+
+ if opts['addremove']:
+ addremove_lock(ui, repo, pats, opts)
+ fns, match, anypats = matchpats(repo, pats, opts)
+ if pats:
+ modified, added, removed, deleted, unknown = (
+ repo.changes(files=fns, match=match))
+ files = modified + added + removed
+ else:
+ files = []
+ try:
+ repo.commit(files, message, opts['user'], opts['date'], match,
+ force_editor=opts.get('force_editor'))
+ except ValueError, inst:
+ raise util.Abort(str(inst))
+
+def docopy(ui, repo, pats, opts, wlock):
+ # called with the repo lock held
+ cwd = repo.getcwd()
+ errors = 0
+ copied = []
+ targets = {}
+
+ def okaytocopy(abs, rel, exact):
+ reasons = {'?': _('is not managed'),
+ 'a': _('has been marked for add'),
+ 'r': _('has been marked for remove')}
+ state = repo.dirstate.state(abs)
+ reason = reasons.get(state)
+ if reason:
+ if state == 'a':
+ origsrc = repo.dirstate.copied(abs)
+ if origsrc is not None:
+ return origsrc
+ if exact:
+ ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
+ else:
+ return abs
+
+ def copy(origsrc, abssrc, relsrc, target, exact):
+ abstarget = util.canonpath(repo.root, cwd, target)
+ reltarget = util.pathto(cwd, abstarget)
+ prevsrc = targets.get(abstarget)
+ if prevsrc is not None:
+ ui.warn(_('%s: not overwriting - %s collides with %s\n') %
+ (reltarget, abssrc, prevsrc))
+ return
+ if (not opts['after'] and os.path.exists(reltarget) or
+ opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
+ if not opts['force']:
+ ui.warn(_('%s: not overwriting - file exists\n') %
+ reltarget)
+ return
+ if not opts['after']:
+ os.unlink(reltarget)
+ if opts['after']:
+ if not os.path.exists(reltarget):
+ return
+ else:
+ targetdir = os.path.dirname(reltarget) or '.'
+ if not os.path.isdir(targetdir):
+ os.makedirs(targetdir)
+ try:
+ restore = repo.dirstate.state(abstarget) == 'r'
+ if restore:
+ repo.undelete([abstarget], wlock)
+ try:
+ shutil.copyfile(relsrc, reltarget)
+ shutil.copymode(relsrc, reltarget)
+ restore = False
+ finally:
+ if restore:
+ repo.remove([abstarget], wlock)
+ except shutil.Error, inst:
+ raise util.Abort(str(inst))
+ except IOError, inst:
+ if inst.errno == errno.ENOENT:
+ ui.warn(_('%s: deleted in working copy\n') % relsrc)
+ else:
+ ui.warn(_('%s: cannot copy - %s\n') %
+ (relsrc, inst.strerror))
+ errors += 1
+ return
+ if ui.verbose or not exact:
+ ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
+ targets[abstarget] = abssrc
+ if abstarget != origsrc:
+ repo.copy(origsrc, abstarget, wlock)
+ copied.append((abssrc, relsrc, exact))
+
+ def targetpathfn(pat, dest, srcs):
+ if os.path.isdir(pat):
+ abspfx = util.canonpath(repo.root, cwd, pat)
+ if destdirexists:
+ striplen = len(os.path.split(abspfx)[0])
+ else:
+ striplen = len(abspfx)
+ if striplen:
+ striplen += len(os.sep)
+ res = lambda p: os.path.join(dest, p[striplen:])
+ elif destdirexists:
+ res = lambda p: os.path.join(dest, os.path.basename(p))
+ else:
+ res = lambda p: dest
+ return res
+
+ def targetpathafterfn(pat, dest, srcs):
+ if util.patkind(pat, None)[0]:
+ # a mercurial pattern
+ res = lambda p: os.path.join(dest, os.path.basename(p))
+ else:
+ abspfx = util.canonpath(repo.root, cwd, pat)
+ if len(abspfx) < len(srcs[0][0]):
+ # A directory. Either the target path contains the last
+ # component of the source path or it does not.
+ def evalpath(striplen):
+ score = 0
+ for s in srcs:
+ t = os.path.join(dest, s[0][striplen:])
+ if os.path.exists(t):
+ score += 1
+ return score
+
+ striplen = len(abspfx)
+ if striplen:
+ striplen += len(os.sep)
+ if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
+ score = evalpath(striplen)
+ striplen1 = len(os.path.split(abspfx)[0])
+ if striplen1:
+ striplen1 += len(os.sep)
+ if evalpath(striplen1) > score:
+ striplen = striplen1
+ res = lambda p: os.path.join(dest, p[striplen:])
+ else:
+ # a file
+ if destdirexists:
+ res = lambda p: os.path.join(dest, os.path.basename(p))
+ else:
+ res = lambda p: dest
+ return res
+
+
+ pats = list(pats)
+ if not pats:
+ raise util.Abort(_('no source or destination specified'))
+ if len(pats) == 1:
+ raise util.Abort(_('no destination specified'))
+ dest = pats.pop()
+ destdirexists = os.path.isdir(dest)
+ if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
+ raise util.Abort(_('with multiple sources, destination must be an '
+ 'existing directory'))
+ if opts['after']:
+ tfn = targetpathafterfn
+ else:
+ tfn = targetpathfn
+ copylist = []
+ for pat in pats:
+ srcs = []
+ for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
+ origsrc = okaytocopy(abssrc, relsrc, exact)
+ if origsrc:
+ srcs.append((origsrc, abssrc, relsrc, exact))
+ if not srcs:
+ continue
+ copylist.append((tfn(pat, dest, srcs), srcs))
+ if not copylist:
+ raise util.Abort(_('no files to copy'))
+
+ for targetpath, srcs in copylist:
+ for origsrc, abssrc, relsrc, exact in srcs:
+ copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
+
+ if errors:
+ ui.warn(_('(consider using --after)\n'))
+ return errors, copied
+
+def copy(ui, repo, *pats, **opts):
+ """mark files as copied for the next commit
+
+ Mark dest as having copies of source files. If dest is a
+ directory, copies are put in that directory. If dest is a file,
+ there can only be one source.
+
+ By default, this command copies the contents of files as they
+ stand in the working directory. If invoked with --after, the
+ operation is recorded, but no copying is performed.
+
+ This command takes effect in the next commit.
+
+ NOTE: This command should be treated as experimental. While it
+ should properly record copied files, this information is not yet
+ fully used by merge, nor fully reported by log.
+ """
+ wlock = repo.wlock(0)
+ errs, copied = docopy(ui, repo, pats, opts, wlock)
+ return errs
+
+def debugancestor(ui, index, rev1, rev2):
+ """find the ancestor revision of two revisions in a given index"""
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
+ a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
+ ui.write("%d:%s\n" % (r.rev(a), hex(a)))
+
+def debugcomplete(ui, cmd='', **opts):
+ """returns the completion list associated with the given command"""
+
+ if opts['options']:
+ options = []
+ otables = [globalopts]
+ if cmd:
+ aliases, entry = find(cmd)
+ otables.append(entry[1])
+ for t in otables:
+ for o in t:
+ if o[0]:
+ options.append('-%s' % o[0])
+ options.append('--%s' % o[1])
+ ui.write("%s\n" % "\n".join(options))
+ return
+
+ clist = findpossible(cmd).keys()
+ clist.sort()
+ ui.write("%s\n" % "\n".join(clist))
+
+def debugrebuildstate(ui, repo, rev=None):
+ """rebuild the dirstate as it would look like for the given revision"""
+ if not rev:
+ rev = repo.changelog.tip()
+ else:
+ rev = repo.lookup(rev)
+ change = repo.changelog.read(rev)
+ n = change[0]
+ files = repo.manifest.readflags(n)
+ wlock = repo.wlock()
+ repo.dirstate.rebuild(rev, files.iteritems())
+
+def debugcheckstate(ui, repo):
+ """validate the correctness of the current dirstate"""
+ parent1, parent2 = repo.dirstate.parents()
+ repo.dirstate.read()
+ dc = repo.dirstate.map
+ keys = dc.keys()
+ keys.sort()
+ m1n = repo.changelog.read(parent1)[0]
+ m2n = repo.changelog.read(parent2)[0]
+ m1 = repo.manifest.read(m1n)
+ m2 = repo.manifest.read(m2n)
+ errors = 0
+ for f in dc:
+ state = repo.dirstate.state(f)
+ if state in "nr" and f not in m1:
+ ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
+ errors += 1
+ if state in "a" and f in m1:
+ ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
+ errors += 1
+ if state in "m" and f not in m1 and f not in m2:
+ ui.warn(_("%s in state %s, but not in either manifest\n") %
+ (f, state))
+ errors += 1
+ for f in m1:
+ state = repo.dirstate.state(f)
+ if state not in "nrm":
+ ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
+ errors += 1
+ if errors:
+ error = _(".hg/dirstate inconsistent with current parent's manifest")
+ raise util.Abort(error)
+
+def debugconfig(ui, repo):
+ """show combined config settings from all hgrc files"""
+ for section, name, value in ui.walkconfig():
+ ui.write('%s.%s=%s\n' % (section, name, value))
+
+def debugsetparents(ui, repo, rev1, rev2=None):
+ """manually set the parents of the current working directory
+
+ This is useful for writing repository conversion tools, but should
+ be used with care.
+ """
+
+ if not rev2:
+ rev2 = hex(nullid)
+
+ repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
+
+def debugstate(ui, repo):
+ """show the contents of the current dirstate"""
+ repo.dirstate.read()
+ dc = repo.dirstate.map
+ keys = dc.keys()
+ keys.sort()
+ for file_ in keys:
+ ui.write("%c %3o %10d %s %s\n"
+ % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
+ time.strftime("%x %X",
+ time.localtime(dc[file_][3])), file_))
+ for f in repo.dirstate.copies:
+ ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
+
+def debugdata(ui, file_, rev):
+ """dump the contents of an data file revision"""
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False),
+ file_[:-2] + ".i", file_, 0)
+ try:
+ ui.write(r.revision(r.lookup(rev)))
+ except KeyError:
+ raise util.Abort(_('invalid revision identifier %s'), rev)
+
+def debugindex(ui, file_):
+ """dump the contents of an index file"""
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
+ ui.write(" rev offset length base linkrev" +
+ " nodeid p1 p2\n")
+ for i in range(r.count()):
+ node = r.node(i)
+ pp = r.parents(node)
+ ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
+ i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
+ short(node), short(pp[0]), short(pp[1])))
+
+def debugindexdot(ui, file_):
+ """dump an index DAG as a .dot file"""
+ r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
+ ui.write("digraph G {\n")
+ for i in range(r.count()):
+ node = r.node(i)
+ pp = r.parents(node)
+ ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
+ if pp[1] != nullid:
+ ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
+ ui.write("}\n")
+
+def debugrename(ui, repo, file, rev=None):
+ """dump rename information"""
+ r = repo.file(relpath(repo, [file])[0])
+ if rev:
+ try:
+ # assume all revision numbers are for changesets
+ n = repo.lookup(rev)
+ change = repo.changelog.read(n)
+ m = repo.manifest.read(change[0])
+ n = m[relpath(repo, [file])[0]]
+ except (hg.RepoError, KeyError):
+ n = r.lookup(rev)
+ else:
+ n = r.tip()
+ m = r.renamed(n)
+ if m:
+ ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
+ else:
+ ui.write(_("not renamed\n"))
+
+def debugwalk(ui, repo, *pats, **opts):
+ """show how files match on given patterns"""
+ items = list(walk(repo, pats, opts))
+ if not items:
+ return
+ fmt = '%%s %%-%ds %%-%ds %%s' % (
+ max([len(abs) for (src, abs, rel, exact) in items]),
+ max([len(rel) for (src, abs, rel, exact) in items]))
+ for src, abs, rel, exact in items:
+ line = fmt % (src, abs, rel, exact and 'exact' or '')
+ ui.write("%s\n" % line.rstrip())
+
+def diff(ui, repo, *pats, **opts):
+ """diff repository (or selected files)
+
+ Show differences between revisions for the specified files.
+
+ Differences between files are shown using the unified diff format.
+
+ When two revision arguments are given, then changes are shown
+ between those revisions. If only one revision is specified then
+ that revision is compared to the working directory, and, when no
+ revisions are specified, the working directory files are compared
+ to its parent.
+
+ Without the -a option, diff will avoid generating diffs of files
+ it detects as binary. With -a, diff will generate a diff anyway,
+ probably with undesirable results.
+ """
+ node1, node2 = revpair(ui, repo, opts['rev'])
+
+ fns, matchfn, anypats = matchpats(repo, pats, opts)
+
+ dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
+ text=opts['text'], opts=opts)
+
+def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
+ node = repo.lookup(changeset)
+ parents = [p for p in repo.changelog.parents(node) if p != nullid]
+ if opts['switch_parent']:
+ parents.reverse()
+ prev = (parents and parents[0]) or nullid
+ change = repo.changelog.read(node)
+
+ fp = make_file(repo, repo.changelog, opts['output'],
+ node=node, total=total, seqno=seqno,
+ revwidth=revwidth)
+ if fp != sys.stdout:
+ ui.note("%s\n" % fp.name)
+
+ fp.write("# HG changeset patch\n")
+ fp.write("# User %s\n" % change[1])
+ fp.write("# Date %d %d\n" % change[2])
+ fp.write("# Node ID %s\n" % hex(node))
+ fp.write("# Parent %s\n" % hex(prev))
+ if len(parents) > 1:
+ fp.write("# Parent %s\n" % hex(parents[1]))
+ fp.write(change[4].rstrip())
+ fp.write("\n\n")
+
+ dodiff(fp, ui, repo, prev, node, text=opts['text'])
+ if fp != sys.stdout:
+ fp.close()
+
+def export(ui, repo, *changesets, **opts):
+ """dump the header and diffs for one or more changesets
+
+ Print the changeset header and diffs for one or more revisions.
+
+ The information shown in the changeset header is: author,
+ changeset hash, parent and commit comment.
+
+ Output may be to a file, in which case the name of the file is
+ given using a format string. The formatting rules are as follows:
+
+ %% literal "%" character
+ %H changeset hash (40 bytes of hexadecimal)
+ %N number of patches being generated
+ %R changeset revision number
+ %b basename of the exporting repository
+ %h short-form changeset hash (12 bytes of hexadecimal)
+ %n zero-padded sequence number, starting at 1
+ %r zero-padded changeset revision number
+
+ Without the -a option, export will avoid generating diffs of files
+ it detects as binary. With -a, export will generate a diff anyway,
+ probably with undesirable results.
+
+ With the --switch-parent option, the diff will be against the second
+ parent. It can be useful to review a merge.
+ """
+ if not changesets:
+ raise util.Abort(_("export requires at least one changeset"))
+ seqno = 0
+ revs = list(revrange(ui, repo, changesets))
+ total = len(revs)
+ revwidth = max(map(len, revs))
+ msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
+ ui.note(msg)
+ for cset in revs:
+ seqno += 1
+ doexport(ui, repo, cset, seqno, total, revwidth, opts)
+
+def forget(ui, repo, *pats, **opts):
+ """don't add the specified files on the next commit (DEPRECATED)
+
+ (DEPRECATED)
+ Undo an 'hg add' scheduled for the next commit.
+
+ This command is now deprecated and will be removed in a future
+ release. Please use revert instead.
+ """
+ ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
+ forget = []
+ for src, abs, rel, exact in walk(repo, pats, opts):
+ if repo.dirstate.state(abs) == 'a':
+ forget.append(abs)
+ if ui.verbose or not exact:
+ ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
+ repo.forget(forget)
+
+def grep(ui, repo, pattern, *pats, **opts):
+ """search for a pattern in specified files and revisions
+
+ Search revisions of files for a regular expression.
+
+ This command behaves differently than Unix grep. It only accepts
+ Python/Perl regexps. It searches repository history, not the
+ working directory. It always prints the revision number in which
+ a match appears.
+
+ By default, grep only prints output for the first revision of a
+ file in which it finds a match. To get it to print every revision
+ that contains a change in match status ("-" for a match that
+ becomes a non-match, or "+" for a non-match that becomes a match),
+ use the --all flag.
+ """
+ reflags = 0
+ if opts['ignore_case']:
+ reflags |= re.I
+ regexp = re.compile(pattern, reflags)
+ sep, eol = ':', '\n'
+ if opts['print0']:
+ sep = eol = '\0'
+
+ fcache = {}
+ def getfile(fn):
+ if fn not in fcache:
+ fcache[fn] = repo.file(fn)
+ return fcache[fn]
+
+ def matchlines(body):
+ begin = 0
+ linenum = 0
+ while True:
+ match = regexp.search(body, begin)
+ if not match:
+ break
+ mstart, mend = match.span()
+ linenum += body.count('\n', begin, mstart) + 1
+ lstart = body.rfind('\n', begin, mstart) + 1 or begin
+ lend = body.find('\n', mend)
+ yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
+ begin = lend + 1
+
+ class linestate(object):
+ def __init__(self, line, linenum, colstart, colend):
+ self.line = line
+ self.linenum = linenum
+ self.colstart = colstart
+ self.colend = colend
+ def __eq__(self, other):
+ return self.line == other.line
+ def __hash__(self):
+ return hash(self.line)
+
+ matches = {}
+ def grepbody(fn, rev, body):
+ matches[rev].setdefault(fn, {})
+ m = matches[rev][fn]
+ for lnum, cstart, cend, line in matchlines(body):
+ s = linestate(line, lnum, cstart, cend)
+ m[s] = s
+
+ # FIXME: prev isn't used, why ?
+ prev = {}
+ ucache = {}
+ def display(fn, rev, states, prevstates):
+ diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
+ diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
+ counts = {'-': 0, '+': 0}
+ filerevmatches = {}
+ for l in diff:
+ if incrementing or not opts['all']:
+ change = ((l in prevstates) and '-') or '+'
+ r = rev
+ else:
+ change = ((l in states) and '-') or '+'
+ r = prev[fn]
+ cols = [fn, str(rev)]
+ if opts['line_number']:
+ cols.append(str(l.linenum))
+ if opts['all']:
+ cols.append(change)
+ if opts['user']:
+ cols.append(trimuser(ui, getchange(rev)[1], rev,
+ ucache))
+ if opts['files_with_matches']:
+ c = (fn, rev)
+ if c in filerevmatches:
+ continue
+ filerevmatches[c] = 1
+ else:
+ cols.append(l.line)
+ ui.write(sep.join(cols), eol)
+ counts[change] += 1
+ return counts['+'], counts['-']
+
+ fstate = {}
+ skip = {}
+ changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
+ count = 0
+ incrementing = False
+ for st, rev, fns in changeiter:
+ if st == 'window':
+ incrementing = rev
+ matches.clear()
+ elif st == 'add':
+ change = repo.changelog.read(repo.lookup(str(rev)))
+ mf = repo.manifest.read(change[0])
+ matches[rev] = {}
+ for fn in fns:
+ if fn in skip:
+ continue
+ fstate.setdefault(fn, {})
+ try:
+ grepbody(fn, rev, getfile(fn).read(mf[fn]))
+ except KeyError:
+ pass
+ elif st == 'iter':
+ states = matches[rev].items()
+ states.sort()
+ for fn, m in states:
+ if fn in skip:
+ continue
+ if incrementing or not opts['all'] or fstate[fn]:
+ pos, neg = display(fn, rev, m, fstate[fn])
+ count += pos + neg
+ if pos and not opts['all']:
+ skip[fn] = True
+ fstate[fn] = m
+ prev[fn] = rev
+
+ if not incrementing:
+ fstate = fstate.items()
+ fstate.sort()
+ for fn, state in fstate:
+ if fn in skip:
+ continue
+ display(fn, rev, {}, state)
+ return (count == 0 and 1) or 0
+
+def heads(ui, repo, **opts):
+ """show current repository heads
+
+ Show all repository head changesets.
+
+ Repository "heads" are changesets that don't have children
+ changesets. They are where development generally takes place and
+ are the usual targets for update and merge operations.
+ """
+ if opts['rev']:
+ heads = repo.heads(repo.lookup(opts['rev']))
+ else:
+ heads = repo.heads()
+ br = None
+ if opts['branches']:
+ br = repo.branchlookup(heads)
+ displayer = show_changeset(ui, repo, opts)
+ for n in heads:
+ displayer.show(changenode=n, brinfo=br)
+
+def identify(ui, repo):
+ """print information about the working copy
+
+ Print a short summary of the current state of the repo.
+
+ This summary identifies the repository state using one or two parent
+ hash identifiers, followed by a "+" if there are uncommitted changes
+ in the working directory, followed by a list of tags for this revision.
+ """
+ parents = [p for p in repo.dirstate.parents() if p != nullid]
+ if not parents:
+ ui.write(_("unknown\n"))
+ return
+
+ hexfunc = ui.verbose and hex or short
+ modified, added, removed, deleted, unknown = repo.changes()
+ output = ["%s%s" %
+ ('+'.join([hexfunc(parent) for parent in parents]),
+ (modified or added or removed or deleted) and "+" or "")]
+
+ if not ui.quiet:
+ # multiple tags for a single parent separated by '/'
+ parenttags = ['/'.join(tags)
+ for tags in map(repo.nodetags, parents) if tags]
+ # tags for multiple parents separated by ' + '
+ if parenttags:
+ output.append(' + '.join(parenttags))
+
+ ui.write("%s\n" % ' '.join(output))
+
+def import_(ui, repo, patch1, *patches, **opts):
+ """import an ordered set of patches
+
+ Import a list of patches and commit them individually.
+
+ If there are outstanding changes in the working directory, import
+ will abort unless given the -f flag.
+
+ If a patch looks like a mail message (its first line starts with
+ "From " or looks like an RFC822 header), it will not be applied
+ unless the -f option is used. The importer neither parses nor
+ discards mail headers, so use -f only to override the "mailness"
+ safety check, not to import a real mail message.
+ """
+ patches = (patch1,) + patches
+
+ if not opts['force']:
+ bail_if_changed(repo)
+
+ d = opts["base"]
+ strip = opts["strip"]
+
+ mailre = re.compile(r'(?:From |[\w-]+:)')
+
+ # attempt to detect the start of a patch
+ # (this heuristic is borrowed from quilt)
+ diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
+ 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
+ '(---|\*\*\*)[ \t])')
+
+ for patch in patches:
+ ui.status(_("applying %s\n") % patch)
+ pf = os.path.join(d, patch)
+
+ message = []
+ user = None
+ date = None
+ hgpatch = False
+ for line in file(pf):
+ line = line.rstrip()
+ if (not message and not hgpatch and
+ mailre.match(line) and not opts['force']):
+ if len(line) > 35:
+ line = line[:32] + '...'
+ raise util.Abort(_('first line looks like a '
+ 'mail header: ') + line)
+ if diffre.match(line):
+ break
+ elif hgpatch:
+ # parse values when importing the result of an hg export
+ if line.startswith("# User "):
+ user = line[7:]
+ ui.debug(_('User: %s\n') % user)
+ elif line.startswith("# Date "):
+ date = line[7:]
+ elif not line.startswith("# ") and line:
+ message.append(line)
+ hgpatch = False
+ elif line == '# HG changeset patch':
+ hgpatch = True
+ message = [] # We may have collected garbage
+ elif message or line:
+ message.append(line)
+
+ # make sure message isn't empty
+ if not message:
+ message = _("imported patch %s\n") % patch
+ else:
+ message = '\n'.join(message).rstrip()
+ ui.debug(_('message:\n%s\n') % message)
+
+ files = util.patch(strip, pf, ui)
+
+ if len(files) > 0:
+ addremove_lock(ui, repo, files, {})
+ repo.commit(files, message, user, date)
+
+def incoming(ui, repo, source="default", **opts):
+ """show new changesets found in source
+
+ Show new changesets found in the specified path/URL or the default
+ pull location. These are the changesets that would be pulled if a pull
+ was requested.
+
+ For remote repository, using --bundle avoids downloading the changesets
+ twice if the incoming is followed by a pull.
+
+ See pull for valid source format details.
+ """
+ source = ui.expandpath(source)
+ if opts['ssh']:
+ ui.setconfig("ui", "ssh", opts['ssh'])
+ if opts['remotecmd']:
+ ui.setconfig("ui", "remotecmd", opts['remotecmd'])
+
+ other = hg.repository(ui, source)
+ incoming = repo.findincoming(other, force=opts["force"])
+ if not incoming:
+ ui.status(_("no changes found\n"))
+ return
+
+ cleanup = None
+ try:
+ fname = opts["bundle"]
+ if fname or not other.local():
+ # create a bundle (uncompressed if other repo is not local)
+ cg = other.changegroup(incoming, "incoming")
+ fname = cleanup = write_bundle(cg, fname, compress=other.local())
+ # keep written bundle?
+ if opts["bundle"]:
+ cleanup = None
+ if not other.local():
+ # use the created uncompressed bundlerepo
+ other = bundlerepo.bundlerepository(ui, repo.root, fname)
+
+ o = other.changelog.nodesbetween(incoming)[0]
+ if opts['newest_first']:
+ o.reverse()
+ displayer = show_changeset(ui, other, opts)
+ for n in o:
+ parents = [p for p in other.changelog.parents(n) if p != nullid]
+ if opts['no_merges'] and len(parents) == 2:
+ continue
+ displayer.show(changenode=n)
+ if opts['patch']:
+ prev = (parents and parents[0]) or nullid
+ dodiff(ui, ui, other, prev, n)
+ ui.write("\n")
+ finally:
+ if hasattr(other, 'close'):
+ other.close()
+ if cleanup:
+ os.unlink(cleanup)
+
+def init(ui, dest="."):
+ """create a new repository in the given directory
+
+ Initialize a new repository in the given directory. If the given
+ directory does not exist, it is created.
+
+ If no directory is given, the current directory is used.
+ """
+ if not os.path.exists(dest):
+ os.mkdir(dest)
+ hg.repository(ui, dest, create=1)
+
+def locate(ui, repo, *pats, **opts):
+ """locate files matching specific patterns
+
+ Print all files under Mercurial control whose names match the
+ given patterns.
+
+ This command searches the current directory and its
+ subdirectories. To search an entire repository, move to the root
+ of the repository.
+
+ If no patterns are given to match, this command prints all file
+ names.
+
+ If you want to feed the output of this command into the "xargs"
+ command, use the "-0" option to both this command and "xargs".
+ This will avoid the problem of "xargs" treating single filenames
+ that contain white space as multiple filenames.
+ """
+ end = opts['print0'] and '\0' or '\n'
+ rev = opts['rev']
+ if rev:
+ node = repo.lookup(rev)
+ else:
+ node = None
+
+ for src, abs, rel, exact in walk(repo, pats, opts, node=node,
+ head='(?:.*/|)'):
+ if not node and repo.dirstate.state(abs) == '?':
+ continue
+ if opts['fullpath']:
+ ui.write(os.path.join(repo.root, abs), end)
+ else:
+ ui.write(((pats and rel) or abs), end)
+
+def log(ui, repo, *pats, **opts):
+ """show revision history of entire repository or files
+
+ Print the revision history of the specified files or the entire project.
+
+ By default this command outputs: changeset id and hash, tags,
+ non-trivial parents, user, date and time, and a summary for each
+ commit. When the -v/--verbose switch is used, the list of changed
+ files and full commit message is shown.
+ """
+ class dui(object):
+ # Implement and delegate some ui protocol. Save hunks of
+ # output for later display in the desired order.
+ def __init__(self, ui):
+ self.ui = ui
+ self.hunk = {}
+ self.header = {}
+ def bump(self, rev):
+ self.rev = rev
+ self.hunk[rev] = []
+ self.header[rev] = []
+ def note(self, *args):
+ if self.verbose:
+ self.write(*args)
+ def status(self, *args):
+ if not self.quiet:
+ self.write(*args)
+ def write(self, *args):
+ self.hunk[self.rev].append(args)
+ def write_header(self, *args):
+ self.header[self.rev].append(args)
+ def debug(self, *args):
+ if self.debugflag:
+ self.write(*args)
+ def __getattr__(self, key):
+ return getattr(self.ui, key)
+
+ changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
+
+ if opts['limit']:
+ try:
+ limit = int(opts['limit'])
+ except ValueError:
+ raise util.Abort(_('limit must be a positive integer'))
+ if limit <= 0: raise util.Abort(_('limit must be positive'))
+ else:
+ limit = sys.maxint
+ count = 0
+
+ displayer = show_changeset(ui, repo, opts)
+ for st, rev, fns in changeiter:
+ if st == 'window':
+ du = dui(ui)
+ displayer.ui = du
+ elif st == 'add':
+ du.bump(rev)
+ changenode = repo.changelog.node(rev)
+ parents = [p for p in repo.changelog.parents(changenode)
+ if p != nullid]
+ if opts['no_merges'] and len(parents) == 2:
+ continue
+ if opts['only_merges'] and len(parents) != 2:
+ continue
+
+ if opts['keyword']:
+ changes = getchange(rev)
+ miss = 0
+ for k in [kw.lower() for kw in opts['keyword']]:
+ if not (k in changes[1].lower() or
+ k in changes[4].lower() or
+ k in " ".join(changes[3][:20]).lower()):
+ miss = 1
+ break
+ if miss:
+ continue
+
+ br = None
+ if opts['branches']:
+ br = repo.branchlookup([repo.changelog.node(rev)])
+
+ displayer.show(rev, brinfo=br)
+ if opts['patch']:
+ prev = (parents and parents[0]) or nullid
+ dodiff(du, du, repo, prev, changenode, match=matchfn)
+ du.write("\n\n")
+ elif st == 'iter':
+ if count == limit: break
+ if du.header[rev]:
+ for args in du.header[rev]:
+ ui.write_header(*args)
+ if du.hunk[rev]:
+ count += 1
+ for args in du.hunk[rev]:
+ ui.write(*args)
+
+def manifest(ui, repo, rev=None):
+ """output the latest or given revision of the project manifest
+
+ Print a list of version controlled files for the given revision.
+
+ The manifest is the list of files being version controlled. If no revision
+ is given then the tip is used.
+ """
+ if rev:
+ try:
+ # assume all revision numbers are for changesets
+ n = repo.lookup(rev)
+ change = repo.changelog.read(n)
+ n = change[0]
+ except hg.RepoError:
+ n = repo.manifest.lookup(rev)
+ else:
+ n = repo.manifest.tip()
+ m = repo.manifest.read(n)
+ mf = repo.manifest.readflags(n)
+ files = m.keys()
+ files.sort()
+
+ for f in files:
+ ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
+
+def merge(ui, repo, node=None, **opts):
+ """Merge working directory with another revision
+
+ Merge the contents of the current working directory and the
+ requested revision. Files that changed between either parent are
+ marked as changed for the next commit and a commit must be
+ performed before any further updates are allowed.
+ """
+ return doupdate(ui, repo, node=node, merge=True, **opts)
+
+def outgoing(ui, repo, dest="default-push", **opts):
+ """show changesets not found in destination
+
+ Show changesets not found in the specified destination repository or
+ the default push location. These are the changesets that would be pushed
+ if a push was requested.
+
+ See pull for valid destination format details.
+ """
+ dest = ui.expandpath(dest)
+ if opts['ssh']:
+ ui.setconfig("ui", "ssh", opts['ssh'])
+ if opts['remotecmd']:
+ ui.setconfig("ui", "remotecmd", opts['remotecmd'])
+
+ other = hg.repository(ui, dest)
+ o = repo.findoutgoing(other, force=opts['force'])
+ if not o:
+ ui.status(_("no changes found\n"))
+ return
+ o = repo.changelog.nodesbetween(o)[0]
+ if opts['newest_first']:
+ o.reverse()
+ displayer = show_changeset(ui, repo, opts)
+ for n in o:
+ parents = [p for p in repo.changelog.parents(n) if p != nullid]
+ if opts['no_merges'] and len(parents) == 2:
+ continue
+ displayer.show(changenode=n)
+ if opts['patch']:
+ prev = (parents and parents[0]) or nullid
+ dodiff(ui, ui, repo, prev, n)
+ ui.write("\n")
+
+def parents(ui, repo, rev=None, branches=None, **opts):
+ """show the parents of the working dir or revision
+
+ Print the working directory's parent revisions.
+ """
+ if rev:
+ p = repo.changelog.parents(repo.lookup(rev))
+ else:
+ p = repo.dirstate.parents()
+
+ br = None
+ if branches is not None:
+ br = repo.branchlookup(p)
+ displayer = show_changeset(ui, repo, opts)
+ for n in p:
+ if n != nullid:
+ displayer.show(changenode=n, brinfo=br)
+
+def paths(ui, repo, search=None):
+ """show definition of symbolic path names
+
+ Show definition of symbolic path name NAME. If no name is given, show
+ definition of available names.
+
+ Path names are defined in the [paths] section of /etc/mercurial/hgrc
+ and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
+ """
+ if search:
+ for name, path in ui.configitems("paths"):
+ if name == search:
+ ui.write("%s\n" % path)
+ return
+ ui.warn(_("not found!\n"))
+ return 1
+ else:
+ for name, path in ui.configitems("paths"):
+ ui.write("%s = %s\n" % (name, path))
+
+def postincoming(ui, repo, modheads, optupdate):
+ if modheads == 0:
+ return
+ if optupdate:
+ if modheads == 1:
+ return doupdate(ui, repo)
+ else:
+ ui.status(_("not updating, since new heads added\n"))
+ if modheads > 1:
+ ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
+ else:
+ ui.status(_("(run 'hg update' to get a working copy)\n"))
+
+def pull(ui, repo, source="default", **opts):
+ """pull changes from the specified source
+
+ Pull changes from a remote repository to a local one.
+
+ This finds all changes from the repository at the specified path
+ or URL and adds them to the local repository. By default, this
+ does not update the copy of the project in the working directory.
+
+ Valid URLs are of the form:
+
+ local/filesystem/path
+ http://[user@]host[:port][/path]
+ https://[user@]host[:port][/path]
+ ssh://[user@]host[:port][/path]
+
+ Some notes about using SSH with Mercurial:
+ - SSH requires an accessible shell account on the destination machine
+ and a copy of hg in the remote path or specified with as remotecmd.
+ - /path is relative to the remote user's home directory by default.
+ Use two slashes at the start of a path to specify an absolute path.
+ - Mercurial doesn't use its own compression via SSH; the right thing
+ to do is to configure it in your ~/.ssh/ssh_config, e.g.:
+ Host *.mylocalnetwork.example.com
+ Compression off
+ Host *
+ Compression on
+ Alternatively specify "ssh -C" as your ssh command in your hgrc or
+ with the --ssh command line option.
+ """
+ source = ui.expandpath(source)
+ ui.status(_('pulling from %s\n') % (source))
+
+ if opts['ssh']:
+ ui.setconfig("ui", "ssh", opts['ssh'])
+ if opts['remotecmd']:
+ ui.setconfig("ui", "remotecmd", opts['remotecmd'])
+
+ other = hg.repository(ui, source)
+ revs = None
+ if opts['rev'] and not other.local():
+ raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
+ elif opts['rev']:
+ revs = [other.lookup(rev) for rev in opts['rev']]
+ modheads = repo.pull(other, heads=revs, force=opts['force'])
+ return postincoming(ui, repo, modheads, opts['update'])
+
+def push(ui, repo, dest="default-push", **opts):
+ """push changes to the specified destination
+
+ Push changes from the local repository to the given destination.
+
+ This is the symmetrical operation for pull. It helps to move
+ changes from the current repository to a different one. If the
+ destination is local this is identical to a pull in that directory
+ from the current one.
+
+ By default, push will refuse to run if it detects the result would
+ increase the number of remote heads. This generally indicates the
+ the client has forgotten to sync and merge before pushing.
+
+ Valid URLs are of the form:
+
+ local/filesystem/path
+ ssh://[user@]host[:port][/path]
+
+ Look at the help text for the pull command for important details
+ about ssh:// URLs.
+ """
+ dest = ui.expandpath(dest)
+ ui.status('pushing to %s\n' % (dest))
+
+ if opts['ssh']:
+ ui.setconfig("ui", "ssh", opts['ssh'])
+ if opts['remotecmd']:
+ ui.setconfig("ui", "remotecmd", opts['remotecmd'])
+
+ other = hg.repository(ui, dest)
+ revs = None
+ if opts['rev']:
+ revs = [repo.lookup(rev) for rev in opts['rev']]
+ r = repo.push(other, opts['force'], revs=revs)
+ return r == 0
+
+def rawcommit(ui, repo, *flist, **rc):
+ """raw commit interface (DEPRECATED)
+
+ (DEPRECATED)
+ Lowlevel commit, for use in helper scripts.
+
+ This command is not intended to be used by normal users, as it is
+ primarily useful for importing from other SCMs.
+
+ This command is now deprecated and will be removed in a future
+ release, please use debugsetparents and commit instead.
+ """
+
+ ui.warn(_("(the rawcommit command is deprecated)\n"))
+
+ message = rc['message']
+ if not message and rc['logfile']:
+ try:
+ message = open(rc['logfile']).read()
+ except IOError:
+ pass
+ if not message and not rc['logfile']:
+ raise util.Abort(_("missing commit message"))
+
+ files = relpath(repo, list(flist))
+ if rc['files']:
+ files += open(rc['files']).read().splitlines()
+
+ rc['parent'] = map(repo.lookup, rc['parent'])
+
+ try:
+ repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
+ except ValueError, inst:
+ raise util.Abort(str(inst))
+
+def recover(ui, repo):
+ """roll back an interrupted transaction
+
+ Recover from an interrupted commit or pull.
+
+ This command tries to fix the repository status after an interrupted
+ operation. It should only be necessary when Mercurial suggests it.
+ """
+ if repo.recover():
+ return repo.verify()
+ return 1
+
+def remove(ui, repo, *pats, **opts):
+ """remove the specified files on the next commit
+
+ Schedule the indicated files for removal from the repository.
+
+ This command schedules the files to be removed at the next commit.
+ This only removes files from the current branch, not from the
+ entire project history. If the files still exist in the working
+ directory, they will be deleted from it. If invoked with --after,
+ files that have been manually deleted are marked as removed.
+
+ Modified files and added files are not removed by default. To
+ remove them, use the -f/--force option.
+ """
+ names = []
+ if not opts['after'] and not pats:
+ raise util.Abort(_('no files specified'))
+ files, matchfn, anypats = matchpats(repo, pats, opts)
+ exact = dict.fromkeys(files)
+ mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
+ modified, added, removed, deleted, unknown = mardu
+ remove, forget = [], []
+ for src, abs, rel, exact in walk(repo, pats, opts):
+ reason = None
+ if abs not in deleted and opts['after']:
+ reason = _('is still present')
+ elif abs in modified and not opts['force']:
+ reason = _('is modified (use -f to force removal)')
+ elif abs in added:
+ if opts['force']:
+ forget.append(abs)
+ continue
+ reason = _('has been marked for add (use -f to force removal)')
+ elif abs in unknown:
+ reason = _('is not managed')
+ elif abs in removed:
+ continue
+ if reason:
+ if exact:
+ ui.warn(_('not removing %s: file %s\n') % (rel, reason))
+ else:
+ if ui.verbose or not exact:
+ ui.status(_('removing %s\n') % rel)
+ remove.append(abs)
+ repo.forget(forget)
+ repo.remove(remove, unlink=not opts['after'])
+
+def rename(ui, repo, *pats, **opts):
+ """rename files; equivalent of copy + remove
+
+ Mark dest as copies of sources; mark sources for deletion. If
+ dest is a directory, copies are put in that directory. If dest is
+ a file, there can only be one source.
+
+ By default, this command copies the contents of files as they
+ stand in the working directory. If invoked with --after, the
+ operation is recorded, but no copying is performed.
+
+ This command takes effect in the next commit.
+
+ NOTE: This command should be treated as experimental. While it
+ should properly record rename files, this information is not yet
+ fully used by merge, nor fully reported by log.
+ """
+ wlock = repo.wlock(0)
+ errs, copied = docopy(ui, repo, pats, opts, wlock)
+ names = []
+ for abs, rel, exact in copied:
+ if ui.verbose or not exact:
+ ui.status(_('removing %s\n') % rel)
+ names.append(abs)
+ repo.remove(names, True, wlock)
+ return errs
+
+def revert(ui, repo, *pats, **opts):
+ """revert files or dirs to their states as of some revision
+
+ With no revision specified, revert the named files or directories
+ to the contents they had in the parent of the working directory.
+ This restores the contents of the affected files to an unmodified
+ state. If the working directory has two parents, you must
+ explicitly specify the revision to revert to.
+
+ Modified files are saved with a .orig suffix before reverting.
+ To disable these backups, use --no-backup.
+
+ Using the -r option, revert the given files or directories to
+ their contents as of a specific revision. This can be helpful to"roll
+ back" some or all of a change that should not have been committed.
+
+ Revert modifies the working directory. It does not commit any
+ changes, or change the parent of the working directory. If you
+ revert to a revision other than the parent of the working
+ directory, the reverted files will thus appear modified
+ afterwards.
+
+ If a file has been deleted, it is recreated. If the executable
+ mode of a file was changed, it is reset.
+
+ If names are given, all files matching the names are reverted.
+
+ If no arguments are given, all files in the repository are reverted.
+ """
+ parent, p2 = repo.dirstate.parents()
+ if opts['rev']:
+ node = repo.lookup(opts['rev'])
+ elif p2 != nullid:
+ raise util.Abort(_('working dir has two parents; '
+ 'you must specify the revision to revert to'))
+ else:
+ node = parent
+ pmf = None
+ mf = repo.manifest.read(repo.changelog.read(node)[0])
+
+ wlock = repo.wlock()
+
+ # need all matching names in dirstate and manifest of target rev,
+ # so have to walk both. do not print errors if files exist in one
+ # but not other.
+
+ names = {}
+ target_only = {}
+
+ # walk dirstate.
+
+ for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
+ names[abs] = (rel, exact)
+ if src == 'b':
+ target_only[abs] = True
+
+ # walk target manifest.
+
+ for src, abs, rel, exact in walk(repo, pats, opts, node=node,
+ badmatch=names.has_key):
+ if abs in names: continue
+ names[abs] = (rel, exact)
+ target_only[abs] = True
+
+ changes = repo.changes(match=names.has_key, wlock=wlock)
+ modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
+
+ revert = ([], _('reverting %s\n'))
+ add = ([], _('adding %s\n'))
+ remove = ([], _('removing %s\n'))
+ forget = ([], _('forgetting %s\n'))
+ undelete = ([], _('undeleting %s\n'))
+ update = {}
+
+ disptable = (
+ # dispatch table:
+ # file state
+ # action if in target manifest
+ # action if not in target manifest
+ # make backup if in target manifest
+ # make backup if not in target manifest
+ (modified, revert, remove, True, True),
+ (added, revert, forget, True, False),
+ (removed, undelete, None, False, False),
+ (deleted, revert, remove, False, False),
+ (unknown, add, None, True, False),
+ (target_only, add, None, False, False),
+ )
+
+ entries = names.items()
+ entries.sort()
+
+ for abs, (rel, exact) in entries:
+ in_mf = abs in mf
+ def handle(xlist, dobackup):
+ xlist[0].append(abs)
+ if dobackup and not opts['no_backup'] and os.path.exists(rel):
+ bakname = "%s.orig" % rel
+ ui.note(_('saving current version of %s as %s\n') %
+ (rel, bakname))
+ shutil.copyfile(rel, bakname)
+ shutil.copymode(rel, bakname)
+ if ui.verbose or not exact:
+ ui.status(xlist[1] % rel)
+ for table, hitlist, misslist, backuphit, backupmiss in disptable:
+ if abs not in table: continue
+ # file has changed in dirstate
+ if in_mf:
+ handle(hitlist, backuphit)
+ elif misslist is not None:
+ handle(misslist, backupmiss)
+ else:
+ if exact: ui.warn(_('file not managed: %s\n' % rel))
+ break
+ else:
+ # file has not changed in dirstate
+ if node == parent:
+ if exact: ui.warn(_('no changes needed to %s\n' % rel))
+ continue
+ if not in_mf:
+ if pmf is None:
+ # only need parent manifest in this unlikely case,
+ # so do not read by default
+ pmf = repo.manifest.read(repo.changelog.read(parent)[0])
+ if abs in pmf:
+ handle(remove, False)
+ update[abs] = True
+
+ repo.dirstate.forget(forget[0])
+ r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
+ show_stats=False)
+ repo.dirstate.update(add[0], 'a')
+ repo.dirstate.update(undelete[0], 'n')
+ repo.dirstate.update(remove[0], 'r')
+ return r
+
+def rollback(ui, repo):
+ """roll back the last transaction in this repository
+
+ Roll back the last transaction in this repository, restoring the
+ project to its state prior to the transaction.
+
+ Transactions are used to encapsulate the effects of all commands
+ that create new changesets or propagate existing changesets into a
+ repository. For example, the following commands are transactional,
+ and their effects can be rolled back:
+
+ commit
+ import
+ pull
+ push (with this repository as destination)
+ unbundle
+
+ This command should be used with care. There is only one level of
+ rollback, and there is no way to undo a rollback.
+
+ This command is not intended for use on public repositories. Once
+ changes are visible for pull by other users, rolling a transaction
+ back locally is ineffective (someone else may already have pulled
+ the changes). Furthermore, a race is possible with readers of the
+ repository; for example an in-progress pull from the repository
+ may fail if a rollback is performed.
+ """
+ repo.rollback()
+
+def root(ui, repo):
+ """print the root (top) of the current working dir
+
+ Print the root directory of the current repository.
+ """
+ ui.write(repo.root + "\n")
+
+def serve(ui, repo, **opts):
+ """export the repository via HTTP
+
+ Start a local HTTP repository browser and pull server.
+
+ By default, the server logs accesses to stdout and errors to
+ stderr. Use the "-A" and "-E" options to log to files.
+ """
+
+ if opts["stdio"]:
+ if repo is None:
+ raise hg.RepoError(_('no repo found'))
+ fin, fout = sys.stdin, sys.stdout
+ sys.stdout = sys.stderr
+
+ # Prevent insertion/deletion of CRs
+ util.set_binary(fin)
+ util.set_binary(fout)
+
+ def getarg():
+ argline = fin.readline()[:-1]
+ arg, l = argline.split()
+ val = fin.read(int(l))
+ return arg, val
+ def respond(v):
+ fout.write("%d\n" % len(v))
+ fout.write(v)
+ fout.flush()
+
+ lock = None
+
+ while 1:
+ cmd = fin.readline()[:-1]
+ if cmd == '':
+ return
+ if cmd == "heads":
+ h = repo.heads()
+ respond(" ".join(map(hex, h)) + "\n")
+ if cmd == "lock":
+ lock = repo.lock()
+ respond("")
+ if cmd == "unlock":
+ if lock:
+ lock.release()
+ lock = None
+ respond("")
+ elif cmd == "branches":
+ arg, nodes = getarg()
+ nodes = map(bin, nodes.split(" "))
+ r = []
+ for b in repo.branches(nodes):
+ r.append(" ".join(map(hex, b)) + "\n")
+ respond("".join(r))
+ elif cmd == "between":
+ arg, pairs = getarg()
+ pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
+ r = []
+ for b in repo.between(pairs):
+ r.append(" ".join(map(hex, b)) + "\n")
+ respond("".join(r))
+ elif cmd == "changegroup":
+ nodes = []
+ arg, roots = getarg()
+ nodes = map(bin, roots.split(" "))
+
+ cg = repo.changegroup(nodes, 'serve')
+ while 1:
+ d = cg.read(4096)
+ if not d:
+ break
+ fout.write(d)
+
+ fout.flush()
+
+ elif cmd == "addchangegroup":
+ if not lock:
+ respond("not locked")
+ continue
+ respond("")
+
+ r = repo.addchangegroup(fin, 'serve')
+ respond(str(r))
+
+ optlist = ("name templates style address port ipv6"
+ " accesslog errorlog webdir_conf")
+ for o in optlist.split():
+ if opts[o]:
+ ui.setconfig("web", o, opts[o])
+
+ if repo is None and not ui.config("web", "webdir_conf"):
+ raise hg.RepoError(_('no repo found'))
+
+ if opts['daemon'] and not opts['daemon_pipefds']:
+ rfd, wfd = os.pipe()
+ args = sys.argv[:]
+ args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
+ pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
+ args[0], args)
+ os.close(wfd)
+ os.read(rfd, 1)
+ os._exit(0)
+
+ try:
+ httpd = create_server(ui, repo, hgwebdir, hgweb)
+ except socket.error, inst:
+ raise util.Abort(_('cannot start server: ') + inst.args[1])
+
+ if ui.verbose:
+ addr, port = httpd.socket.getsockname()
+ if addr == '0.0.0.0':
+ addr = socket.gethostname()
+ else:
+ try:
+ addr = socket.gethostbyaddr(addr)[0]
+ except socket.error:
+ pass
+ if port != 80:
+ ui.status(_('listening at http://%s:%d/\n') % (addr, port))
+ else:
+ ui.status(_('listening at http://%s/\n') % addr)
+
+ if opts['pid_file']:
+ fp = open(opts['pid_file'], 'w')
+ fp.write(str(os.getpid()))
+ fp.close()
+
+ if opts['daemon_pipefds']:
+ rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
+ os.close(rfd)
+ os.write(wfd, 'y')
+ os.close(wfd)
+ sys.stdout.flush()
+ sys.stderr.flush()
+ fd = os.open(util.nulldev, os.O_RDWR)
+ if fd != 0: os.dup2(fd, 0)
+ if fd != 1: os.dup2(fd, 1)
+ if fd != 2: os.dup2(fd, 2)
+ if fd not in (0, 1, 2): os.close(fd)
+
+ httpd.serve_forever()
+
+def status(ui, repo, *pats, **opts):
+ """show changed files in the working directory
+
+ Show changed files in the repository. If names are
+ given, only files that match are shown.
+
+ The codes used to show the status of files are:
+ M = modified
+ A = added
+ R = removed
+ ! = deleted, but still tracked
+ ? = not tracked
+ I = ignored (not shown by default)
+ """
+
+ show_ignored = opts['ignored'] and True or False
+ files, matchfn, anypats = matchpats(repo, pats, opts)
+ cwd = (pats and repo.getcwd()) or ''
+ modified, added, removed, deleted, unknown, ignored = [
+ [util.pathto(cwd, x) for x in n]
+ for n in repo.changes(files=files, match=matchfn,
+ show_ignored=show_ignored)]
+
+ changetypes = [('modified', 'M', modified),
+ ('added', 'A', added),
+ ('removed', 'R', removed),
+ ('deleted', '!', deleted),
+ ('unknown', '?', unknown),
+ ('ignored', 'I', ignored)]
+
+ end = opts['print0'] and '\0' or '\n'
+
+ for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
+ or changetypes):
+ if opts['no_status']:
+ format = "%%s%s" % end
+ else:
+ format = "%s %%s%s" % (char, end)
+
+ for f in changes:
+ ui.write(format % f)
+
+def tag(ui, repo, name, rev_=None, **opts):
+ """add a tag for the current tip or a given revision
+
+ Name a particular revision using <name>.
+
+ Tags are used to name particular revisions of the repository and are
+ very useful to compare different revision, to go back to significant
+ earlier versions or to mark branch points as releases, etc.
+
+ If no revision is given, the tip is used.
+
+ To facilitate version control, distribution, and merging of tags,
+ they are stored as a file named ".hgtags" which is managed
+ similarly to other project files and can be hand-edited if
+ necessary. The file '.hg/localtags' is used for local tags (not
+ shared among repositories).
+ """
+ if name == "tip":
+ raise util.Abort(_("the name 'tip' is reserved"))
+ if rev_ is not None:
+ ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
+ "please use 'hg tag [-r REV] NAME' instead\n"))
+ if opts['rev']:
+ raise util.Abort(_("use only one form to specify the revision"))
+ if opts['rev']:
+ rev_ = opts['rev']
+ if rev_:
+ r = hex(repo.lookup(rev_))
+ else:
+ r = hex(repo.changelog.tip())
+
+ disallowed = (revrangesep, '\r', '\n')
+ for c in disallowed:
+ if name.find(c) >= 0:
+ raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
+
+ repo.hook('pretag', throw=True, node=r, tag=name,
+ local=int(not not opts['local']))
+
+ if opts['local']:
+ repo.opener("localtags", "a").write("%s %s\n" % (r, name))
+ repo.hook('tag', node=r, tag=name, local=1)
+ return
+
+ for x in repo.changes():
+ if ".hgtags" in x:
+ raise util.Abort(_("working copy of .hgtags is changed "
+ "(please commit .hgtags manually)"))
+
+ repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
+ if repo.dirstate.state(".hgtags") == '?':
+ repo.add([".hgtags"])
+
+ message = (opts['message'] or
+ _("Added tag %s for changeset %s") % (name, r))
+ try:
+ repo.commit([".hgtags"], message, opts['user'], opts['date'])
+ repo.hook('tag', node=r, tag=name, local=0)
+ except ValueError, inst:
+ raise util.Abort(str(inst))
+
+def tags(ui, repo):
+ """list repository tags
+
+ List the repository tags.
+
+ This lists both regular and local tags.
+ """
+
+ l = repo.tagslist()
+ l.reverse()
+ for t, n in l:
+ try:
+ r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
+ except KeyError:
+ r = " ?:?"
+ if ui.quiet:
+ ui.write("%s\n" % t)
+ else:
+ ui.write("%-30s %s\n" % (t, r))
+
+def tip(ui, repo, **opts):
+ """show the tip revision
+
+ Show the tip revision.
+ """
+ n = repo.changelog.tip()
+ br = None
+ if opts['branches']:
+ br = repo.branchlookup([n])
+ show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
+ if opts['patch']:
+ dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
+
+def unbundle(ui, repo, fname, **opts):
+ """apply a changegroup file
+
+ Apply a compressed changegroup file generated by the bundle
+ command.
+ """
+ f = urllib.urlopen(fname)
+
+ header = f.read(6)
+ if not header.startswith("HG"):
+ raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
+ elif not header.startswith("HG10"):
+ raise util.Abort(_("%s: unknown bundle version") % fname)
+ elif header == "HG10BZ":
+ def generator(f):
+ zd = bz2.BZ2Decompressor()
+ zd.decompress("BZ")
+ for chunk in f:
+ yield zd.decompress(chunk)
+ elif header == "HG10UN":
+ def generator(f):
+ for chunk in f:
+ yield chunk
+ else:
+ raise util.Abort(_("%s: unknown bundle compression type")
+ % fname)
+ gen = generator(util.filechunkiter(f, 4096))
+ modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
+ return postincoming(ui, repo, modheads, opts['update'])
+
+def undo(ui, repo):
+ """undo the last commit or pull (DEPRECATED)
+
+ (DEPRECATED)
+ This command is now deprecated and will be removed in a future
+ release. Please use the rollback command instead. For usage
+ instructions, see the rollback command.
+ """
+ ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
+ repo.rollback()
+
+def update(ui, repo, node=None, merge=False, clean=False, force=None,
+ branch=None, **opts):
+ """update or merge working directory
+
+ Update the working directory to the specified revision.
+
+ If there are no outstanding changes in the working directory and
+ there is a linear relationship between the current version and the
+ requested version, the result is the requested version.
+
+ To merge the working directory with another revision, use the
+ merge command.
+
+ By default, update will refuse to run if doing so would require
+ merging or discarding local changes.
+ """
+ if merge:
+ ui.warn(_('(the -m/--merge option is deprecated; '
+ 'use the merge command instead)\n'))
+ return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
+
+def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
+ branch=None, **opts):
+ if branch:
+ br = repo.branchlookup(branch=branch)
+ found = []
+ for x in br:
+ if branch in br[x]:
+ found.append(x)
+ if len(found) > 1:
+ ui.warn(_("Found multiple heads for %s\n") % branch)
+ for x in found:
+ show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
+ return 1
+ if len(found) == 1:
+ node = found[0]
+ ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
+ else:
+ ui.warn(_("branch %s not found\n") % (branch))
+ return 1
+ else:
+ node = node and repo.lookup(node) or repo.changelog.tip()
+ return repo.update(node, allow=merge, force=clean, forcemerge=force)
+
+def verify(ui, repo):
+ """verify the integrity of the repository
+
+ Verify the integrity of the current repository.
+
+ This will perform an extensive check of the repository's
+ integrity, validating the hashes and checksums of each entry in
+ the changelog, manifest, and tracked files, as well as the
+ integrity of their crosslinks and indices.
+ """
+ return repo.verify()
+
+# Command options and aliases are listed here, alphabetically
+
+table = {
+ "^add":
+ (add,
+ [('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg add [OPTION]... [FILE]...')),
+ "debugaddremove|addremove":
+ (addremove,
+ [('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg addremove [OPTION]... [FILE]...')),
+ "^annotate":
+ (annotate,
+ [('r', 'rev', '', _('annotate the specified revision')),
+ ('a', 'text', None, _('treat all files as text')),
+ ('u', 'user', None, _('list the author')),
+ ('d', 'date', None, _('list the date')),
+ ('n', 'number', None, _('list the revision number (default)')),
+ ('c', 'changeset', None, _('list the changeset')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
+ "archive":
+ (archive,
+ [('', 'no-decode', None, _('do not pass files through decoders')),
+ ('p', 'prefix', '', _('directory prefix for files in archive')),
+ ('r', 'rev', '', _('revision to distribute')),
+ ('t', 'type', '', _('type of distribution to create')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg archive [OPTION]... DEST')),
+ "backout":
+ (backout,
+ [('', 'merge', None,
+ _('merge with old dirstate parent after backout')),
+ ('m', 'message', '', _('use <text> as commit message')),
+ ('l', 'logfile', '', _('read commit message from <file>')),
+ ('d', 'date', '', _('record datecode as commit date')),
+ ('u', 'user', '', _('record user as committer')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg backout [OPTION]... REV')),
+ "bundle":
+ (bundle,
+ [('f', 'force', None,
+ _('run even when remote repository is unrelated'))],
+ _('hg bundle FILE DEST')),
+ "cat":
+ (cat,
+ [('o', 'output', '', _('print output to file with formatted name')),
+ ('r', 'rev', '', _('print the given revision')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg cat [OPTION]... FILE...')),
+ "^clone":
+ (clone,
+ [('U', 'noupdate', None, _('do not update the new working directory')),
+ ('r', 'rev', [],
+ _('a changeset you would like to have after cloning')),
+ ('', 'pull', None, _('use pull protocol to copy metadata')),
+ ('e', 'ssh', '', _('specify ssh command to use')),
+ ('', 'remotecmd', '',
+ _('specify hg command to run on the remote side'))],
+ _('hg clone [OPTION]... SOURCE [DEST]')),
+ "^commit|ci":
+ (commit,
+ [('A', 'addremove', None,
+ _('mark new/missing files as added/removed before committing')),
+ ('m', 'message', '', _('use <text> as commit message')),
+ ('l', 'logfile', '', _('read the commit message from <file>')),
+ ('d', 'date', '', _('record datecode as commit date')),
+ ('u', 'user', '', _('record user as commiter')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg commit [OPTION]... [FILE]...')),
+ "copy|cp":
+ (copy,
+ [('A', 'after', None, _('record a copy that has already occurred')),
+ ('f', 'force', None,
+ _('forcibly copy over an existing managed file')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg copy [OPTION]... [SOURCE]... DEST')),
+ "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
+ "debugcomplete":
+ (debugcomplete,
+ [('o', 'options', None, _('show the command options'))],
+ _('debugcomplete [-o] CMD')),
+ "debugrebuildstate":
+ (debugrebuildstate,
+ [('r', 'rev', '', _('revision to rebuild to'))],
+ _('debugrebuildstate [-r REV] [REV]')),
+ "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
+ "debugconfig": (debugconfig, [], _('debugconfig')),
+ "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
+ "debugstate": (debugstate, [], _('debugstate')),
+ "debugdata": (debugdata, [], _('debugdata FILE REV')),
+ "debugindex": (debugindex, [], _('debugindex FILE')),
+ "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
+ "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
+ "debugwalk":
+ (debugwalk,
+ [('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('debugwalk [OPTION]... [FILE]...')),
+ "^diff":
+ (diff,
+ [('r', 'rev', [], _('revision')),
+ ('a', 'text', None, _('treat all files as text')),
+ ('p', 'show-function', None,
+ _('show which function each change is in')),
+ ('w', 'ignore-all-space', None,
+ _('ignore white space when comparing lines')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
+ "^export":
+ (export,
+ [('o', 'output', '', _('print output to file with formatted name')),
+ ('a', 'text', None, _('treat all files as text')),
+ ('', 'switch-parent', None, _('diff against the second parent'))],
+ _('hg export [-a] [-o OUTFILESPEC] REV...')),
+ "debugforget|forget":
+ (forget,
+ [('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg forget [OPTION]... FILE...')),
+ "grep":
+ (grep,
+ [('0', 'print0', None, _('end fields with NUL')),
+ ('', 'all', None, _('print all revisions that match')),
+ ('i', 'ignore-case', None, _('ignore case when matching')),
+ ('l', 'files-with-matches', None,
+ _('print only filenames and revs that match')),
+ ('n', 'line-number', None, _('print matching line numbers')),
+ ('r', 'rev', [], _('search in given revision range')),
+ ('u', 'user', None, _('print user who committed change')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg grep [OPTION]... PATTERN [FILE]...')),
+ "heads":
+ (heads,
+ [('b', 'branches', None, _('show branches')),
+ ('', 'style', '', _('display using template map file')),
+ ('r', 'rev', '', _('show only heads which are descendants of rev')),
+ ('', 'template', '', _('display with template'))],
+ _('hg heads [-b] [-r <rev>]')),
+ "help": (help_, [], _('hg help [COMMAND]')),
+ "identify|id": (identify, [], _('hg identify')),
+ "import|patch":
+ (import_,
+ [('p', 'strip', 1,
+ _('directory strip option for patch. This has the same\n'
+ 'meaning as the corresponding patch option')),
+ ('b', 'base', '', _('base path')),
+ ('f', 'force', None,
+ _('skip check for outstanding uncommitted changes'))],
+ _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
+ "incoming|in": (incoming,
+ [('M', 'no-merges', None, _('do not show merges')),
+ ('f', 'force', None,
+ _('run even when remote repository is unrelated')),
+ ('', 'style', '', _('display using template map file')),
+ ('n', 'newest-first', None, _('show newest record first')),
+ ('', 'bundle', '', _('file to store the bundles into')),
+ ('p', 'patch', None, _('show patch')),
+ ('', 'template', '', _('display with template')),
+ ('e', 'ssh', '', _('specify ssh command to use')),
+ ('', 'remotecmd', '',
+ _('specify hg command to run on the remote side'))],
+ _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
+ "^init": (init, [], _('hg init [DEST]')),
+ "locate":
+ (locate,
+ [('r', 'rev', '', _('search the repository as it stood at rev')),
+ ('0', 'print0', None,
+ _('end filenames with NUL, for use with xargs')),
+ ('f', 'fullpath', None,
+ _('print complete paths from the filesystem root')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg locate [OPTION]... [PATTERN]...')),
+ "^log|history":
+ (log,
+ [('b', 'branches', None, _('show branches')),
+ ('k', 'keyword', [], _('search for a keyword')),
+ ('l', 'limit', '', _('limit number of changes displayed')),
+ ('r', 'rev', [], _('show the specified revision or range')),
+ ('M', 'no-merges', None, _('do not show merges')),
+ ('', 'style', '', _('display using template map file')),
+ ('m', 'only-merges', None, _('show only merges')),
+ ('p', 'patch', None, _('show patch')),
+ ('', 'template', '', _('display with template')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg log [OPTION]... [FILE]')),
+ "manifest": (manifest, [], _('hg manifest [REV]')),
+ "merge":
+ (merge,
+ [('b', 'branch', '', _('merge with head of a specific branch')),
+ ('f', 'force', None, _('force a merge with outstanding changes'))],
+ _('hg merge [-b TAG] [-f] [REV]')),
+ "outgoing|out": (outgoing,
+ [('M', 'no-merges', None, _('do not show merges')),
+ ('f', 'force', None,
+ _('run even when remote repository is unrelated')),
+ ('p', 'patch', None, _('show patch')),
+ ('', 'style', '', _('display using template map file')),
+ ('n', 'newest-first', None, _('show newest record first')),
+ ('', 'template', '', _('display with template')),
+ ('e', 'ssh', '', _('specify ssh command to use')),
+ ('', 'remotecmd', '',
+ _('specify hg command to run on the remote side'))],
+ _('hg outgoing [-M] [-p] [-n] [DEST]')),
+ "^parents":
+ (parents,
+ [('b', 'branches', None, _('show branches')),
+ ('', 'style', '', _('display using template map file')),
+ ('', 'template', '', _('display with template'))],
+ _('hg parents [-b] [REV]')),
+ "paths": (paths, [], _('hg paths [NAME]')),
+ "^pull":
+ (pull,
+ [('u', 'update', None,
+ _('update the working directory to tip after pull')),
+ ('e', 'ssh', '', _('specify ssh command to use')),
+ ('f', 'force', None,
+ _('run even when remote repository is unrelated')),
+ ('r', 'rev', [], _('a specific revision you would like to pull')),
+ ('', 'remotecmd', '',
+ _('specify hg command to run on the remote side'))],
+ _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
+ "^push":
+ (push,
+ [('f', 'force', None, _('force push')),
+ ('e', 'ssh', '', _('specify ssh command to use')),
+ ('r', 'rev', [], _('a specific revision you would like to push')),
+ ('', 'remotecmd', '',
+ _('specify hg command to run on the remote side'))],
+ _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
+ "debugrawcommit|rawcommit":
+ (rawcommit,
+ [('p', 'parent', [], _('parent')),
+ ('d', 'date', '', _('date code')),
+ ('u', 'user', '', _('user')),
+ ('F', 'files', '', _('file list')),
+ ('m', 'message', '', _('commit message')),
+ ('l', 'logfile', '', _('commit message file'))],
+ _('hg debugrawcommit [OPTION]... [FILE]...')),
+ "recover": (recover, [], _('hg recover')),
+ "^remove|rm":
+ (remove,
+ [('A', 'after', None, _('record remove that has already occurred')),
+ ('f', 'force', None, _('remove file even if modified')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg remove [OPTION]... FILE...')),
+ "rename|mv":
+ (rename,
+ [('A', 'after', None, _('record a rename that has already occurred')),
+ ('f', 'force', None,
+ _('forcibly copy over an existing managed file')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg rename [OPTION]... SOURCE... DEST')),
+ "^revert":
+ (revert,
+ [('r', 'rev', '', _('revision to revert to')),
+ ('', 'no-backup', None, _('do not save backup copies of files')),
+ ('I', 'include', [], _('include names matching given patterns')),
+ ('X', 'exclude', [], _('exclude names matching given patterns'))],
+ _('hg revert [-r REV] [NAME]...')),
+ "rollback": (rollback, [], _('hg rollback')),
+ "root": (root, [], _('hg root')),
+ "^serve":
+ (serve,
+ [('A', 'accesslog', '', _('name of access log file to write to')),
+ ('d', 'daemon', None, _('run server in background')),
+ ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
+ ('E', 'errorlog', '', _('name of error log file to write to')),
+ ('p', 'port', 0, _('port to use (default: 8000)')),
+ ('a', 'address', '', _('address to use')),
+ ('n', 'name', '',
+ _('name to show in web pages (default: working dir)')),
+ ('', 'webdir-conf', '', _('name of the webdir config file'
+ ' (serve more than one repo)')),
+ ('', 'pid-file', '', _('name of file to write process ID to')),
+ ('', 'stdio', None, _('for remote clients')),
+ ('t', 'templates', '', _('web templates to use')),
+ ('', 'style', '', _('template style to use')),
+ ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
+ _('hg serve [OPTION]...')),
+ "^status|st":
+ (status,
+ [('m', 'modified', None, _('show only modified files')),
+ ('a', 'added', None, _('show only added files')),
+ ('r', 'removed', None, _('show only removed files')),
+ ('d', 'deleted', None, _('show only deleted (but tracked) files')),
+ ('u', 'unknown', None, _('show only unknown (not tracked) files')),
+ ('i', 'ignored', None, _('show ignored files')),
+ ('n', 'no-status', None, _('hide status prefix')),
+ ('0', 'print0', None,
+ _('end filenames with NUL, for use with xargs')),
+ ('I', 'include', [], _('include names matching the given patterns')),
+ ('X', 'exclude', [], _('exclude names matching the given patterns'))],
+ _('hg status [OPTION]... [FILE]...')),
+ "tag":
+ (tag,
+ [('l', 'local', None, _('make the tag local')),
+ ('m', 'message', '', _('message for tag commit log entry')),
+ ('d', 'date', '', _('record datecode as commit date')),
+ ('u', 'user', '', _('record user as commiter')),
+ ('r', 'rev', '', _('revision to tag'))],
+ _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
+ "tags": (tags, [], _('hg tags')),
+ "tip":
+ (tip,
+ [('b', 'branches', None, _('show branches')),
+ ('', 'style', '', _('display using template map file')),
+ ('p', 'patch', None, _('show patch')),
+ ('', 'template', '', _('display with template'))],
+ _('hg tip [-b] [-p]')),
+ "unbundle":
+ (unbundle,
+ [('u', 'update', None,
+ _('update the working directory to tip after unbundle'))],
+ _('hg unbundle [-u] FILE')),
+ "debugundo|undo": (undo, [], _('hg undo')),
+ "^update|up|checkout|co":
+ (update,
+ [('b', 'branch', '', _('checkout the head of a specific branch')),
+ ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
+ ('C', 'clean', None, _('overwrite locally modified files')),
+ ('f', 'force', None, _('force a merge with outstanding changes'))],
+ _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
+ "verify": (verify, [], _('hg verify')),
+ "version": (show_version, [], _('hg version')),
+}
+
+globalopts = [
+ ('R', 'repository', '',
+ _('repository root directory or symbolic path name')),
+ ('', 'cwd', '', _('change working directory')),
+ ('y', 'noninteractive', None,
+ _('do not prompt, assume \'yes\' for any required answers')),
+ ('q', 'quiet', None, _('suppress output')),
+ ('v', 'verbose', None, _('enable additional output')),
+ ('', 'config', [], _('set/override config option')),
+ ('', 'debug', None, _('enable debugging output')),
+ ('', 'debugger', None, _('start debugger')),
+ ('', 'traceback', None, _('print traceback on exception')),
+ ('', 'time', None, _('time how long the command takes')),
+ ('', 'profile', None, _('print command execution profile')),
+ ('', 'version', None, _('output version information and exit')),
+ ('h', 'help', None, _('display help and exit')),
+]
+
+norepo = ("clone init version help debugancestor debugcomplete debugdata"
+ " debugindex debugindexdot")
+optionalrepo = ("paths serve debugconfig")
+
+def findpossible(cmd):
+ """
+ Return cmd -> (aliases, command table entry)
+ for each matching command.
+ Return debug commands (or their aliases) only if no normal command matches.
+ """
+ choice = {}
+ debugchoice = {}
+ for e in table.keys():
+ aliases = e.lstrip("^").split("|")
+ found = None
+ if cmd in aliases:
+ found = cmd
+ else:
+ for a in aliases:
+ if a.startswith(cmd):
+ found = a
+ break
+ if found is not None:
+ if aliases[0].startswith("debug"):
+ debugchoice[found] = (aliases, table[e])
+ else:
+ choice[found] = (aliases, table[e])
+
+ if not choice and debugchoice:
+ choice = debugchoice
+
+ return choice
+
+def find(cmd):
+ """Return (aliases, command table entry) for command string."""
+ choice = findpossible(cmd)
+
+ if choice.has_key(cmd):
+ return choice[cmd]
+
+ if len(choice) > 1:
+ clist = choice.keys()
+ clist.sort()
+ raise AmbiguousCommand(cmd, clist)
+
+ if choice:
+ return choice.values()[0]
+
+ raise UnknownCommand(cmd)
+
+def catchterm(*args):
+ raise util.SignalInterrupt
+
+def run():
+ sys.exit(dispatch(sys.argv[1:]))
+
+class ParseError(Exception):
+ """Exception raised on errors in parsing the command line."""
+
+def parse(ui, args):
+ options = {}
+ cmdoptions = {}
+
+ try:
+ args = fancyopts.fancyopts(args, globalopts, options)
+ except fancyopts.getopt.GetoptError, inst:
+ raise ParseError(None, inst)
+
+ if args:
+ cmd, args = args[0], args[1:]
+ aliases, i = find(cmd)
+ cmd = aliases[0]
+ defaults = ui.config("defaults", cmd)
+ if defaults:
+ args = defaults.split() + args
+ c = list(i[1])
+ else:
+ cmd = None
+ c = []
+
+ # combine global options into local
+ for o in globalopts:
+ c.append((o[0], o[1], options[o[1]], o[3]))
+
+ try:
+ args = fancyopts.fancyopts(args, c, cmdoptions)
+ except fancyopts.getopt.GetoptError, inst:
+ raise ParseError(cmd, inst)
+
+ # separate global options back out
+ for o in globalopts:
+ n = o[1]
+ options[n] = cmdoptions[n]
+ del cmdoptions[n]
+
+ return (cmd, cmd and i[0] or None, args, options, cmdoptions)
+
+def dispatch(args):
+ for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
+ num = getattr(signal, name, None)
+ if num: signal.signal(num, catchterm)
+
+ try:
+ u = ui.ui(traceback='--traceback' in sys.argv[1:])
+ except util.Abort, inst:
+ sys.stderr.write(_("abort: %s\n") % inst)
+ return -1
+
+ external = []
+ for x in u.extensions():
+ try:
+ if x[1]:
+ mod = imp.load_source(x[0], x[1])
+ else:
+ def importh(name):
+ mod = __import__(name)
+ components = name.split('.')
+ for comp in components[1:]:
+ mod = getattr(mod, comp)
+ return mod
+ try:
+ mod = importh("hgext." + x[0])
+ except ImportError:
+ mod = importh(x[0])
+ external.append(mod)
+ except Exception, inst:
+ u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
+ if u.print_exc():
+ return 1
+
+ for x in external:
+ uisetup = getattr(x, 'uisetup', None)
+ if uisetup:
+ uisetup(u)
+ cmdtable = getattr(x, 'cmdtable', {})
+ for t in cmdtable:
+ if t in table:
+ u.warn(_("module %s overrides %s\n") % (x.__name__, t))
+ table.update(cmdtable)
+
+ try:
+ cmd, func, args, options, cmdoptions = parse(u, args)
+ if options["time"]:
+ def get_times():
+ t = os.times()
+ if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
+ t = (t[0], t[1], t[2], t[3], time.clock())
+ return t
+ s = get_times()
+ def print_time():
+ t = get_times()
+ u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
+ (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
+ atexit.register(print_time)
+
+ u.updateopts(options["verbose"], options["debug"], options["quiet"],
+ not options["noninteractive"], options["traceback"],
+ options["config"])
+
+ # enter the debugger before command execution
+ if options['debugger']:
+ pdb.set_trace()
+
+ try:
+ if options['cwd']:
+ try:
+ os.chdir(options['cwd'])
+ except OSError, inst:
+ raise util.Abort('%s: %s' %
+ (options['cwd'], inst.strerror))
+
+ path = u.expandpath(options["repository"]) or ""
+ repo = path and hg.repository(u, path=path) or None
+
+ if options['help']:
+ return help_(u, cmd, options['version'])
+ elif options['version']:
+ return show_version(u)
+ elif not cmd:
+ return help_(u, 'shortlist')
+
+ if cmd not in norepo.split():
+ try:
+ if not repo:
+ repo = hg.repository(u, path=path)
+ u = repo.ui
+ for x in external:
+ if hasattr(x, 'reposetup'):
+ x.reposetup(u, repo)
+ except hg.RepoError:
+ if cmd not in optionalrepo.split():
+ raise
+ d = lambda: func(u, repo, *args, **cmdoptions)
+ else:
+ d = lambda: func(u, *args, **cmdoptions)
+
+ try:
+ if options['profile']:
+ import hotshot, hotshot.stats
+ prof = hotshot.Profile("hg.prof")
+ try:
+ try:
+ return prof.runcall(d)
+ except:
+ try:
+ u.warn(_('exception raised - generating '
+ 'profile anyway\n'))
+ except:
+ pass
+ raise
+ finally:
+ prof.close()
+ stats = hotshot.stats.load("hg.prof")
+ stats.strip_dirs()
+ stats.sort_stats('time', 'calls')
+ stats.print_stats(40)
+ else:
+ return d()
+ finally:
+ u.flush()
+ except:
+ # enter the debugger when we hit an exception
+ if options['debugger']:
+ pdb.post_mortem(sys.exc_info()[2])
+ u.print_exc()
+ raise
+ except ParseError, inst:
+ if inst.args[0]:
+ u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
+ help_(u, inst.args[0])
+ else:
+ u.warn(_("hg: %s\n") % inst.args[1])
+ help_(u, 'shortlist')
+ except AmbiguousCommand, inst:
+ u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
+ (inst.args[0], " ".join(inst.args[1])))
+ except UnknownCommand, inst:
+ u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
+ help_(u, 'shortlist')
+ except hg.RepoError, inst:
+ u.warn(_("abort: %s!\n") % inst)
+ except lock.LockHeld, inst:
+ if inst.errno == errno.ETIMEDOUT:
+ reason = _('timed out waiting for lock held by %s') % inst.locker
+ else:
+ reason = _('lock held by %s') % inst.locker
+ u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
+ except lock.LockUnavailable, inst:
+ u.warn(_("abort: could not lock %s: %s\n") %
+ (inst.desc or inst.filename, inst.strerror))
+ except revlog.RevlogError, inst:
+ u.warn(_("abort: "), inst, "!\n")
+ except util.SignalInterrupt:
+ u.warn(_("killed!\n"))
+ except KeyboardInterrupt:
+ try:
+ u.warn(_("interrupted!\n"))
+ except IOError, inst:
+ if inst.errno == errno.EPIPE:
+ if u.debugflag:
+ u.warn(_("\nbroken pipe\n"))
+ else:
+ raise
+ except IOError, inst:
+ if hasattr(inst, "code"):
+ u.warn(_("abort: %s\n") % inst)
+ elif hasattr(inst, "reason"):
+ u.warn(_("abort: error: %s\n") % inst.reason[1])
+ elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
+ if u.debugflag:
+ u.warn(_("broken pipe\n"))
+ elif getattr(inst, "strerror", None):
+ if getattr(inst, "filename", None):
+ u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
+ else:
+ u.warn(_("abort: %s\n") % inst.strerror)
+ else:
+ raise
+ except OSError, inst:
+ if hasattr(inst, "filename"):
+ u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
+ else:
+ u.warn(_("abort: %s\n") % inst.strerror)
+ except util.Abort, inst:
+ u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
+ except TypeError, inst:
+ # was this an argument error?
+ tb = traceback.extract_tb(sys.exc_info()[2])
+ if len(tb) > 2: # no
+ raise
+ u.debug(inst, "\n")
+ u.warn(_("%s: invalid arguments\n") % cmd)
+ help_(u, cmd)
+ except SystemExit, inst:
+ # Commands shouldn't sys.exit directly, but give a return code.
+ # Just in case catch this and and pass exit code to caller.
+ return inst.code
+ except:
+ u.warn(_("** unknown exception encountered, details follow\n"))
+ u.warn(_("** report bug details to mercurial@selenic.com\n"))
+ u.warn(_("** Mercurial Distributed SCM (version %s)\n")
+ % version.get_version())
+ raise
+
+ return -1
new file mode 100644
--- /dev/null
+++ b/mercurial/demandload.py
@@ -0,0 +1,129 @@
+'''Demand load modules when used, not when imported.'''
+
+__author__ = '''Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>.
+This software may be used and distributed according to the terms
+of the GNU General Public License, incorporated herein by reference.'''
+
+# this is based on matt's original demandload module. it is a
+# complete rewrite. some time, we may need to support syntax of
+# "import foo as bar".
+
+class _importer(object):
+ '''import a module. it is not imported until needed, and is
+ imported at most once per scope.'''
+
+ def __init__(self, scope, modname, fromlist):
+ '''scope is context (globals() or locals()) in which import
+ should be made. modname is name of module to import.
+ fromlist is list of modules for "from foo import ..."
+ emulation.'''
+
+ self.scope = scope
+ self.modname = modname
+ self.fromlist = fromlist
+ self.mod = None
+
+ def module(self):
+ '''import the module if needed, and return.'''
+ if self.mod is None:
+ self.mod = __import__(self.modname, self.scope, self.scope,
+ self.fromlist)
+ del self.modname, self.fromlist
+ return self.mod
+
+class _replacer(object):
+ '''placeholder for a demand loaded module. demandload puts this in
+ a target scope. when an attribute of this object is looked up,
+ this object is replaced in the target scope with the actual
+ module.
+
+ we use __getattribute__ to avoid namespace clashes between
+ placeholder object and real module.'''
+
+ def __init__(self, importer, target):
+ self.importer = importer
+ self.target = target
+ # consider case where we do this:
+ # demandload(globals(), 'foo.bar foo.quux')
+ # foo will already exist in target scope when we get to
+ # foo.quux. so we remember that we will need to demandload
+ # quux into foo's scope when we really load it.
+ self.later = []
+
+ def module(self):
+ return object.__getattribute__(self, 'importer').module()
+
+ def __getattribute__(self, key):
+ '''look up an attribute in a module and return it. replace the
+ name of the module in the caller\'s dict with the actual
+ module.'''
+
+ module = object.__getattribute__(self, 'module')()
+ target = object.__getattribute__(self, 'target')
+ importer = object.__getattribute__(self, 'importer')
+ later = object.__getattribute__(self, 'later')
+
+ if later:
+ demandload(module.__dict__, ' '.join(later))
+
+ importer.scope[target] = module
+
+ return getattr(module, key)
+
+class _replacer_from(_replacer):
+ '''placeholder for a demand loaded module. used for "from foo
+ import ..." emulation. semantics of this are different than
+ regular import, so different implementation needed.'''
+
+ def module(self):
+ importer = object.__getattribute__(self, 'importer')
+ target = object.__getattribute__(self, 'target')
+
+ return getattr(importer.module(), target)
+
+ def __call__(self, *args, **kwargs):
+ target = object.__getattribute__(self, 'module')()
+ return target(*args, **kwargs)
+
+def demandload(scope, modules):
+ '''import modules into scope when each is first used.
+
+ scope should be the value of globals() in the module calling this
+ function, or locals() in the calling function.
+
+ modules is a string listing module names, separated by white
+ space. names are handled like this:
+
+ foo import foo
+ foo bar import foo, bar
+ foo.bar import foo.bar
+ foo:bar from foo import bar
+ foo:bar,quux from foo import bar, quux
+ foo.bar:quux from foo.bar import quux'''
+
+ for mod in modules.split():
+ col = mod.find(':')
+ if col >= 0:
+ fromlist = mod[col+1:].split(',')
+ mod = mod[:col]
+ else:
+ fromlist = []
+ importer = _importer(scope, mod, fromlist)
+ if fromlist:
+ for name in fromlist:
+ scope[name] = _replacer_from(importer, name)
+ else:
+ dot = mod.find('.')
+ if dot >= 0:
+ basemod = mod[:dot]
+ val = scope.get(basemod)
+ # if base module has already been demandload()ed,
+ # remember to load this submodule into its namespace
+ # when needed.
+ if isinstance(val, _replacer):
+ later = object.__getattribute__(val, 'later')
+ later.append(mod[dot+1:])
+ continue
+ else:
+ basemod = mod
+ scope[basemod] = _replacer(importer, basemod)
new file mode 100644
--- /dev/null
+++ b/mercurial/dirstate.py
@@ -0,0 +1,473 @@
+"""
+dirstate.py - working directory tracking for mercurial
+
+Copyright 2005 Matt Mackall <mpm@selenic.com>
+
+This software may be used and distributed according to the terms
+of the GNU General Public License, incorporated herein by reference.
+"""
+
+import struct, os
+from node import *
+from i18n import gettext as _
+from demandload import *
+demandload(globals(), "time bisect stat util re errno")
+
+class dirstate(object):
+ def __init__(self, opener, ui, root):
+ self.opener = opener
+ self.root = root
+ self.dirty = 0
+ self.ui = ui
+ self.map = None
+ self.pl = None
+ self.copies = {}
+ self.ignorefunc = None
+ self.blockignore = False
+
+ def wjoin(self, f):
+ return os.path.join(self.root, f)
+
+ def getcwd(self):
+ cwd = os.getcwd()
+ if cwd == self.root: return ''
+ return cwd[len(self.root) + 1:]
+
+ def hgignore(self):
+ '''return the contents of .hgignore files as a list of patterns.
+
+ the files parsed for patterns include:
+ .hgignore in the repository root
+ any additional files specified in the [ui] section of ~/.hgrc
+
+ trailing white space is dropped.
+ the escape character is backslash.
+ comments start with #.
+ empty lines are skipped.
+
+ lines can be of the following formats:
+
+ syntax: regexp # defaults following lines to non-rooted regexps
+ syntax: glob # defaults following lines to non-rooted globs
+ re:pattern # non-rooted regular expression
+ glob:pattern # non-rooted glob
+ pattern # pattern of the current default type'''
+ syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
+ def parselines(fp):
+ for line in fp:
+ escape = False
+ for i in xrange(len(line)):
+ if escape: escape = False
+ elif line[i] == '\\': escape = True
+ elif line[i] == '#': break
+ line = line[:i].rstrip()
+ if line: yield line
+ repoignore = self.wjoin('.hgignore')
+ files = [repoignore]
+ files.extend(self.ui.hgignorefiles())
+ pats = {}
+ for f in files:
+ try:
+ pats[f] = []
+ fp = open(f)
+ syntax = 'relre:'
+ for line in parselines(fp):
+ if line.startswith('syntax:'):
+ s = line[7:].strip()
+ try:
+ syntax = syntaxes[s]
+ except KeyError:
+ self.ui.warn(_("%s: ignoring invalid "
+ "syntax '%s'\n") % (f, s))
+ continue
+ pat = syntax + line
+ for s in syntaxes.values():
+ if line.startswith(s):
+ pat = line
+ break
+ pats[f].append(pat)
+ except IOError, inst:
+ if f != repoignore:
+ self.ui.warn(_("skipping unreadable ignore file"
+ " '%s': %s\n") % (f, inst.strerror))
+ return pats
+
+ def ignore(self, fn):
+ '''default match function used by dirstate and
+ localrepository. this honours the repository .hgignore file
+ and any other files specified in the [ui] section of .hgrc.'''
+ if self.blockignore:
+ return False
+ if not self.ignorefunc:
+ ignore = self.hgignore()
+ allpats = []
+ [allpats.extend(patlist) for patlist in ignore.values()]
+ if allpats:
+ try:
+ files, self.ignorefunc, anypats = (
+ util.matcher(self.root, inc=allpats, src='.hgignore'))
+ except util.Abort:
+ # Re-raise an exception where the src is the right file
+ for f, patlist in ignore.items():
+ files, self.ignorefunc, anypats = (
+ util.matcher(self.root, inc=patlist, src=f))
+ else:
+ self.ignorefunc = util.never
+ return self.ignorefunc(fn)
+
+ def __del__(self):
+ if self.dirty:
+ self.write()
+
+ def __getitem__(self, key):
+ try:
+ return self.map[key]
+ except TypeError:
+ self.lazyread()
+ return self[key]
+
+ def __contains__(self, key):
+ self.lazyread()
+ return key in self.map
+
+ def parents(self):
+ self.lazyread()
+ return self.pl
+
+ def markdirty(self):
+ if not self.dirty:
+ self.dirty = 1
+
+ def setparents(self, p1, p2=nullid):
+ self.lazyread()
+ self.markdirty()
+ self.pl = p1, p2
+
+ def state(self, key):
+ try:
+ return self[key][0]
+ except KeyError:
+ return "?"
+
+ def lazyread(self):
+ if self.map is None:
+ self.read()
+
+ def read(self):
+ self.map = {}
+ self.pl = [nullid, nullid]
+ try:
+ st = self.opener("dirstate").read()
+ if not st: return
+ except: return
+
+ self.pl = [st[:20], st[20: 40]]
+
+ pos = 40
+ while pos < len(st):
+ e = struct.unpack(">cllll", st[pos:pos+17])
+ l = e[4]
+ pos += 17
+ f = st[pos:pos + l]
+ if '\0' in f:
+ f, c = f.split('\0')
+ self.copies[f] = c
+ self.map[f] = e[:4]
+ pos += l
+
+ def copy(self, source, dest):
+ self.lazyread()
+ self.markdirty()
+ self.copies[dest] = source
+
+ def copied(self, file):
+ return self.copies.get(file, None)
+
+ def update(self, files, state, **kw):
+ ''' current states:
+ n normal
+ m needs merging
+ r marked for removal
+ a marked for addition'''
+
+ if not files: return
+ self.lazyread()
+ self.markdirty()
+ for f in files:
+ if state == "r":
+ self.map[f] = ('r', 0, 0, 0)
+ else:
+ s = os.lstat(self.wjoin(f))
+ st_size = kw.get('st_size', s.st_size)
+ st_mtime = kw.get('st_mtime', s.st_mtime)
+ self.map[f] = (state, s.st_mode, st_size, st_mtime)
+ if self.copies.has_key(f):
+ del self.copies[f]
+
+ def forget(self, files):
+ if not files: return
+ self.lazyread()
+ self.markdirty()
+ for f in files:
+ try:
+ del self.map[f]
+ except KeyError:
+ self.ui.warn(_("not in dirstate: %s!\n") % f)
+ pass
+
+ def clear(self):
+ self.map = {}
+ self.copies = {}
+ self.markdirty()
+
+ def rebuild(self, parent, files):
+ self.clear()
+ umask = os.umask(0)
+ os.umask(umask)
+ for f, mode in files:
+ if mode:
+ self.map[f] = ('n', ~umask, -1, 0)
+ else:
+ self.map[f] = ('n', ~umask & 0666, -1, 0)
+ self.pl = (parent, nullid)
+ self.markdirty()
+
+ def write(self):
+ if not self.dirty:
+ return
+ st = self.opener("dirstate", "w", atomic=True)
+ st.write("".join(self.pl))
+ for f, e in self.map.items():
+ c = self.copied(f)
+ if c:
+ f = f + "\0" + c
+ e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
+ st.write(e + f)
+ self.dirty = 0
+
+ def filterfiles(self, files):
+ ret = {}
+ unknown = []
+
+ for x in files:
+ if x == '.':
+ return self.map.copy()
+ if x not in self.map:
+ unknown.append(x)
+ else:
+ ret[x] = self.map[x]
+
+ if not unknown:
+ return ret
+
+ b = self.map.keys()
+ b.sort()
+ blen = len(b)
+
+ for x in unknown:
+ bs = bisect.bisect(b, x)
+ if bs != 0 and b[bs-1] == x:
+ ret[x] = self.map[x]
+ continue
+ while bs < blen:
+ s = b[bs]
+ if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
+ ret[s] = self.map[s]
+ else:
+ break
+ bs += 1
+ return ret
+
+ def supported_type(self, f, st, verbose=False):
+ if stat.S_ISREG(st.st_mode):
+ return True
+ if verbose:
+ kind = 'unknown'
+ if stat.S_ISCHR(st.st_mode): kind = _('character device')
+ elif stat.S_ISBLK(st.st_mode): kind = _('block device')
+ elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
+ elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
+ elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
+ elif stat.S_ISDIR(st.st_mode): kind = _('directory')
+ self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
+ util.pathto(self.getcwd(), f),
+ kind))
+ return False
+
+ def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
+ badmatch=None):
+ self.lazyread()
+
+ # walk all files by default
+ if not files:
+ files = [self.root]
+ if not dc:
+ dc = self.map.copy()
+ elif not dc:
+ dc = self.filterfiles(files)
+
+ def statmatch(file_, stat):
+ file_ = util.pconvert(file_)
+ if not ignored and file_ not in dc and self.ignore(file_):
+ return False
+ return match(file_)
+
+ return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
+ badmatch=badmatch)
+
+ def walk(self, files=None, match=util.always, dc=None, badmatch=None):
+ # filter out the stat
+ for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
+ yield src, f
+
+ # walk recursively through the directory tree, finding all files
+ # matched by the statmatch function
+ #
+ # results are yielded in a tuple (src, filename, st), where src
+ # is one of:
+ # 'f' the file was found in the directory tree
+ # 'm' the file was only in the dirstate and not in the tree
+ # and st is the stat result if the file was found in the directory.
+ #
+ # dc is an optional arg for the current dirstate. dc is not modified
+ # directly by this function, but might be modified by your statmatch call.
+ #
+ def walkhelper(self, files, statmatch, dc, badmatch=None):
+ # recursion free walker, faster than os.walk.
+ def findfiles(s):
+ work = [s]
+ while work:
+ top = work.pop()
+ names = os.listdir(top)
+ names.sort()
+ # nd is the top of the repository dir tree
+ nd = util.normpath(top[len(self.root) + 1:])
+ if nd == '.':
+ nd = ''
+ else:
+ # do not recurse into a repo contained in this
+ # one. use bisect to find .hg directory so speed
+ # is good on big directory.
+ hg = bisect.bisect_left(names, '.hg')
+ if hg < len(names) and names[hg] == '.hg':
+ if os.path.isdir(os.path.join(top, '.hg')):
+ continue
+ for f in names:
+ np = util.pconvert(os.path.join(nd, f))
+ if seen(np):
+ continue
+ p = os.path.join(top, f)
+ # don't trip over symlinks
+ st = os.lstat(p)
+ if stat.S_ISDIR(st.st_mode):
+ ds = os.path.join(nd, f +'/')
+ if statmatch(ds, st):
+ work.append(p)
+ if statmatch(np, st) and np in dc:
+ yield 'm', np, st
+ elif statmatch(np, st):
+ if self.supported_type(np, st):
+ yield 'f', np, st
+ elif np in dc:
+ yield 'm', np, st
+
+ known = {'.hg': 1}
+ def seen(fn):
+ if fn in known: return True
+ known[fn] = 1
+
+ # step one, find all files that match our criteria
+ files.sort()
+ for ff in util.unique(files):
+ f = self.wjoin(ff)
+ try:
+ st = os.lstat(f)
+ except OSError, inst:
+ nf = util.normpath(ff)
+ found = False
+ for fn in dc:
+ if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
+ found = True
+ break
+ if not found:
+ if inst.errno != errno.ENOENT or not badmatch:
+ self.ui.warn('%s: %s\n' % (
+ util.pathto(self.getcwd(), ff),
+ inst.strerror))
+ elif badmatch and badmatch(ff) and statmatch(ff, None):
+ yield 'b', ff, None
+ continue
+ if stat.S_ISDIR(st.st_mode):
+ cmp1 = (lambda x, y: cmp(x[1], y[1]))
+ sorted_ = [ x for x in findfiles(f) ]
+ sorted_.sort(cmp1)
+ for e in sorted_:
+ yield e
+ else:
+ ff = util.normpath(ff)
+ if seen(ff):
+ continue
+ self.blockignore = True
+ if statmatch(ff, st):
+ if self.supported_type(ff, st, verbose=True):
+ yield 'f', ff, st
+ elif ff in dc:
+ yield 'm', ff, st
+ self.blockignore = False
+
+ # step two run through anything left in the dc hash and yield
+ # if we haven't already seen it
+ ks = dc.keys()
+ ks.sort()
+ for k in ks:
+ if not seen(k) and (statmatch(k, None)):
+ yield 'm', k, None
+
+ def changes(self, files=None, match=util.always, show_ignored=None):
+ lookup, modified, added, unknown, ignored = [], [], [], [], []
+ removed, deleted = [], []
+
+ for src, fn, st in self.statwalk(files, match, ignored=show_ignored):
+ try:
+ type_, mode, size, time = self[fn]
+ except KeyError:
+ if show_ignored and self.ignore(fn):
+ ignored.append(fn)
+ else:
+ unknown.append(fn)
+ continue
+ if src == 'm':
+ nonexistent = True
+ if not st:
+ try:
+ f = self.wjoin(fn)
+ st = os.lstat(f)
+ except OSError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
+ st = None
+ # We need to re-check that it is a valid file
+ if st and self.supported_type(fn, st):
+ nonexistent = False
+ # XXX: what to do with file no longer present in the fs
+ # who are not removed in the dirstate ?
+ if nonexistent and type_ in "nm":
+ deleted.append(fn)
+ continue
+ # check the common case first
+ if type_ == 'n':
+ if not st:
+ st = os.stat(fn)
+ if size >= 0 and (size != st.st_size
+ or (mode ^ st.st_mode) & 0100):
+ modified.append(fn)
+ elif time != st.st_mtime:
+ lookup.append(fn)
+ elif type_ == 'm':
+ modified.append(fn)
+ elif type_ == 'a':
+ added.append(fn)
+ elif type_ == 'r':
+ removed.append(fn)
+
+ return (lookup, modified, added, removed, deleted, unknown, ignored)
new file mode 100644
--- /dev/null
+++ b/mercurial/fancyopts.py
@@ -0,0 +1,30 @@
+import getopt
+
+def fancyopts(args, options, state):
+ long = []
+ short = ''
+ map = {}
+ dt = {}
+
+ for s, l, d, c in options:
+ pl = l.replace('-', '_')
+ map['-'+s] = map['--'+l] = pl
+ state[pl] = d
+ dt[pl] = type(d)
+ if not d is None and not callable(d):
+ if s: s += ':'
+ if l: l += '='
+ if s: short = short + s
+ if l: long.append(l)
+
+ opts, args = getopt.getopt(args, short, long)
+
+ for opt, arg in opts:
+ if dt[map[opt]] is type(fancyopts): state[map[opt]](state,map[opt],arg)
+ elif dt[map[opt]] is type(1): state[map[opt]] = int(arg)
+ elif dt[map[opt]] is type(''): state[map[opt]] = arg
+ elif dt[map[opt]] is type([]): state[map[opt]].append(arg)
+ elif dt[map[opt]] is type(None): state[map[opt]] = 1
+
+ return args
+
new file mode 100644
--- /dev/null
+++ b/mercurial/filelog.py
@@ -0,0 +1,108 @@
+# filelog.py - file history class for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os
+from revlog import *
+from demandload import *
+demandload(globals(), "bdiff")
+
+class filelog(revlog):
+ def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
+ revlog.__init__(self, opener,
+ os.path.join("data", self.encodedir(path + ".i")),
+ os.path.join("data", self.encodedir(path + ".d")),
+ defversion)
+
+ # This avoids a collision between a file named foo and a dir named
+ # foo.i or foo.d
+ def encodedir(self, path):
+ return (path
+ .replace(".hg/", ".hg.hg/")
+ .replace(".i/", ".i.hg/")
+ .replace(".d/", ".d.hg/"))
+
+ def decodedir(self, path):
+ return (path
+ .replace(".d.hg/", ".d/")
+ .replace(".i.hg/", ".i/")
+ .replace(".hg.hg/", ".hg/"))
+
+ def read(self, node):
+ t = self.revision(node)
+ if not t.startswith('\1\n'):
+ return t
+ s = t.find('\1\n', 2)
+ return t[s+2:]
+
+ def readmeta(self, node):
+ t = self.revision(node)
+ if not t.startswith('\1\n'):
+ return {}
+ s = t.find('\1\n', 2)
+ mt = t[2:s]
+ m = {}
+ for l in mt.splitlines():
+ k, v = l.split(": ", 1)
+ m[k] = v
+ return m
+
+ def add(self, text, meta, transaction, link, p1=None, p2=None):
+ if meta or text.startswith('\1\n'):
+ mt = ""
+ if meta:
+ mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
+ text = "\1\n%s\1\n%s" % ("".join(mt), text)
+ return self.addrevision(text, transaction, link, p1, p2)
+
+ def renamed(self, node):
+ if self.parents(node)[0] != nullid:
+ return False
+ m = self.readmeta(node)
+ if m and m.has_key("copy"):
+ return (m["copy"], bin(m["copyrev"]))
+ return False
+
+ def annotate(self, node):
+
+ def decorate(text, rev):
+ return ([rev] * len(text.splitlines()), text)
+
+ def pair(parent, child):
+ for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
+ child[0][b1:b2] = parent[0][a1:a2]
+ return child
+
+ # find all ancestors
+ needed = {node:1}
+ visit = [node]
+ while visit:
+ n = visit.pop(0)
+ for p in self.parents(n):
+ if p not in needed:
+ needed[p] = 1
+ visit.append(p)
+ else:
+ # count how many times we'll use this
+ needed[p] += 1
+
+ # sort by revision which is a topological order
+ visit = [ (self.rev(n), n) for n in needed.keys() ]
+ visit.sort()
+ hist = {}
+
+ for r,n in visit:
+ curr = decorate(self.read(n), self.linkrev(n))
+ for p in self.parents(n):
+ if p != nullid:
+ curr = pair(hist[p], curr)
+ # trim the history of unneeded revs
+ needed[p] -= 1
+ if not needed[p]:
+ del hist[p]
+ hist[n] = curr
+
+ return zip(hist[n][0], hist[n][1].splitlines(1))
new file mode 100644
--- /dev/null
+++ b/mercurial/hg.py
@@ -0,0 +1,36 @@
+# hg.py - repository classes for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from node import *
+from repo import *
+from demandload import *
+demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
+
+def repository(ui, path=None, create=0):
+ if path:
+ if path.startswith("http://"):
+ return httprepo.httprepository(ui, path)
+ if path.startswith("https://"):
+ return httprepo.httpsrepository(ui, path)
+ if path.startswith("hg://"):
+ return httprepo.httprepository(
+ ui, path.replace("hg://", "http://"))
+ if path.startswith("old-http://"):
+ return statichttprepo.statichttprepository(
+ ui, path.replace("old-http://", "http://"))
+ if path.startswith("ssh://"):
+ return sshrepo.sshrepository(ui, path)
+ if path.startswith("bundle://"):
+ path = path[9:]
+ s = path.split("+", 1)
+ if len(s) == 1:
+ repopath, bundlename = "", s[0]
+ else:
+ repopath, bundlename = s
+ return bundlerepo.bundlerepository(ui, repopath, bundlename)
+
+ return localrepo.localrepository(ui, path, create)
new file mode 100644
--- /dev/null
+++ b/mercurial/hgweb/__init__.py
@@ -0,0 +1,11 @@
+# hgweb.py - web interface to a mercurial repository
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial.demandload import demandload
+demandload(globals(), "mercurial.hgweb.hgweb_mod:hgweb")
+demandload(globals(), "mercurial.hgweb.hgwebdir_mod:hgwebdir")
new file mode 100644
--- /dev/null
+++ b/mercurial/hgweb/common.py
@@ -0,0 +1,42 @@
+# hgweb.py - web interface to a mercurial repository
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os, mimetypes
+import os.path
+
+def get_mtime(repo_path):
+ hg_path = os.path.join(repo_path, ".hg")
+ cl_path = os.path.join(hg_path, "00changelog.i")
+ if os.path.exists(os.path.join(cl_path)):
+ return os.stat(cl_path).st_mtime
+ else:
+ return os.stat(hg_path).st_mtime
+
+def staticfile(directory, fname):
+ """return a file inside directory with guessed content-type header
+
+ fname always uses '/' as directory separator and isn't allowed to
+ contain unusual path components.
+ Content-type is guessed using the mimetypes module.
+ Return an empty string if fname is illegal or file not found.
+
+ """
+ parts = fname.split('/')
+ path = directory
+ for part in parts:
+ if (part in ('', os.curdir, os.pardir) or
+ os.sep in part or os.altsep is not None and os.altsep in part):
+ return ""
+ path = os.path.join(path, part)
+ try:
+ os.stat(path)
+ ct = mimetypes.guess_type(path)[0] or "text/plain"
+ return "Content-type: %s\n\n%s" % (ct, file(path).read())
+ except (TypeError, OSError):
+ # illegal fname or unreadable file
+ return ""
new file mode 100644
--- /dev/null
+++ b/mercurial/hgweb/hgweb_mod.py
@@ -0,0 +1,822 @@
+# hgweb.py - web interface to a mercurial repository
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os
+import os.path
+import mimetypes
+from mercurial.demandload import demandload
+demandload(globals(), "re zlib ConfigParser")
+demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,templater")
+demandload(globals(), "mercurial.hgweb.request:hgrequest")
+demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile")
+from mercurial.node import *
+from mercurial.i18n import gettext as _
+
+def _up(p):
+ if p[0] != "/":
+ p = "/" + p
+ if p[-1] == "/":
+ p = p[:-1]
+ up = os.path.dirname(p)
+ if up == "/":
+ return "/"
+ return up + "/"
+
+class hgweb(object):
+ def __init__(self, repo, name=None):
+ if type(repo) == type(""):
+ self.repo = hg.repository(ui.ui(), repo)
+ else:
+ self.repo = repo
+
+ self.mtime = -1
+ self.reponame = name
+ self.archives = 'zip', 'gz', 'bz2'
+
+ def refresh(self):
+ mtime = get_mtime(self.repo.root)
+ if mtime != self.mtime:
+ self.mtime = mtime
+ self.repo = hg.repository(self.repo.ui, self.repo.root)
+ self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
+ self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
+ self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
+
+ def archivelist(self, nodeid):
+ allowed = (self.repo.ui.config("web", "allow_archive", "")
+ .replace(",", " ").split())
+ for i in self.archives:
+ if i in allowed or self.repo.ui.configbool("web", "allow" + i):
+ yield {"type" : i, "node" : nodeid, "url": ""}
+
+ def listfiles(self, files, mf):
+ for f in files[:self.maxfiles]:
+ yield self.t("filenodelink", node=hex(mf[f]), file=f)
+ if len(files) > self.maxfiles:
+ yield self.t("fileellipses")
+
+ def listfilediffs(self, files, changeset):
+ for f in files[:self.maxfiles]:
+ yield self.t("filedifflink", node=hex(changeset), file=f)
+ if len(files) > self.maxfiles:
+ yield self.t("fileellipses")
+
+ def siblings(self, siblings=[], rev=None, hiderev=None, **args):
+ if not rev:
+ rev = lambda x: ""
+ siblings = [s for s in siblings if s != nullid]
+ if len(siblings) == 1 and rev(siblings[0]) == hiderev:
+ return
+ for s in siblings:
+ yield dict(node=hex(s), rev=rev(s), **args)
+
+ def renamelink(self, fl, node):
+ r = fl.renamed(node)
+ if r:
+ return [dict(file=r[0], node=hex(r[1]))]
+ return []
+
+ def showtag(self, t1, node=nullid, **args):
+ for t in self.repo.nodetags(node):
+ yield self.t(t1, tag=t, **args)
+
+ def diff(self, node1, node2, files):
+ def filterfiles(filters, files):
+ l = [x for x in files if x in filters]
+
+ for t in filters:
+ if t and t[-1] != os.sep:
+ t += os.sep
+ l += [x for x in files if x.startswith(t)]
+ return l
+
+ parity = [0]
+ def diffblock(diff, f, fn):
+ yield self.t("diffblock",
+ lines=prettyprintlines(diff),
+ parity=parity[0],
+ file=f,
+ filenode=hex(fn or nullid))
+ parity[0] = 1 - parity[0]
+
+ def prettyprintlines(diff):
+ for l in diff.splitlines(1):
+ if l.startswith('+'):
+ yield self.t("difflineplus", line=l)
+ elif l.startswith('-'):
+ yield self.t("difflineminus", line=l)
+ elif l.startswith('@'):
+ yield self.t("difflineat", line=l)
+ else:
+ yield self.t("diffline", line=l)
+
+ r = self.repo
+ cl = r.changelog
+ mf = r.manifest
+ change1 = cl.read(node1)
+ change2 = cl.read(node2)
+ mmap1 = mf.read(change1[0])
+ mmap2 = mf.read(change2[0])
+ date1 = util.datestr(change1[2])
+ date2 = util.datestr(change2[2])
+
+ modified, added, removed, deleted, unknown = r.changes(node1, node2)
+ if files:
+ modified, added, removed = map(lambda x: filterfiles(files, x),
+ (modified, added, removed))
+
+ diffopts = self.repo.ui.diffopts()
+ showfunc = diffopts['showfunc']
+ ignorews = diffopts['ignorews']
+ for f in modified:
+ to = r.file(f).read(mmap1[f])
+ tn = r.file(f).read(mmap2[f])
+ yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
+ showfunc=showfunc, ignorews=ignorews), f, tn)
+ for f in added:
+ to = None
+ tn = r.file(f).read(mmap2[f])
+ yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
+ showfunc=showfunc, ignorews=ignorews), f, tn)
+ for f in removed:
+ to = r.file(f).read(mmap1[f])
+ tn = None
+ yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
+ showfunc=showfunc, ignorews=ignorews), f, tn)
+
+ def changelog(self, pos):
+ def changenav(**map):
+ def seq(factor, maxchanges=None):
+ if maxchanges:
+ yield maxchanges
+ if maxchanges >= 20 and maxchanges <= 40:
+ yield 50
+ else:
+ yield 1 * factor
+ yield 3 * factor
+ for f in seq(factor * 10):
+ yield f
+
+ l = []
+ last = 0
+ for f in seq(1, self.maxchanges):
+ if f < self.maxchanges or f <= last:
+ continue
+ if f > count:
+ break
+ last = f
+ r = "%d" % f
+ if pos + f < count:
+ l.append(("+" + r, pos + f))
+ if pos - f >= 0:
+ l.insert(0, ("-" + r, pos - f))
+
+ yield {"rev": 0, "label": "(0)"}
+
+ for label, rev in l:
+ yield {"label": label, "rev": rev}
+
+ yield {"label": "tip", "rev": "tip"}
+
+ def changelist(**map):
+ parity = (start - end) & 1
+ cl = self.repo.changelog
+ l = [] # build a list in forward order for efficiency
+ for i in range(start, end):
+ n = cl.node(i)
+ changes = cl.read(n)
+ hn = hex(n)
+
+ l.insert(0, {"parity": parity,
+ "author": changes[1],
+ "parent": self.siblings(cl.parents(n), cl.rev,
+ cl.rev(n) - 1),
+ "child": self.siblings(cl.children(n), cl.rev,
+ cl.rev(n) + 1),
+ "changelogtag": self.showtag("changelogtag",n),
+ "manifest": hex(changes[0]),
+ "desc": changes[4],
+ "date": changes[2],
+ "files": self.listfilediffs(changes[3], n),
+ "rev": i,
+ "node": hn})
+ parity = 1 - parity
+
+ for e in l:
+ yield e
+
+ cl = self.repo.changelog
+ mf = cl.read(cl.tip())[0]
+ count = cl.count()
+ start = max(0, pos - self.maxchanges + 1)
+ end = min(count, start + self.maxchanges)
+ pos = end - 1
+
+ yield self.t('changelog',
+ changenav=changenav,
+ manifest=hex(mf),
+ rev=pos, changesets=count, entries=changelist,
+ archives=self.archivelist("tip"))
+
+ def search(self, query):
+
+ def changelist(**map):
+ cl = self.repo.changelog
+ count = 0
+ qw = query.lower().split()
+
+ def revgen():
+ for i in range(cl.count() - 1, 0, -100):
+ l = []
+ for j in range(max(0, i - 100), i):
+ n = cl.node(j)
+ changes = cl.read(n)
+ l.append((n, j, changes))
+ l.reverse()
+ for e in l:
+ yield e
+
+ for n, i, changes in revgen():
+ miss = 0
+ for q in qw:
+ if not (q in changes[1].lower() or
+ q in changes[4].lower() or
+ q in " ".join(changes[3][:20]).lower()):
+ miss = 1
+ break
+ if miss:
+ continue
+
+ count += 1
+ hn = hex(n)
+
+ yield self.t('searchentry',
+ parity=count & 1,
+ author=changes[1],
+ parent=self.siblings(cl.parents(n), cl.rev),
+ child=self.siblings(cl.children(n), cl.rev),
+ changelogtag=self.showtag("changelogtag",n),
+ manifest=hex(changes[0]),
+ desc=changes[4],
+ date=changes[2],
+ files=self.listfilediffs(changes[3], n),
+ rev=i,
+ node=hn)
+
+ if count >= self.maxchanges:
+ break
+
+ cl = self.repo.changelog
+ mf = cl.read(cl.tip())[0]
+
+ yield self.t('search',
+ query=query,
+ manifest=hex(mf),
+ entries=changelist)
+
+ def changeset(self, nodeid):
+ cl = self.repo.changelog
+ n = self.repo.lookup(nodeid)
+ nodeid = hex(n)
+ changes = cl.read(n)
+ p1 = cl.parents(n)[0]
+
+ files = []
+ mf = self.repo.manifest.read(changes[0])
+ for f in changes[3]:
+ files.append(self.t("filenodelink",
+ filenode=hex(mf.get(f, nullid)), file=f))
+
+ def diff(**map):
+ yield self.diff(p1, n, None)
+
+ yield self.t('changeset',
+ diff=diff,
+ rev=cl.rev(n),
+ node=nodeid,
+ parent=self.siblings(cl.parents(n), cl.rev),
+ child=self.siblings(cl.children(n), cl.rev),
+ changesettag=self.showtag("changesettag",n),
+ manifest=hex(changes[0]),
+ author=changes[1],
+ desc=changes[4],
+ date=changes[2],
+ files=files,
+ archives=self.archivelist(nodeid))
+
+ def filelog(self, f, filenode):
+ cl = self.repo.changelog
+ fl = self.repo.file(f)
+ filenode = hex(fl.lookup(filenode))
+ count = fl.count()
+
+ def entries(**map):
+ l = []
+ parity = (count - 1) & 1
+
+ for i in range(count):
+ n = fl.node(i)
+ lr = fl.linkrev(n)
+ cn = cl.node(lr)
+ cs = cl.read(cl.node(lr))
+
+ l.insert(0, {"parity": parity,
+ "filenode": hex(n),
+ "filerev": i,
+ "file": f,
+ "node": hex(cn),
+ "author": cs[1],
+ "date": cs[2],
+ "rename": self.renamelink(fl, n),
+ "parent": self.siblings(fl.parents(n),
+ fl.rev, file=f),
+ "child": self.siblings(fl.children(n),
+ fl.rev, file=f),
+ "desc": cs[4]})
+ parity = 1 - parity
+
+ for e in l:
+ yield e
+
+ yield self.t("filelog", file=f, filenode=filenode, entries=entries)
+
+ def filerevision(self, f, node):
+ fl = self.repo.file(f)
+ n = fl.lookup(node)
+ node = hex(n)
+ text = fl.read(n)
+ changerev = fl.linkrev(n)
+ cl = self.repo.changelog
+ cn = cl.node(changerev)
+ cs = cl.read(cn)
+ mfn = cs[0]
+
+ mt = mimetypes.guess_type(f)[0]
+ rawtext = text
+ if util.binary(text):
+ mt = mt or 'application/octet-stream'
+ text = "(binary:%s)" % mt
+ mt = mt or 'text/plain'
+
+ def lines():
+ for l, t in enumerate(text.splitlines(1)):
+ yield {"line": t,
+ "linenumber": "% 6d" % (l + 1),
+ "parity": l & 1}
+
+ yield self.t("filerevision",
+ file=f,
+ filenode=node,
+ path=_up(f),
+ text=lines(),
+ raw=rawtext,
+ mimetype=mt,
+ rev=changerev,
+ node=hex(cn),
+ manifest=hex(mfn),
+ author=cs[1],
+ date=cs[2],
+ parent=self.siblings(fl.parents(n), fl.rev, file=f),
+ child=self.siblings(fl.children(n), fl.rev, file=f),
+ rename=self.renamelink(fl, n),
+ permissions=self.repo.manifest.readflags(mfn)[f])
+
+ def fileannotate(self, f, node):
+ bcache = {}
+ ncache = {}
+ fl = self.repo.file(f)
+ n = fl.lookup(node)
+ node = hex(n)
+ changerev = fl.linkrev(n)
+
+ cl = self.repo.changelog
+ cn = cl.node(changerev)
+ cs = cl.read(cn)
+ mfn = cs[0]
+
+ def annotate(**map):
+ parity = 1
+ last = None
+ for r, l in fl.annotate(n):
+ try:
+ cnode = ncache[r]
+ except KeyError:
+ cnode = ncache[r] = self.repo.changelog.node(r)
+
+ try:
+ name = bcache[r]
+ except KeyError:
+ cl = self.repo.changelog.read(cnode)
+ bcache[r] = name = self.repo.ui.shortuser(cl[1])
+
+ if last != cnode:
+ parity = 1 - parity
+ last = cnode
+
+ yield {"parity": parity,
+ "node": hex(cnode),
+ "rev": r,
+ "author": name,
+ "file": f,
+ "line": l}
+
+ yield self.t("fileannotate",
+ file=f,
+ filenode=node,
+ annotate=annotate,
+ path=_up(f),
+ rev=changerev,
+ node=hex(cn),
+ manifest=hex(mfn),
+ author=cs[1],
+ date=cs[2],
+ rename=self.renamelink(fl, n),
+ parent=self.siblings(fl.parents(n), fl.rev, file=f),
+ child=self.siblings(fl.children(n), fl.rev, file=f),
+ permissions=self.repo.manifest.readflags(mfn)[f])
+
+ def manifest(self, mnode, path):
+ man = self.repo.manifest
+ mn = man.lookup(mnode)
+ mnode = hex(mn)
+ mf = man.read(mn)
+ rev = man.rev(mn)
+ changerev = man.linkrev(mn)
+ node = self.repo.changelog.node(changerev)
+ mff = man.readflags(mn)
+
+ files = {}
+
+ p = path[1:]
+ if p and p[-1] != "/":
+ p += "/"
+ l = len(p)
+
+ for f,n in mf.items():
+ if f[:l] != p:
+ continue
+ remain = f[l:]
+ if "/" in remain:
+ short = remain[:remain.find("/") + 1] # bleah
+ files[short] = (f, None)
+ else:
+ short = os.path.basename(remain)
+ files[short] = (f, n)
+
+ def filelist(**map):
+ parity = 0
+ fl = files.keys()
+ fl.sort()
+ for f in fl:
+ full, fnode = files[f]
+ if not fnode:
+ continue
+
+ yield {"file": full,
+ "manifest": mnode,
+ "filenode": hex(fnode),
+ "parity": parity,
+ "basename": f,
+ "permissions": mff[full]}
+ parity = 1 - parity
+
+ def dirlist(**map):
+ parity = 0
+ fl = files.keys()
+ fl.sort()
+ for f in fl:
+ full, fnode = files[f]
+ if fnode:
+ continue
+
+ yield {"parity": parity,
+ "path": os.path.join(path, f),
+ "manifest": mnode,
+ "basename": f[:-1]}
+ parity = 1 - parity
+
+ yield self.t("manifest",
+ manifest=mnode,
+ rev=rev,
+ node=hex(node),
+ path=path,
+ up=_up(path),
+ fentries=filelist,
+ dentries=dirlist,
+ archives=self.archivelist(hex(node)))
+
+ def tags(self):
+ cl = self.repo.changelog
+ mf = cl.read(cl.tip())[0]
+
+ i = self.repo.tagslist()
+ i.reverse()
+
+ def entries(notip=False, **map):
+ parity = 0
+ for k,n in i:
+ if notip and k == "tip": continue
+ yield {"parity": parity,
+ "tag": k,
+ "tagmanifest": hex(cl.read(n)[0]),
+ "date": cl.read(n)[2],
+ "node": hex(n)}
+ parity = 1 - parity
+
+ yield self.t("tags",
+ manifest=hex(mf),
+ entries=lambda **x: entries(False, **x),
+ entriesnotip=lambda **x: entries(True, **x))
+
+ def summary(self):
+ cl = self.repo.changelog
+ mf = cl.read(cl.tip())[0]
+
+ i = self.repo.tagslist()
+ i.reverse()
+
+ def tagentries(**map):
+ parity = 0
+ count = 0
+ for k,n in i:
+ if k == "tip": # skip tip
+ continue;
+
+ count += 1
+ if count > 10: # limit to 10 tags
+ break;
+
+ c = cl.read(n)
+ m = c[0]
+ t = c[2]
+
+ yield self.t("tagentry",
+ parity = parity,
+ tag = k,
+ node = hex(n),
+ date = t,
+ tagmanifest = hex(m))
+ parity = 1 - parity
+
+ def changelist(**map):
+ parity = 0
+ cl = self.repo.changelog
+ l = [] # build a list in forward order for efficiency
+ for i in range(start, end):
+ n = cl.node(i)
+ changes = cl.read(n)
+ hn = hex(n)
+ t = changes[2]
+
+ l.insert(0, self.t(
+ 'shortlogentry',
+ parity = parity,
+ author = changes[1],
+ manifest = hex(changes[0]),
+ desc = changes[4],
+ date = t,
+ rev = i,
+ node = hn))
+ parity = 1 - parity
+
+ yield l
+
+ cl = self.repo.changelog
+ mf = cl.read(cl.tip())[0]
+ count = cl.count()
+ start = max(0, count - self.maxchanges)
+ end = min(count, start + self.maxchanges)
+ pos = end - 1
+
+ yield self.t("summary",
+ desc = self.repo.ui.config("web", "description", "unknown"),
+ owner = (self.repo.ui.config("ui", "username") or # preferred
+ self.repo.ui.config("web", "contact") or # deprecated
+ self.repo.ui.config("web", "author", "unknown")), # also
+ lastchange = (0, 0), # FIXME
+ manifest = hex(mf),
+ tags = tagentries,
+ shortlog = changelist)
+
+ def filediff(self, file, changeset):
+ cl = self.repo.changelog
+ n = self.repo.lookup(changeset)
+ changeset = hex(n)
+ p1 = cl.parents(n)[0]
+ cs = cl.read(n)
+ mf = self.repo.manifest.read(cs[0])
+
+ def diff(**map):
+ yield self.diff(p1, n, [file])
+
+ yield self.t("filediff",
+ file=file,
+ filenode=hex(mf.get(file, nullid)),
+ node=changeset,
+ rev=self.repo.changelog.rev(n),
+ parent=self.siblings(cl.parents(n), cl.rev),
+ child=self.siblings(cl.children(n), cl.rev),
+ diff=diff)
+
+ archive_specs = {
+ 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
+ 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
+ 'zip': ('application/zip', 'zip', '.zip', None),
+ }
+
+ def archive(self, req, cnode, type):
+ reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
+ name = "%s-%s" % (reponame, short(cnode))
+ mimetype, artype, extension, encoding = self.archive_specs[type]
+ headers = [('Content-type', mimetype),
+ ('Content-disposition', 'attachment; filename=%s%s' %
+ (name, extension))]
+ if encoding:
+ headers.append(('Content-encoding', encoding))
+ req.header(headers)
+ archival.archive(self.repo, req.out, cnode, artype, prefix=name)
+
+ # add tags to things
+ # tags -> list of changesets corresponding to tags
+ # find tag, changeset, file
+
+ def run(self, req=hgrequest()):
+ def clean(path):
+ p = util.normpath(path)
+ if p[:2] == "..":
+ raise "suspicious path"
+ return p
+
+ def header(**map):
+ yield self.t("header", **map)
+
+ def footer(**map):
+ yield self.t("footer",
+ motd=self.repo.ui.config("web", "motd", ""),
+ **map)
+
+ def expand_form(form):
+ shortcuts = {
+ 'cl': [('cmd', ['changelog']), ('rev', None)],
+ 'cs': [('cmd', ['changeset']), ('node', None)],
+ 'f': [('cmd', ['file']), ('filenode', None)],
+ 'fl': [('cmd', ['filelog']), ('filenode', None)],
+ 'fd': [('cmd', ['filediff']), ('node', None)],
+ 'fa': [('cmd', ['annotate']), ('filenode', None)],
+ 'mf': [('cmd', ['manifest']), ('manifest', None)],
+ 'ca': [('cmd', ['archive']), ('node', None)],
+ 'tags': [('cmd', ['tags'])],
+ 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
+ 'static': [('cmd', ['static']), ('file', None)]
+ }
+
+ for k in shortcuts.iterkeys():
+ if form.has_key(k):
+ for name, value in shortcuts[k]:
+ if value is None:
+ value = form[k]
+ form[name] = value
+ del form[k]
+
+ self.refresh()
+
+ expand_form(req.form)
+
+ t = self.repo.ui.config("web", "templates", templater.templatepath())
+ static = self.repo.ui.config("web", "static", os.path.join(t,"static"))
+ m = os.path.join(t, "map")
+ style = self.repo.ui.config("web", "style", "")
+ if req.form.has_key('style'):
+ style = req.form['style'][0]
+ if style:
+ b = os.path.basename("map-" + style)
+ p = os.path.join(t, b)
+ if os.path.isfile(p):
+ m = p
+
+ port = req.env["SERVER_PORT"]
+ port = port != "80" and (":" + port) or ""
+ uri = req.env["REQUEST_URI"]
+ if "?" in uri:
+ uri = uri.split("?")[0]
+ url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
+ if not self.reponame:
+ self.reponame = (self.repo.ui.config("web", "name")
+ or uri.strip('/') or self.repo.root)
+
+ self.t = templater.templater(m, templater.common_filters,
+ defaults={"url": url,
+ "repo": self.reponame,
+ "header": header,
+ "footer": footer,
+ })
+
+ if not req.form.has_key('cmd'):
+ req.form['cmd'] = [self.t.cache['default'],]
+
+ cmd = req.form['cmd'][0]
+ if cmd == 'changelog':
+ hi = self.repo.changelog.count() - 1
+ if req.form.has_key('rev'):
+ hi = req.form['rev'][0]
+ try:
+ hi = self.repo.changelog.rev(self.repo.lookup(hi))
+ except hg.RepoError:
+ req.write(self.search(hi)) # XXX redirect to 404 page?
+ return
+
+ req.write(self.changelog(hi))
+
+ elif cmd == 'changeset':
+ req.write(self.changeset(req.form['node'][0]))
+
+ elif cmd == 'manifest':
+ req.write(self.manifest(req.form['manifest'][0],
+ clean(req.form['path'][0])))
+
+ elif cmd == 'tags':
+ req.write(self.tags())
+
+ elif cmd == 'summary':
+ req.write(self.summary())
+
+ elif cmd == 'filediff':
+ req.write(self.filediff(clean(req.form['file'][0]),
+ req.form['node'][0]))
+
+ elif cmd == 'file':
+ req.write(self.filerevision(clean(req.form['file'][0]),
+ req.form['filenode'][0]))
+
+ elif cmd == 'annotate':
+ req.write(self.fileannotate(clean(req.form['file'][0]),
+ req.form['filenode'][0]))
+
+ elif cmd == 'filelog':
+ req.write(self.filelog(clean(req.form['file'][0]),
+ req.form['filenode'][0]))
+
+ elif cmd == 'heads':
+ req.httphdr("application/mercurial-0.1")
+ h = self.repo.heads()
+ req.write(" ".join(map(hex, h)) + "\n")
+
+ elif cmd == 'branches':
+ req.httphdr("application/mercurial-0.1")
+ nodes = []
+ if req.form.has_key('nodes'):
+ nodes = map(bin, req.form['nodes'][0].split(" "))
+ for b in self.repo.branches(nodes):
+ req.write(" ".join(map(hex, b)) + "\n")
+
+ elif cmd == 'between':
+ req.httphdr("application/mercurial-0.1")
+ nodes = []
+ if req.form.has_key('pairs'):
+ pairs = [map(bin, p.split("-"))
+ for p in req.form['pairs'][0].split(" ")]
+ for b in self.repo.between(pairs):
+ req.write(" ".join(map(hex, b)) + "\n")
+
+ elif cmd == 'changegroup':
+ req.httphdr("application/mercurial-0.1")
+ nodes = []
+ if not self.allowpull:
+ return
+
+ if req.form.has_key('roots'):
+ nodes = map(bin, req.form['roots'][0].split(" "))
+
+ z = zlib.compressobj()
+ f = self.repo.changegroup(nodes, 'serve')
+ while 1:
+ chunk = f.read(4096)
+ if not chunk:
+ break
+ req.write(z.compress(chunk))
+
+ req.write(z.flush())
+
+ elif cmd == 'archive':
+ changeset = self.repo.lookup(req.form['node'][0])
+ type = req.form['type'][0]
+ allowed = self.repo.ui.config("web", "allow_archive", "").split()
+ if (type in self.archives and (type in allowed or
+ self.repo.ui.configbool("web", "allow" + type, False))):
+ self.archive(req, changeset, type)
+ return
+
+ req.write(self.t("error"))
+
+ elif cmd == 'static':
+ fname = req.form['file'][0]
+ req.write(staticfile(static, fname)
+ or self.t("error", error="%r not found" % fname))
+
+ else:
+ req.write(self.t("error"))
new file mode 100644
--- /dev/null
+++ b/mercurial/hgweb/hgwebdir_mod.py
@@ -0,0 +1,156 @@
+# hgweb.py - web interface to a mercurial repository
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os
+from mercurial.demandload import demandload
+demandload(globals(), "ConfigParser")
+demandload(globals(), "mercurial:ui,hg,util,templater")
+demandload(globals(), "mercurial.hgweb.hgweb_mod:hgweb")
+demandload(globals(), "mercurial.hgweb.request:hgrequest")
+demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile")
+from mercurial.i18n import gettext as _
+
+# This is a stopgap
+class hgwebdir(object):
+ def __init__(self, config):
+ def cleannames(items):
+ return [(name.strip(os.sep), path) for name, path in items]
+
+ self.motd = ""
+ self.repos_sorted = ('name', False)
+ if isinstance(config, (list, tuple)):
+ self.repos = cleannames(config)
+ self.repos_sorted = ('', False)
+ elif isinstance(config, dict):
+ self.repos = cleannames(config.items())
+ self.repos.sort()
+ else:
+ cp = ConfigParser.SafeConfigParser()
+ cp.read(config)
+ self.repos = []
+ if cp.has_section('web') and cp.has_option('web', 'motd'):
+ self.motd = cp.get('web', 'motd')
+ if cp.has_section('paths'):
+ self.repos.extend(cleannames(cp.items('paths')))
+ if cp.has_section('collections'):
+ for prefix, root in cp.items('collections'):
+ for path in util.walkrepos(root):
+ repo = os.path.normpath(path)
+ name = repo
+ if name.startswith(prefix):
+ name = name[len(prefix):]
+ self.repos.append((name.lstrip(os.sep), repo))
+ self.repos.sort()
+
+ def run(self, req=hgrequest()):
+ def header(**map):
+ yield tmpl("header", **map)
+
+ def footer(**map):
+ yield tmpl("footer", motd=self.motd, **map)
+
+ m = os.path.join(templater.templatepath(), "map")
+ tmpl = templater.templater(m, templater.common_filters,
+ defaults={"header": header,
+ "footer": footer})
+
+ def archivelist(ui, nodeid, url):
+ allowed = (ui.config("web", "allow_archive", "")
+ .replace(",", " ").split())
+ for i in ['zip', 'gz', 'bz2']:
+ if i in allowed or ui.configbool("web", "allow" + i):
+ yield {"type" : i, "node": nodeid, "url": url}
+
+ def entries(sortcolumn="", descending=False, **map):
+ rows = []
+ parity = 0
+ for name, path in self.repos:
+ u = ui.ui()
+ try:
+ u.readconfig(os.path.join(path, '.hg', 'hgrc'))
+ except IOError:
+ pass
+ get = u.config
+
+ url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
+ .replace("//", "/"))
+
+ # update time with local timezone
+ try:
+ d = (get_mtime(path), util.makedate()[1])
+ except OSError:
+ continue
+
+ contact = (get("ui", "username") or # preferred
+ get("web", "contact") or # deprecated
+ get("web", "author", "")) # also
+ description = get("web", "description", "")
+ name = get("web", "name", name)
+ row = dict(contact=contact or "unknown",
+ contact_sort=contact.upper() or "unknown",
+ name=name,
+ name_sort=name,
+ url=url,
+ description=description or "unknown",
+ description_sort=description.upper() or "unknown",
+ lastchange=d,
+ lastchange_sort=d[1]-d[0],
+ archives=archivelist(u, "tip", url))
+ if (not sortcolumn
+ or (sortcolumn, descending) == self.repos_sorted):
+ # fast path for unsorted output
+ row['parity'] = parity
+ parity = 1 - parity
+ yield row
+ else:
+ rows.append((row["%s_sort" % sortcolumn], row))
+ if rows:
+ rows.sort()
+ if descending:
+ rows.reverse()
+ for key, row in rows:
+ row['parity'] = parity
+ parity = 1 - parity
+ yield row
+
+ virtual = req.env.get("PATH_INFO", "").strip('/')
+ if virtual:
+ real = dict(self.repos).get(virtual)
+ if real:
+ try:
+ hgweb(real).run(req)
+ except IOError, inst:
+ req.write(tmpl("error", error=inst.strerror))
+ except hg.RepoError, inst:
+ req.write(tmpl("error", error=str(inst)))
+ else:
+ req.write(tmpl("notfound", repo=virtual))
+ else:
+ if req.form.has_key('static'):
+ static = os.path.join(templater.templatepath(), "static")
+ fname = req.form['static'][0]
+ req.write(staticfile(static, fname)
+ or tmpl("error", error="%r not found" % fname))
+ else:
+ sortable = ["name", "description", "contact", "lastchange"]
+ sortcolumn, descending = self.repos_sorted
+ if req.form.has_key('sort'):
+ sortcolumn = req.form['sort'][0]
+ descending = sortcolumn.startswith('-')
+ if descending:
+ sortcolumn = sortcolumn[1:]
+ if sortcolumn not in sortable:
+ sortcolumn = ""
+
+ sort = [("sort_%s" % column,
+ "%s%s" % ((not descending and column == sortcolumn)
+ and "-" or "", column))
+ for column in sortable]
+ req.write(tmpl("index", entries=entries,
+ sortcolumn=sortcolumn, descending=descending,
+ **dict(sort)))
new file mode 100644
--- /dev/null
+++ b/mercurial/hgweb/request.py
@@ -0,0 +1,44 @@
+# hgweb.py - web interface to a mercurial repository
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial.demandload import demandload
+demandload(globals(), "socket sys cgi os")
+from mercurial.i18n import gettext as _
+
+class hgrequest(object):
+ def __init__(self, inp=None, out=None, env=None):
+ self.inp = inp or sys.stdin
+ self.out = out or sys.stdout
+ self.env = env or os.environ
+ self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
+
+ def write(self, *things):
+ for thing in things:
+ if hasattr(thing, "__iter__"):
+ for part in thing:
+ self.write(part)
+ else:
+ try:
+ self.out.write(str(thing))
+ except socket.error, inst:
+ if inst[0] != errno.ECONNRESET:
+ raise
+
+ def header(self, headers=[('Content-type','text/html')]):
+ for header in headers:
+ self.out.write("%s: %s\r\n" % header)
+ self.out.write("\r\n")
+
+ def httphdr(self, type, file="", size=0):
+
+ headers = [('Content-type', type)]
+ if file:
+ headers.append(('Content-disposition', 'attachment; filename=%s' % file))
+ if size > 0:
+ headers.append(('Content-length', str(size)))
+ self.header(headers)
new file mode 100644
--- /dev/null
+++ b/mercurial/hgweb/server.py
@@ -0,0 +1,150 @@
+# hgweb.py - web interface to a mercurial repository
+#
+# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from mercurial.demandload import demandload
+import os, sys, errno
+demandload(globals(), "urllib BaseHTTPServer socket SocketServer")
+demandload(globals(), "mercurial:ui,hg,util,templater")
+demandload(globals(), "mercurial.hgweb.request:hgrequest")
+from mercurial.i18n import gettext as _
+
+def _splitURI(uri):
+ """ Return path and query splited from uri
+
+ Just like CGI environment, the path is unquoted, the query is
+ not.
+ """
+ if '?' in uri:
+ path, query = uri.split('?', 1)
+ else:
+ path, query = uri, ''
+ return urllib.unquote(path), query
+
+class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
+ def __init__(self, *args, **kargs):
+ BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
+
+ def log_error(self, format, *args):
+ errorlog = self.server.errorlog
+ errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
+ self.log_date_time_string(),
+ format % args))
+
+ def log_message(self, format, *args):
+ accesslog = self.server.accesslog
+ accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
+ self.log_date_time_string(),
+ format % args))
+
+ def do_POST(self):
+ try:
+ self.do_hgweb()
+ except socket.error, inst:
+ if inst[0] != errno.EPIPE:
+ raise
+
+ def do_GET(self):
+ self.do_POST()
+
+ def do_hgweb(self):
+ path_info, query = _splitURI(self.path)
+
+ env = {}
+ env['GATEWAY_INTERFACE'] = 'CGI/1.1'
+ env['REQUEST_METHOD'] = self.command
+ env['SERVER_NAME'] = self.server.server_name
+ env['SERVER_PORT'] = str(self.server.server_port)
+ env['REQUEST_URI'] = "/"
+ env['PATH_INFO'] = path_info
+ if query:
+ env['QUERY_STRING'] = query
+ host = self.address_string()
+ if host != self.client_address[0]:
+ env['REMOTE_HOST'] = host
+ env['REMOTE_ADDR'] = self.client_address[0]
+
+ if self.headers.typeheader is None:
+ env['CONTENT_TYPE'] = self.headers.type
+ else:
+ env['CONTENT_TYPE'] = self.headers.typeheader
+ length = self.headers.getheader('content-length')
+ if length:
+ env['CONTENT_LENGTH'] = length
+ accept = []
+ for line in self.headers.getallmatchingheaders('accept'):
+ if line[:1] in "\t\n\r ":
+ accept.append(line.strip())
+ else:
+ accept = accept + line[7:].split(',')
+ env['HTTP_ACCEPT'] = ','.join(accept)
+
+ req = hgrequest(self.rfile, self.wfile, env)
+ self.send_response(200, "Script output follows")
+ self.server.make_and_run_handler(req)
+
+def create_server(ui, repo, webdirmaker, repoviewmaker):
+ use_threads = True
+
+ def openlog(opt, default):
+ if opt and opt != '-':
+ return open(opt, 'w')
+ return default
+
+ address = ui.config("web", "address", "")
+ port = int(ui.config("web", "port", 8000))
+ use_ipv6 = ui.configbool("web", "ipv6")
+ webdir_conf = ui.config("web", "webdir_conf")
+ accesslog = openlog(ui.config("web", "accesslog", "-"), sys.stdout)
+ errorlog = openlog(ui.config("web", "errorlog", "-"), sys.stderr)
+
+ if use_threads:
+ try:
+ from threading import activeCount
+ except ImportError:
+ use_threads = False
+
+ if use_threads:
+ _mixin = SocketServer.ThreadingMixIn
+ else:
+ if hasattr(os, "fork"):
+ _mixin = SocketServer.ForkingMixIn
+ else:
+ class _mixin: pass
+
+ class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
+ def __init__(self, *args, **kargs):
+ BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
+ self.accesslog = accesslog
+ self.errorlog = errorlog
+ self.repo = repo
+ self.webdir_conf = webdir_conf
+ self.webdirmaker = webdirmaker
+ self.repoviewmaker = repoviewmaker
+
+ def make_and_run_handler(self, req):
+ if self.webdir_conf:
+ hgwebobj = self.webdirmaker(self.server.webdir_conf)
+ elif self.repo is not None:
+ hgwebobj = self.repoviewmaker(repo.__class__(repo.ui,
+ repo.origroot))
+ else:
+ raise hg.RepoError(_('no repo found'))
+ hgwebobj.run(req)
+
+ class IPv6HTTPServer(MercurialHTTPServer):
+ address_family = getattr(socket, 'AF_INET6', None)
+
+ def __init__(self, *args, **kwargs):
+ if self.address_family is None:
+ raise hg.RepoError(_('IPv6 not available on this system'))
+ super(IPv6HTTPServer, self).__init__(*args, **kargs)
+
+ if use_ipv6:
+ return IPv6HTTPServer((address, port), _hgwebhandler)
+ else:
+ return MercurialHTTPServer((address, port), _hgwebhandler)
new file mode 100644
--- /dev/null
+++ b/mercurial/httprangereader.py
@@ -0,0 +1,28 @@
+# httprangereader.py - just what it says
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import byterange, urllib2
+
+class httprangereader(object):
+ def __init__(self, url):
+ self.url = url
+ self.pos = 0
+ def seek(self, pos):
+ self.pos = pos
+ def read(self, bytes=None):
+ opener = urllib2.build_opener(byterange.HTTPRangeHandler())
+ urllib2.install_opener(opener)
+ req = urllib2.Request(self.url)
+ end = ''
+ if bytes:
+ end = self.pos + bytes - 1
+ req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
+ f = urllib2.urlopen(req)
+ data = f.read()
+ if bytes:
+ data = data[:bytes]
+ return data
new file mode 100644
--- /dev/null
+++ b/mercurial/httprepo.py
@@ -0,0 +1,226 @@
+# httprepo.py - HTTP repository proxy classes for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from node import *
+from remoterepo import *
+from i18n import gettext as _
+from demandload import *
+demandload(globals(), "hg os urllib urllib2 urlparse zlib util httplib")
+
+class passwordmgr(urllib2.HTTPPasswordMgr):
+ def __init__(self, ui):
+ urllib2.HTTPPasswordMgr.__init__(self)
+ self.ui = ui
+
+ def find_user_password(self, realm, authuri):
+ authinfo = urllib2.HTTPPasswordMgr.find_user_password(
+ self, realm, authuri)
+ if authinfo != (None, None):
+ return authinfo
+
+ if not ui.interactive:
+ raise util.Abort(_('http authorization required'))
+
+ self.ui.write(_("http authorization required\n"))
+ self.ui.status(_("realm: %s\n") % realm)
+ user = self.ui.prompt(_("user:"), default=None)
+ passwd = self.ui.getpass()
+
+ self.add_password(realm, authuri, user, passwd)
+ return (user, passwd)
+
+def netlocsplit(netloc):
+ '''split [user[:passwd]@]host[:port] into 4-tuple.'''
+
+ a = netloc.find('@')
+ if a == -1:
+ user, passwd = None, None
+ else:
+ userpass, netloc = netloc[:a], netloc[a+1:]
+ c = userpass.find(':')
+ if c == -1:
+ user, passwd = urllib.unquote(userpass), None
+ else:
+ user = urllib.unquote(userpass[:c])
+ passwd = urllib.unquote(userpass[c+1:])
+ c = netloc.find(':')
+ if c == -1:
+ host, port = netloc, None
+ else:
+ host, port = netloc[:c], netloc[c+1:]
+ return host, port, user, passwd
+
+def netlocunsplit(host, port, user=None, passwd=None):
+ '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
+ if port:
+ hostport = host + ':' + port
+ else:
+ hostport = host
+ if user:
+ if passwd:
+ userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
+ else:
+ userpass = urllib.quote(user)
+ return userpass + '@' + hostport
+ return hostport
+
+class httprepository(remoterepository):
+ def __init__(self, ui, path):
+ scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
+ if query or frag:
+ raise util.Abort(_('unsupported URL component: "%s"') %
+ (query or frag))
+ if not urlpath: urlpath = '/'
+ host, port, user, passwd = netlocsplit(netloc)
+
+ # urllib cannot handle URLs with embedded user or passwd
+ self.url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
+ urlpath, '', ''))
+ self.ui = ui
+
+ proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
+ proxyauthinfo = None
+ handler = urllib2.BaseHandler()
+
+ if proxyurl:
+ # proxy can be proper url or host[:port]
+ if not (proxyurl.startswith('http:') or
+ proxyurl.startswith('https:')):
+ proxyurl = 'http://' + proxyurl + '/'
+ snpqf = urlparse.urlsplit(proxyurl)
+ proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
+ hpup = netlocsplit(proxynetloc)
+
+ proxyhost, proxyport, proxyuser, proxypasswd = hpup
+ if not proxyuser:
+ proxyuser = ui.config("http_proxy", "user")
+ proxypasswd = ui.config("http_proxy", "passwd")
+
+ # see if we should use a proxy for this url
+ no_list = [ "localhost", "127.0.0.1" ]
+ no_list.extend([p.strip().lower() for
+ p in ui.config("http_proxy", "no", '').split(',')
+ if p.strip()])
+ no_list.extend([p.strip().lower() for
+ p in os.getenv("no_proxy", '').split(',')
+ if p.strip()])
+ # "http_proxy.always" config is for running tests on localhost
+ if (not ui.configbool("http_proxy", "always") and
+ host.lower() in no_list):
+ ui.debug(_('disabling proxy for %s\n') % host)
+ else:
+ proxyurl = urlparse.urlunsplit((
+ proxyscheme, netlocunsplit(proxyhost, proxyport,
+ proxyuser, proxypasswd or ''),
+ proxypath, proxyquery, proxyfrag))
+ handler = urllib2.ProxyHandler({scheme: proxyurl})
+ ui.debug(_('proxying through %s\n') % proxyurl)
+
+ # urllib2 takes proxy values from the environment and those
+ # will take precedence if found, so drop them
+ for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
+ try:
+ if os.environ.has_key(env):
+ del os.environ[env]
+ except OSError:
+ pass
+
+ passmgr = passwordmgr(ui)
+ if user:
+ ui.debug(_('will use user %s for http auth\n') % user)
+ passmgr.add_password(None, host, user, passwd or '')
+
+ opener = urllib2.build_opener(
+ handler,
+ urllib2.HTTPBasicAuthHandler(passmgr),
+ urllib2.HTTPDigestAuthHandler(passmgr))
+
+ # 1.0 here is the _protocol_ version
+ opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
+ urllib2.install_opener(opener)
+
+ def dev(self):
+ return -1
+
+ def lock(self):
+ raise util.Abort(_('operation not supported over http'))
+
+ def do_cmd(self, cmd, **args):
+ self.ui.debug(_("sending %s command\n") % cmd)
+ q = {"cmd": cmd}
+ q.update(args)
+ qs = urllib.urlencode(q)
+ cu = "%s?%s" % (self.url, qs)
+ try:
+ resp = urllib2.urlopen(cu)
+ except httplib.HTTPException, inst:
+ self.ui.debug(_('http error while sending %s command\n') % cmd)
+ self.ui.print_exc()
+ raise IOError(None, inst)
+ proto = resp.headers['content-type']
+
+ # accept old "text/plain" and "application/hg-changegroup" for now
+ if not proto.startswith('application/mercurial') and \
+ not proto.startswith('text/plain') and \
+ not proto.startswith('application/hg-changegroup'):
+ raise hg.RepoError(_("'%s' does not appear to be an hg repository") %
+ self.url)
+
+ if proto.startswith('application/mercurial'):
+ version = proto[22:]
+ if float(version) > 0.1:
+ raise hg.RepoError(_("'%s' uses newer protocol %s") %
+ (self.url, version))
+
+ return resp
+
+ def heads(self):
+ d = self.do_cmd("heads").read()
+ try:
+ return map(bin, d[:-1].split(" "))
+ except:
+ self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
+ raise
+
+ def branches(self, nodes):
+ n = " ".join(map(hex, nodes))
+ d = self.do_cmd("branches", nodes=n).read()
+ try:
+ br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
+ return br
+ except:
+ self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
+ raise
+
+ def between(self, pairs):
+ n = "\n".join(["-".join(map(hex, p)) for p in pairs])
+ d = self.do_cmd("between", pairs=n).read()
+ try:
+ p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
+ return p
+ except:
+ self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
+ raise
+
+ def changegroup(self, nodes, kind):
+ n = " ".join(map(hex, nodes))
+ f = self.do_cmd("changegroup", roots=n)
+ bytes = 0
+
+ def zgenerator(f):
+ zd = zlib.decompressobj()
+ try:
+ for chnk in f:
+ yield zd.decompress(chnk)
+ except httplib.HTTPException, inst:
+ raise IOError(None, _('connection ended unexpectedly'))
+ yield zd.flush()
+
+ return util.chunkbuffer(zgenerator(util.filechunkiter(f)))
+
+class httpsrepository(httprepository):
+ pass
new file mode 100644
--- /dev/null
+++ b/mercurial/i18n.py
@@ -0,0 +1,15 @@
+"""
+i18n.py - internationalization support for mercurial
+
+Copyright 2005 Matt Mackall <mpm@selenic.com>
+
+This software may be used and distributed according to the terms
+of the GNU General Public License, incorporated herein by reference.
+"""
+
+# the import from gettext is _really_ slow
+# for now we use a dummy function
+gettext = lambda x: x
+#import gettext
+#t = gettext.translation('hg', '/usr/share/locale', fallback=1)
+#gettext = t.gettext
new file mode 100644
--- /dev/null
+++ b/mercurial/localrepo.py
@@ -0,0 +1,2122 @@
+# localrepo.py - read/write repository class for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os, util
+import filelog, manifest, changelog, dirstate, repo
+from node import *
+from i18n import gettext as _
+from demandload import *
+demandload(globals(), "appendfile changegroup")
+demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
+demandload(globals(), "revlog")
+
+class localrepository(object):
+ def __del__(self):
+ self.transhandle = None
+ def __init__(self, parentui, path=None, create=0):
+ if not path:
+ p = os.getcwd()
+ while not os.path.isdir(os.path.join(p, ".hg")):
+ oldp = p
+ p = os.path.dirname(p)
+ if p == oldp:
+ raise repo.RepoError(_("no repo found"))
+ path = p
+ self.path = os.path.join(path, ".hg")
+
+ if not create and not os.path.isdir(self.path):
+ raise repo.RepoError(_("repository %s not found") % path)
+
+ self.root = os.path.abspath(path)
+ self.origroot = path
+ self.ui = ui.ui(parentui=parentui)
+ self.opener = util.opener(self.path)
+ self.wopener = util.opener(self.root)
+
+ try:
+ self.ui.readconfig(self.join("hgrc"), self.root)
+ except IOError:
+ pass
+
+ v = self.ui.revlogopts
+ self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
+ self.revlogv1 = self.revlogversion != revlog.REVLOGV0
+ fl = v.get('flags', None)
+ flags = 0
+ if fl != None:
+ for x in fl.split():
+ flags |= revlog.flagstr(x)
+ elif self.revlogv1:
+ flags = revlog.REVLOG_DEFAULT_FLAGS
+
+ v = self.revlogversion | flags
+ self.manifest = manifest.manifest(self.opener, v)
+ self.changelog = changelog.changelog(self.opener, v)
+
+ # the changelog might not have the inline index flag
+ # on. If the format of the changelog is the same as found in
+ # .hgrc, apply any flags found in the .hgrc as well.
+ # Otherwise, just version from the changelog
+ v = self.changelog.version
+ if v == self.revlogversion:
+ v |= flags
+ self.revlogversion = v
+
+ self.tagscache = None
+ self.nodetagscache = None
+ self.encodepats = None
+ self.decodepats = None
+ self.transhandle = None
+
+ if create:
+ os.mkdir(self.path)
+ os.mkdir(self.join("data"))
+
+ self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
+
+ def hook(self, name, throw=False, **args):
+ def callhook(hname, funcname):
+ '''call python hook. hook is callable object, looked up as
+ name in python module. if callable returns "true", hook
+ fails, else passes. if hook raises exception, treated as
+ hook failure. exception propagates if throw is "true".
+
+ reason for "true" meaning "hook failed" is so that
+ unmodified commands (e.g. mercurial.commands.update) can
+ be run as hooks without wrappers to convert return values.'''
+
+ self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
+ d = funcname.rfind('.')
+ if d == -1:
+ raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
+ % (hname, funcname))
+ modname = funcname[:d]
+ try:
+ obj = __import__(modname)
+ except ImportError:
+ raise util.Abort(_('%s hook is invalid '
+ '(import of "%s" failed)') %
+ (hname, modname))
+ try:
+ for p in funcname.split('.')[1:]:
+ obj = getattr(obj, p)
+ except AttributeError, err:
+ raise util.Abort(_('%s hook is invalid '
+ '("%s" is not defined)') %
+ (hname, funcname))
+ if not callable(obj):
+ raise util.Abort(_('%s hook is invalid '
+ '("%s" is not callable)') %
+ (hname, funcname))
+ try:
+ r = obj(ui=self.ui, repo=self, hooktype=name, **args)
+ except (KeyboardInterrupt, util.SignalInterrupt):
+ raise
+ except Exception, exc:
+ if isinstance(exc, util.Abort):
+ self.ui.warn(_('error: %s hook failed: %s\n') %
+ (hname, exc.args[0] % exc.args[1:]))
+ else:
+ self.ui.warn(_('error: %s hook raised an exception: '
+ '%s\n') % (hname, exc))
+ if throw:
+ raise
+ self.ui.print_exc()
+ return True
+ if r:
+ if throw:
+ raise util.Abort(_('%s hook failed') % hname)
+ self.ui.warn(_('warning: %s hook failed\n') % hname)
+ return r
+
+ def runhook(name, cmd):
+ self.ui.note(_("running hook %s: %s\n") % (name, cmd))
+ env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
+ r = util.system(cmd, environ=env, cwd=self.root)
+ if r:
+ desc, r = util.explain_exit(r)
+ if throw:
+ raise util.Abort(_('%s hook %s') % (name, desc))
+ self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
+ return r
+
+ r = False
+ hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
+ if hname.split(".", 1)[0] == name and cmd]
+ hooks.sort()
+ for hname, cmd in hooks:
+ if cmd.startswith('python:'):
+ r = callhook(hname, cmd[7:].strip()) or r
+ else:
+ r = runhook(hname, cmd) or r
+ return r
+
+ def tags(self):
+ '''return a mapping of tag to node'''
+ if not self.tagscache:
+ self.tagscache = {}
+
+ def parsetag(line, context):
+ if not line:
+ return
+ s = l.split(" ", 1)
+ if len(s) != 2:
+ self.ui.warn(_("%s: cannot parse entry\n") % context)
+ return
+ node, key = s
+ key = key.strip()
+ try:
+ bin_n = bin(node)
+ except TypeError:
+ self.ui.warn(_("%s: node '%s' is not well formed\n") %
+ (context, node))
+ return
+ if bin_n not in self.changelog.nodemap:
+ self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
+ (context, key))
+ return
+ self.tagscache[key] = bin_n
+
+ # read the tags file from each head, ending with the tip,
+ # and add each tag found to the map, with "newer" ones
+ # taking precedence
+ heads = self.heads()
+ heads.reverse()
+ fl = self.file(".hgtags")
+ for node in heads:
+ change = self.changelog.read(node)
+ rev = self.changelog.rev(node)
+ fn, ff = self.manifest.find(change[0], '.hgtags')
+ if fn is None: continue
+ count = 0
+ for l in fl.read(fn).splitlines():
+ count += 1
+ parsetag(l, _(".hgtags (rev %d:%s), line %d") %
+ (rev, short(node), count))
+ try:
+ f = self.opener("localtags")
+ count = 0
+ for l in f:
+ count += 1
+ parsetag(l, _("localtags, line %d") % count)
+ except IOError:
+ pass
+
+ self.tagscache['tip'] = self.changelog.tip()
+
+ return self.tagscache
+
+ def tagslist(self):
+ '''return a list of tags ordered by revision'''
+ l = []
+ for t, n in self.tags().items():
+ try:
+ r = self.changelog.rev(n)
+ except:
+ r = -2 # sort to the beginning of the list if unknown
+ l.append((r, t, n))
+ l.sort()
+ return [(t, n) for r, t, n in l]
+
+ def nodetags(self, node):
+ '''return the tags associated with a node'''
+ if not self.nodetagscache:
+ self.nodetagscache = {}
+ for t, n in self.tags().items():
+ self.nodetagscache.setdefault(n, []).append(t)
+ return self.nodetagscache.get(node, [])
+
+ def lookup(self, key):
+ try:
+ return self.tags()[key]
+ except KeyError:
+ try:
+ return self.changelog.lookup(key)
+ except:
+ raise repo.RepoError(_("unknown revision '%s'") % key)
+
+ def dev(self):
+ return os.stat(self.path).st_dev
+
+ def local(self):
+ return True
+
+ def join(self, f):
+ return os.path.join(self.path, f)
+
+ def wjoin(self, f):
+ return os.path.join(self.root, f)
+
+ def file(self, f):
+ if f[0] == '/':
+ f = f[1:]
+ return filelog.filelog(self.opener, f, self.revlogversion)
+
+ def getcwd(self):
+ return self.dirstate.getcwd()
+
+ def wfile(self, f, mode='r'):
+ return self.wopener(f, mode)
+
+ def wread(self, filename):
+ if self.encodepats == None:
+ l = []
+ for pat, cmd in self.ui.configitems("encode"):
+ mf = util.matcher(self.root, "", [pat], [], [])[1]
+ l.append((mf, cmd))
+ self.encodepats = l
+
+ data = self.wopener(filename, 'r').read()
+
+ for mf, cmd in self.encodepats:
+ if mf(filename):
+ self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
+ data = util.filter(data, cmd)
+ break
+
+ return data
+
+ def wwrite(self, filename, data, fd=None):
+ if self.decodepats == None:
+ l = []
+ for pat, cmd in self.ui.configitems("decode"):
+ mf = util.matcher(self.root, "", [pat], [], [])[1]
+ l.append((mf, cmd))
+ self.decodepats = l
+
+ for mf, cmd in self.decodepats:
+ if mf(filename):
+ self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
+ data = util.filter(data, cmd)
+ break
+
+ if fd:
+ return fd.write(data)
+ return self.wopener(filename, 'w').write(data)
+
+ def transaction(self):
+ tr = self.transhandle
+ if tr != None and tr.running():
+ return tr.nest()
+
+ # save dirstate for rollback
+ try:
+ ds = self.opener("dirstate").read()
+ except IOError:
+ ds = ""
+ self.opener("journal.dirstate", "w").write(ds)
+
+ tr = transaction.transaction(self.ui.warn, self.opener,
+ self.join("journal"),
+ aftertrans(self.path))
+ self.transhandle = tr
+ return tr
+
+ def recover(self):
+ l = self.lock()
+ if os.path.exists(self.join("journal")):
+ self.ui.status(_("rolling back interrupted transaction\n"))
+ transaction.rollback(self.opener, self.join("journal"))
+ self.reload()
+ return True
+ else:
+ self.ui.warn(_("no interrupted transaction available\n"))
+ return False
+
+ def rollback(self, wlock=None):
+ if not wlock:
+ wlock = self.wlock()
+ l = self.lock()
+ if os.path.exists(self.join("undo")):
+ self.ui.status(_("rolling back last transaction\n"))
+ transaction.rollback(self.opener, self.join("undo"))
+ util.rename(self.join("undo.dirstate"), self.join("dirstate"))
+ self.reload()
+ self.wreload()
+ else:
+ self.ui.warn(_("no rollback information available\n"))
+
+ def wreload(self):
+ self.dirstate.read()
+
+ def reload(self):
+ self.changelog.load()
+ self.manifest.load()
+ self.tagscache = None
+ self.nodetagscache = None
+
+ def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
+ desc=None):
+ try:
+ l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
+ except lock.LockHeld, inst:
+ if not wait:
+ raise
+ self.ui.warn(_("waiting for lock on %s held by %s\n") %
+ (desc, inst.args[0]))
+ # default to 600 seconds timeout
+ l = lock.lock(self.join(lockname),
+ int(self.ui.config("ui", "timeout") or 600),
+ releasefn, desc=desc)
+ if acquirefn:
+ acquirefn()
+ return l
+
+ def lock(self, wait=1):
+ return self.do_lock("lock", wait, acquirefn=self.reload,
+ desc=_('repository %s') % self.origroot)
+
+ def wlock(self, wait=1):
+ return self.do_lock("wlock", wait, self.dirstate.write,
+ self.wreload,
+ desc=_('working directory of %s') % self.origroot)
+
+ def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
+ "determine whether a new filenode is needed"
+ fp1 = manifest1.get(filename, nullid)
+ fp2 = manifest2.get(filename, nullid)
+
+ if fp2 != nullid:
+ # is one parent an ancestor of the other?
+ fpa = filelog.ancestor(fp1, fp2)
+ if fpa == fp1:
+ fp1, fp2 = fp2, nullid
+ elif fpa == fp2:
+ fp2 = nullid
+
+ # is the file unmodified from the parent? report existing entry
+ if fp2 == nullid and text == filelog.read(fp1):
+ return (fp1, None, None)
+
+ return (None, fp1, fp2)
+
+ def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
+ orig_parent = self.dirstate.parents()[0] or nullid
+ p1 = p1 or self.dirstate.parents()[0] or nullid
+ p2 = p2 or self.dirstate.parents()[1] or nullid
+ c1 = self.changelog.read(p1)
+ c2 = self.changelog.read(p2)
+ m1 = self.manifest.read(c1[0])
+ mf1 = self.manifest.readflags(c1[0])
+ m2 = self.manifest.read(c2[0])
+ changed = []
+
+ if orig_parent == p1:
+ update_dirstate = 1
+ else:
+ update_dirstate = 0
+
+ if not wlock:
+ wlock = self.wlock()
+ l = self.lock()
+ tr = self.transaction()
+ mm = m1.copy()
+ mfm = mf1.copy()
+ linkrev = self.changelog.count()
+ for f in files:
+ try:
+ t = self.wread(f)
+ tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
+ r = self.file(f)
+ mfm[f] = tm
+
+ (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
+ if entry:
+ mm[f] = entry
+ continue
+
+ mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
+ changed.append(f)
+ if update_dirstate:
+ self.dirstate.update([f], "n")
+ except IOError:
+ try:
+ del mm[f]
+ del mfm[f]
+ if update_dirstate:
+ self.dirstate.forget([f])
+ except:
+ # deleted from p2?
+ pass
+
+ mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
+ user = user or self.ui.username()
+ n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
+ tr.close()
+ if update_dirstate:
+ self.dirstate.setparents(n, nullid)
+
+ def commit(self, files=None, text="", user=None, date=None,
+ match=util.always, force=False, lock=None, wlock=None,
+ force_editor=False):
+ commit = []
+ remove = []
+ changed = []
+
+ if files:
+ for f in files:
+ s = self.dirstate.state(f)
+ if s in 'nmai':
+ commit.append(f)
+ elif s == 'r':
+ remove.append(f)
+ else:
+ self.ui.warn(_("%s not tracked!\n") % f)
+ else:
+ modified, added, removed, deleted, unknown = self.changes(match=match)
+ commit = modified + added
+ remove = removed
+
+ p1, p2 = self.dirstate.parents()
+ c1 = self.changelog.read(p1)
+ c2 = self.changelog.read(p2)
+ m1 = self.manifest.read(c1[0])
+ mf1 = self.manifest.readflags(c1[0])
+ m2 = self.manifest.read(c2[0])
+
+ if not commit and not remove and not force and p2 == nullid:
+ self.ui.status(_("nothing changed\n"))
+ return None
+
+ xp1 = hex(p1)
+ if p2 == nullid: xp2 = ''
+ else: xp2 = hex(p2)
+
+ self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
+
+ if not wlock:
+ wlock = self.wlock()
+ if not lock:
+ lock = self.lock()
+ tr = self.transaction()
+
+ # check in files
+ new = {}
+ linkrev = self.changelog.count()
+ commit.sort()
+ for f in commit:
+ self.ui.note(f + "\n")
+ try:
+ mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
+ t = self.wread(f)
+ except IOError:
+ self.ui.warn(_("trouble committing %s!\n") % f)
+ raise
+
+ r = self.file(f)
+
+ meta = {}
+ cp = self.dirstate.copied(f)
+ if cp:
+ meta["copy"] = cp
+ meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
+ self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
+ fp1, fp2 = nullid, nullid
+ else:
+ entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
+ if entry:
+ new[f] = entry
+ continue
+
+ new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
+ # remember what we've added so that we can later calculate
+ # the files to pull from a set of changesets
+ changed.append(f)
+
+ # update manifest
+ m1 = m1.copy()
+ m1.update(new)
+ for f in remove:
+ if f in m1:
+ del m1[f]
+ mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
+ (new, remove))
+
+ # add changeset
+ new = new.keys()
+ new.sort()
+
+ user = user or self.ui.username()
+ if not text or force_editor:
+ edittext = []
+ if text:
+ edittext.append(text)
+ edittext.append("")
+ if p2 != nullid:
+ edittext.append("HG: branch merge")
+ edittext.extend(["HG: changed %s" % f for f in changed])
+ edittext.extend(["HG: removed %s" % f for f in remove])
+ if not changed and not remove:
+ edittext.append("HG: no files changed")
+ edittext.append("")
+ # run editor in the repository root
+ olddir = os.getcwd()
+ os.chdir(self.root)
+ text = self.ui.edit("\n".join(edittext), user)
+ os.chdir(olddir)
+
+ lines = [line.rstrip() for line in text.rstrip().splitlines()]
+ while lines and not lines[0]:
+ del lines[0]
+ if not lines:
+ return None
+ text = '\n'.join(lines)
+ n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
+ self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
+ parent2=xp2)
+ tr.close()
+
+ self.dirstate.setparents(n)
+ self.dirstate.update(new, "n")
+ self.dirstate.forget(remove)
+
+ self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
+ return n
+
+ def walk(self, node=None, files=[], match=util.always, badmatch=None):
+ if node:
+ fdict = dict.fromkeys(files)
+ for fn in self.manifest.read(self.changelog.read(node)[0]):
+ fdict.pop(fn, None)
+ if match(fn):
+ yield 'm', fn
+ for fn in fdict:
+ if badmatch and badmatch(fn):
+ if match(fn):
+ yield 'b', fn
+ else:
+ self.ui.warn(_('%s: No such file in rev %s\n') % (
+ util.pathto(self.getcwd(), fn), short(node)))
+ else:
+ for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
+ yield src, fn
+
+ def changes(self, node1=None, node2=None, files=[], match=util.always,
+ wlock=None, show_ignored=None):
+ """return changes between two nodes or node and working directory
+
+ If node1 is None, use the first dirstate parent instead.
+ If node2 is None, compare node1 with working directory.
+ """
+
+ def fcmp(fn, mf):
+ t1 = self.wread(fn)
+ t2 = self.file(fn).read(mf.get(fn, nullid))
+ return cmp(t1, t2)
+
+ def mfmatches(node):
+ change = self.changelog.read(node)
+ mf = dict(self.manifest.read(change[0]))
+ for fn in mf.keys():
+ if not match(fn):
+ del mf[fn]
+ return mf
+
+ if node1:
+ # read the manifest from node1 before the manifest from node2,
+ # so that we'll hit the manifest cache if we're going through
+ # all the revisions in parent->child order.
+ mf1 = mfmatches(node1)
+
+ # are we comparing the working directory?
+ if not node2:
+ if not wlock:
+ try:
+ wlock = self.wlock(wait=0)
+ except lock.LockException:
+ wlock = None
+ lookup, modified, added, removed, deleted, unknown, ignored = (
+ self.dirstate.changes(files, match, show_ignored))
+
+ # are we comparing working dir against its parent?
+ if not node1:
+ if lookup:
+ # do a full compare of any files that might have changed
+ mf2 = mfmatches(self.dirstate.parents()[0])
+ for f in lookup:
+ if fcmp(f, mf2):
+ modified.append(f)
+ elif wlock is not None:
+ self.dirstate.update([f], "n")
+ else:
+ # we are comparing working dir against non-parent
+ # generate a pseudo-manifest for the working dir
+ mf2 = mfmatches(self.dirstate.parents()[0])
+ for f in lookup + modified + added:
+ mf2[f] = ""
+ for f in removed:
+ if f in mf2:
+ del mf2[f]
+ else:
+ # we are comparing two revisions
+ deleted, unknown, ignored = [], [], []
+ mf2 = mfmatches(node2)
+
+ if node1:
+ # flush lists from dirstate before comparing manifests
+ modified, added = [], []
+
+ for fn in mf2:
+ if mf1.has_key(fn):
+ if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
+ modified.append(fn)
+ del mf1[fn]
+ else:
+ added.append(fn)
+
+ removed = mf1.keys()
+
+ # sort and return results:
+ for l in modified, added, removed, deleted, unknown, ignored:
+ l.sort()
+ if show_ignored is None:
+ return (modified, added, removed, deleted, unknown)
+ else:
+ return (modified, added, removed, deleted, unknown, ignored)
+
+ def add(self, list, wlock=None):
+ if not wlock:
+ wlock = self.wlock()
+ for f in list:
+ p = self.wjoin(f)
+ if not os.path.exists(p):
+ self.ui.warn(_("%s does not exist!\n") % f)
+ elif not os.path.isfile(p):
+ self.ui.warn(_("%s not added: only files supported currently\n")
+ % f)
+ elif self.dirstate.state(f) in 'an':
+ self.ui.warn(_("%s already tracked!\n") % f)
+ else:
+ self.dirstate.update([f], "a")
+
+ def forget(self, list, wlock=None):
+ if not wlock:
+ wlock = self.wlock()
+ for f in list:
+ if self.dirstate.state(f) not in 'ai':
+ self.ui.warn(_("%s not added!\n") % f)
+ else:
+ self.dirstate.forget([f])
+
+ def remove(self, list, unlink=False, wlock=None):
+ if unlink:
+ for f in list:
+ try:
+ util.unlink(self.wjoin(f))
+ except OSError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
+ if not wlock:
+ wlock = self.wlock()
+ for f in list:
+ p = self.wjoin(f)
+ if os.path.exists(p):
+ self.ui.warn(_("%s still exists!\n") % f)
+ elif self.dirstate.state(f) == 'a':
+ self.dirstate.forget([f])
+ elif f not in self.dirstate:
+ self.ui.warn(_("%s not tracked!\n") % f)
+ else:
+ self.dirstate.update([f], "r")
+
+ def undelete(self, list, wlock=None):
+ p = self.dirstate.parents()[0]
+ mn = self.changelog.read(p)[0]
+ mf = self.manifest.readflags(mn)
+ m = self.manifest.read(mn)
+ if not wlock:
+ wlock = self.wlock()
+ for f in list:
+ if self.dirstate.state(f) not in "r":
+ self.ui.warn("%s not removed!\n" % f)
+ else:
+ t = self.file(f).read(m[f])
+ self.wwrite(f, t)
+ util.set_exec(self.wjoin(f), mf[f])
+ self.dirstate.update([f], "n")
+
+ def copy(self, source, dest, wlock=None):
+ p = self.wjoin(dest)
+ if not os.path.exists(p):
+ self.ui.warn(_("%s does not exist!\n") % dest)
+ elif not os.path.isfile(p):
+ self.ui.warn(_("copy failed: %s is not a file\n") % dest)
+ else:
+ if not wlock:
+ wlock = self.wlock()
+ if self.dirstate.state(dest) == '?':
+ self.dirstate.update([dest], "a")
+ self.dirstate.copy(source, dest)
+
+ def heads(self, start=None):
+ heads = self.changelog.heads(start)
+ # sort the output in rev descending order
+ heads = [(-self.changelog.rev(h), h) for h in heads]
+ heads.sort()
+ return [n for (r, n) in heads]
+
+ # branchlookup returns a dict giving a list of branches for
+ # each head. A branch is defined as the tag of a node or
+ # the branch of the node's parents. If a node has multiple
+ # branch tags, tags are eliminated if they are visible from other
+ # branch tags.
+ #
+ # So, for this graph: a->b->c->d->e
+ # \ /
+ # aa -----/
+ # a has tag 2.6.12
+ # d has tag 2.6.13
+ # e would have branch tags for 2.6.12 and 2.6.13. Because the node
+ # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
+ # from the list.
+ #
+ # It is possible that more than one head will have the same branch tag.
+ # callers need to check the result for multiple heads under the same
+ # branch tag if that is a problem for them (ie checkout of a specific
+ # branch).
+ #
+ # passing in a specific branch will limit the depth of the search
+ # through the parents. It won't limit the branches returned in the
+ # result though.
+ def branchlookup(self, heads=None, branch=None):
+ if not heads:
+ heads = self.heads()
+ headt = [ h for h in heads ]
+ chlog = self.changelog
+ branches = {}
+ merges = []
+ seenmerge = {}
+
+ # traverse the tree once for each head, recording in the branches
+ # dict which tags are visible from this head. The branches
+ # dict also records which tags are visible from each tag
+ # while we traverse.
+ while headt or merges:
+ if merges:
+ n, found = merges.pop()
+ visit = [n]
+ else:
+ h = headt.pop()
+ visit = [h]
+ found = [h]
+ seen = {}
+ while visit:
+ n = visit.pop()
+ if n in seen:
+ continue
+ pp = chlog.parents(n)
+ tags = self.nodetags(n)
+ if tags:
+ for x in tags:
+ if x == 'tip':
+ continue
+ for f in found:
+ branches.setdefault(f, {})[n] = 1
+ branches.setdefault(n, {})[n] = 1
+ break
+ if n not in found:
+ found.append(n)
+ if branch in tags:
+ continue
+ seen[n] = 1
+ if pp[1] != nullid and n not in seenmerge:
+ merges.append((pp[1], [x for x in found]))
+ seenmerge[n] = 1
+ if pp[0] != nullid:
+ visit.append(pp[0])
+ # traverse the branches dict, eliminating branch tags from each
+ # head that are visible from another branch tag for that head.
+ out = {}
+ viscache = {}
+ for h in heads:
+ def visible(node):
+ if node in viscache:
+ return viscache[node]
+ ret = {}
+ visit = [node]
+ while visit:
+ x = visit.pop()
+ if x in viscache:
+ ret.update(viscache[x])
+ elif x not in ret:
+ ret[x] = 1
+ if x in branches:
+ visit[len(visit):] = branches[x].keys()
+ viscache[node] = ret
+ return ret
+ if h not in branches:
+ continue
+ # O(n^2), but somewhat limited. This only searches the
+ # tags visible from a specific head, not all the tags in the
+ # whole repo.
+ for b in branches[h]:
+ vis = False
+ for bb in branches[h].keys():
+ if b != bb:
+ if b in visible(bb):
+ vis = True
+ break
+ if not vis:
+ l = out.setdefault(h, [])
+ l[len(l):] = self.nodetags(b)
+ return out
+
+ def branches(self, nodes):
+ if not nodes:
+ nodes = [self.changelog.tip()]
+ b = []
+ for n in nodes:
+ t = n
+ while 1:
+ p = self.changelog.parents(n)
+ if p[1] != nullid or p[0] == nullid:
+ b.append((t, n, p[0], p[1]))
+ break
+ n = p[0]
+ return b
+
+ def between(self, pairs):
+ r = []
+
+ for top, bottom in pairs:
+ n, l, i = top, [], 0
+ f = 1
+
+ while n != bottom:
+ p = self.changelog.parents(n)[0]
+ if i == f:
+ l.append(n)
+ f = f * 2
+ n = p
+ i += 1
+
+ r.append(l)
+
+ return r
+
+ def findincoming(self, remote, base=None, heads=None, force=False):
+ """Return list of roots of the subsets of missing nodes from remote
+
+ If base dict is specified, assume that these nodes and their parents
+ exist on the remote side and that no child of a node of base exists
+ in both remote and self.
+ Furthermore base will be updated to include the nodes that exists
+ in self and remote but no children exists in self and remote.
+ If a list of heads is specified, return only nodes which are heads
+ or ancestors of these heads.
+
+ All the ancestors of base are in self and in remote.
+ All the descendants of the list returned are missing in self.
+ (and so we know that the rest of the nodes are missing in remote, see
+ outgoing)
+ """
+ m = self.changelog.nodemap
+ search = []
+ fetch = {}
+ seen = {}
+ seenbranch = {}
+ if base == None:
+ base = {}
+
+ if not heads:
+ heads = remote.heads()
+
+ if self.changelog.tip() == nullid:
+ base[nullid] = 1
+ if heads != [nullid]:
+ return [nullid]
+ return []
+
+ # assume we're closer to the tip than the root
+ # and start by examining the heads
+ self.ui.status(_("searching for changes\n"))
+
+ unknown = []
+ for h in heads:
+ if h not in m:
+ unknown.append(h)
+ else:
+ base[h] = 1
+
+ if not unknown:
+ return []
+
+ req = dict.fromkeys(unknown)
+ reqcnt = 0
+
+ # search through remote branches
+ # a 'branch' here is a linear segment of history, with four parts:
+ # head, root, first parent, second parent
+ # (a branch always has two parents (or none) by definition)
+ unknown = remote.branches(unknown)
+ while unknown:
+ r = []
+ while unknown:
+ n = unknown.pop(0)
+ if n[0] in seen:
+ continue
+
+ self.ui.debug(_("examining %s:%s\n")
+ % (short(n[0]), short(n[1])))
+ if n[0] == nullid: # found the end of the branch
+ pass
+ elif n in seenbranch:
+ self.ui.debug(_("branch already found\n"))
+ continue
+ elif n[1] and n[1] in m: # do we know the base?
+ self.ui.debug(_("found incomplete branch %s:%s\n")
+ % (short(n[0]), short(n[1])))
+ search.append(n) # schedule branch range for scanning
+ seenbranch[n] = 1
+ else:
+ if n[1] not in seen and n[1] not in fetch:
+ if n[2] in m and n[3] in m:
+ self.ui.debug(_("found new changeset %s\n") %
+ short(n[1]))
+ fetch[n[1]] = 1 # earliest unknown
+ for p in n[2:4]:
+ if p in m:
+ base[p] = 1 # latest known
+
+ for p in n[2:4]:
+ if p not in req and p not in m:
+ r.append(p)
+ req[p] = 1
+ seen[n[0]] = 1
+
+ if r:
+ reqcnt += 1
+ self.ui.debug(_("request %d: %s\n") %
+ (reqcnt, " ".join(map(short, r))))
+ for p in range(0, len(r), 10):
+ for b in remote.branches(r[p:p+10]):
+ self.ui.debug(_("received %s:%s\n") %
+ (short(b[0]), short(b[1])))
+ unknown.append(b)
+
+ # do binary search on the branches we found
+ while search:
+ n = search.pop(0)
+ reqcnt += 1
+ l = remote.between([(n[0], n[1])])[0]
+ l.append(n[1])
+ p = n[0]
+ f = 1
+ for i in l:
+ self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
+ if i in m:
+ if f <= 2:
+ self.ui.debug(_("found new branch changeset %s\n") %
+ short(p))
+ fetch[p] = 1
+ base[i] = 1
+ else:
+ self.ui.debug(_("narrowed branch search to %s:%s\n")
+ % (short(p), short(i)))
+ search.append((p, i))
+ break
+ p, f = i, f * 2
+
+ # sanity check our fetch list
+ for f in fetch.keys():
+ if f in m:
+ raise repo.RepoError(_("already have changeset ") + short(f[:4]))
+
+ if base.keys() == [nullid]:
+ if force:
+ self.ui.warn(_("warning: repository is unrelated\n"))
+ else:
+ raise util.Abort(_("repository is unrelated"))
+
+ self.ui.note(_("found new changesets starting at ") +
+ " ".join([short(f) for f in fetch]) + "\n")
+
+ self.ui.debug(_("%d total queries\n") % reqcnt)
+
+ return fetch.keys()
+
+ def findoutgoing(self, remote, base=None, heads=None, force=False):
+ """Return list of nodes that are roots of subsets not in remote
+
+ If base dict is specified, assume that these nodes and their parents
+ exist on the remote side.
+ If a list of heads is specified, return only nodes which are heads
+ or ancestors of these heads, and return a second element which
+ contains all remote heads which get new children.
+ """
+ if base == None:
+ base = {}
+ self.findincoming(remote, base, heads, force=force)
+
+ self.ui.debug(_("common changesets up to ")
+ + " ".join(map(short, base.keys())) + "\n")
+
+ remain = dict.fromkeys(self.changelog.nodemap)
+
+ # prune everything remote has from the tree
+ del remain[nullid]
+ remove = base.keys()
+ while remove:
+ n = remove.pop(0)
+ if n in remain:
+ del remain[n]
+ for p in self.changelog.parents(n):
+ remove.append(p)
+
+ # find every node whose parents have been pruned
+ subset = []
+ # find every remote head that will get new children
+ updated_heads = {}
+ for n in remain:
+ p1, p2 = self.changelog.parents(n)
+ if p1 not in remain and p2 not in remain:
+ subset.append(n)
+ if heads:
+ if p1 in heads:
+ updated_heads[p1] = True
+ if p2 in heads:
+ updated_heads[p2] = True
+
+ # this is the set of all roots we have to push
+ if heads:
+ return subset, updated_heads.keys()
+ else:
+ return subset
+
+ def pull(self, remote, heads=None, force=False):
+ l = self.lock()
+
+ fetch = self.findincoming(remote, force=force)
+ if fetch == [nullid]:
+ self.ui.status(_("requesting all changes\n"))
+
+ if not fetch:
+ self.ui.status(_("no changes found\n"))
+ return 0
+
+ if heads is None:
+ cg = remote.changegroup(fetch, 'pull')
+ else:
+ cg = remote.changegroupsubset(fetch, heads, 'pull')
+ return self.addchangegroup(cg, 'pull')
+
+ def push(self, remote, force=False, revs=None):
+ lock = remote.lock()
+
+ base = {}
+ remote_heads = remote.heads()
+ inc = self.findincoming(remote, base, remote_heads, force=force)
+ if not force and inc:
+ self.ui.warn(_("abort: unsynced remote changes!\n"))
+ self.ui.status(_("(did you forget to sync?"
+ " use push -f to force)\n"))
+ return 1
+
+ update, updated_heads = self.findoutgoing(remote, base, remote_heads)
+ if revs is not None:
+ msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
+ else:
+ bases, heads = update, self.changelog.heads()
+
+ if not bases:
+ self.ui.status(_("no changes found\n"))
+ return 1
+ elif not force:
+ # FIXME we don't properly detect creation of new heads
+ # in the push -r case, assume the user knows what he's doing
+ if not revs and len(remote_heads) < len(heads) \
+ and remote_heads != [nullid]:
+ self.ui.warn(_("abort: push creates new remote branches!\n"))
+ self.ui.status(_("(did you forget to merge?"
+ " use push -f to force)\n"))
+ return 1
+
+ if revs is None:
+ cg = self.changegroup(update, 'push')
+ else:
+ cg = self.changegroupsubset(update, revs, 'push')
+ return remote.addchangegroup(cg, 'push')
+
+ def changegroupsubset(self, bases, heads, source):
+ """This function generates a changegroup consisting of all the nodes
+ that are descendents of any of the bases, and ancestors of any of
+ the heads.
+
+ It is fairly complex as determining which filenodes and which
+ manifest nodes need to be included for the changeset to be complete
+ is non-trivial.
+
+ Another wrinkle is doing the reverse, figuring out which changeset in
+ the changegroup a particular filenode or manifestnode belongs to."""
+
+ self.hook('preoutgoing', throw=True, source=source)
+
+ # Set up some initial variables
+ # Make it easy to refer to self.changelog
+ cl = self.changelog
+ # msng is short for missing - compute the list of changesets in this
+ # changegroup.
+ msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
+ # Some bases may turn out to be superfluous, and some heads may be
+ # too. nodesbetween will return the minimal set of bases and heads
+ # necessary to re-create the changegroup.
+
+ # Known heads are the list of heads that it is assumed the recipient
+ # of this changegroup will know about.
+ knownheads = {}
+ # We assume that all parents of bases are known heads.
+ for n in bases:
+ for p in cl.parents(n):
+ if p != nullid:
+ knownheads[p] = 1
+ knownheads = knownheads.keys()
+ if knownheads:
+ # Now that we know what heads are known, we can compute which
+ # changesets are known. The recipient must know about all
+ # changesets required to reach the known heads from the null
+ # changeset.
+ has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
+ junk = None
+ # Transform the list into an ersatz set.
+ has_cl_set = dict.fromkeys(has_cl_set)
+ else:
+ # If there were no known heads, the recipient cannot be assumed to
+ # know about any changesets.
+ has_cl_set = {}
+
+ # Make it easy to refer to self.manifest
+ mnfst = self.manifest
+ # We don't know which manifests are missing yet
+ msng_mnfst_set = {}
+ # Nor do we know which filenodes are missing.
+ msng_filenode_set = {}
+
+ junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
+ junk = None
+
+ # A changeset always belongs to itself, so the changenode lookup
+ # function for a changenode is identity.
+ def identity(x):
+ return x
+
+ # A function generating function. Sets up an environment for the
+ # inner function.
+ def cmp_by_rev_func(revlog):
+ # Compare two nodes by their revision number in the environment's
+ # revision history. Since the revision number both represents the
+ # most efficient order to read the nodes in, and represents a
+ # topological sorting of the nodes, this function is often useful.
+ def cmp_by_rev(a, b):
+ return cmp(revlog.rev(a), revlog.rev(b))
+ return cmp_by_rev
+
+ # If we determine that a particular file or manifest node must be a
+ # node that the recipient of the changegroup will already have, we can
+ # also assume the recipient will have all the parents. This function
+ # prunes them from the set of missing nodes.
+ def prune_parents(revlog, hasset, msngset):
+ haslst = hasset.keys()
+ haslst.sort(cmp_by_rev_func(revlog))
+ for node in haslst:
+ parentlst = [p for p in revlog.parents(node) if p != nullid]
+ while parentlst:
+ n = parentlst.pop()
+ if n not in hasset:
+ hasset[n] = 1
+ p = [p for p in revlog.parents(n) if p != nullid]
+ parentlst.extend(p)
+ for n in hasset:
+ msngset.pop(n, None)
+
+ # This is a function generating function used to set up an environment
+ # for the inner function to execute in.
+ def manifest_and_file_collector(changedfileset):
+ # This is an information gathering function that gathers
+ # information from each changeset node that goes out as part of
+ # the changegroup. The information gathered is a list of which
+ # manifest nodes are potentially required (the recipient may
+ # already have them) and total list of all files which were
+ # changed in any changeset in the changegroup.
+ #
+ # We also remember the first changenode we saw any manifest
+ # referenced by so we can later determine which changenode 'owns'
+ # the manifest.
+ def collect_manifests_and_files(clnode):
+ c = cl.read(clnode)
+ for f in c[3]:
+ # This is to make sure we only have one instance of each
+ # filename string for each filename.
+ changedfileset.setdefault(f, f)
+ msng_mnfst_set.setdefault(c[0], clnode)
+ return collect_manifests_and_files
+
+ # Figure out which manifest nodes (of the ones we think might be part
+ # of the changegroup) the recipient must know about and remove them
+ # from the changegroup.
+ def prune_manifests():
+ has_mnfst_set = {}
+ for n in msng_mnfst_set:
+ # If a 'missing' manifest thinks it belongs to a changenode
+ # the recipient is assumed to have, obviously the recipient
+ # must have that manifest.
+ linknode = cl.node(mnfst.linkrev(n))
+ if linknode in has_cl_set:
+ has_mnfst_set[n] = 1
+ prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
+
+ # Use the information collected in collect_manifests_and_files to say
+ # which changenode any manifestnode belongs to.
+ def lookup_manifest_link(mnfstnode):
+ return msng_mnfst_set[mnfstnode]
+
+ # A function generating function that sets up the initial environment
+ # the inner function.
+ def filenode_collector(changedfiles):
+ next_rev = [0]
+ # This gathers information from each manifestnode included in the
+ # changegroup about which filenodes the manifest node references
+ # so we can include those in the changegroup too.
+ #
+ # It also remembers which changenode each filenode belongs to. It
+ # does this by assuming the a filenode belongs to the changenode
+ # the first manifest that references it belongs to.
+ def collect_msng_filenodes(mnfstnode):
+ r = mnfst.rev(mnfstnode)
+ if r == next_rev[0]:
+ # If the last rev we looked at was the one just previous,
+ # we only need to see a diff.
+ delta = mdiff.patchtext(mnfst.delta(mnfstnode))
+ # For each line in the delta
+ for dline in delta.splitlines():
+ # get the filename and filenode for that line
+ f, fnode = dline.split('\0')
+ fnode = bin(fnode[:40])
+ f = changedfiles.get(f, None)
+ # And if the file is in the list of files we care
+ # about.
+ if f is not None:
+ # Get the changenode this manifest belongs to
+ clnode = msng_mnfst_set[mnfstnode]
+ # Create the set of filenodes for the file if
+ # there isn't one already.
+ ndset = msng_filenode_set.setdefault(f, {})
+ # And set the filenode's changelog node to the
+ # manifest's if it hasn't been set already.
+ ndset.setdefault(fnode, clnode)
+ else:
+ # Otherwise we need a full manifest.
+ m = mnfst.read(mnfstnode)
+ # For every file in we care about.
+ for f in changedfiles:
+ fnode = m.get(f, None)
+ # If it's in the manifest
+ if fnode is not None:
+ # See comments above.
+ clnode = msng_mnfst_set[mnfstnode]
+ ndset = msng_filenode_set.setdefault(f, {})
+ ndset.setdefault(fnode, clnode)
+ # Remember the revision we hope to see next.
+ next_rev[0] = r + 1
+ return collect_msng_filenodes
+
+ # We have a list of filenodes we think we need for a file, lets remove
+ # all those we now the recipient must have.
+ def prune_filenodes(f, filerevlog):
+ msngset = msng_filenode_set[f]
+ hasset = {}
+ # If a 'missing' filenode thinks it belongs to a changenode we
+ # assume the recipient must have, then the recipient must have
+ # that filenode.
+ for n in msngset:
+ clnode = cl.node(filerevlog.linkrev(n))
+ if clnode in has_cl_set:
+ hasset[n] = 1
+ prune_parents(filerevlog, hasset, msngset)
+
+ # A function generator function that sets up the a context for the
+ # inner function.
+ def lookup_filenode_link_func(fname):
+ msngset = msng_filenode_set[fname]
+ # Lookup the changenode the filenode belongs to.
+ def lookup_filenode_link(fnode):
+ return msngset[fnode]
+ return lookup_filenode_link
+
+ # Now that we have all theses utility functions to help out and
+ # logically divide up the task, generate the group.
+ def gengroup():
+ # The set of changed files starts empty.
+ changedfiles = {}
+ # Create a changenode group generator that will call our functions
+ # back to lookup the owning changenode and collect information.
+ group = cl.group(msng_cl_lst, identity,
+ manifest_and_file_collector(changedfiles))
+ for chnk in group:
+ yield chnk
+
+ # The list of manifests has been collected by the generator
+ # calling our functions back.
+ prune_manifests()
+ msng_mnfst_lst = msng_mnfst_set.keys()
+ # Sort the manifestnodes by revision number.
+ msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
+ # Create a generator for the manifestnodes that calls our lookup
+ # and data collection functions back.
+ group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
+ filenode_collector(changedfiles))
+ for chnk in group:
+ yield chnk
+
+ # These are no longer needed, dereference and toss the memory for
+ # them.
+ msng_mnfst_lst = None
+ msng_mnfst_set.clear()
+
+ changedfiles = changedfiles.keys()
+ changedfiles.sort()
+ # Go through all our files in order sorted by name.
+ for fname in changedfiles:
+ filerevlog = self.file(fname)
+ # Toss out the filenodes that the recipient isn't really
+ # missing.
+ if msng_filenode_set.has_key(fname):
+ prune_filenodes(fname, filerevlog)
+ msng_filenode_lst = msng_filenode_set[fname].keys()
+ else:
+ msng_filenode_lst = []
+ # If any filenodes are left, generate the group for them,
+ # otherwise don't bother.
+ if len(msng_filenode_lst) > 0:
+ yield changegroup.genchunk(fname)
+ # Sort the filenodes by their revision #
+ msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
+ # Create a group generator and only pass in a changenode
+ # lookup function as we need to collect no information
+ # from filenodes.
+ group = filerevlog.group(msng_filenode_lst,
+ lookup_filenode_link_func(fname))
+ for chnk in group:
+ yield chnk
+ if msng_filenode_set.has_key(fname):
+ # Don't need this anymore, toss it to free memory.
+ del msng_filenode_set[fname]
+ # Signal that no more groups are left.
+ yield changegroup.closechunk()
+
+ if msng_cl_lst:
+ self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
+
+ return util.chunkbuffer(gengroup())
+
+ def changegroup(self, basenodes, source):
+ """Generate a changegroup of all nodes that we have that a recipient
+ doesn't.
+
+ This is much easier than the previous function as we can assume that
+ the recipient has any changenode we aren't sending them."""
+
+ self.hook('preoutgoing', throw=True, source=source)
+
+ cl = self.changelog
+ nodes = cl.nodesbetween(basenodes, None)[0]
+ revset = dict.fromkeys([cl.rev(n) for n in nodes])
+
+ def identity(x):
+ return x
+
+ def gennodelst(revlog):
+ for r in xrange(0, revlog.count()):
+ n = revlog.node(r)
+ if revlog.linkrev(n) in revset:
+ yield n
+
+ def changed_file_collector(changedfileset):
+ def collect_changed_files(clnode):
+ c = cl.read(clnode)
+ for fname in c[3]:
+ changedfileset[fname] = 1
+ return collect_changed_files
+
+ def lookuprevlink_func(revlog):
+ def lookuprevlink(n):
+ return cl.node(revlog.linkrev(n))
+ return lookuprevlink
+
+ def gengroup():
+ # construct a list of all changed files
+ changedfiles = {}
+
+ for chnk in cl.group(nodes, identity,
+ changed_file_collector(changedfiles)):
+ yield chnk
+ changedfiles = changedfiles.keys()
+ changedfiles.sort()
+
+ mnfst = self.manifest
+ nodeiter = gennodelst(mnfst)
+ for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
+ yield chnk
+
+ for fname in changedfiles:
+ filerevlog = self.file(fname)
+ nodeiter = gennodelst(filerevlog)
+ nodeiter = list(nodeiter)
+ if nodeiter:
+ yield changegroup.genchunk(fname)
+ lookup = lookuprevlink_func(filerevlog)
+ for chnk in filerevlog.group(nodeiter, lookup):
+ yield chnk
+
+ yield changegroup.closechunk()
+
+ if nodes:
+ self.hook('outgoing', node=hex(nodes[0]), source=source)
+
+ return util.chunkbuffer(gengroup())
+
+ def addchangegroup(self, source, srctype):
+ """add changegroup to repo.
+ returns number of heads modified or added + 1."""
+
+ def csmap(x):
+ self.ui.debug(_("add changeset %s\n") % short(x))
+ return cl.count()
+
+ def revmap(x):
+ return cl.rev(x)
+
+ if not source:
+ return 0
+
+ self.hook('prechangegroup', throw=True, source=srctype)
+
+ changesets = files = revisions = 0
+
+ tr = self.transaction()
+
+ # write changelog and manifest data to temp files so
+ # concurrent readers will not see inconsistent view
+ cl = None
+ try:
+ cl = appendfile.appendchangelog(self.opener, self.changelog.version)
+
+ oldheads = len(cl.heads())
+
+ # pull off the changeset group
+ self.ui.status(_("adding changesets\n"))
+ cor = cl.count() - 1
+ chunkiter = changegroup.chunkiter(source)
+ if cl.addgroup(chunkiter, csmap, tr, 1) is None:
+ raise util.Abort(_("received changelog group is empty"))
+ cnr = cl.count() - 1
+ changesets = cnr - cor
+
+ mf = None
+ try:
+ mf = appendfile.appendmanifest(self.opener,
+ self.manifest.version)
+
+ # pull off the manifest group
+ self.ui.status(_("adding manifests\n"))
+ chunkiter = changegroup.chunkiter(source)
+ # no need to check for empty manifest group here:
+ # if the result of the merge of 1 and 2 is the same in 3 and 4,
+ # no new manifest will be created and the manifest group will
+ # be empty during the pull
+ mf.addgroup(chunkiter, revmap, tr)
+
+ # process the files
+ self.ui.status(_("adding file changes\n"))
+ while 1:
+ f = changegroup.getchunk(source)
+ if not f:
+ break
+ self.ui.debug(_("adding %s revisions\n") % f)
+ fl = self.file(f)
+ o = fl.count()
+ chunkiter = changegroup.chunkiter(source)
+ if fl.addgroup(chunkiter, revmap, tr) is None:
+ raise util.Abort(_("received file revlog group is empty"))
+ revisions += fl.count() - o
+ files += 1
+
+ # write order here is important so concurrent readers will see
+ # consistent view of repo
+ mf.writedata()
+ finally:
+ if mf:
+ mf.cleanup()
+ cl.writedata()
+ finally:
+ if cl:
+ cl.cleanup()
+
+ # make changelog and manifest see real files again
+ self.changelog = changelog.changelog(self.opener, self.changelog.version)
+ self.manifest = manifest.manifest(self.opener, self.manifest.version)
+ self.changelog.checkinlinesize(tr)
+ self.manifest.checkinlinesize(tr)
+
+ newheads = len(self.changelog.heads())
+ heads = ""
+ if oldheads and newheads > oldheads:
+ heads = _(" (+%d heads)") % (newheads - oldheads)
+
+ self.ui.status(_("added %d changesets"
+ " with %d changes to %d files%s\n")
+ % (changesets, revisions, files, heads))
+
+ if changesets > 0:
+ self.hook('pretxnchangegroup', throw=True,
+ node=hex(self.changelog.node(cor+1)), source=srctype)
+
+ tr.close()
+
+ if changesets > 0:
+ self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
+ source=srctype)
+
+ for i in range(cor + 1, cnr + 1):
+ self.hook("incoming", node=hex(self.changelog.node(i)),
+ source=srctype)
+
+ return newheads - oldheads + 1
+
+ def update(self, node, allow=False, force=False, choose=None,
+ moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
+ pl = self.dirstate.parents()
+ if not force and pl[1] != nullid:
+ raise util.Abort(_("outstanding uncommitted merges"))
+
+ err = False
+
+ p1, p2 = pl[0], node
+ pa = self.changelog.ancestor(p1, p2)
+ m1n = self.changelog.read(p1)[0]
+ m2n = self.changelog.read(p2)[0]
+ man = self.manifest.ancestor(m1n, m2n)
+ m1 = self.manifest.read(m1n)
+ mf1 = self.manifest.readflags(m1n)
+ m2 = self.manifest.read(m2n).copy()
+ mf2 = self.manifest.readflags(m2n)
+ ma = self.manifest.read(man)
+ mfa = self.manifest.readflags(man)
+
+ modified, added, removed, deleted, unknown = self.changes()
+
+ # is this a jump, or a merge? i.e. is there a linear path
+ # from p1 to p2?
+ linear_path = (pa == p1 or pa == p2)
+
+ if allow and linear_path:
+ raise util.Abort(_("there is nothing to merge, "
+ "just use 'hg update'"))
+ if allow and not forcemerge:
+ if modified or added or removed:
+ raise util.Abort(_("outstanding uncommitted changes"))
+
+ if not forcemerge and not force:
+ for f in unknown:
+ if f in m2:
+ t1 = self.wread(f)
+ t2 = self.file(f).read(m2[f])
+ if cmp(t1, t2) != 0:
+ raise util.Abort(_("'%s' already exists in the working"
+ " dir and differs from remote") % f)
+
+ # resolve the manifest to determine which files
+ # we care about merging
+ self.ui.note(_("resolving manifests\n"))
+ self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
+ (force, allow, moddirstate, linear_path))
+ self.ui.debug(_(" ancestor %s local %s remote %s\n") %
+ (short(man), short(m1n), short(m2n)))
+
+ merge = {}
+ get = {}
+ remove = []
+
+ # construct a working dir manifest
+ mw = m1.copy()
+ mfw = mf1.copy()
+ umap = dict.fromkeys(unknown)
+
+ for f in added + modified + unknown:
+ mw[f] = ""
+ mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
+
+ if moddirstate and not wlock:
+ wlock = self.wlock()
+
+ for f in deleted + removed:
+ if f in mw:
+ del mw[f]
+
+ # If we're jumping between revisions (as opposed to merging),
+ # and if neither the working directory nor the target rev has
+ # the file, then we need to remove it from the dirstate, to
+ # prevent the dirstate from listing the file when it is no
+ # longer in the manifest.
+ if moddirstate and linear_path and f not in m2:
+ self.dirstate.forget((f,))
+
+ # Compare manifests
+ for f, n in mw.iteritems():
+ if choose and not choose(f):
+ continue
+ if f in m2:
+ s = 0
+
+ # is the wfile new since m1, and match m2?
+ if f not in m1:
+ t1 = self.wread(f)
+ t2 = self.file(f).read(m2[f])
+ if cmp(t1, t2) == 0:
+ n = m2[f]
+ del t1, t2
+
+ # are files different?
+ if n != m2[f]:
+ a = ma.get(f, nullid)
+ # are both different from the ancestor?
+ if n != a and m2[f] != a:
+ self.ui.debug(_(" %s versions differ, resolve\n") % f)
+ # merge executable bits
+ # "if we changed or they changed, change in merge"
+ a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
+ mode = ((a^b) | (a^c)) ^ a
+ merge[f] = (m1.get(f, nullid), m2[f], mode)
+ s = 1
+ # are we clobbering?
+ # is remote's version newer?
+ # or are we going back in time?
+ elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
+ self.ui.debug(_(" remote %s is newer, get\n") % f)
+ get[f] = m2[f]
+ s = 1
+ elif f in umap or f in added:
+ # this unknown file is the same as the checkout
+ # we need to reset the dirstate if the file was added
+ get[f] = m2[f]
+
+ if not s and mfw[f] != mf2[f]:
+ if force:
+ self.ui.debug(_(" updating permissions for %s\n") % f)
+ util.set_exec(self.wjoin(f), mf2[f])
+ else:
+ a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
+ mode = ((a^b) | (a^c)) ^ a
+ if mode != b:
+ self.ui.debug(_(" updating permissions for %s\n")
+ % f)
+ util.set_exec(self.wjoin(f), mode)
+ del m2[f]
+ elif f in ma:
+ if n != ma[f]:
+ r = _("d")
+ if not force and (linear_path or allow):
+ r = self.ui.prompt(
+ (_(" local changed %s which remote deleted\n") % f) +
+ _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
+ if r == _("d"):
+ remove.append(f)
+ else:
+ self.ui.debug(_("other deleted %s\n") % f)
+ remove.append(f) # other deleted it
+ else:
+ # file is created on branch or in working directory
+ if force and f not in umap:
+ self.ui.debug(_("remote deleted %s, clobbering\n") % f)
+ remove.append(f)
+ elif n == m1.get(f, nullid): # same as parent
+ if p2 == pa: # going backwards?
+ self.ui.debug(_("remote deleted %s\n") % f)
+ remove.append(f)
+ else:
+ self.ui.debug(_("local modified %s, keeping\n") % f)
+ else:
+ self.ui.debug(_("working dir created %s, keeping\n") % f)
+
+ for f, n in m2.iteritems():
+ if choose and not choose(f):
+ continue
+ if f[0] == "/":
+ continue
+ if f in ma and n != ma[f]:
+ r = _("k")
+ if not force and (linear_path or allow):
+ r = self.ui.prompt(
+ (_("remote changed %s which local deleted\n") % f) +
+ _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
+ if r == _("k"):
+ get[f] = n
+ elif f not in ma:
+ self.ui.debug(_("remote created %s\n") % f)
+ get[f] = n
+ else:
+ if force or p2 == pa: # going backwards?
+ self.ui.debug(_("local deleted %s, recreating\n") % f)
+ get[f] = n
+ else:
+ self.ui.debug(_("local deleted %s\n") % f)
+
+ del mw, m1, m2, ma
+
+ if force:
+ for f in merge:
+ get[f] = merge[f][1]
+ merge = {}
+
+ if linear_path or force:
+ # we don't need to do any magic, just jump to the new rev
+ branch_merge = False
+ p1, p2 = p2, nullid
+ else:
+ if not allow:
+ self.ui.status(_("this update spans a branch"
+ " affecting the following files:\n"))
+ fl = merge.keys() + get.keys()
+ fl.sort()
+ for f in fl:
+ cf = ""
+ if f in merge:
+ cf = _(" (resolve)")
+ self.ui.status(" %s%s\n" % (f, cf))
+ self.ui.warn(_("aborting update spanning branches!\n"))
+ self.ui.status(_("(use 'hg merge' to merge across branches"
+ " or 'hg update -C' to lose changes)\n"))
+ return 1
+ branch_merge = True
+
+ xp1 = hex(p1)
+ xp2 = hex(p2)
+ if p2 == nullid: xxp2 = ''
+ else: xxp2 = xp2
+
+ self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
+
+ # get the files we don't need to change
+ files = get.keys()
+ files.sort()
+ for f in files:
+ if f[0] == "/":
+ continue
+ self.ui.note(_("getting %s\n") % f)
+ t = self.file(f).read(get[f])
+ self.wwrite(f, t)
+ util.set_exec(self.wjoin(f), mf2[f])
+ if moddirstate:
+ if branch_merge:
+ self.dirstate.update([f], 'n', st_mtime=-1)
+ else:
+ self.dirstate.update([f], 'n')
+
+ # merge the tricky bits
+ failedmerge = []
+ files = merge.keys()
+ files.sort()
+ for f in files:
+ self.ui.status(_("merging %s\n") % f)
+ my, other, flag = merge[f]
+ ret = self.merge3(f, my, other, xp1, xp2)
+ if ret:
+ err = True
+ failedmerge.append(f)
+ util.set_exec(self.wjoin(f), flag)
+ if moddirstate:
+ if branch_merge:
+ # We've done a branch merge, mark this file as merged
+ # so that we properly record the merger later
+ self.dirstate.update([f], 'm')
+ else:
+ # We've update-merged a locally modified file, so
+ # we set the dirstate to emulate a normal checkout
+ # of that file some time in the past. Thus our
+ # merge will appear as a normal local file
+ # modification.
+ f_len = len(self.file(f).read(other))
+ self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
+
+ remove.sort()
+ for f in remove:
+ self.ui.note(_("removing %s\n") % f)
+ util.audit_path(f)
+ try:
+ util.unlink(self.wjoin(f))
+ except OSError, inst:
+ if inst.errno != errno.ENOENT:
+ self.ui.warn(_("update failed to remove %s: %s!\n") %
+ (f, inst.strerror))
+ if moddirstate:
+ if branch_merge:
+ self.dirstate.update(remove, 'r')
+ else:
+ self.dirstate.forget(remove)
+
+ if moddirstate:
+ self.dirstate.setparents(p1, p2)
+
+ if show_stats:
+ stats = ((len(get), _("updated")),
+ (len(merge) - len(failedmerge), _("merged")),
+ (len(remove), _("removed")),
+ (len(failedmerge), _("unresolved")))
+ note = ", ".join([_("%d files %s") % s for s in stats])
+ self.ui.status("%s\n" % note)
+ if moddirstate:
+ if branch_merge:
+ if failedmerge:
+ self.ui.status(_("There are unresolved merges,"
+ " you can redo the full merge using:\n"
+ " hg update -C %s\n"
+ " hg merge %s\n"
+ % (self.changelog.rev(p1),
+ self.changelog.rev(p2))))
+ else:
+ self.ui.status(_("(branch merge, don't forget to commit)\n"))
+ elif failedmerge:
+ self.ui.status(_("There are unresolved merges with"
+ " locally modified files.\n"))
+
+ self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
+ return err
+
+ def merge3(self, fn, my, other, p1, p2):
+ """perform a 3-way merge in the working directory"""
+
+ def temp(prefix, node):
+ pre = "%s~%s." % (os.path.basename(fn), prefix)
+ (fd, name) = tempfile.mkstemp(prefix=pre)
+ f = os.fdopen(fd, "wb")
+ self.wwrite(fn, fl.read(node), f)
+ f.close()
+ return name
+
+ fl = self.file(fn)
+ base = fl.ancestor(my, other)
+ a = self.wjoin(fn)
+ b = temp("base", base)
+ c = temp("other", other)
+
+ self.ui.note(_("resolving %s\n") % fn)
+ self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
+ (fn, short(my), short(other), short(base)))
+
+ cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
+ or "hgmerge")
+ r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
+ environ={'HG_FILE': fn,
+ 'HG_MY_NODE': p1,
+ 'HG_OTHER_NODE': p2,
+ 'HG_FILE_MY_NODE': hex(my),
+ 'HG_FILE_OTHER_NODE': hex(other),
+ 'HG_FILE_BASE_NODE': hex(base)})
+ if r:
+ self.ui.warn(_("merging %s failed!\n") % fn)
+
+ os.unlink(b)
+ os.unlink(c)
+ return r
+
+ def verify(self):
+ filelinkrevs = {}
+ filenodes = {}
+ changesets = revisions = files = 0
+ errors = [0]
+ warnings = [0]
+ neededmanifests = {}
+
+ def err(msg):
+ self.ui.warn(msg + "\n")
+ errors[0] += 1
+
+ def warn(msg):
+ self.ui.warn(msg + "\n")
+ warnings[0] += 1
+
+ def checksize(obj, name):
+ d = obj.checksize()
+ if d[0]:
+ err(_("%s data length off by %d bytes") % (name, d[0]))
+ if d[1]:
+ err(_("%s index contains %d extra bytes") % (name, d[1]))
+
+ def checkversion(obj, name):
+ if obj.version != revlog.REVLOGV0:
+ if not revlogv1:
+ warn(_("warning: `%s' uses revlog format 1") % name)
+ elif revlogv1:
+ warn(_("warning: `%s' uses revlog format 0") % name)
+
+ revlogv1 = self.revlogversion != revlog.REVLOGV0
+ if self.ui.verbose or revlogv1 != self.revlogv1:
+ self.ui.status(_("repository uses revlog format %d\n") %
+ (revlogv1 and 1 or 0))
+
+ seen = {}
+ self.ui.status(_("checking changesets\n"))
+ checksize(self.changelog, "changelog")
+
+ for i in range(self.changelog.count()):
+ changesets += 1
+ n = self.changelog.node(i)
+ l = self.changelog.linkrev(n)
+ if l != i:
+ err(_("incorrect link (%d) for changeset revision %d") %(l, i))
+ if n in seen:
+ err(_("duplicate changeset at revision %d") % i)
+ seen[n] = 1
+
+ for p in self.changelog.parents(n):
+ if p not in self.changelog.nodemap:
+ err(_("changeset %s has unknown parent %s") %
+ (short(n), short(p)))
+ try:
+ changes = self.changelog.read(n)
+ except KeyboardInterrupt:
+ self.ui.warn(_("interrupted"))
+ raise
+ except Exception, inst:
+ err(_("unpacking changeset %s: %s") % (short(n), inst))
+ continue
+
+ neededmanifests[changes[0]] = n
+
+ for f in changes[3]:
+ filelinkrevs.setdefault(f, []).append(i)
+
+ seen = {}
+ self.ui.status(_("checking manifests\n"))
+ checkversion(self.manifest, "manifest")
+ checksize(self.manifest, "manifest")
+
+ for i in range(self.manifest.count()):
+ n = self.manifest.node(i)
+ l = self.manifest.linkrev(n)
+
+ if l < 0 or l >= self.changelog.count():
+ err(_("bad manifest link (%d) at revision %d") % (l, i))
+
+ if n in neededmanifests:
+ del neededmanifests[n]
+
+ if n in seen:
+ err(_("duplicate manifest at revision %d") % i)
+
+ seen[n] = 1
+
+ for p in self.manifest.parents(n):
+ if p not in self.manifest.nodemap:
+ err(_("manifest %s has unknown parent %s") %
+ (short(n), short(p)))
+
+ try:
+ delta = mdiff.patchtext(self.manifest.delta(n))
+ except KeyboardInterrupt:
+ self.ui.warn(_("interrupted"))
+ raise
+ except Exception, inst:
+ err(_("unpacking manifest %s: %s") % (short(n), inst))
+ continue
+
+ try:
+ ff = [ l.split('\0') for l in delta.splitlines() ]
+ for f, fn in ff:
+ filenodes.setdefault(f, {})[bin(fn[:40])] = 1
+ except (ValueError, TypeError), inst:
+ err(_("broken delta in manifest %s: %s") % (short(n), inst))
+
+ self.ui.status(_("crosschecking files in changesets and manifests\n"))
+
+ for m, c in neededmanifests.items():
+ err(_("Changeset %s refers to unknown manifest %s") %
+ (short(m), short(c)))
+ del neededmanifests
+
+ for f in filenodes:
+ if f not in filelinkrevs:
+ err(_("file %s in manifest but not in changesets") % f)
+
+ for f in filelinkrevs:
+ if f not in filenodes:
+ err(_("file %s in changeset but not in manifest") % f)
+
+ self.ui.status(_("checking files\n"))
+ ff = filenodes.keys()
+ ff.sort()
+ for f in ff:
+ if f == "/dev/null":
+ continue
+ files += 1
+ if not f:
+ err(_("file without name in manifest %s") % short(n))
+ continue
+ fl = self.file(f)
+ checkversion(fl, f)
+ checksize(fl, f)
+
+ nodes = {nullid: 1}
+ seen = {}
+ for i in range(fl.count()):
+ revisions += 1
+ n = fl.node(i)
+
+ if n in seen:
+ err(_("%s: duplicate revision %d") % (f, i))
+ if n not in filenodes[f]:
+ err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
+ else:
+ del filenodes[f][n]
+
+ flr = fl.linkrev(n)
+ if flr not in filelinkrevs.get(f, []):
+ err(_("%s:%s points to unexpected changeset %d")
+ % (f, short(n), flr))
+ else:
+ filelinkrevs[f].remove(flr)
+
+ # verify contents
+ try:
+ t = fl.read(n)
+ except KeyboardInterrupt:
+ self.ui.warn(_("interrupted"))
+ raise
+ except Exception, inst:
+ err(_("unpacking file %s %s: %s") % (f, short(n), inst))
+
+ # verify parents
+ (p1, p2) = fl.parents(n)
+ if p1 not in nodes:
+ err(_("file %s:%s unknown parent 1 %s") %
+ (f, short(n), short(p1)))
+ if p2 not in nodes:
+ err(_("file %s:%s unknown parent 2 %s") %
+ (f, short(n), short(p1)))
+ nodes[n] = 1
+
+ # cross-check
+ for node in filenodes[f]:
+ err(_("node %s in manifests not in %s") % (hex(node), f))
+
+ self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
+ (files, changesets, revisions))
+
+ if warnings[0]:
+ self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
+ if errors[0]:
+ self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
+ return 1
+
+# used to avoid circular references so destructors work
+def aftertrans(base):
+ p = base
+ def a():
+ util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
+ util.rename(os.path.join(p, "journal.dirstate"),
+ os.path.join(p, "undo.dirstate"))
+ return a
+
new file mode 100644
--- /dev/null
+++ b/mercurial/lock.py
@@ -0,0 +1,118 @@
+# lock.py - simple locking scheme for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from demandload import *
+demandload(globals(), 'errno os socket time util')
+
+class LockException(IOError):
+ def __init__(self, errno, strerror, filename, desc):
+ IOError.__init__(self, errno, strerror, filename)
+ self.desc = desc
+
+class LockHeld(LockException):
+ def __init__(self, errno, filename, desc, locker):
+ LockException.__init__(self, errno, 'Lock held', filename, desc)
+ self.locker = locker
+
+class LockUnavailable(LockException):
+ pass
+
+class lock(object):
+ # lock is symlink on platforms that support it, file on others.
+
+ # symlink is used because create of directory entry and contents
+ # are atomic even over nfs.
+
+ # old-style lock: symlink to pid
+ # new-style lock: symlink to hostname:pid
+
+ def __init__(self, file, timeout=-1, releasefn=None, desc=None):
+ self.f = file
+ self.held = 0
+ self.timeout = timeout
+ self.releasefn = releasefn
+ self.id = None
+ self.host = None
+ self.pid = None
+ self.desc = desc
+ self.lock()
+
+ def __del__(self):
+ self.release()
+
+ def lock(self):
+ timeout = self.timeout
+ while 1:
+ try:
+ self.trylock()
+ return 1
+ except LockHeld, inst:
+ if timeout != 0:
+ time.sleep(1)
+ if timeout > 0:
+ timeout -= 1
+ continue
+ raise LockHeld(errno.ETIMEDOUT, inst.filename, self.desc,
+ inst.locker)
+
+ def trylock(self):
+ if self.id is None:
+ self.host = socket.gethostname()
+ self.pid = os.getpid()
+ self.id = '%s:%s' % (self.host, self.pid)
+ while not self.held:
+ try:
+ util.makelock(self.id, self.f)
+ self.held = 1
+ except (OSError, IOError), why:
+ if why.errno == errno.EEXIST:
+ locker = self.testlock()
+ if locker:
+ raise LockHeld(errno.EAGAIN, self.f, self.desc,
+ locker)
+ else:
+ raise LockUnavailable(why.errno, why.strerror,
+ why.filename, self.desc)
+
+ def testlock(self):
+ '''return id of locker if lock is valid, else None.'''
+ # if old-style lock, we cannot tell what machine locker is on.
+ # with new-style lock, if locker is on this machine, we can
+ # see if locker is alive. if locker is on this machine but
+ # not alive, we can safely break lock.
+ locker = util.readlock(self.f)
+ c = locker.find(':')
+ if c == -1:
+ return locker
+ host = locker[:c]
+ if host != self.host:
+ return locker
+ try:
+ pid = int(locker[c+1:])
+ except:
+ return locker
+ if util.testpid(pid):
+ return locker
+ # if locker dead, break lock. must do this with another lock
+ # held, or can race and break valid lock.
+ try:
+ l = lock(self.f + '.break')
+ l.trylock()
+ os.unlink(self.f)
+ l.release()
+ except (LockHeld, LockUnavailable):
+ return locker
+
+ def release(self):
+ if self.held:
+ self.held = 0
+ if self.releasefn:
+ self.releasefn()
+ try:
+ os.unlink(self.f)
+ except: pass
+
new file mode 100644
--- /dev/null
+++ b/mercurial/manifest.py
@@ -0,0 +1,189 @@
+# manifest.py - manifest revision class for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import struct
+from revlog import *
+from i18n import gettext as _
+from demandload import *
+demandload(globals(), "bisect array")
+
+class manifest(revlog):
+ def __init__(self, opener, defversion=REVLOGV0):
+ self.mapcache = None
+ self.listcache = None
+ revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
+ defversion)
+
+ def read(self, node):
+ if node == nullid: return {} # don't upset local cache
+ if self.mapcache and self.mapcache[0] == node:
+ return self.mapcache[1]
+ text = self.revision(node)
+ map = {}
+ flag = {}
+ self.listcache = array.array('c', text)
+ lines = text.splitlines(1)
+ for l in lines:
+ (f, n) = l.split('\0')
+ map[f] = bin(n[:40])
+ flag[f] = (n[40:-1] == "x")
+ self.mapcache = (node, map, flag)
+ return map
+
+ def readflags(self, node):
+ if node == nullid: return {} # don't upset local cache
+ if not self.mapcache or self.mapcache[0] != node:
+ self.read(node)
+ return self.mapcache[2]
+
+ def diff(self, a, b):
+ return mdiff.textdiff(str(a), str(b))
+
+ def _search(self, m, s, lo=0, hi=None):
+ '''return a tuple (start, end) that says where to find s within m.
+
+ If the string is found m[start:end] are the line containing
+ that string. If start == end the string was not found and
+ they indicate the proper sorted insertion point. This was
+ taken from bisect_left, and modified to find line start/end as
+ it goes along.
+
+ m should be a buffer or a string
+ s is a string'''
+ def advance(i, c):
+ while i < lenm and m[i] != c:
+ i += 1
+ return i
+ lenm = len(m)
+ if not hi:
+ hi = lenm
+ while lo < hi:
+ mid = (lo + hi) // 2
+ start = mid
+ while start > 0 and m[start-1] != '\n':
+ start -= 1
+ end = advance(start, '\0')
+ if m[start:end] < s:
+ # we know that after the null there are 40 bytes of sha1
+ # this translates to the bisect lo = mid + 1
+ lo = advance(end + 40, '\n') + 1
+ else:
+ # this translates to the bisect hi = mid
+ hi = start
+ end = advance(lo, '\0')
+ found = m[lo:end]
+ if cmp(s, found) == 0:
+ # we know that after the null there are 40 bytes of sha1
+ end = advance(end + 40, '\n')
+ return (lo, end+1)
+ else:
+ return (lo, lo)
+
+ def find(self, node, f):
+ '''look up entry for a single file efficiently.
+ return (node, flag) pair if found, (None, None) if not.'''
+ if self.mapcache and node == self.mapcache[0]:
+ return self.mapcache[1].get(f), self.mapcache[2].get(f)
+ text = self.revision(node)
+ start, end = self._search(text, f)
+ if start == end:
+ return None, None
+ l = text[start:end]
+ f, n = l.split('\0')
+ return bin(n[:40]), n[40:-1] == 'x'
+
+ def add(self, map, flags, transaction, link, p1=None, p2=None,
+ changed=None):
+ # apply the changes collected during the bisect loop to our addlist
+ # return a delta suitable for addrevision
+ def addlistdelta(addlist, x):
+ # start from the bottom up
+ # so changes to the offsets don't mess things up.
+ i = len(x)
+ while i > 0:
+ i -= 1
+ start = x[i][0]
+ end = x[i][1]
+ if x[i][2]:
+ addlist[start:end] = array.array('c', x[i][2])
+ else:
+ del addlist[start:end]
+ return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
+ for d in x ])
+
+ # if we're using the listcache, make sure it is valid and
+ # parented by the same node we're diffing against
+ if not changed or not self.listcache or not p1 or \
+ self.mapcache[0] != p1:
+ files = map.keys()
+ files.sort()
+
+ # if this is changed to support newlines in filenames,
+ # be sure to check the templates/ dir again (especially *-raw.tmpl)
+ text = ["%s\000%s%s\n" %
+ (f, hex(map[f]), flags[f] and "x" or '')
+ for f in files]
+ self.listcache = array.array('c', "".join(text))
+ cachedelta = None
+ else:
+ addlist = self.listcache
+
+ # combine the changed lists into one list for sorting
+ work = [[x, 0] for x in changed[0]]
+ work[len(work):] = [[x, 1] for x in changed[1]]
+ work.sort()
+
+ delta = []
+ dstart = None
+ dend = None
+ dline = [""]
+ start = 0
+ # zero copy representation of addlist as a buffer
+ addbuf = buffer(addlist)
+
+ # start with a readonly loop that finds the offset of
+ # each line and creates the deltas
+ for w in work:
+ f = w[0]
+ # bs will either be the index of the item or the insert point
+ start, end = self._search(addbuf, f, start)
+ if w[1] == 0:
+ l = "%s\000%s%s\n" % (f, hex(map[f]),
+ flags[f] and "x" or '')
+ else:
+ l = ""
+ if start == end and w[1] == 1:
+ # item we want to delete was not found, error out
+ raise AssertionError(
+ _("failed to remove %s from manifest\n") % f)
+ if dstart != None and dstart <= start and dend >= start:
+ if dend < end:
+ dend = end
+ if l:
+ dline.append(l)
+ else:
+ if dstart != None:
+ delta.append([dstart, dend, "".join(dline)])
+ dstart = start
+ dend = end
+ dline = [l]
+
+ if dstart != None:
+ delta.append([dstart, dend, "".join(dline)])
+ # apply the delta to the addlist, and get a delta for addrevision
+ cachedelta = addlistdelta(addlist, delta)
+
+ # the delta is only valid if we've been processing the tip revision
+ if self.mapcache[0] != self.tip():
+ cachedelta = None
+ self.listcache = addlist
+
+ n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
+ p2, cachedelta)
+ self.mapcache = (n, map, flags)
+
+ return n
new file mode 100644
--- /dev/null
+++ b/mercurial/mdiff.py
@@ -0,0 +1,205 @@
+# mdiff.py - diff and patch routines for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from demandload import demandload
+import struct, bdiff, util, mpatch
+demandload(globals(), "re")
+
+def splitnewlines(text):
+ '''like str.splitlines, but only split on newlines.'''
+ lines = [l + '\n' for l in text.split('\n')]
+ if lines:
+ if lines[-1] == '\n':
+ lines.pop()
+ else:
+ lines[-1] = lines[-1][:-1]
+ return lines
+
+def unidiff(a, ad, b, bd, fn, r=None, text=False,
+ showfunc=False, ignorews=False):
+
+ if not a and not b: return ""
+ epoch = util.datestr((0, 0))
+
+ if not text and (util.binary(a) or util.binary(b)):
+ l = ['Binary file %s has changed\n' % fn]
+ elif not a:
+ b = splitnewlines(b)
+ if a is None:
+ l1 = "--- %s\t%s\n" % ("/dev/null", epoch)
+ else:
+ l1 = "--- %s\t%s\n" % ("a/" + fn, ad)
+ l2 = "+++ %s\t%s\n" % ("b/" + fn, bd)
+ l3 = "@@ -0,0 +1,%d @@\n" % len(b)
+ l = [l1, l2, l3] + ["+" + e for e in b]
+ elif not b:
+ a = splitnewlines(a)
+ l1 = "--- %s\t%s\n" % ("a/" + fn, ad)
+ if b is None:
+ l2 = "+++ %s\t%s\n" % ("/dev/null", epoch)
+ else:
+ l2 = "+++ %s\t%s\n" % ("b/" + fn, bd)
+ l3 = "@@ -1,%d +0,0 @@\n" % len(a)
+ l = [l1, l2, l3] + ["-" + e for e in a]
+ else:
+ al = splitnewlines(a)
+ bl = splitnewlines(b)
+ l = list(bunidiff(a, b, al, bl, "a/" + fn, "b/" + fn,
+ showfunc=showfunc, ignorews=ignorews))
+ if not l: return ""
+ # difflib uses a space, rather than a tab
+ l[0] = "%s\t%s\n" % (l[0][:-2], ad)
+ l[1] = "%s\t%s\n" % (l[1][:-2], bd)
+
+ for ln in xrange(len(l)):
+ if l[ln][-1] != '\n':
+ l[ln] += "\n\ No newline at end of file\n"
+
+ if r:
+ l.insert(0, "diff %s %s\n" %
+ (' '.join(["-r %s" % rev for rev in r]), fn))
+
+ return "".join(l)
+
+# somewhat self contained replacement for difflib.unified_diff
+# t1 and t2 are the text to be diffed
+# l1 and l2 are the text broken up into lines
+# header1 and header2 are the filenames for the diff output
+# context is the number of context lines
+# showfunc enables diff -p output
+# ignorews ignores all whitespace changes in the diff
+def bunidiff(t1, t2, l1, l2, header1, header2, context=3, showfunc=False,
+ ignorews=False):
+ def contextend(l, len):
+ ret = l + context
+ if ret > len:
+ ret = len
+ return ret
+
+ def contextstart(l):
+ ret = l - context
+ if ret < 0:
+ return 0
+ return ret
+
+ def yieldhunk(hunk, header):
+ if header:
+ for x in header:
+ yield x
+ (astart, a2, bstart, b2, delta) = hunk
+ aend = contextend(a2, len(l1))
+ alen = aend - astart
+ blen = b2 - bstart + aend - a2
+
+ func = ""
+ if showfunc:
+ # walk backwards from the start of the context
+ # to find a line starting with an alphanumeric char.
+ for x in xrange(astart, -1, -1):
+ t = l1[x].rstrip()
+ if funcre.match(t):
+ func = ' ' + t[:40]
+ break
+
+ yield "@@ -%d,%d +%d,%d @@%s\n" % (astart + 1, alen,
+ bstart + 1, blen, func)
+ for x in delta:
+ yield x
+ for x in xrange(a2, aend):
+ yield ' ' + l1[x]
+
+ header = [ "--- %s\t\n" % header1, "+++ %s\t\n" % header2 ]
+
+ if showfunc:
+ funcre = re.compile('\w')
+ if ignorews:
+ wsre = re.compile('[ \t]')
+
+ # bdiff.blocks gives us the matching sequences in the files. The loop
+ # below finds the spaces between those matching sequences and translates
+ # them into diff output.
+ #
+ diff = bdiff.blocks(t1, t2)
+ hunk = None
+ for i in xrange(len(diff)):
+ # The first match is special.
+ # we've either found a match starting at line 0 or a match later
+ # in the file. If it starts later, old and new below will both be
+ # empty and we'll continue to the next match.
+ if i > 0:
+ s = diff[i-1]
+ else:
+ s = [0, 0, 0, 0]
+ delta = []
+ s1 = diff[i]
+ a1 = s[1]
+ a2 = s1[0]
+ b1 = s[3]
+ b2 = s1[2]
+
+ old = l1[a1:a2]
+ new = l2[b1:b2]
+
+ # bdiff sometimes gives huge matches past eof, this check eats them,
+ # and deals with the special first match case described above
+ if not old and not new:
+ continue
+
+ if ignorews:
+ wsold = wsre.sub('', "".join(old))
+ wsnew = wsre.sub('', "".join(new))
+ if wsold == wsnew:
+ continue
+
+ astart = contextstart(a1)
+ bstart = contextstart(b1)
+ prev = None
+ if hunk:
+ # join with the previous hunk if it falls inside the context
+ if astart < hunk[1] + context + 1:
+ prev = hunk
+ astart = hunk[1]
+ bstart = hunk[3]
+ else:
+ for x in yieldhunk(hunk, header):
+ yield x
+ # we only want to yield the header if the files differ, and
+ # we only want to yield it once.
+ header = None
+ if prev:
+ # we've joined the previous hunk, record the new ending points.
+ hunk[1] = a2
+ hunk[3] = b2
+ delta = hunk[4]
+ else:
+ # create a new hunk
+ hunk = [ astart, a2, bstart, b2, delta ]
+
+ delta[len(delta):] = [ ' ' + x for x in l1[astart:a1] ]
+ delta[len(delta):] = [ '-' + x for x in old ]
+ delta[len(delta):] = [ '+' + x for x in new ]
+
+ if hunk:
+ for x in yieldhunk(hunk, header):
+ yield x
+
+def patchtext(bin):
+ pos = 0
+ t = []
+ while pos < len(bin):
+ p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
+ pos += 12
+ t.append(bin[pos:pos + l])
+ pos += l
+ return "".join(t)
+
+def patch(a, bin):
+ return mpatch.patches(a, [bin])
+
+patches = mpatch.patches
+patchedsize = mpatch.patchedsize
+textdiff = bdiff.bdiff
new file mode 100644
--- /dev/null
+++ b/mercurial/mpatch.c
@@ -0,0 +1,404 @@
+/*
+ mpatch.c - efficient binary patching for Mercurial
+
+ This implements a patch algorithm that's O(m + nlog n) where m is the
+ size of the output and n is the number of patches.
+
+ Given a list of binary patches, it unpacks each into a hunk list,
+ then combines the hunk lists with a treewise recursion to form a
+ single hunk list. This hunk list is then applied to the original
+ text.
+
+ The text (or binary) fragments are copied directly from their source
+ Python objects into a preallocated output string to avoid the
+ allocation of intermediate Python objects. Working memory is about 2x
+ the total number of hunks.
+
+ Copyright 2005 Matt Mackall <mpm@selenic.com>
+
+ This software may be used and distributed according to the terms
+ of the GNU General Public License, incorporated herein by reference.
+*/
+
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+#ifdef _WIN32
+#ifdef _MSC_VER
+#define inline __inline
+typedef unsigned long uint32_t;
+#else
+#include <stdint.h>
+#endif
+static uint32_t ntohl(uint32_t x)
+{
+ return ((x & 0x000000ffUL) << 24) |
+ ((x & 0x0000ff00UL) << 8) |
+ ((x & 0x00ff0000UL) >> 8) |
+ ((x & 0xff000000UL) >> 24);
+}
+#else
+#include <sys/types.h>
+#include <arpa/inet.h>
+#endif
+
+static char mpatch_doc[] = "Efficient binary patching.";
+static PyObject *mpatch_Error;
+
+struct frag {
+ int start, end, len;
+ char *data;
+};
+
+struct flist {
+ struct frag *base, *head, *tail;
+};
+
+static struct flist *lalloc(int size)
+{
+ struct flist *a = NULL;
+
+ a = (struct flist *)malloc(sizeof(struct flist));
+ if (a) {
+ a->base = (struct frag *)malloc(sizeof(struct frag) * size);
+ if (a->base) {
+ a->head = a->tail = a->base;
+ return a;
+ }
+ free(a);
+ a = NULL;
+ }
+ if (!PyErr_Occurred())
+ PyErr_NoMemory();
+ return NULL;
+}
+
+static void lfree(struct flist *a)
+{
+ if (a) {
+ free(a->base);
+ free(a);
+ }
+}
+
+static int lsize(struct flist *a)
+{
+ return a->tail - a->head;
+}
+
+/* move hunks in source that are less cut to dest, compensating
+ for changes in offset. the last hunk may be split if necessary.
+*/
+static int gather(struct flist *dest, struct flist *src, int cut, int offset)
+{
+ struct frag *d = dest->tail, *s = src->head;
+ int postend, c, l;
+
+ while (s != src->tail) {
+ if (s->start + offset >= cut)
+ break; /* we've gone far enough */
+
+ postend = offset + s->start + s->len;
+ if (postend <= cut) {
+ /* save this hunk */
+ offset += s->start + s->len - s->end;
+ *d++ = *s++;
+ }
+ else {
+ /* break up this hunk */
+ c = cut - offset;
+ if (s->end < c)
+ c = s->end;
+ l = cut - offset - s->start;
+ if (s->len < l)
+ l = s->len;
+
+ offset += s->start + l - c;
+
+ d->start = s->start;
+ d->end = c;
+ d->len = l;
+ d->data = s->data;
+ d++;
+ s->start = c;
+ s->len = s->len - l;
+ s->data = s->data + l;
+
+ break;
+ }
+ }
+
+ dest->tail = d;
+ src->head = s;
+ return offset;
+}
+
+/* like gather, but with no output list */
+static int discard(struct flist *src, int cut, int offset)
+{
+ struct frag *s = src->head;
+ int postend, c, l;
+
+ while (s != src->tail) {
+ if (s->start + offset >= cut)
+ break;
+
+ postend = offset + s->start + s->len;
+ if (postend <= cut) {
+ offset += s->start + s->len - s->end;
+ s++;
+ }
+ else {
+ c = cut - offset;
+ if (s->end < c)
+ c = s->end;
+ l = cut - offset - s->start;
+ if (s->len < l)
+ l = s->len;
+
+ offset += s->start + l - c;
+ s->start = c;
+ s->len = s->len - l;
+ s->data = s->data + l;
+
+ break;
+ }
+ }
+
+ src->head = s;
+ return offset;
+}
+
+/* combine hunk lists a and b, while adjusting b for offset changes in a/
+ this deletes a and b and returns the resultant list. */
+static struct flist *combine(struct flist *a, struct flist *b)
+{
+ struct flist *c = NULL;
+ struct frag *bh, *ct;
+ int offset = 0, post;
+
+ if (a && b)
+ c = lalloc((lsize(a) + lsize(b)) * 2);
+
+ if (c) {
+
+ for (bh = b->head; bh != b->tail; bh++) {
+ /* save old hunks */
+ offset = gather(c, a, bh->start, offset);
+
+ /* discard replaced hunks */
+ post = discard(a, bh->end, offset);
+
+ /* insert new hunk */
+ ct = c->tail;
+ ct->start = bh->start - offset;
+ ct->end = bh->end - post;
+ ct->len = bh->len;
+ ct->data = bh->data;
+ c->tail++;
+ offset = post;
+ }
+
+ /* hold on to tail from a */
+ memcpy(c->tail, a->head, sizeof(struct frag) * lsize(a));
+ c->tail += lsize(a);
+ }
+
+ lfree(a);
+ lfree(b);
+ return c;
+}
+
+/* decode a binary patch into a hunk list */
+static struct flist *decode(char *bin, int len)
+{
+ struct flist *l;
+ struct frag *lt;
+ char *end = bin + len;
+ char decode[12]; /* for dealing with alignment issues */
+
+ /* assume worst case size, we won't have many of these lists */
+ l = lalloc(len / 12);
+ if (!l)
+ return NULL;
+
+ lt = l->tail;
+
+ while (bin < end) {
+ memcpy(decode, bin, 12);
+ lt->start = ntohl(*(uint32_t *)decode);
+ lt->end = ntohl(*(uint32_t *)(decode + 4));
+ lt->len = ntohl(*(uint32_t *)(decode + 8));
+ lt->data = bin + 12;
+ bin += 12 + lt->len;
+ lt++;
+ }
+
+ if (bin != end) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(mpatch_Error, "patch cannot be decoded");
+ lfree(l);
+ return NULL;
+ }
+
+ l->tail = lt;
+ return l;
+}
+
+/* calculate the size of resultant text */
+static int calcsize(int len, struct flist *l)
+{
+ int outlen = 0, last = 0;
+ struct frag *f = l->head;
+
+ while (f != l->tail) {
+ if (f->start < last || f->end > len) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(mpatch_Error,
+ "invalid patch");
+ return -1;
+ }
+ outlen += f->start - last;
+ last = f->end;
+ outlen += f->len;
+ f++;
+ }
+
+ outlen += len - last;
+ return outlen;
+}
+
+static int apply(char *buf, char *orig, int len, struct flist *l)
+{
+ struct frag *f = l->head;
+ int last = 0;
+ char *p = buf;
+
+ while (f != l->tail) {
+ if (f->start < last || f->end > len) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(mpatch_Error,
+ "invalid patch");
+ return 0;
+ }
+ memcpy(p, orig + last, f->start - last);
+ p += f->start - last;
+ memcpy(p, f->data, f->len);
+ last = f->end;
+ p += f->len;
+ f++;
+ }
+ memcpy(p, orig + last, len - last);
+ return 1;
+}
+
+/* recursively generate a patch of all bins between start and end */
+static struct flist *fold(PyObject *bins, int start, int end)
+{
+ int len;
+
+ if (start + 1 == end) {
+ /* trivial case, output a decoded list */
+ PyObject *tmp = PyList_GetItem(bins, start);
+ if (!tmp)
+ return NULL;
+ return decode(PyString_AsString(tmp), PyString_Size(tmp));
+ }
+
+ /* divide and conquer, memory management is elsewhere */
+ len = (end - start) / 2;
+ return combine(fold(bins, start, start + len),
+ fold(bins, start + len, end));
+}
+
+static PyObject *
+patches(PyObject *self, PyObject *args)
+{
+ PyObject *text, *bins, *result;
+ struct flist *patch;
+ char *in, *out;
+ int len, outlen;
+
+ if (!PyArg_ParseTuple(args, "SO:mpatch", &text, &bins))
+ return NULL;
+
+ len = PyList_Size(bins);
+ if (!len) {
+ /* nothing to do */
+ Py_INCREF(text);
+ return text;
+ }
+
+ patch = fold(bins, 0, len);
+ if (!patch)
+ return NULL;
+
+ outlen = calcsize(PyString_Size(text), patch);
+ if (outlen < 0) {
+ result = NULL;
+ goto cleanup;
+ }
+ result = PyString_FromStringAndSize(NULL, outlen);
+ if (!result) {
+ result = NULL;
+ goto cleanup;
+ }
+ in = PyString_AsString(text);
+ out = PyString_AsString(result);
+ if (!apply(out, in, PyString_Size(text), patch)) {
+ Py_DECREF(result);
+ result = NULL;
+ }
+cleanup:
+ lfree(patch);
+ return result;
+}
+
+/* calculate size of a patched file directly */
+static PyObject *
+patchedsize(PyObject *self, PyObject *args)
+{
+ long orig, start, end, len, outlen = 0, last = 0;
+ int patchlen;
+ char *bin, *binend;
+ char decode[12]; /* for dealing with alignment issues */
+
+ if (!PyArg_ParseTuple(args, "ls#", &orig, &bin, &patchlen))
+ return NULL;
+
+ binend = bin + patchlen;
+
+ while (bin < binend) {
+ memcpy(decode, bin, 12);
+ start = ntohl(*(uint32_t *)decode);
+ end = ntohl(*(uint32_t *)(decode + 4));
+ len = ntohl(*(uint32_t *)(decode + 8));
+ bin += 12 + len;
+ outlen += start - last;
+ last = end;
+ outlen += len;
+ }
+
+ if (bin != binend) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(mpatch_Error, "patch cannot be decoded");
+ return NULL;
+ }
+
+ outlen += orig - last;
+ return Py_BuildValue("l", outlen);
+}
+
+static PyMethodDef methods[] = {
+ {"patches", patches, METH_VARARGS, "apply a series of patches\n"},
+ {"patchedsize", patchedsize, METH_VARARGS, "calculed patched size\n"},
+ {NULL, NULL}
+};
+
+PyMODINIT_FUNC
+initmpatch(void)
+{
+ Py_InitModule3("mpatch", methods, mpatch_doc);
+ mpatch_Error = PyErr_NewException("mpatch.mpatchError", NULL, NULL);
+}
+
new file mode 100644
--- /dev/null
+++ b/mercurial/node.py
@@ -0,0 +1,21 @@
+"""
+node.py - basic nodeid manipulation for mercurial
+
+Copyright 2005 Matt Mackall <mpm@selenic.com>
+
+This software may be used and distributed according to the terms
+of the GNU General Public License, incorporated herein by reference.
+"""
+
+import binascii
+
+nullid = "\0" * 20
+
+def hex(node):
+ return binascii.hexlify(node)
+
+def bin(node):
+ return binascii.unhexlify(node)
+
+def short(node):
+ return hex(node[:6])
new file mode 100644
--- /dev/null
+++ b/mercurial/packagescan.py
@@ -0,0 +1,113 @@
+# packagescan.py - Helper module for identifing used modules.
+# Used for the py2exe distutil.
+# This module must be the first mercurial module imported in setup.py
+#
+# Copyright 2005 Volker Kleinfeld <Volker.Kleinfeld@gmx.de>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+import glob
+import os
+import sys
+import ihooks
+import types
+import string
+
+# Install this module as fake demandload module
+sys.modules['mercurial.demandload'] = sys.modules[__name__]
+
+# Requiredmodules contains the modules imported by demandload.
+# Please note that demandload can be invoked before the
+# mercurial.packagescan.scan method is invoked in case a mercurial
+# module is imported.
+requiredmodules = {}
+def demandload(scope, modules):
+ """ fake demandload function that collects the required modules
+ foo import foo
+ foo bar import foo, bar
+ foo.bar import foo.bar
+ foo:bar from foo import bar
+ foo:bar,quux from foo import bar, quux
+ foo.bar:quux from foo.bar import quux"""
+
+ for m in modules.split():
+ mod = None
+ try:
+ module, fromlist = m.split(':')
+ fromlist = fromlist.split(',')
+ except:
+ module = m
+ fromlist = []
+ mod = __import__(module, scope, scope, fromlist)
+ if fromlist == []:
+ # mod is only the top package, but we need all packages
+ comp = module.split('.')
+ i = 1
+ mn = comp[0]
+ while True:
+ # mn and mod.__name__ might not be the same
+ scope[mn] = mod
+ requiredmodules[mod.__name__] = 1
+ if len(comp) == i: break
+ mod = getattr(mod,comp[i])
+ mn = string.join(comp[:i+1],'.')
+ i += 1
+ else:
+ # mod is the last package in the component list
+ requiredmodules[mod.__name__] = 1
+ for f in fromlist:
+ scope[f] = getattr(mod,f)
+ if type(scope[f]) == types.ModuleType:
+ requiredmodules[scope[f].__name__] = 1
+
+def scan(libpath,packagename):
+ """ helper for finding all required modules of package <packagename> """
+ # Use the package in the build directory
+ libpath = os.path.abspath(libpath)
+ sys.path.insert(0,libpath)
+ packdir = os.path.join(libpath,packagename)
+ # A normal import would not find the package in
+ # the build directory. ihook is used to force the import.
+ # After the package is imported the import scope for
+ # the following imports is settled.
+ p = importfrom(packdir)
+ globals()[packagename] = p
+ sys.modules[packagename] = p
+ # Fetch the python modules in the package
+ cwd = os.getcwd()
+ os.chdir(packdir)
+ pymodulefiles = glob.glob('*.py')
+ extmodulefiles = glob.glob('*.pyd')
+ os.chdir(cwd)
+ # Import all python modules and by that run the fake demandload
+ for m in pymodulefiles:
+ if m == '__init__.py': continue
+ tmp = {}
+ mname,ext = os.path.splitext(m)
+ fullname = packagename+'.'+mname
+ __import__(fullname,tmp,tmp)
+ requiredmodules[fullname] = 1
+ # Import all extension modules and by that run the fake demandload
+ for m in extmodulefiles:
+ tmp = {}
+ mname,ext = os.path.splitext(m)
+ fullname = packagename+'.'+mname
+ __import__(fullname,tmp,tmp)
+ requiredmodules[fullname] = 1
+
+def getmodules():
+ return requiredmodules.keys()
+
+def importfrom(filename):
+ """
+ import module/package from a named file and returns the module.
+ It does not check on sys.modules or includes the module in the scope.
+ """
+ loader = ihooks.BasicModuleLoader()
+ path, file = os.path.split(filename)
+ name, ext = os.path.splitext(file)
+ m = loader.find_module_in_dir(name, path)
+ if not m:
+ raise ImportError, name
+ m = loader.load_module(name, m)
+ return m
new file mode 100644
--- /dev/null
+++ b/mercurial/remoterepo.py
@@ -0,0 +1,20 @@
+# remoterepo - remote repositort proxy classes for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+class remoterepository(object):
+ def local(self):
+ return False
+
+class remotelock(object):
+ def __init__(self, repo):
+ self.repo = repo
+ def release(self):
+ self.repo.unlock()
+ self.repo = None
+ def __del__(self):
+ if self.repo:
+ self.release()
new file mode 100644
--- /dev/null
+++ b/mercurial/repo.py
@@ -0,0 +1,8 @@
+# repo.py - repository base classes for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+class RepoError(Exception): pass
new file mode 100644
--- /dev/null
+++ b/mercurial/revlog.py
@@ -0,0 +1,1272 @@
+"""
+revlog.py - storage back-end for mercurial
+
+This provides efficient delta storage with O(1) retrieve and append
+and O(changes) merge between branches
+
+Copyright 2005 Matt Mackall <mpm@selenic.com>
+
+This software may be used and distributed according to the terms
+of the GNU General Public License, incorporated herein by reference.
+"""
+
+from node import *
+from i18n import gettext as _
+from demandload import demandload
+demandload(globals(), "binascii changegroup errno heapq mdiff os")
+demandload(globals(), "sha struct util zlib")
+
+# revlog version strings
+REVLOGV0 = 0
+REVLOGNG = 1
+
+# revlog flags
+REVLOGNGINLINEDATA = (1 << 16)
+REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
+
+REVLOG_DEFAULT_FORMAT = REVLOGNG
+REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
+
+def flagstr(flag):
+ if flag == "inline":
+ return REVLOGNGINLINEDATA
+ raise RevlogError(_("unknown revlog flag %s" % flag))
+
+def hash(text, p1, p2):
+ """generate a hash from the given text and its parent hashes
+
+ This hash combines both the current file contents and its history
+ in a manner that makes it easy to distinguish nodes with the same
+ content in the revision graph.
+ """
+ l = [p1, p2]
+ l.sort()
+ s = sha.new(l[0])
+ s.update(l[1])
+ s.update(text)
+ return s.digest()
+
+def compress(text):
+ """ generate a possibly-compressed representation of text """
+ if not text: return ("", text)
+ if len(text) < 44:
+ if text[0] == '\0': return ("", text)
+ return ('u', text)
+ bin = zlib.compress(text)
+ if len(bin) > len(text):
+ if text[0] == '\0': return ("", text)
+ return ('u', text)
+ return ("", bin)
+
+def decompress(bin):
+ """ decompress the given input """
+ if not bin: return bin
+ t = bin[0]
+ if t == '\0': return bin
+ if t == 'x': return zlib.decompress(bin)
+ if t == 'u': return bin[1:]
+ raise RevlogError(_("unknown compression type %r") % t)
+
+indexformatv0 = ">4l20s20s20s"
+v0shaoffset = 56
+# index ng:
+# 6 bytes offset
+# 2 bytes flags
+# 4 bytes compressed length
+# 4 bytes uncompressed length
+# 4 bytes: base rev
+# 4 bytes link rev
+# 4 bytes parent 1 rev
+# 4 bytes parent 2 rev
+# 32 bytes: nodeid
+indexformatng = ">Qiiiiii20s12x"
+ngshaoffset = 32
+versionformat = ">i"
+
+class lazyparser(object):
+ """
+ this class avoids the need to parse the entirety of large indices
+ """
+
+ # lazyparser is not safe to use on windows if win32 extensions not
+ # available. it keeps file handle open, which make it not possible
+ # to break hardlinks on local cloned repos.
+ safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
+ hasattr(util, 'win32api'))
+
+ def __init__(self, dataf, size, indexformat, shaoffset):
+ self.dataf = dataf
+ self.format = indexformat
+ self.s = struct.calcsize(indexformat)
+ self.indexformat = indexformat
+ self.datasize = size
+ self.l = size/self.s
+ self.index = [None] * self.l
+ self.map = {nullid: -1}
+ self.allmap = 0
+ self.all = 0
+ self.mapfind_count = 0
+ self.shaoffset = shaoffset
+
+ def loadmap(self):
+ """
+ during a commit, we need to make sure the rev being added is
+ not a duplicate. This requires loading the entire index,
+ which is fairly slow. loadmap can load up just the node map,
+ which takes much less time.
+ """
+ if self.allmap: return
+ start = 0
+ end = self.datasize
+ self.allmap = 1
+ cur = 0
+ count = 0
+ blocksize = self.s * 256
+ self.dataf.seek(0)
+ while cur < end:
+ data = self.dataf.read(blocksize)
+ off = 0
+ for x in xrange(256):
+ n = data[off + self.shaoffset:off + self.shaoffset + 20]
+ self.map[n] = count
+ count += 1
+ if count >= self.l:
+ break
+ off += self.s
+ cur += blocksize
+
+ def loadblock(self, blockstart, blocksize, data=None):
+ if self.all: return
+ if data is None:
+ self.dataf.seek(blockstart)
+ data = self.dataf.read(blocksize)
+ lend = len(data) / self.s
+ i = blockstart / self.s
+ off = 0
+ for x in xrange(lend):
+ if self.index[i + x] == None:
+ b = data[off : off + self.s]
+ self.index[i + x] = b
+ n = b[self.shaoffset:self.shaoffset + 20]
+ self.map[n] = i + x
+ off += self.s
+
+ def findnode(self, node):
+ """search backwards through the index file for a specific node"""
+ if self.allmap: return None
+
+ # hg log will cause many many searches for the manifest
+ # nodes. After we get called a few times, just load the whole
+ # thing.
+ if self.mapfind_count > 8:
+ self.loadmap()
+ if node in self.map:
+ return node
+ return None
+ self.mapfind_count += 1
+ last = self.l - 1
+ while self.index[last] != None:
+ if last == 0:
+ self.all = 1
+ self.allmap = 1
+ return None
+ last -= 1
+ end = (last + 1) * self.s
+ blocksize = self.s * 256
+ while end >= 0:
+ start = max(end - blocksize, 0)
+ self.dataf.seek(start)
+ data = self.dataf.read(end - start)
+ findend = end - start
+ while True:
+ # we're searching backwards, so weh have to make sure
+ # we don't find a changeset where this node is a parent
+ off = data.rfind(node, 0, findend)
+ findend = off
+ if off >= 0:
+ i = off / self.s
+ off = i * self.s
+ n = data[off + self.shaoffset:off + self.shaoffset + 20]
+ if n == node:
+ self.map[n] = i + start / self.s
+ return node
+ else:
+ break
+ end -= blocksize
+ return None
+
+ def loadindex(self, i=None, end=None):
+ if self.all: return
+ all = False
+ if i == None:
+ blockstart = 0
+ blocksize = (512 / self.s) * self.s
+ end = self.datasize
+ all = True
+ else:
+ if end:
+ blockstart = i * self.s
+ end = end * self.s
+ blocksize = end - blockstart
+ else:
+ blockstart = (i & ~(32)) * self.s
+ blocksize = self.s * 64
+ end = blockstart + blocksize
+ while blockstart < end:
+ self.loadblock(blockstart, blocksize)
+ blockstart += blocksize
+ if all: self.all = True
+
+class lazyindex(object):
+ """a lazy version of the index array"""
+ def __init__(self, parser):
+ self.p = parser
+ def __len__(self):
+ return len(self.p.index)
+ def load(self, pos):
+ if pos < 0:
+ pos += len(self.p.index)
+ self.p.loadindex(pos)
+ return self.p.index[pos]
+ def __getitem__(self, pos):
+ ret = self.p.index[pos] or self.load(pos)
+ if isinstance(ret, str):
+ ret = struct.unpack(self.p.indexformat, ret)
+ return ret
+ def __setitem__(self, pos, item):
+ self.p.index[pos] = item
+ def __delitem__(self, pos):
+ del self.p.index[pos]
+ def append(self, e):
+ self.p.index.append(e)
+
+class lazymap(object):
+ """a lazy version of the node map"""
+ def __init__(self, parser):
+ self.p = parser
+ def load(self, key):
+ n = self.p.findnode(key)
+ if n == None:
+ raise KeyError(key)
+ def __contains__(self, key):
+ if key in self.p.map:
+ return True
+ self.p.loadmap()
+ return key in self.p.map
+ def __iter__(self):
+ yield nullid
+ for i in xrange(self.p.l):
+ ret = self.p.index[i]
+ if not ret:
+ self.p.loadindex(i)
+ ret = self.p.index[i]
+ if isinstance(ret, str):
+ ret = struct.unpack(self.p.indexformat, ret)
+ yield ret[-1]
+ def __getitem__(self, key):
+ try:
+ return self.p.map[key]
+ except KeyError:
+ try:
+ self.load(key)
+ return self.p.map[key]
+ except KeyError:
+ raise KeyError("node " + hex(key))
+ def __setitem__(self, key, val):
+ self.p.map[key] = val
+ def __delitem__(self, key):
+ del self.p.map[key]
+
+class RevlogError(Exception): pass
+
+class revlog(object):
+ """
+ the underlying revision storage object
+
+ A revlog consists of two parts, an index and the revision data.
+
+ The index is a file with a fixed record size containing
+ information on each revision, includings its nodeid (hash), the
+ nodeids of its parents, the position and offset of its data within
+ the data file, and the revision it's based on. Finally, each entry
+ contains a linkrev entry that can serve as a pointer to external
+ data.
+
+ The revision data itself is a linear collection of data chunks.
+ Each chunk represents a revision and is usually represented as a
+ delta against the previous chunk. To bound lookup time, runs of
+ deltas are limited to about 2 times the length of the original
+ version data. This makes retrieval of a version proportional to
+ its size, or O(1) relative to the number of revisions.
+
+ Both pieces of the revlog are written to in an append-only
+ fashion, which means we never need to rewrite a file to insert or
+ remove data, and can use some simple techniques to avoid the need
+ for locking while reading.
+ """
+ def __init__(self, opener, indexfile, datafile,
+ defversion=REVLOG_DEFAULT_VERSION):
+ """
+ create a revlog object
+
+ opener is a function that abstracts the file opening operation
+ and can be used to implement COW semantics or the like.
+ """
+ self.indexfile = indexfile
+ self.datafile = datafile
+ self.opener = opener
+
+ self.indexstat = None
+ self.cache = None
+ self.chunkcache = None
+ self.defversion = defversion
+ self.load()
+
+ def load(self):
+ v = self.defversion
+ try:
+ f = self.opener(self.indexfile)
+ i = f.read(4)
+ f.seek(0)
+ except IOError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
+ i = ""
+ else:
+ try:
+ st = util.fstat(f)
+ except AttributeError, inst:
+ st = None
+ else:
+ oldst = self.indexstat
+ if (oldst and st.st_dev == oldst.st_dev
+ and st.st_ino == oldst.st_ino
+ and st.st_mtime == oldst.st_mtime
+ and st.st_ctime == oldst.st_ctime):
+ return
+ self.indexstat = st
+ if len(i) > 0:
+ v = struct.unpack(versionformat, i)[0]
+ flags = v & ~0xFFFF
+ fmt = v & 0xFFFF
+ if fmt == REVLOGV0:
+ if flags:
+ raise RevlogError(_("index %s invalid flags %x for format v0" %
+ (self.indexfile, flags)))
+ elif fmt == REVLOGNG:
+ if flags & ~REVLOGNGINLINEDATA:
+ raise RevlogError(_("index %s invalid flags %x for revlogng" %
+ (self.indexfile, flags)))
+ else:
+ raise RevlogError(_("index %s invalid format %d" %
+ (self.indexfile, fmt)))
+ self.version = v
+ if v == REVLOGV0:
+ self.indexformat = indexformatv0
+ shaoffset = v0shaoffset
+ else:
+ self.indexformat = indexformatng
+ shaoffset = ngshaoffset
+
+ if i:
+ if (lazyparser.safe_to_use and not self.inlinedata() and
+ st and st.st_size > 10000):
+ # big index, let's parse it on demand
+ parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
+ self.index = lazyindex(parser)
+ self.nodemap = lazymap(parser)
+ else:
+ self.parseindex(f, st)
+ if self.version != REVLOGV0:
+ e = list(self.index[0])
+ type = self.ngtype(e[0])
+ e[0] = self.offset_type(0, type)
+ self.index[0] = e
+ else:
+ self.nodemap = { nullid: -1}
+ self.index = []
+
+
+ def parseindex(self, fp, st):
+ s = struct.calcsize(self.indexformat)
+ self.index = []
+ self.nodemap = {nullid: -1}
+ inline = self.inlinedata()
+ n = 0
+ leftover = None
+ while True:
+ if st:
+ data = fp.read(65536)
+ else:
+ # hack for httprangereader, it doesn't do partial reads well
+ data = fp.read()
+ if not data:
+ break
+ if n == 0 and self.inlinedata():
+ # cache the first chunk
+ self.chunkcache = (0, data)
+ if leftover:
+ data = leftover + data
+ leftover = None
+ off = 0
+ l = len(data)
+ while off < l:
+ if l - off < s:
+ leftover = data[off:]
+ break
+ cur = data[off:off + s]
+ off += s
+ e = struct.unpack(self.indexformat, cur)
+ self.index.append(e)
+ self.nodemap[e[-1]] = n
+ n += 1
+ if inline:
+ off += e[1]
+ if off > l:
+ # some things don't seek well, just read it
+ fp.read(off - l)
+ if not st:
+ break
+
+
+ def ngoffset(self, q):
+ if q & 0xFFFF:
+ raise RevlogError(_('%s: incompatible revision flag %x') %
+ (self.indexfile, q))
+ return long(q >> 16)
+
+ def ngtype(self, q):
+ return int(q & 0xFFFF)
+
+ def offset_type(self, offset, type):
+ return long(long(offset) << 16 | type)
+
+ def loadindex(self, start, end):
+ """load a block of indexes all at once from the lazy parser"""
+ if isinstance(self.index, lazyindex):
+ self.index.p.loadindex(start, end)
+
+ def loadindexmap(self):
+ """loads both the map and the index from the lazy parser"""
+ if isinstance(self.index, lazyindex):
+ p = self.index.p
+ p.loadindex()
+ self.nodemap = p.map
+
+ def loadmap(self):
+ """loads the map from the lazy parser"""
+ if isinstance(self.nodemap, lazymap):
+ self.nodemap.p.loadmap()
+ self.nodemap = self.nodemap.p.map
+
+ def inlinedata(self): return self.version & REVLOGNGINLINEDATA
+ def tip(self): return self.node(len(self.index) - 1)
+ def count(self): return len(self.index)
+ def node(self, rev):
+ return (rev < 0) and nullid or self.index[rev][-1]
+ def rev(self, node):
+ try:
+ return self.nodemap[node]
+ except KeyError:
+ raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
+ def linkrev(self, node): return self.index[self.rev(node)][-4]
+ def parents(self, node):
+ if node == nullid: return (nullid, nullid)
+ r = self.rev(node)
+ d = self.index[r][-3:-1]
+ if self.version == REVLOGV0:
+ return d
+ return [ self.node(x) for x in d ]
+ def start(self, rev):
+ if rev < 0:
+ return -1
+ if self.version != REVLOGV0:
+ return self.ngoffset(self.index[rev][0])
+ return self.index[rev][0]
+
+ def end(self, rev): return self.start(rev) + self.length(rev)
+
+ def size(self, rev):
+ """return the length of the uncompressed text for a given revision"""
+ l = -1
+ if self.version != REVLOGV0:
+ l = self.index[rev][2]
+ if l >= 0:
+ return l
+
+ t = self.revision(self.node(rev))
+ return len(t)
+
+ # alternate implementation, The advantage to this code is it
+ # will be faster for a single revision. But, the results are not
+ # cached, so finding the size of every revision will be slower.
+ """
+ if self.cache and self.cache[1] == rev:
+ return len(self.cache[2])
+
+ base = self.base(rev)
+ if self.cache and self.cache[1] >= base and self.cache[1] < rev:
+ base = self.cache[1]
+ text = self.cache[2]
+ else:
+ text = self.revision(self.node(base))
+
+ l = len(text)
+ for x in xrange(base + 1, rev + 1):
+ l = mdiff.patchedsize(l, self.chunk(x))
+ return l
+ """
+
+ def length(self, rev):
+ if rev < 0:
+ return 0
+ else:
+ return self.index[rev][1]
+ def base(self, rev): return (rev < 0) and rev or self.index[rev][-5]
+
+ def reachable(self, rev, stop=None):
+ reachable = {}
+ visit = [rev]
+ reachable[rev] = 1
+ if stop:
+ stopn = self.rev(stop)
+ else:
+ stopn = 0
+ while visit:
+ n = visit.pop(0)
+ if n == stop:
+ continue
+ if n == nullid:
+ continue
+ for p in self.parents(n):
+ if self.rev(p) < stopn:
+ continue
+ if p not in reachable:
+ reachable[p] = 1
+ visit.append(p)
+ return reachable
+
+ def nodesbetween(self, roots=None, heads=None):
+ """Return a tuple containing three elements. Elements 1 and 2 contain
+ a final list bases and heads after all the unreachable ones have been
+ pruned. Element 0 contains a topologically sorted list of all
+
+ nodes that satisfy these constraints:
+ 1. All nodes must be descended from a node in roots (the nodes on
+ roots are considered descended from themselves).
+ 2. All nodes must also be ancestors of a node in heads (the nodes in
+ heads are considered to be their own ancestors).
+
+ If roots is unspecified, nullid is assumed as the only root.
+ If heads is unspecified, it is taken to be the output of the
+ heads method (i.e. a list of all nodes in the repository that
+ have no children)."""
+ nonodes = ([], [], [])
+ if roots is not None:
+ roots = list(roots)
+ if not roots:
+ return nonodes
+ lowestrev = min([self.rev(n) for n in roots])
+ else:
+ roots = [nullid] # Everybody's a descendent of nullid
+ lowestrev = -1
+ if (lowestrev == -1) and (heads is None):
+ # We want _all_ the nodes!
+ return ([self.node(r) for r in xrange(0, self.count())],
+ [nullid], list(self.heads()))
+ if heads is None:
+ # All nodes are ancestors, so the latest ancestor is the last
+ # node.
+ highestrev = self.count() - 1
+ # Set ancestors to None to signal that every node is an ancestor.
+ ancestors = None
+ # Set heads to an empty dictionary for later discovery of heads
+ heads = {}
+ else:
+ heads = list(heads)
+ if not heads:
+ return nonodes
+ ancestors = {}
+ # Start at the top and keep marking parents until we're done.
+ nodestotag = heads[:]
+ # Turn heads into a dictionary so we can remove 'fake' heads.
+ # Also, later we will be using it to filter out the heads we can't
+ # find from roots.
+ heads = dict.fromkeys(heads, 0)
+ # Remember where the top was so we can use it as a limit later.
+ highestrev = max([self.rev(n) for n in nodestotag])
+ while nodestotag:
+ # grab a node to tag
+ n = nodestotag.pop()
+ # Never tag nullid
+ if n == nullid:
+ continue
+ # A node's revision number represents its place in a
+ # topologically sorted list of nodes.
+ r = self.rev(n)
+ if r >= lowestrev:
+ if n not in ancestors:
+ # If we are possibly a descendent of one of the roots
+ # and we haven't already been marked as an ancestor
+ ancestors[n] = 1 # Mark as ancestor
+ # Add non-nullid parents to list of nodes to tag.
+ nodestotag.extend([p for p in self.parents(n) if
+ p != nullid])
+ elif n in heads: # We've seen it before, is it a fake head?
+ # So it is, real heads should not be the ancestors of
+ # any other heads.
+ heads.pop(n)
+ if not ancestors:
+ return nonodes
+ # Now that we have our set of ancestors, we want to remove any
+ # roots that are not ancestors.
+
+ # If one of the roots was nullid, everything is included anyway.
+ if lowestrev > -1:
+ # But, since we weren't, let's recompute the lowest rev to not
+ # include roots that aren't ancestors.
+
+ # Filter out roots that aren't ancestors of heads
+ roots = [n for n in roots if n in ancestors]
+ # Recompute the lowest revision
+ if roots:
+ lowestrev = min([self.rev(n) for n in roots])
+ else:
+ # No more roots? Return empty list
+ return nonodes
+ else:
+ # We are descending from nullid, and don't need to care about
+ # any other roots.
+ lowestrev = -1
+ roots = [nullid]
+ # Transform our roots list into a 'set' (i.e. a dictionary where the
+ # values don't matter.
+ descendents = dict.fromkeys(roots, 1)
+ # Also, keep the original roots so we can filter out roots that aren't
+ # 'real' roots (i.e. are descended from other roots).
+ roots = descendents.copy()
+ # Our topologically sorted list of output nodes.
+ orderedout = []
+ # Don't start at nullid since we don't want nullid in our output list,
+ # and if nullid shows up in descedents, empty parents will look like
+ # they're descendents.
+ for r in xrange(max(lowestrev, 0), highestrev + 1):
+ n = self.node(r)
+ isdescendent = False
+ if lowestrev == -1: # Everybody is a descendent of nullid
+ isdescendent = True
+ elif n in descendents:
+ # n is already a descendent
+ isdescendent = True
+ # This check only needs to be done here because all the roots
+ # will start being marked is descendents before the loop.
+ if n in roots:
+ # If n was a root, check if it's a 'real' root.
+ p = tuple(self.parents(n))
+ # If any of its parents are descendents, it's not a root.
+ if (p[0] in descendents) or (p[1] in descendents):
+ roots.pop(n)
+ else:
+ p = tuple(self.parents(n))
+ # A node is a descendent if either of its parents are
+ # descendents. (We seeded the dependents list with the roots
+ # up there, remember?)
+ if (p[0] in descendents) or (p[1] in descendents):
+ descendents[n] = 1
+ isdescendent = True
+ if isdescendent and ((ancestors is None) or (n in ancestors)):
+ # Only include nodes that are both descendents and ancestors.
+ orderedout.append(n)
+ if (ancestors is not None) and (n in heads):
+ # We're trying to figure out which heads are reachable
+ # from roots.
+ # Mark this head as having been reached
+ heads[n] = 1
+ elif ancestors is None:
+ # Otherwise, we're trying to discover the heads.
+ # Assume this is a head because if it isn't, the next step
+ # will eventually remove it.
+ heads[n] = 1
+ # But, obviously its parents aren't.
+ for p in self.parents(n):
+ heads.pop(p, None)
+ heads = [n for n in heads.iterkeys() if heads[n] != 0]
+ roots = roots.keys()
+ assert orderedout
+ assert roots
+ assert heads
+ return (orderedout, roots, heads)
+
+ def heads(self, start=None):
+ """return the list of all nodes that have no children
+
+ if start is specified, only heads that are descendants of
+ start will be returned
+
+ """
+ if start is None:
+ start = nullid
+ reachable = {start: 1}
+ heads = {start: 1}
+ startrev = self.rev(start)
+
+ for r in xrange(startrev + 1, self.count()):
+ n = self.node(r)
+ for pn in self.parents(n):
+ if pn in reachable:
+ reachable[n] = 1
+ heads[n] = 1
+ if pn in heads:
+ del heads[pn]
+ return heads.keys()
+
+ def children(self, node):
+ """find the children of a given node"""
+ c = []
+ p = self.rev(node)
+ for r in range(p + 1, self.count()):
+ n = self.node(r)
+ for pn in self.parents(n):
+ if pn == node:
+ c.append(n)
+ continue
+ elif pn == nullid:
+ continue
+ return c
+
+ def lookup(self, id):
+ """locate a node based on revision number or subset of hex nodeid"""
+ try:
+ rev = int(id)
+ if str(rev) != id: raise ValueError
+ if rev < 0: rev = self.count() + rev
+ if rev < 0 or rev >= self.count(): raise ValueError
+ return self.node(rev)
+ except (ValueError, OverflowError):
+ c = []
+ for n in self.nodemap:
+ if hex(n).startswith(id):
+ c.append(n)
+ if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
+ if len(c) < 1: raise RevlogError(_("No match found"))
+ return c[0]
+
+ return None
+
+ def diff(self, a, b):
+ """return a delta between two revisions"""
+ return mdiff.textdiff(a, b)
+
+ def patches(self, t, pl):
+ """apply a list of patches to a string"""
+ return mdiff.patches(t, pl)
+
+ def chunk(self, rev, df=None, cachelen=4096):
+ start, length = self.start(rev), self.length(rev)
+ inline = self.inlinedata()
+ if inline:
+ start += (rev + 1) * struct.calcsize(self.indexformat)
+ end = start + length
+ def loadcache(df):
+ cache_length = max(cachelen, length) # 4k
+ if not df:
+ if inline:
+ df = self.opener(self.indexfile)
+ else:
+ df = self.opener(self.datafile)
+ df.seek(start)
+ self.chunkcache = (start, df.read(cache_length))
+
+ if not self.chunkcache:
+ loadcache(df)
+
+ cache_start = self.chunkcache[0]
+ cache_end = cache_start + len(self.chunkcache[1])
+ if start >= cache_start and end <= cache_end:
+ # it is cached
+ offset = start - cache_start
+ else:
+ loadcache(df)
+ offset = 0
+
+ #def checkchunk():
+ # df = self.opener(self.datafile)
+ # df.seek(start)
+ # return df.read(length)
+ #assert s == checkchunk()
+ return decompress(self.chunkcache[1][offset:offset + length])
+
+ def delta(self, node):
+ """return or calculate a delta between a node and its predecessor"""
+ r = self.rev(node)
+ return self.revdiff(r - 1, r)
+
+ def revdiff(self, rev1, rev2):
+ """return or calculate a delta between two revisions"""
+ b1 = self.base(rev1)
+ b2 = self.base(rev2)
+ if b1 == b2 and rev1 + 1 == rev2:
+ return self.chunk(rev2)
+ else:
+ return self.diff(self.revision(self.node(rev1)),
+ self.revision(self.node(rev2)))
+
+ def revision(self, node):
+ """return an uncompressed revision of a given"""
+ if node == nullid: return ""
+ if self.cache and self.cache[0] == node: return self.cache[2]
+
+ # look up what we need to read
+ text = None
+ rev = self.rev(node)
+ base = self.base(rev)
+
+ if self.inlinedata():
+ # we probably have the whole chunk cached
+ df = None
+ else:
+ df = self.opener(self.datafile)
+
+ # do we have useful data cached?
+ if self.cache and self.cache[1] >= base and self.cache[1] < rev:
+ base = self.cache[1]
+ text = self.cache[2]
+ self.loadindex(base, rev + 1)
+ else:
+ self.loadindex(base, rev + 1)
+ text = self.chunk(base, df=df)
+
+ bins = []
+ for r in xrange(base + 1, rev + 1):
+ bins.append(self.chunk(r, df=df))
+
+ text = self.patches(text, bins)
+
+ p1, p2 = self.parents(node)
+ if node != hash(text, p1, p2):
+ raise RevlogError(_("integrity check failed on %s:%d")
+ % (self.datafile, rev))
+
+ self.cache = (node, rev, text)
+ return text
+
+ def checkinlinesize(self, tr, fp=None):
+ if not self.inlinedata():
+ return
+ if not fp:
+ fp = self.opener(self.indexfile, 'r')
+ fp.seek(0, 2)
+ size = fp.tell()
+ if size < 131072:
+ return
+ trinfo = tr.find(self.indexfile)
+ if trinfo == None:
+ raise RevlogError(_("%s not found in the transaction" %
+ self.indexfile))
+
+ trindex = trinfo[2]
+ dataoff = self.start(trindex)
+
+ tr.add(self.datafile, dataoff)
+ df = self.opener(self.datafile, 'w')
+ calc = struct.calcsize(self.indexformat)
+ for r in xrange(self.count()):
+ start = self.start(r) + (r + 1) * calc
+ length = self.length(r)
+ fp.seek(start)
+ d = fp.read(length)
+ df.write(d)
+ fp.close()
+ df.close()
+ fp = self.opener(self.indexfile, 'w', atomictemp=True)
+ self.version &= ~(REVLOGNGINLINEDATA)
+ if self.count():
+ x = self.index[0]
+ e = struct.pack(self.indexformat, *x)[4:]
+ l = struct.pack(versionformat, self.version)
+ fp.write(l)
+ fp.write(e)
+
+ for i in xrange(1, self.count()):
+ x = self.index[i]
+ e = struct.pack(self.indexformat, *x)
+ fp.write(e)
+
+ # if we don't call rename, the temp file will never replace the
+ # real index
+ fp.rename()
+
+ tr.replace(self.indexfile, trindex * calc)
+ self.chunkcache = None
+
+ def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
+ """add a revision to the log
+
+ text - the revision data to add
+ transaction - the transaction object used for rollback
+ link - the linkrev data to add
+ p1, p2 - the parent nodeids of the revision
+ d - an optional precomputed delta
+ """
+ if text is None: text = ""
+ if p1 is None: p1 = self.tip()
+ if p2 is None: p2 = nullid
+
+ node = hash(text, p1, p2)
+
+ if node in self.nodemap:
+ return node
+
+ n = self.count()
+ t = n - 1
+
+ if n:
+ base = self.base(t)
+ start = self.start(base)
+ end = self.end(t)
+ if not d:
+ prev = self.revision(self.tip())
+ d = self.diff(prev, str(text))
+ data = compress(d)
+ l = len(data[1]) + len(data[0])
+ dist = end - start + l
+
+ # full versions are inserted when the needed deltas
+ # become comparable to the uncompressed text
+ if not n or dist > len(text) * 2:
+ data = compress(text)
+ l = len(data[1]) + len(data[0])
+ base = n
+ else:
+ base = self.base(t)
+
+ offset = 0
+ if t >= 0:
+ offset = self.end(t)
+
+ if self.version == REVLOGV0:
+ e = (offset, l, base, link, p1, p2, node)
+ else:
+ e = (self.offset_type(offset, 0), l, len(text),
+ base, link, self.rev(p1), self.rev(p2), node)
+
+ self.index.append(e)
+ self.nodemap[node] = n
+ entry = struct.pack(self.indexformat, *e)
+
+ if not self.inlinedata():
+ transaction.add(self.datafile, offset)
+ transaction.add(self.indexfile, n * len(entry))
+ f = self.opener(self.datafile, "a")
+ if data[0]:
+ f.write(data[0])
+ f.write(data[1])
+ f.close()
+ f = self.opener(self.indexfile, "a")
+ else:
+ f = self.opener(self.indexfile, "a+")
+ f.seek(0, 2)
+ transaction.add(self.indexfile, f.tell(), self.count() - 1)
+
+ if len(self.index) == 1 and self.version != REVLOGV0:
+ l = struct.pack(versionformat, self.version)
+ f.write(l)
+ entry = entry[4:]
+
+ f.write(entry)
+
+ if self.inlinedata():
+ f.write(data[0])
+ f.write(data[1])
+ self.checkinlinesize(transaction, f)
+
+ self.cache = (node, n, text)
+ return node
+
+ def ancestor(self, a, b):
+ """calculate the least common ancestor of nodes a and b"""
+
+ # start with some short cuts for the linear cases
+ if a == b:
+ return a
+ ra = self.rev(a)
+ rb = self.rev(b)
+ if ra < rb:
+ last = b
+ first = a
+ else:
+ last = a
+ first = b
+
+ # reachable won't include stop in the list, so we have to use a parent
+ reachable = self.reachable(last, stop=self.parents(first)[0])
+ if first in reachable:
+ return first
+
+ # calculate the distance of every node from root
+ dist = {nullid: 0}
+ for i in xrange(self.count()):
+ n = self.node(i)
+ p1, p2 = self.parents(n)
+ dist[n] = max(dist[p1], dist[p2]) + 1
+
+ # traverse ancestors in order of decreasing distance from root
+ def ancestors(node):
+ # we store negative distances because heap returns smallest member
+ h = [(-dist[node], node)]
+ seen = {}
+ while h:
+ d, n = heapq.heappop(h)
+ if n not in seen:
+ seen[n] = 1
+ yield (-d, n)
+ for p in self.parents(n):
+ heapq.heappush(h, (-dist[p], p))
+
+ def generations(node):
+ sg, s = None, {}
+ for g,n in ancestors(node):
+ if g != sg:
+ if sg:
+ yield sg, s
+ sg, s = g, {n:1}
+ else:
+ s[n] = 1
+ yield sg, s
+
+ x = generations(a)
+ y = generations(b)
+ gx = x.next()
+ gy = y.next()
+
+ # increment each ancestor list until it is closer to root than
+ # the other, or they match
+ while 1:
+ #print "ancestor gen %s %s" % (gx[0], gy[0])
+ if gx[0] == gy[0]:
+ # find the intersection
+ i = [ n for n in gx[1] if n in gy[1] ]
+ if i:
+ return i[0]
+ else:
+ #print "next"
+ gy = y.next()
+ gx = x.next()
+ elif gx[0] < gy[0]:
+ #print "next y"
+ gy = y.next()
+ else:
+ #print "next x"
+ gx = x.next()
+
+ def group(self, nodelist, lookup, infocollect=None):
+ """calculate a delta group
+
+ Given a list of changeset revs, return a set of deltas and
+ metadata corresponding to nodes. the first delta is
+ parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
+ have this parent as it has all history before these
+ changesets. parent is parent[0]
+ """
+ revs = [self.rev(n) for n in nodelist]
+
+ # if we don't have any revisions touched by these changesets, bail
+ if not revs:
+ yield changegroup.closechunk()
+ return
+
+ # add the parent of the first rev
+ p = self.parents(self.node(revs[0]))[0]
+ revs.insert(0, self.rev(p))
+
+ # build deltas
+ for d in xrange(0, len(revs) - 1):
+ a, b = revs[d], revs[d + 1]
+ nb = self.node(b)
+
+ if infocollect is not None:
+ infocollect(nb)
+
+ d = self.revdiff(a, b)
+ p = self.parents(nb)
+ meta = nb + p[0] + p[1] + lookup(nb)
+ yield changegroup.genchunk("%s%s" % (meta, d))
+
+ yield changegroup.closechunk()
+
+ def addgroup(self, revs, linkmapper, transaction, unique=0):
+ """
+ add a delta group
+
+ given a set of deltas, add them to the revision log. the
+ first delta is against its parent, which should be in our
+ log, the rest are against the previous delta.
+ """
+
+ #track the base of the current delta log
+ r = self.count()
+ t = r - 1
+ node = None
+
+ base = prev = -1
+ start = end = textlen = 0
+ if r:
+ end = self.end(t)
+
+ ifh = self.opener(self.indexfile, "a+")
+ ifh.seek(0, 2)
+ transaction.add(self.indexfile, ifh.tell(), self.count())
+ if self.inlinedata():
+ dfh = None
+ else:
+ transaction.add(self.datafile, end)
+ dfh = self.opener(self.datafile, "a")
+
+ # loop through our set of deltas
+ chain = None
+ for chunk in revs:
+ node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
+ link = linkmapper(cs)
+ if node in self.nodemap:
+ # this can happen if two branches make the same change
+ # if unique:
+ # raise RevlogError(_("already have %s") % hex(node[:4]))
+ chain = node
+ continue
+ delta = chunk[80:]
+
+ for p in (p1, p2):
+ if not p in self.nodemap:
+ raise RevlogError(_("unknown parent %s") % short(p))
+
+ if not chain:
+ # retrieve the parent revision of the delta chain
+ chain = p1
+ if not chain in self.nodemap:
+ raise RevlogError(_("unknown base %s") % short(chain[:4]))
+
+ # full versions are inserted when the needed deltas become
+ # comparable to the uncompressed text or when the previous
+ # version is not the one we have a delta against. We use
+ # the size of the previous full rev as a proxy for the
+ # current size.
+
+ if chain == prev:
+ tempd = compress(delta)
+ cdelta = tempd[0] + tempd[1]
+ textlen = mdiff.patchedsize(textlen, delta)
+
+ if chain != prev or (end - start + len(cdelta)) > textlen * 2:
+ # flush our writes here so we can read it in revision
+ if dfh:
+ dfh.flush()
+ ifh.flush()
+ text = self.revision(chain)
+ text = self.patches(text, [delta])
+ chk = self.addrevision(text, transaction, link, p1, p2)
+ if chk != node:
+ raise RevlogError(_("consistency error adding group"))
+ textlen = len(text)
+ else:
+ if self.version == REVLOGV0:
+ e = (end, len(cdelta), base, link, p1, p2, node)
+ else:
+ e = (self.offset_type(end, 0), len(cdelta), textlen, base,
+ link, self.rev(p1), self.rev(p2), node)
+ self.index.append(e)
+ self.nodemap[node] = r
+ if self.inlinedata():
+ ifh.write(struct.pack(self.indexformat, *e))
+ ifh.write(cdelta)
+ self.checkinlinesize(transaction, ifh)
+ if not self.inlinedata():
+ dfh = self.opener(self.datafile, "a")
+ ifh = self.opener(self.indexfile, "a")
+ else:
+ if not dfh:
+ # addrevision switched from inline to conventional
+ # reopen the index
+ dfh = self.opener(self.datafile, "a")
+ ifh = self.opener(self.indexfile, "a")
+ dfh.write(cdelta)
+ ifh.write(struct.pack(self.indexformat, *e))
+
+ t, r, chain, prev = r, r + 1, node, node
+ base = self.base(t)
+ start = self.start(base)
+ end = self.end(t)
+
+ return node
+
+ def strip(self, rev, minlink):
+ if self.count() == 0 or rev >= self.count():
+ return
+
+ if isinstance(self.index, lazyindex):
+ self.loadindexmap()
+
+ # When stripping away a revision, we need to make sure it
+ # does not actually belong to an older changeset.
+ # The minlink parameter defines the oldest revision
+ # we're allowed to strip away.
+ while minlink > self.index[rev][-4]:
+ rev += 1
+ if rev >= self.count():
+ return
+
+ # first truncate the files on disk
+ end = self.start(rev)
+ if not self.inlinedata():
+ df = self.opener(self.datafile, "a")
+ df.truncate(end)
+ end = rev * struct.calcsize(self.indexformat)
+ else:
+ end += rev * struct.calcsize(self.indexformat)
+
+ indexf = self.opener(self.indexfile, "a")
+ indexf.truncate(end)
+
+ # then reset internal state in memory to forget those revisions
+ self.cache = None
+ self.chunkcache = None
+ for x in xrange(rev, self.count()):
+ del self.nodemap[self.node(x)]
+
+ del self.index[rev:]
+
+ def checksize(self):
+ expected = 0
+ if self.count():
+ expected = self.end(self.count() - 1)
+
+ try:
+ f = self.opener(self.datafile)
+ f.seek(0, 2)
+ actual = f.tell()
+ dd = actual - expected
+ except IOError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
+ dd = 0
+
+ try:
+ f = self.opener(self.indexfile)
+ f.seek(0, 2)
+ actual = f.tell()
+ s = struct.calcsize(self.indexformat)
+ i = actual / s
+ di = actual - (i * s)
+ if self.inlinedata():
+ databytes = 0
+ for r in xrange(self.count()):
+ databytes += self.length(r)
+ dd = 0
+ di = actual - self.count() * s - databytes
+ except IOError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
+ di = 0
+
+ return (dd, di)
+
+
new file mode 100644
--- /dev/null
+++ b/mercurial/sshrepo.py
@@ -0,0 +1,155 @@
+# sshrepo.py - ssh repository proxy class for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from node import *
+from remoterepo import *
+from i18n import gettext as _
+from demandload import *
+demandload(globals(), "hg os re stat util")
+
+class sshrepository(remoterepository):
+ def __init__(self, ui, path):
+ self.url = path
+ self.ui = ui
+
+ m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
+ if not m:
+ raise hg.RepoError(_("couldn't parse destination %s") % path)
+
+ self.user = m.group(2)
+ self.host = m.group(3)
+ self.port = m.group(5)
+ self.path = m.group(7) or "."
+
+ args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
+ args = self.port and ("%s -p %s") % (args, self.port) or args
+
+ sshcmd = self.ui.config("ui", "ssh", "ssh")
+ remotecmd = self.ui.config("ui", "remotecmd", "hg")
+ cmd = '%s %s "%s -R %s serve --stdio"'
+ cmd = cmd % (sshcmd, args, remotecmd, self.path)
+
+ ui.note('running %s\n' % cmd)
+ self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
+
+ # skip any noise generated by remote shell
+ r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
+ l1 = ""
+ l2 = "dummy"
+ max_noise = 500
+ while l2 and max_noise:
+ l2 = r.readline()
+ self.readerr()
+ if l1 == "1\n" and l2 == "\n":
+ break
+ if l1:
+ ui.debug(_("remote: "), l1)
+ l1 = l2
+ max_noise -= 1
+ else:
+ if l1:
+ ui.debug(_("remote: "), l1)
+ raise hg.RepoError(_("no response from remote hg"))
+
+ def readerr(self):
+ while 1:
+ size = util.fstat(self.pipee).st_size
+ if size == 0: break
+ l = self.pipee.readline()
+ if not l: break
+ self.ui.status(_("remote: "), l)
+
+ def __del__(self):
+ try:
+ self.pipeo.close()
+ self.pipei.close()
+ # read the error descriptor until EOF
+ for l in self.pipee:
+ self.ui.status(_("remote: "), l)
+ self.pipee.close()
+ except:
+ pass
+
+ def dev(self):
+ return -1
+
+ def do_cmd(self, cmd, **args):
+ self.ui.debug(_("sending %s command\n") % cmd)
+ self.pipeo.write("%s\n" % cmd)
+ for k, v in args.items():
+ self.pipeo.write("%s %d\n" % (k, len(v)))
+ self.pipeo.write(v)
+ self.pipeo.flush()
+
+ return self.pipei
+
+ def call(self, cmd, **args):
+ r = self.do_cmd(cmd, **args)
+ l = r.readline()
+ self.readerr()
+ try:
+ l = int(l)
+ except:
+ raise hg.RepoError(_("unexpected response '%s'") % l)
+ return r.read(l)
+
+ def lock(self):
+ self.call("lock")
+ return remotelock(self)
+
+ def unlock(self):
+ self.call("unlock")
+
+ def heads(self):
+ d = self.call("heads")
+ try:
+ return map(bin, d[:-1].split(" "))
+ except:
+ raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
+
+ def branches(self, nodes):
+ n = " ".join(map(hex, nodes))
+ d = self.call("branches", nodes=n)
+ try:
+ br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
+ return br
+ except:
+ raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
+
+ def between(self, pairs):
+ n = "\n".join(["-".join(map(hex, p)) for p in pairs])
+ d = self.call("between", pairs=n)
+ try:
+ p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
+ return p
+ except:
+ raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
+
+ def changegroup(self, nodes, kind):
+ n = " ".join(map(hex, nodes))
+ f = self.do_cmd("changegroup", roots=n)
+ return self.pipei
+
+ def addchangegroup(self, cg, source):
+ d = self.call("addchangegroup")
+ if d:
+ raise hg.RepoError(_("push refused: %s"), d)
+
+ while 1:
+ d = cg.read(4096)
+ if not d: break
+ self.pipeo.write(d)
+ self.readerr()
+
+ self.pipeo.flush()
+
+ self.readerr()
+ l = int(self.pipei.readline())
+ r = self.pipei.read(l)
+ if not r:
+ return 1
+ return int(r)
new file mode 100644
--- /dev/null
+++ b/mercurial/statichttprepo.py
@@ -0,0 +1,48 @@
+# statichttprepo.py - simple http repository class for mercurial
+#
+# This provides read-only repo access to repositories exported via static http
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from demandload import demandload
+demandload(globals(), "changelog filelog httprangereader")
+demandload(globals(), "localrepo manifest os urllib urllib2")
+
+class rangereader(httprangereader.httprangereader):
+ def read(self, size=None):
+ try:
+ return httprangereader.httprangereader.read(self, size)
+ except urllib2.HTTPError, inst:
+ raise IOError(None, inst)
+ except urllib2.URLError, inst:
+ raise IOError(None, inst.reason[1])
+
+def opener(base):
+ """return a function that opens files over http"""
+ p = base
+ def o(path, mode="r"):
+ f = os.path.join(p, urllib.quote(path))
+ return rangereader(f)
+ return o
+
+class statichttprepository(localrepo.localrepository):
+ def __init__(self, ui, path):
+ self.path = (path + "/.hg")
+ self.ui = ui
+ self.revlogversion = 0
+ self.opener = opener(self.path)
+ self.manifest = manifest.manifest(self.opener)
+ self.changelog = changelog.changelog(self.opener)
+ self.tagscache = None
+ self.nodetagscache = None
+ self.encodepats = None
+ self.decodepats = None
+
+ def dev(self):
+ return -1
+
+ def local(self):
+ return False
new file mode 100644
--- /dev/null
+++ b/mercurial/templater.py
@@ -0,0 +1,515 @@
+# templater.py - template expansion for output
+#
+# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import re
+from demandload import demandload
+from i18n import gettext as _
+from node import *
+demandload(globals(), "cStringIO cgi re sys os time urllib util textwrap")
+
+esctable = {
+ '\\': '\\',
+ 'r': '\r',
+ 't': '\t',
+ 'n': '\n',
+ 'v': '\v',
+ }
+
+def parsestring(s, quoted=True):
+ '''parse a string using simple c-like syntax.
+ string must be in quotes if quoted is True.'''
+ fp = cStringIO.StringIO()
+ if quoted:
+ first = s[0]
+ if len(s) < 2: raise SyntaxError(_('string too short'))
+ if first not in "'\"": raise SyntaxError(_('invalid quote'))
+ if s[-1] != first: raise SyntaxError(_('unmatched quotes'))
+ s = s[1:-1]
+ escape = False
+ for c in s:
+ if escape:
+ fp.write(esctable.get(c, c))
+ escape = False
+ elif c == '\\': escape = True
+ elif quoted and c == first: raise SyntaxError(_('string ends early'))
+ else: fp.write(c)
+ if escape: raise SyntaxError(_('unterminated escape'))
+ return fp.getvalue()
+
+class templater(object):
+ '''template expansion engine.
+
+ template expansion works like this. a map file contains key=value
+ pairs. if value is quoted, it is treated as string. otherwise, it
+ is treated as name of template file.
+
+ templater is asked to expand a key in map. it looks up key, and
+ looks for atrings like this: {foo}. it expands {foo} by looking up
+ foo in map, and substituting it. expansion is recursive: it stops
+ when there is no more {foo} to replace.
+
+ expansion also allows formatting and filtering.
+
+ format uses key to expand each item in list. syntax is
+ {key%format}.
+
+ filter uses function to transform value. syntax is
+ {key|filter1|filter2|...}.'''
+
+ def __init__(self, mapfile, filters={}, defaults={}, cache={}):
+ '''set up template engine.
+ mapfile is name of file to read map definitions from.
+ filters is dict of functions. each transforms a value into another.
+ defaults is dict of default map definitions.'''
+ self.mapfile = mapfile or 'template'
+ self.cache = cache.copy()
+ self.map = {}
+ self.base = (mapfile and os.path.dirname(mapfile)) or ''
+ self.filters = filters
+ self.defaults = defaults
+
+ if not mapfile:
+ return
+ i = 0
+ for l in file(mapfile):
+ l = l.strip()
+ i += 1
+ if not l or l[0] in '#;': continue
+ m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l)
+ if m:
+ key, val = m.groups()
+ if val[0] in "'\"":
+ try:
+ self.cache[key] = parsestring(val)
+ except SyntaxError, inst:
+ raise SyntaxError('%s:%s: %s' %
+ (mapfile, i, inst.args[0]))
+ else:
+ self.map[key] = os.path.join(self.base, val)
+ else:
+ raise SyntaxError(_("%s:%s: parse error") % (mapfile, i))
+
+ def __contains__(self, key):
+ return key in self.cache
+
+ def __call__(self, t, **map):
+ '''perform expansion.
+ t is name of map element to expand.
+ map is added elements to use during expansion.'''
+ m = self.defaults.copy()
+ m.update(map)
+ try:
+ tmpl = self.cache[t]
+ except KeyError:
+ try:
+ tmpl = self.cache[t] = file(self.map[t]).read()
+ except IOError, inst:
+ raise IOError(inst.args[0], _('template file %s: %s') %
+ (self.map[t], inst.args[1]))
+ return self.template(tmpl, self.filters, **m)
+
+ template_re = re.compile(r"[#{]([a-zA-Z_][a-zA-Z0-9_]*)"
+ r"((%[a-zA-Z_][a-zA-Z0-9_]*)*)"
+ r"((\|[a-zA-Z_][a-zA-Z0-9_]*)*)[#}]")
+
+ def template(self, tmpl, filters={}, **map):
+ lm = map.copy()
+ while tmpl:
+ m = self.template_re.search(tmpl)
+ if m:
+ start, end = m.span(0)
+ s, e = tmpl[start], tmpl[end - 1]
+ key = m.group(1)
+ if ((s == '#' and e != '#') or (s == '{' and e != '}')):
+ raise SyntaxError(_("'%s'/'%s' mismatch expanding '%s'") %
+ (s, e, key))
+ if start:
+ yield tmpl[:start]
+ v = map.get(key, "")
+ v = callable(v) and v(**map) or v
+
+ format = m.group(2)
+ fl = m.group(4)
+
+ if format:
+ q = v.__iter__
+ for i in q():
+ lm.update(i)
+ yield self(format[1:], **lm)
+
+ v = ""
+
+ elif fl:
+ for f in fl.split("|")[1:]:
+ v = filters[f](v)
+
+ yield v
+ tmpl = tmpl[end:]
+ else:
+ yield tmpl
+ break
+
+agescales = [("second", 1),
+ ("minute", 60),
+ ("hour", 3600),
+ ("day", 3600 * 24),
+ ("week", 3600 * 24 * 7),
+ ("month", 3600 * 24 * 30),
+ ("year", 3600 * 24 * 365)]
+
+agescales.reverse()
+
+def age(date):
+ '''turn a (timestamp, tzoff) tuple into an age string.'''
+
+ def plural(t, c):
+ if c == 1:
+ return t
+ return t + "s"
+ def fmt(t, c):
+ return "%d %s" % (c, plural(t, c))
+
+ now = time.time()
+ then = date[0]
+ delta = max(1, int(now - then))
+
+ for t, s in agescales:
+ n = delta / s
+ if n >= 2 or s == 1:
+ return fmt(t, n)
+
+def stringify(thing):
+ '''turn nested template iterator into string.'''
+ cs = cStringIO.StringIO()
+ def walk(things):
+ for t in things:
+ if hasattr(t, '__iter__'):
+ walk(t)
+ else:
+ cs.write(t)
+ walk(thing)
+ return cs.getvalue()
+
+para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
+space_re = re.compile(r' +')
+
+def fill(text, width):
+ '''fill many paragraphs.'''
+ def findparas():
+ start = 0
+ while True:
+ m = para_re.search(text, start)
+ if not m:
+ w = len(text)
+ while w > start and text[w-1].isspace(): w -= 1
+ yield text[start:w], text[w:]
+ break
+ yield text[start:m.start(0)], m.group(1)
+ start = m.end(1)
+
+ fp = cStringIO.StringIO()
+ for para, rest in findparas():
+ fp.write(space_re.sub(' ', textwrap.fill(para, width)))
+ fp.write(rest)
+ return fp.getvalue()
+
+def isodate(date):
+ '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
+ return util.datestr(date, format='%Y-%m-%d %H:%M')
+
+def nl2br(text):
+ '''replace raw newlines with xhtml line breaks.'''
+ return text.replace('\n', '<br/>\n')
+
+def obfuscate(text):
+ return ''.join(['&#%d;' % ord(c) for c in text])
+
+def domain(author):
+ '''get domain of author, or empty string if none.'''
+ f = author.find('@')
+ if f == -1: return ''
+ author = author[f+1:]
+ f = author.find('>')
+ if f >= 0: author = author[:f]
+ return author
+
+def email(author):
+ '''get email of author.'''
+ r = author.find('>')
+ if r == -1: r = None
+ return author[author.find('<')+1:r]
+
+def person(author):
+ '''get name of author, or else username.'''
+ f = author.find('<')
+ if f == -1: return util.shortuser(author)
+ return author[:f].rstrip()
+
+def shortdate(date):
+ '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
+ return util.datestr(date, format='%Y-%m-%d', timezone=False)
+
+def indent(text, prefix):
+ '''indent each non-empty line of text after first with prefix.'''
+ fp = cStringIO.StringIO()
+ lines = text.splitlines()
+ num_lines = len(lines)
+ for i in xrange(num_lines):
+ l = lines[i]
+ if i and l.strip(): fp.write(prefix)
+ fp.write(l)
+ if i < num_lines - 1 or text.endswith('\n'):
+ fp.write('\n')
+ return fp.getvalue()
+
+common_filters = {
+ "addbreaks": nl2br,
+ "basename": os.path.basename,
+ "age": age,
+ "date": lambda x: util.datestr(x),
+ "domain": domain,
+ "email": email,
+ "escape": lambda x: cgi.escape(x, True),
+ "fill68": lambda x: fill(x, width=68),
+ "fill76": lambda x: fill(x, width=76),
+ "firstline": lambda x: x.splitlines(1)[0].rstrip('\r\n'),
+ "tabindent": lambda x: indent(x, '\t'),
+ "isodate": isodate,
+ "obfuscate": obfuscate,
+ "permissions": lambda x: x and "-rwxr-xr-x" or "-rw-r--r--",
+ "person": person,
+ "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
+ "short": lambda x: x[:12],
+ "shortdate": shortdate,
+ "stringify": stringify,
+ "strip": lambda x: x.strip(),
+ "urlescape": lambda x: urllib.quote(x),
+ "user": lambda x: util.shortuser(x),
+ }
+
+def templatepath(name=None):
+ '''return location of template file or directory (if no name).
+ returns None if not found.'''
+
+ # executable version (py2exe) doesn't support __file__
+ if hasattr(sys, 'frozen'):
+ module = sys.executable
+ else:
+ module = __file__
+ for f in 'templates', '../templates':
+ fl = f.split('/')
+ if name: fl.append(name)
+ p = os.path.join(os.path.dirname(module), *fl)
+ if (name and os.path.exists(p)) or os.path.isdir(p):
+ return os.path.normpath(p)
+
+class changeset_templater(object):
+ '''format changeset information.'''
+
+ def __init__(self, ui, repo, mapfile, dest=None):
+ self.t = templater(mapfile, common_filters,
+ cache={'parent': '{rev}:{node|short} ',
+ 'manifest': '{rev}:{node|short}'})
+ self.ui = ui
+ self.dest = dest
+ self.repo = repo
+
+ def use_template(self, t):
+ '''set template string to use'''
+ self.t.cache['changeset'] = t
+
+ def write(self, thing, header=False):
+ '''write expanded template.
+ uses in-order recursive traverse of iterators.'''
+ dest = self.dest or self.ui
+ for t in thing:
+ if hasattr(t, '__iter__'):
+ self.write(t, header=header)
+ elif header:
+ dest.write_header(t)
+ else:
+ dest.write(t)
+
+ def write_header(self, thing):
+ self.write(thing, header=True)
+
+ def show(self, rev=0, changenode=None, brinfo=None, changes=None,
+ **props):
+ '''show a single changeset or file revision'''
+ log = self.repo.changelog
+ if changenode is None:
+ changenode = log.node(rev)
+ elif not rev:
+ rev = log.rev(changenode)
+ if changes is None:
+ changes = log.read(changenode)
+
+ def showlist(name, values, plural=None, **args):
+ '''expand set of values.
+ name is name of key in template map.
+ values is list of strings or dicts.
+ plural is plural of name, if not simply name + 's'.
+
+ expansion works like this, given name 'foo'.
+
+ if values is empty, expand 'no_foos'.
+
+ if 'foo' not in template map, return values as a string,
+ joined by space.
+
+ expand 'start_foos'.
+
+ for each value, expand 'foo'. if 'last_foo' in template
+ map, expand it instead of 'foo' for last key.
+
+ expand 'end_foos'.
+ '''
+ if plural: names = plural
+ else: names = name + 's'
+ if not values:
+ noname = 'no_' + names
+ if noname in self.t:
+ yield self.t(noname, **args)
+ return
+ if name not in self.t:
+ if isinstance(values[0], str):
+ yield ' '.join(values)
+ else:
+ for v in values:
+ yield dict(v, **args)
+ return
+ startname = 'start_' + names
+ if startname in self.t:
+ yield self.t(startname, **args)
+ vargs = args.copy()
+ def one(v, tag=name):
+ try:
+ vargs.update(v)
+ except (AttributeError, ValueError):
+ try:
+ for a, b in v:
+ vargs[a] = b
+ except ValueError:
+ vargs[name] = v
+ return self.t(tag, **vargs)
+ lastname = 'last_' + name
+ if lastname in self.t:
+ last = values.pop()
+ else:
+ last = None
+ for v in values:
+ yield one(v)
+ if last is not None:
+ yield one(last, tag=lastname)
+ endname = 'end_' + names
+ if endname in self.t:
+ yield self.t(endname, **args)
+
+ if brinfo:
+ def showbranches(**args):
+ if changenode in brinfo:
+ for x in showlist('branch', brinfo[changenode],
+ plural='branches', **args):
+ yield x
+ else:
+ showbranches = ''
+
+ if self.ui.debugflag:
+ def showmanifest(**args):
+ args = args.copy()
+ args.update(dict(rev=self.repo.manifest.rev(changes[0]),
+ node=hex(changes[0])))
+ yield self.t('manifest', **args)
+ else:
+ showmanifest = ''
+
+ def showparents(**args):
+ parents = [[('rev', log.rev(p)), ('node', hex(p))]
+ for p in log.parents(changenode)
+ if self.ui.debugflag or p != nullid]
+ if (not self.ui.debugflag and len(parents) == 1 and
+ parents[0][0][1] == rev - 1):
+ return
+ for x in showlist('parent', parents, **args):
+ yield x
+
+ def showtags(**args):
+ for x in showlist('tag', self.repo.nodetags(changenode), **args):
+ yield x
+
+ if self.ui.debugflag:
+ files = self.repo.changes(log.parents(changenode)[0], changenode)
+ def showfiles(**args):
+ for x in showlist('file', files[0], **args): yield x
+ def showadds(**args):
+ for x in showlist('file_add', files[1], **args): yield x
+ def showdels(**args):
+ for x in showlist('file_del', files[2], **args): yield x
+ else:
+ def showfiles(**args):
+ for x in showlist('file', changes[3], **args): yield x
+ showadds = ''
+ showdels = ''
+
+ defprops = {
+ 'author': changes[1],
+ 'branches': showbranches,
+ 'date': changes[2],
+ 'desc': changes[4],
+ 'file_adds': showadds,
+ 'file_dels': showdels,
+ 'files': showfiles,
+ 'manifest': showmanifest,
+ 'node': hex(changenode),
+ 'parents': showparents,
+ 'rev': rev,
+ 'tags': showtags,
+ }
+ props = props.copy()
+ props.update(defprops)
+
+ try:
+ if self.ui.debugflag and 'header_debug' in self.t:
+ key = 'header_debug'
+ elif self.ui.quiet and 'header_quiet' in self.t:
+ key = 'header_quiet'
+ elif self.ui.verbose and 'header_verbose' in self.t:
+ key = 'header_verbose'
+ elif 'header' in self.t:
+ key = 'header'
+ else:
+ key = ''
+ if key:
+ self.write_header(self.t(key, **props))
+ if self.ui.debugflag and 'changeset_debug' in self.t:
+ key = 'changeset_debug'
+ elif self.ui.quiet and 'changeset_quiet' in self.t:
+ key = 'changeset_quiet'
+ elif self.ui.verbose and 'changeset_verbose' in self.t:
+ key = 'changeset_verbose'
+ else:
+ key = 'changeset'
+ self.write(self.t(key, **props))
+ except KeyError, inst:
+ raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
+ inst.args[0]))
+ except SyntaxError, inst:
+ raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
+
+class stringio(object):
+ '''wrap cStringIO for use by changeset_templater.'''
+ def __init__(self):
+ self.fp = cStringIO.StringIO()
+
+ def write(self, *args):
+ for a in args:
+ self.fp.write(a)
+
+ write_header = write
+
+ def __getattr__(self, key):
+ return getattr(self.fp, key)
new file mode 100644
--- /dev/null
+++ b/mercurial/transaction.py
@@ -0,0 +1,106 @@
+# transaction.py - simple journalling scheme for mercurial
+#
+# This transaction scheme is intended to gracefully handle program
+# errors and interruptions. More serious failures like system crashes
+# can be recovered with an fsck-like tool. As the whole repository is
+# effectively log-structured, this should amount to simply truncating
+# anything that isn't referenced in the changelog.
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os
+from i18n import gettext as _
+
+class transaction(object):
+ def __init__(self, report, opener, journal, after=None):
+ self.journal = None
+
+ # abort here if the journal already exists
+ if os.path.exists(journal):
+ raise AssertionError(_("journal already exists - run hg recover"))
+
+ self.count = 1
+ self.report = report
+ self.opener = opener
+ self.after = after
+ self.entries = []
+ self.map = {}
+ self.journal = journal
+
+ self.file = open(self.journal, "w")
+
+ def __del__(self):
+ if self.journal:
+ if self.entries: self.abort()
+ self.file.close()
+ try: os.unlink(self.journal)
+ except: pass
+
+ def add(self, file, offset, data=None):
+ if file in self.map: return
+ self.entries.append((file, offset, data))
+ self.map[file] = len(self.entries) - 1
+ # add enough data to the journal to do the truncate
+ self.file.write("%s\0%d\n" % (file, offset))
+ self.file.flush()
+
+ def find(self, file):
+ if file in self.map:
+ return self.entries[self.map[file]]
+ return None
+
+ def replace(self, file, offset, data=None):
+ if file not in self.map:
+ raise KeyError(file)
+ index = self.map[file]
+ self.entries[index] = (file, offset, data)
+ self.file.write("%s\0%d\n" % (file, offset))
+ self.file.flush()
+
+ def nest(self):
+ self.count += 1
+ return self
+
+ def running(self):
+ return self.count > 0
+
+ def close(self):
+ self.count -= 1
+ if self.count != 0:
+ return
+ self.file.close()
+ self.entries = []
+ if self.after:
+ self.after()
+ else:
+ os.unlink(self.journal)
+ self.journal = None
+
+ def abort(self):
+ if not self.entries: return
+
+ self.report(_("transaction abort!\n"))
+
+ for f, o, ignore in self.entries:
+ try:
+ self.opener(f, "a").truncate(o)
+ except:
+ self.report(_("failed to truncate %s\n") % f)
+
+ self.entries = []
+
+ self.report(_("rollback completed\n"))
+
+def rollback(opener, file):
+ files = {}
+ for l in open(file).readlines():
+ f, o = l.split('\0')
+ files[f] = o
+ for f in files:
+ o = files[f]
+ opener(f, "a").truncate(int(o))
+ os.unlink(file)
+
new file mode 100644
--- /dev/null
+++ b/mercurial/ui.py
@@ -0,0 +1,349 @@
+# ui.py - user interface bits for mercurial
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import ConfigParser
+from i18n import gettext as _
+from demandload import *
+demandload(globals(), "errno getpass os re smtplib socket sys tempfile")
+demandload(globals(), "templater traceback util")
+
+class ui(object):
+ def __init__(self, verbose=False, debug=False, quiet=False,
+ interactive=True, traceback=False, parentui=None):
+ self.overlay = {}
+ if parentui is None:
+ # this is the parent of all ui children
+ self.parentui = None
+ self.cdata = ConfigParser.SafeConfigParser()
+ self.readconfig(util.rcpath())
+
+ self.quiet = self.configbool("ui", "quiet")
+ self.verbose = self.configbool("ui", "verbose")
+ self.debugflag = self.configbool("ui", "debug")
+ self.interactive = self.configbool("ui", "interactive", True)
+ self.traceback = traceback
+
+ self.updateopts(verbose, debug, quiet, interactive)
+ self.diffcache = None
+ self.header = []
+ self.prev_header = []
+ self.revlogopts = self.configrevlog()
+ else:
+ # parentui may point to an ui object which is already a child
+ self.parentui = parentui.parentui or parentui
+ parent_cdata = self.parentui.cdata
+ self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
+ # make interpolation work
+ for section in parent_cdata.sections():
+ self.cdata.add_section(section)
+ for name, value in parent_cdata.items(section, raw=True):
+ self.cdata.set(section, name, value)
+
+ def __getattr__(self, key):
+ return getattr(self.parentui, key)
+
+ def updateopts(self, verbose=False, debug=False, quiet=False,
+ interactive=True, traceback=False, config=[]):
+ self.quiet = (self.quiet or quiet) and not verbose and not debug
+ self.verbose = (self.verbose or verbose) or debug
+ self.debugflag = (self.debugflag or debug)
+ self.interactive = (self.interactive and interactive)
+ self.traceback = self.traceback or traceback
+ for cfg in config:
+ try:
+ name, value = cfg.split('=', 1)
+ section, name = name.split('.', 1)
+ if not self.cdata.has_section(section):
+ self.cdata.add_section(section)
+ if not section or not name:
+ raise IndexError
+ self.cdata.set(section, name, value)
+ except (IndexError, ValueError):
+ raise util.Abort(_('malformed --config option: %s') % cfg)
+
+ def readconfig(self, fn, root=None):
+ if isinstance(fn, basestring):
+ fn = [fn]
+ for f in fn:
+ try:
+ self.cdata.read(f)
+ except ConfigParser.ParsingError, inst:
+ raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
+ # translate paths relative to root (or home) into absolute paths
+ if root is None:
+ root = os.path.expanduser('~')
+ for name, path in self.configitems("paths"):
+ if path and path.find("://") == -1 and not os.path.isabs(path):
+ self.cdata.set("paths", name, os.path.join(root, path))
+
+ def setconfig(self, section, name, val):
+ self.overlay[(section, name)] = val
+
+ def config(self, section, name, default=None):
+ if self.overlay.has_key((section, name)):
+ return self.overlay[(section, name)]
+ if self.cdata.has_option(section, name):
+ try:
+ return self.cdata.get(section, name)
+ except ConfigParser.InterpolationError, inst:
+ raise util.Abort(_("Error in configuration:\n%s") % inst)
+ if self.parentui is None:
+ return default
+ else:
+ return self.parentui.config(section, name, default)
+
+ def configbool(self, section, name, default=False):
+ if self.overlay.has_key((section, name)):
+ return self.overlay[(section, name)]
+ if self.cdata.has_option(section, name):
+ try:
+ return self.cdata.getboolean(section, name)
+ except ConfigParser.InterpolationError, inst:
+ raise util.Abort(_("Error in configuration:\n%s") % inst)
+ if self.parentui is None:
+ return default
+ else:
+ return self.parentui.configbool(section, name, default)
+
+ def has_config(self, section):
+ '''tell whether section exists in config.'''
+ return self.cdata.has_section(section)
+
+ def configitems(self, section):
+ items = {}
+ if self.parentui is not None:
+ items = dict(self.parentui.configitems(section))
+ if self.cdata.has_section(section):
+ try:
+ items.update(dict(self.cdata.items(section)))
+ except ConfigParser.InterpolationError, inst:
+ raise util.Abort(_("Error in configuration:\n%s") % inst)
+ x = items.items()
+ x.sort()
+ return x
+
+ def walkconfig(self, seen=None):
+ if seen is None:
+ seen = {}
+ for (section, name), value in self.overlay.iteritems():
+ yield section, name, value
+ seen[section, name] = 1
+ for section in self.cdata.sections():
+ for name, value in self.cdata.items(section):
+ if (section, name) in seen: continue
+ yield section, name, value.replace('\n', '\\n')
+ seen[section, name] = 1
+ if self.parentui is not None:
+ for parent in self.parentui.walkconfig(seen):
+ yield parent
+
+ def extensions(self):
+ return self.configitems("extensions")
+
+ def hgignorefiles(self):
+ result = []
+ cfgitems = self.configitems("ui")
+ for key, value in cfgitems:
+ if key == 'ignore' or key.startswith('ignore.'):
+ path = os.path.expanduser(value)
+ result.append(path)
+ return result
+
+ def configrevlog(self):
+ ret = {}
+ for x in self.configitems("revlog"):
+ k = x[0].lower()
+ ret[k] = x[1]
+ return ret
+ def diffopts(self):
+ if self.diffcache:
+ return self.diffcache
+ ret = { 'showfunc' : True, 'ignorews' : False}
+ for x in self.configitems("diff"):
+ k = x[0].lower()
+ v = x[1]
+ if v:
+ v = v.lower()
+ if v == 'true':
+ value = True
+ else:
+ value = False
+ ret[k] = value
+ self.diffcache = ret
+ return ret
+
+ def username(self):
+ """Return default username to be used in commits.
+
+ Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
+ and stop searching if one of these is set.
+ Abort if found username is an empty string to force specifying
+ the commit user elsewhere, e.g. with line option or repo hgrc.
+ If not found, use ($LOGNAME or $USER or $LNAME or
+ $USERNAME) +"@full.hostname".
+ """
+ user = os.environ.get("HGUSER")
+ if user is None:
+ user = self.config("ui", "username")
+ if user is None:
+ user = os.environ.get("EMAIL")
+ if user is None:
+ try:
+ user = '%s@%s' % (getpass.getuser(), socket.getfqdn())
+ except KeyError:
+ raise util.Abort(_("Please specify a username."))
+ return user
+
+ def shortuser(self, user):
+ """Return a short representation of a user name or email address."""
+ if not self.verbose: user = util.shortuser(user)
+ return user
+
+ def expandpath(self, loc):
+ """Return repository location relative to cwd or from [paths]"""
+ if loc.find("://") != -1 or os.path.exists(loc):
+ return loc
+
+ return self.config("paths", loc, loc)
+
+ def write(self, *args):
+ if self.header:
+ if self.header != self.prev_header:
+ self.prev_header = self.header
+ self.write(*self.header)
+ self.header = []
+ for a in args:
+ sys.stdout.write(str(a))
+
+ def write_header(self, *args):
+ for a in args:
+ self.header.append(str(a))
+
+ def write_err(self, *args):
+ try:
+ if not sys.stdout.closed: sys.stdout.flush()
+ for a in args:
+ sys.stderr.write(str(a))
+ except IOError, inst:
+ if inst.errno != errno.EPIPE:
+ raise
+
+ def flush(self):
+ try: sys.stdout.flush()
+ except: pass
+ try: sys.stderr.flush()
+ except: pass
+
+ def readline(self):
+ return sys.stdin.readline()[:-1]
+ def prompt(self, msg, pat=None, default="y"):
+ if not self.interactive: return default
+ while 1:
+ self.write(msg, " ")
+ r = self.readline()
+ if not pat or re.match(pat, r):
+ return r
+ else:
+ self.write(_("unrecognized response\n"))
+ def getpass(self, prompt=None, default=None):
+ if not self.interactive: return default
+ return getpass.getpass(prompt or _('password: '))
+ def status(self, *msg):
+ if not self.quiet: self.write(*msg)
+ def warn(self, *msg):
+ self.write_err(*msg)
+ def note(self, *msg):
+ if self.verbose: self.write(*msg)
+ def debug(self, *msg):
+ if self.debugflag: self.write(*msg)
+ def edit(self, text, user):
+ (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
+ text=True)
+ try:
+ f = os.fdopen(fd, "w")
+ f.write(text)
+ f.close()
+
+ editor = (os.environ.get("HGEDITOR") or
+ self.config("ui", "editor") or
+ os.environ.get("EDITOR", "vi"))
+
+ util.system("%s \"%s\"" % (editor, name),
+ environ={'HGUSER': user},
+ onerr=util.Abort, errprefix=_("edit failed"))
+
+ f = open(name)
+ t = f.read()
+ f.close()
+ t = re.sub("(?m)^HG:.*\n", "", t)
+ finally:
+ os.unlink(name)
+
+ return t
+
+ def sendmail(self):
+ '''send mail message. object returned has one method, sendmail.
+ call as sendmail(sender, list-of-recipients, msg).'''
+
+ def smtp():
+ '''send mail using smtp.'''
+
+ s = smtplib.SMTP()
+ mailhost = self.config('smtp', 'host')
+ if not mailhost:
+ raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
+ mailport = int(self.config('smtp', 'port', 25))
+ self.note(_('sending mail: smtp host %s, port %s\n') %
+ (mailhost, mailport))
+ s.connect(host=mailhost, port=mailport)
+ if self.configbool('smtp', 'tls'):
+ self.note(_('(using tls)\n'))
+ s.ehlo()
+ s.starttls()
+ s.ehlo()
+ username = self.config('smtp', 'username')
+ password = self.config('smtp', 'password')
+ if username and password:
+ self.note(_('(authenticating to mail server as %s)\n') %
+ (username))
+ s.login(username, password)
+ return s
+
+ class sendmail(object):
+ '''send mail using sendmail.'''
+
+ def __init__(self, ui, program):
+ self.ui = ui
+ self.program = program
+
+ def sendmail(self, sender, recipients, msg):
+ cmdline = '%s -f %s %s' % (
+ self.program, templater.email(sender),
+ ' '.join(map(templater.email, recipients)))
+ self.ui.note(_('sending mail: %s\n') % cmdline)
+ fp = os.popen(cmdline, 'w')
+ fp.write(msg)
+ ret = fp.close()
+ if ret:
+ raise util.Abort('%s %s' % (
+ os.path.basename(self.program.split(None, 1)[0]),
+ util.explain_exit(ret)[0]))
+
+ method = self.config('email', 'method', 'smtp')
+ if method == 'smtp':
+ mail = smtp()
+ else:
+ mail = sendmail(self, method)
+ return mail
+
+ def print_exc(self):
+ '''print exception traceback if traceback printing enabled.
+ only to call in exception handler. returns true if traceback
+ printed.'''
+ if self.traceback:
+ traceback.print_exc()
+ return self.traceback
new file mode 100644
--- /dev/null
+++ b/mercurial/util.py
@@ -0,0 +1,900 @@
+"""
+util.py - Mercurial utility functions and platform specfic implementations
+
+ Copyright 2005 K. Thananchayan <thananck@yahoo.com>
+
+This software may be used and distributed according to the terms
+of the GNU General Public License, incorporated herein by reference.
+
+This contains helper routines that are independent of the SCM core and hide
+platform-specific details from the core.
+"""
+
+import os, errno
+from i18n import gettext as _
+from demandload import *
+demandload(globals(), "cStringIO errno popen2 re shutil sys tempfile")
+demandload(globals(), "threading time")
+
+class SignalInterrupt(Exception):
+ """Exception raised on SIGTERM and SIGHUP."""
+
+def pipefilter(s, cmd):
+ '''filter string S through command CMD, returning its output'''
+ (pout, pin) = popen2.popen2(cmd, -1, 'b')
+ def writer():
+ try:
+ pin.write(s)
+ pin.close()
+ except IOError, inst:
+ if inst.errno != errno.EPIPE:
+ raise
+
+ # we should use select instead on UNIX, but this will work on most
+ # systems, including Windows
+ w = threading.Thread(target=writer)
+ w.start()
+ f = pout.read()
+ pout.close()
+ w.join()
+ return f
+
+def tempfilter(s, cmd):
+ '''filter string S through a pair of temporary files with CMD.
+ CMD is used as a template to create the real command to be run,
+ with the strings INFILE and OUTFILE replaced by the real names of
+ the temporary files generated.'''
+ inname, outname = None, None
+ try:
+ infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
+ fp = os.fdopen(infd, 'wb')
+ fp.write(s)
+ fp.close()
+ outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
+ os.close(outfd)
+ cmd = cmd.replace('INFILE', inname)
+ cmd = cmd.replace('OUTFILE', outname)
+ code = os.system(cmd)
+ if code: raise Abort(_("command '%s' failed: %s") %
+ (cmd, explain_exit(code)))
+ return open(outname, 'rb').read()
+ finally:
+ try:
+ if inname: os.unlink(inname)
+ except: pass
+ try:
+ if outname: os.unlink(outname)
+ except: pass
+
+filtertable = {
+ 'tempfile:': tempfilter,
+ 'pipe:': pipefilter,
+ }
+
+def filter(s, cmd):
+ "filter a string through a command that transforms its input to its output"
+ for name, fn in filtertable.iteritems():
+ if cmd.startswith(name):
+ return fn(s, cmd[len(name):].lstrip())
+ return pipefilter(s, cmd)
+
+def find_in_path(name, path, default=None):
+ '''find name in search path. path can be string (will be split
+ with os.pathsep), or iterable thing that returns strings. if name
+ found, return path to name. else return default.'''
+ if isinstance(path, str):
+ path = path.split(os.pathsep)
+ for p in path:
+ p_name = os.path.join(p, name)
+ if os.path.exists(p_name):
+ return p_name
+ return default
+
+def patch(strip, patchname, ui):
+ """apply the patch <patchname> to the working directory.
+ a list of patched files is returned"""
+ patcher = find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
+ fp = os.popen('%s -p%d < "%s"' % (patcher, strip, patchname))
+ files = {}
+ for line in fp:
+ line = line.rstrip()
+ ui.status("%s\n" % line)
+ if line.startswith('patching file '):
+ pf = parse_patch_output(line)
+ files.setdefault(pf, 1)
+ code = fp.close()
+ if code:
+ raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
+ return files.keys()
+
+def binary(s):
+ """return true if a string is binary data using diff's heuristic"""
+ if s and '\0' in s[:4096]:
+ return True
+ return False
+
+def unique(g):
+ """return the uniq elements of iterable g"""
+ seen = {}
+ for f in g:
+ if f not in seen:
+ seen[f] = 1
+ yield f
+
+class Abort(Exception):
+ """Raised if a command needs to print an error and exit."""
+
+def always(fn): return True
+def never(fn): return False
+
+def patkind(name, dflt_pat='glob'):
+ """Split a string into an optional pattern kind prefix and the
+ actual pattern."""
+ for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
+ if name.startswith(prefix + ':'): return name.split(':', 1)
+ return dflt_pat, name
+
+def globre(pat, head='^', tail='$'):
+ "convert a glob pattern into a regexp"
+ i, n = 0, len(pat)
+ res = ''
+ group = False
+ def peek(): return i < n and pat[i]
+ while i < n:
+ c = pat[i]
+ i = i+1
+ if c == '*':
+ if peek() == '*':
+ i += 1
+ res += '.*'
+ else:
+ res += '[^/]*'
+ elif c == '?':
+ res += '.'
+ elif c == '[':
+ j = i
+ if j < n and pat[j] in '!]':
+ j += 1
+ while j < n and pat[j] != ']':
+ j += 1
+ if j >= n:
+ res += '\\['
+ else:
+ stuff = pat[i:j].replace('\\','\\\\')
+ i = j + 1
+ if stuff[0] == '!':
+ stuff = '^' + stuff[1:]
+ elif stuff[0] == '^':
+ stuff = '\\' + stuff
+ res = '%s[%s]' % (res, stuff)
+ elif c == '{':
+ group = True
+ res += '(?:'
+ elif c == '}' and group:
+ res += ')'
+ group = False
+ elif c == ',' and group:
+ res += '|'
+ elif c == '\\':
+ p = peek()
+ if p:
+ i += 1
+ res += re.escape(p)
+ else:
+ res += re.escape(c)
+ else:
+ res += re.escape(c)
+ return head + res + tail
+
+_globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
+
+def pathto(n1, n2):
+ '''return the relative path from one place to another.
+ this returns a path in the form used by the local filesystem, not hg.'''
+ if not n1: return localpath(n2)
+ a, b = n1.split('/'), n2.split('/')
+ a.reverse()
+ b.reverse()
+ while a and b and a[-1] == b[-1]:
+ a.pop()
+ b.pop()
+ b.reverse()
+ return os.sep.join((['..'] * len(a)) + b)
+
+def canonpath(root, cwd, myname):
+ """return the canonical path of myname, given cwd and root"""
+ if root == os.sep:
+ rootsep = os.sep
+ elif root.endswith(os.sep):
+ rootsep = root
+ else:
+ rootsep = root + os.sep
+ name = myname
+ if not os.path.isabs(name):
+ name = os.path.join(root, cwd, name)
+ name = os.path.normpath(name)
+ if name != rootsep and name.startswith(rootsep):
+ name = name[len(rootsep):]
+ audit_path(name)
+ return pconvert(name)
+ elif name == root:
+ return ''
+ else:
+ # Determine whether `name' is in the hierarchy at or beneath `root',
+ # by iterating name=dirname(name) until that causes no change (can't
+ # check name == '/', because that doesn't work on windows). For each
+ # `name', compare dev/inode numbers. If they match, the list `rel'
+ # holds the reversed list of components making up the relative file
+ # name we want.
+ root_st = os.stat(root)
+ rel = []
+ while True:
+ try:
+ name_st = os.stat(name)
+ except OSError:
+ break
+ if samestat(name_st, root_st):
+ rel.reverse()
+ name = os.path.join(*rel)
+ audit_path(name)
+ return pconvert(name)
+ dirname, basename = os.path.split(name)
+ rel.append(basename)
+ if dirname == name:
+ break
+ name = dirname
+
+ raise Abort('%s not under root' % myname)
+
+def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
+ return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
+
+def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
+ if os.name == 'nt':
+ dflt_pat = 'glob'
+ else:
+ dflt_pat = 'relpath'
+ return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
+
+def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
+ """build a function to match a set of file patterns
+
+ arguments:
+ canonroot - the canonical root of the tree you're matching against
+ cwd - the current working directory, if relevant
+ names - patterns to find
+ inc - patterns to include
+ exc - patterns to exclude
+ head - a regex to prepend to patterns to control whether a match is rooted
+
+ a pattern is one of:
+ 'glob:<rooted glob>'
+ 're:<rooted regexp>'
+ 'path:<rooted path>'
+ 'relglob:<relative glob>'
+ 'relpath:<relative path>'
+ 'relre:<relative regexp>'
+ '<rooted path or regexp>'
+
+ returns:
+ a 3-tuple containing
+ - list of explicit non-pattern names passed in
+ - a bool match(filename) function
+ - a bool indicating if any patterns were passed in
+
+ todo:
+ make head regex a rooted bool
+ """
+
+ def contains_glob(name):
+ for c in name:
+ if c in _globchars: return True
+ return False
+
+ def regex(kind, name, tail):
+ '''convert a pattern into a regular expression'''
+ if kind == 're':
+ return name
+ elif kind == 'path':
+ return '^' + re.escape(name) + '(?:/|$)'
+ elif kind == 'relglob':
+ return head + globre(name, '(?:|.*/)', tail)
+ elif kind == 'relpath':
+ return head + re.escape(name) + tail
+ elif kind == 'relre':
+ if name.startswith('^'):
+ return name
+ return '.*' + name
+ return head + globre(name, '', tail)
+
+ def matchfn(pats, tail):
+ """build a matching function from a set of patterns"""
+ if not pats:
+ return
+ matches = []
+ for k, p in pats:
+ try:
+ pat = '(?:%s)' % regex(k, p, tail)
+ matches.append(re.compile(pat).match)
+ except re.error:
+ if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
+ else: raise Abort("invalid pattern (%s): %s" % (k, p))
+
+ def buildfn(text):
+ for m in matches:
+ r = m(text)
+ if r:
+ return r
+
+ return buildfn
+
+ def globprefix(pat):
+ '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
+ root = []
+ for p in pat.split(os.sep):
+ if contains_glob(p): break
+ root.append(p)
+ return '/'.join(root)
+
+ pats = []
+ files = []
+ roots = []
+ for kind, name in [patkind(p, dflt_pat) for p in names]:
+ if kind in ('glob', 'relpath'):
+ name = canonpath(canonroot, cwd, name)
+ if name == '':
+ kind, name = 'glob', '**'
+ if kind in ('glob', 'path', 're'):
+ pats.append((kind, name))
+ if kind == 'glob':
+ root = globprefix(name)
+ if root: roots.append(root)
+ elif kind == 'relpath':
+ files.append((kind, name))
+ roots.append(name)
+
+ patmatch = matchfn(pats, '$') or always
+ filematch = matchfn(files, '(?:/|$)') or always
+ incmatch = always
+ if inc:
+ incmatch = matchfn(map(patkind, inc), '(?:/|$)')
+ excmatch = lambda fn: False
+ if exc:
+ excmatch = matchfn(map(patkind, exc), '(?:/|$)')
+
+ return (roots,
+ lambda fn: (incmatch(fn) and not excmatch(fn) and
+ (fn.endswith('/') or
+ (not pats and not files) or
+ (pats and patmatch(fn)) or
+ (files and filematch(fn)))),
+ (inc or exc or (pats and pats != [('glob', '**')])) and True)
+
+def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
+ '''enhanced shell command execution.
+ run with environment maybe modified, maybe in different dir.
+
+ if command fails and onerr is None, return status. if ui object,
+ print error message and return status, else raise onerr object as
+ exception.'''
+ oldenv = {}
+ for k in environ:
+ oldenv[k] = os.environ.get(k)
+ if cwd is not None:
+ oldcwd = os.getcwd()
+ try:
+ for k, v in environ.iteritems():
+ os.environ[k] = str(v)
+ if cwd is not None and oldcwd != cwd:
+ os.chdir(cwd)
+ rc = os.system(cmd)
+ if rc and onerr:
+ errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
+ explain_exit(rc)[0])
+ if errprefix:
+ errmsg = '%s: %s' % (errprefix, errmsg)
+ try:
+ onerr.warn(errmsg + '\n')
+ except AttributeError:
+ raise onerr(errmsg)
+ return rc
+ finally:
+ for k, v in oldenv.iteritems():
+ if v is None:
+ del os.environ[k]
+ else:
+ os.environ[k] = v
+ if cwd is not None and oldcwd != cwd:
+ os.chdir(oldcwd)
+
+def rename(src, dst):
+ """forcibly rename a file"""
+ try:
+ os.rename(src, dst)
+ except OSError, err:
+ # on windows, rename to existing file is not allowed, so we
+ # must delete destination first. but if file is open, unlink
+ # schedules it for delete but does not delete it. rename
+ # happens immediately even for open files, so we create
+ # temporary file, delete it, rename destination to that name,
+ # then delete that. then rename is safe to do.
+ fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
+ os.close(fd)
+ os.unlink(temp)
+ os.rename(dst, temp)
+ os.unlink(temp)
+ os.rename(src, dst)
+
+def unlink(f):
+ """unlink and remove the directory if it is empty"""
+ os.unlink(f)
+ # try removing directories that might now be empty
+ try:
+ os.removedirs(os.path.dirname(f))
+ except OSError:
+ pass
+
+def copyfiles(src, dst, hardlink=None):
+ """Copy a directory tree using hardlinks if possible"""
+
+ if hardlink is None:
+ hardlink = (os.stat(src).st_dev ==
+ os.stat(os.path.dirname(dst)).st_dev)
+
+ if os.path.isdir(src):
+ os.mkdir(dst)
+ for name in os.listdir(src):
+ srcname = os.path.join(src, name)
+ dstname = os.path.join(dst, name)
+ copyfiles(srcname, dstname, hardlink)
+ else:
+ if hardlink:
+ try:
+ os_link(src, dst)
+ except (IOError, OSError):
+ hardlink = False
+ shutil.copy(src, dst)
+ else:
+ shutil.copy(src, dst)
+
+def audit_path(path):
+ """Abort if path contains dangerous components"""
+ parts = os.path.normcase(path).split(os.sep)
+ if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
+ or os.pardir in parts):
+ raise Abort(_("path contains illegal component: %s\n") % path)
+
+def _makelock_file(info, pathname):
+ ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
+ os.write(ld, info)
+ os.close(ld)
+
+def _readlock_file(pathname):
+ return posixfile(pathname).read()
+
+def nlinks(pathname):
+ """Return number of hardlinks for the given file."""
+ return os.stat(pathname).st_nlink
+
+if hasattr(os, 'link'):
+ os_link = os.link
+else:
+ def os_link(src, dst):
+ raise OSError(0, _("Hardlinks not supported"))
+
+def fstat(fp):
+ '''stat file object that may not have fileno method.'''
+ try:
+ return os.fstat(fp.fileno())
+ except AttributeError:
+ return os.stat(fp.name)
+
+posixfile = file
+
+def is_win_9x():
+ '''return true if run on windows 95, 98 or me.'''
+ try:
+ return sys.getwindowsversion()[3] == 1
+ except AttributeError:
+ return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
+
+# Platform specific variants
+if os.name == 'nt':
+ demandload(globals(), "msvcrt")
+ nulldev = 'NUL:'
+
+ class winstdout:
+ '''stdout on windows misbehaves if sent through a pipe'''
+
+ def __init__(self, fp):
+ self.fp = fp
+
+ def __getattr__(self, key):
+ return getattr(self.fp, key)
+
+ def close(self):
+ try:
+ self.fp.close()
+ except: pass
+
+ def write(self, s):
+ try:
+ return self.fp.write(s)
+ except IOError, inst:
+ if inst.errno != 0: raise
+ self.close()
+ raise IOError(errno.EPIPE, 'Broken pipe')
+
+ sys.stdout = winstdout(sys.stdout)
+
+ def system_rcpath():
+ try:
+ return system_rcpath_win32()
+ except:
+ return [r'c:\mercurial\mercurial.ini']
+
+ def os_rcpath():
+ '''return default os-specific hgrc search path'''
+ path = system_rcpath()
+ path.append(user_rcpath())
+ userprofile = os.environ.get('USERPROFILE')
+ if userprofile:
+ path.append(os.path.join(userprofile, 'mercurial.ini'))
+ return path
+
+ def user_rcpath():
+ '''return os-specific hgrc search path to the user dir'''
+ return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
+
+ def parse_patch_output(output_line):
+ """parses the output produced by patch and returns the file name"""
+ pf = output_line[14:]
+ if pf[0] == '`':
+ pf = pf[1:-1] # Remove the quotes
+ return pf
+
+ def testpid(pid):
+ '''return False if pid dead, True if running or not known'''
+ return True
+
+ def is_exec(f, last):
+ return last
+
+ def set_exec(f, mode):
+ pass
+
+ def set_binary(fd):
+ msvcrt.setmode(fd.fileno(), os.O_BINARY)
+
+ def pconvert(path):
+ return path.replace("\\", "/")
+
+ def localpath(path):
+ return path.replace('/', '\\')
+
+ def normpath(path):
+ return pconvert(os.path.normpath(path))
+
+ makelock = _makelock_file
+ readlock = _readlock_file
+
+ def samestat(s1, s2):
+ return False
+
+ def explain_exit(code):
+ return _("exited with status %d") % code, code
+
+ try:
+ # override functions with win32 versions if possible
+ from util_win32 import *
+ if not is_win_9x():
+ posixfile = posixfile_nt
+ except ImportError:
+ pass
+
+else:
+ nulldev = '/dev/null'
+
+ def rcfiles(path):
+ rcs = [os.path.join(path, 'hgrc')]
+ rcdir = os.path.join(path, 'hgrc.d')
+ try:
+ rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
+ if f.endswith(".rc")])
+ except OSError, inst: pass
+ return rcs
+
+ def os_rcpath():
+ '''return default os-specific hgrc search path'''
+ path = []
+ # old mod_python does not set sys.argv
+ if len(getattr(sys, 'argv', [])) > 0:
+ path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
+ '/../etc/mercurial'))
+ path.extend(rcfiles('/etc/mercurial'))
+ path.append(os.path.expanduser('~/.hgrc'))
+ path = [os.path.normpath(f) for f in path]
+ return path
+
+ def parse_patch_output(output_line):
+ """parses the output produced by patch and returns the file name"""
+ pf = output_line[14:]
+ if pf.startswith("'") and pf.endswith("'") and pf.find(" ") >= 0:
+ pf = pf[1:-1] # Remove the quotes
+ return pf
+
+ def is_exec(f, last):
+ """check whether a file is executable"""
+ return (os.stat(f).st_mode & 0100 != 0)
+
+ def set_exec(f, mode):
+ s = os.stat(f).st_mode
+ if (s & 0100 != 0) == mode:
+ return
+ if mode:
+ # Turn on +x for every +r bit when making a file executable
+ # and obey umask.
+ umask = os.umask(0)
+ os.umask(umask)
+ os.chmod(f, s | (s & 0444) >> 2 & ~umask)
+ else:
+ os.chmod(f, s & 0666)
+
+ def set_binary(fd):
+ pass
+
+ def pconvert(path):
+ return path
+
+ def localpath(path):
+ return path
+
+ normpath = os.path.normpath
+ samestat = os.path.samestat
+
+ def makelock(info, pathname):
+ try:
+ os.symlink(info, pathname)
+ except OSError, why:
+ if why.errno == errno.EEXIST:
+ raise
+ else:
+ _makelock_file(info, pathname)
+
+ def readlock(pathname):
+ try:
+ return os.readlink(pathname)
+ except OSError, why:
+ if why.errno == errno.EINVAL:
+ return _readlock_file(pathname)
+ else:
+ raise
+
+ def testpid(pid):
+ '''return False if pid dead, True if running or not sure'''
+ try:
+ os.kill(pid, 0)
+ return True
+ except OSError, inst:
+ return inst.errno != errno.ESRCH
+
+ def explain_exit(code):
+ """return a 2-tuple (desc, code) describing a process's status"""
+ if os.WIFEXITED(code):
+ val = os.WEXITSTATUS(code)
+ return _("exited with status %d") % val, val
+ elif os.WIFSIGNALED(code):
+ val = os.WTERMSIG(code)
+ return _("killed by signal %d") % val, val
+ elif os.WIFSTOPPED(code):
+ val = os.WSTOPSIG(code)
+ return _("stopped by signal %d") % val, val
+ raise ValueError(_("invalid exit code"))
+
+def opener(base, audit=True):
+ """
+ return a function that opens files relative to base
+
+ this function is used to hide the details of COW semantics and
+ remote file access from higher level code.
+ """
+ p = base
+ audit_p = audit
+
+ def mktempcopy(name):
+ d, fn = os.path.split(name)
+ fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
+ os.close(fd)
+ ofp = posixfile(temp, "wb")
+ try:
+ try:
+ ifp = posixfile(name, "rb")
+ except IOError, inst:
+ if not getattr(inst, 'filename', None):
+ inst.filename = name
+ raise
+ for chunk in filechunkiter(ifp):
+ ofp.write(chunk)
+ ifp.close()
+ ofp.close()
+ except:
+ try: os.unlink(temp)
+ except: pass
+ raise
+ st = os.lstat(name)
+ os.chmod(temp, st.st_mode)
+ return temp
+
+ class atomictempfile(posixfile):
+ """the file will only be copied when rename is called"""
+ def __init__(self, name, mode):
+ self.__name = name
+ self.temp = mktempcopy(name)
+ posixfile.__init__(self, self.temp, mode)
+ def rename(self):
+ if not self.closed:
+ posixfile.close(self)
+ rename(self.temp, localpath(self.__name))
+ def __del__(self):
+ if not self.closed:
+ try:
+ os.unlink(self.temp)
+ except: pass
+ posixfile.close(self)
+
+ class atomicfile(atomictempfile):
+ """the file will only be copied on close"""
+ def __init__(self, name, mode):
+ atomictempfile.__init__(self, name, mode)
+ def close(self):
+ self.rename()
+ def __del__(self):
+ self.rename()
+
+ def o(path, mode="r", text=False, atomic=False, atomictemp=False):
+ if audit_p:
+ audit_path(path)
+ f = os.path.join(p, path)
+
+ if not text:
+ mode += "b" # for that other OS
+
+ if mode[0] != "r":
+ try:
+ nlink = nlinks(f)
+ except OSError:
+ d = os.path.dirname(f)
+ if not os.path.isdir(d):
+ os.makedirs(d)
+ else:
+ if atomic:
+ return atomicfile(f, mode)
+ elif atomictemp:
+ return atomictempfile(f, mode)
+ if nlink > 1:
+ rename(mktempcopy(f), f)
+ return posixfile(f, mode)
+
+ return o
+
+class chunkbuffer(object):
+ """Allow arbitrary sized chunks of data to be efficiently read from an
+ iterator over chunks of arbitrary size."""
+
+ def __init__(self, in_iter, targetsize = 2**16):
+ """in_iter is the iterator that's iterating over the input chunks.
+ targetsize is how big a buffer to try to maintain."""
+ self.in_iter = iter(in_iter)
+ self.buf = ''
+ self.targetsize = int(targetsize)
+ if self.targetsize <= 0:
+ raise ValueError(_("targetsize must be greater than 0, was %d") %
+ targetsize)
+ self.iterempty = False
+
+ def fillbuf(self):
+ """Ignore target size; read every chunk from iterator until empty."""
+ if not self.iterempty:
+ collector = cStringIO.StringIO()
+ collector.write(self.buf)
+ for ch in self.in_iter:
+ collector.write(ch)
+ self.buf = collector.getvalue()
+ self.iterempty = True
+
+ def read(self, l):
+ """Read L bytes of data from the iterator of chunks of data.
+ Returns less than L bytes if the iterator runs dry."""
+ if l > len(self.buf) and not self.iterempty:
+ # Clamp to a multiple of self.targetsize
+ targetsize = self.targetsize * ((l // self.targetsize) + 1)
+ collector = cStringIO.StringIO()
+ collector.write(self.buf)
+ collected = len(self.buf)
+ for chunk in self.in_iter:
+ collector.write(chunk)
+ collected += len(chunk)
+ if collected >= targetsize:
+ break
+ if collected < targetsize:
+ self.iterempty = True
+ self.buf = collector.getvalue()
+ s, self.buf = self.buf[:l], buffer(self.buf, l)
+ return s
+
+def filechunkiter(f, size = 65536):
+ """Create a generator that produces all the data in the file size
+ (default 65536) bytes at a time. Chunks may be less than size
+ bytes if the chunk is the last chunk in the file, or the file is a
+ socket or some other type of file that sometimes reads less data
+ than is requested."""
+ s = f.read(size)
+ while len(s) > 0:
+ yield s
+ s = f.read(size)
+
+def makedate():
+ lt = time.localtime()
+ if lt[8] == 1 and time.daylight:
+ tz = time.altzone
+ else:
+ tz = time.timezone
+ return time.mktime(lt), tz
+
+def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
+ """represent a (unixtime, offset) tuple as a localized time.
+ unixtime is seconds since the epoch, and offset is the time zone's
+ number of seconds away from UTC. if timezone is false, do not
+ append time zone to string."""
+ t, tz = date or makedate()
+ s = time.strftime(format, time.gmtime(float(t) - tz))
+ if timezone:
+ s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
+ return s
+
+def shortuser(user):
+ """Return a short representation of a user name or email address."""
+ f = user.find('@')
+ if f >= 0:
+ user = user[:f]
+ f = user.find('<')
+ if f >= 0:
+ user = user[f+1:]
+ return user
+
+def walkrepos(path):
+ '''yield every hg repository under path, recursively.'''
+ def errhandler(err):
+ if err.filename == path:
+ raise err
+
+ for root, dirs, files in os.walk(path, onerror=errhandler):
+ for d in dirs:
+ if d == '.hg':
+ yield root
+ dirs[:] = []
+ break
+
+_rcpath = None
+
+def rcpath():
+ '''return hgrc search path. if env var HGRCPATH is set, use it.
+ for each item in path, if directory, use files ending in .rc,
+ else use item.
+ make HGRCPATH empty to only look in .hg/hgrc of current repo.
+ if no HGRCPATH, use default os-specific path.'''
+ global _rcpath
+ if _rcpath is None:
+ if 'HGRCPATH' in os.environ:
+ _rcpath = []
+ for p in os.environ['HGRCPATH'].split(os.pathsep):
+ if not p: continue
+ if os.path.isdir(p):
+ for f in os.listdir(p):
+ if f.endswith('.rc'):
+ _rcpath.append(os.path.join(p, f))
+ else:
+ _rcpath.append(p)
+ else:
+ _rcpath = os_rcpath()
+ return _rcpath
new file mode 100644
--- /dev/null
+++ b/mercurial/util_win32.py
@@ -0,0 +1,299 @@
+# util_win32.py - utility functions that use win32 API
+#
+# Copyright 2005 Matt Mackall <mpm@selenic.com>
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms of
+# the GNU General Public License, incorporated herein by reference.
+
+# Mark Hammond's win32all package allows better functionality on
+# Windows. this module overrides definitions in util.py. if not
+# available, import of this module will fail, and generic code will be
+# used.
+
+import win32api
+
+from demandload import *
+from i18n import gettext as _
+demandload(globals(), 'errno os pywintypes win32con win32file win32process')
+demandload(globals(), 'cStringIO win32com.shell:shell,shellcon winerror')
+
+class WinError:
+ winerror_map = {
+ winerror.ERROR_ACCESS_DENIED: errno.EACCES,
+ winerror.ERROR_ACCOUNT_DISABLED: errno.EACCES,
+ winerror.ERROR_ACCOUNT_RESTRICTION: errno.EACCES,
+ winerror.ERROR_ALREADY_ASSIGNED: errno.EBUSY,
+ winerror.ERROR_ALREADY_EXISTS: errno.EEXIST,
+ winerror.ERROR_ARITHMETIC_OVERFLOW: errno.ERANGE,
+ winerror.ERROR_BAD_COMMAND: errno.EIO,
+ winerror.ERROR_BAD_DEVICE: errno.ENODEV,
+ winerror.ERROR_BAD_DRIVER_LEVEL: errno.ENXIO,
+ winerror.ERROR_BAD_EXE_FORMAT: errno.ENOEXEC,
+ winerror.ERROR_BAD_FORMAT: errno.ENOEXEC,
+ winerror.ERROR_BAD_LENGTH: errno.EINVAL,
+ winerror.ERROR_BAD_PATHNAME: errno.ENOENT,
+ winerror.ERROR_BAD_PIPE: errno.EPIPE,
+ winerror.ERROR_BAD_UNIT: errno.ENODEV,
+ winerror.ERROR_BAD_USERNAME: errno.EINVAL,
+ winerror.ERROR_BROKEN_PIPE: errno.EPIPE,
+ winerror.ERROR_BUFFER_OVERFLOW: errno.ENAMETOOLONG,
+ winerror.ERROR_BUSY: errno.EBUSY,
+ winerror.ERROR_BUSY_DRIVE: errno.EBUSY,
+ winerror.ERROR_CALL_NOT_IMPLEMENTED: errno.ENOSYS,
+ winerror.ERROR_CANNOT_MAKE: errno.EACCES,
+ winerror.ERROR_CANTOPEN: errno.EIO,
+ winerror.ERROR_CANTREAD: errno.EIO,
+ winerror.ERROR_CANTWRITE: errno.EIO,
+ winerror.ERROR_CRC: errno.EIO,
+ winerror.ERROR_CURRENT_DIRECTORY: errno.EACCES,
+ winerror.ERROR_DEVICE_IN_USE: errno.EBUSY,
+ winerror.ERROR_DEV_NOT_EXIST: errno.ENODEV,
+ winerror.ERROR_DIRECTORY: errno.EINVAL,
+ winerror.ERROR_DIR_NOT_EMPTY: errno.ENOTEMPTY,
+ winerror.ERROR_DISK_CHANGE: errno.EIO,
+ winerror.ERROR_DISK_FULL: errno.ENOSPC,
+ winerror.ERROR_DRIVE_LOCKED: errno.EBUSY,
+ winerror.ERROR_ENVVAR_NOT_FOUND: errno.EINVAL,
+ winerror.ERROR_EXE_MARKED_INVALID: errno.ENOEXEC,
+ winerror.ERROR_FILENAME_EXCED_RANGE: errno.ENAMETOOLONG,
+ winerror.ERROR_FILE_EXISTS: errno.EEXIST,
+ winerror.ERROR_FILE_INVALID: errno.ENODEV,
+ winerror.ERROR_FILE_NOT_FOUND: errno.ENOENT,
+ winerror.ERROR_GEN_FAILURE: errno.EIO,
+ winerror.ERROR_HANDLE_DISK_FULL: errno.ENOSPC,
+ winerror.ERROR_INSUFFICIENT_BUFFER: errno.ENOMEM,
+ winerror.ERROR_INVALID_ACCESS: errno.EACCES,
+ winerror.ERROR_INVALID_ADDRESS: errno.EFAULT,
+ winerror.ERROR_INVALID_BLOCK: errno.EFAULT,
+ winerror.ERROR_INVALID_DATA: errno.EINVAL,
+ winerror.ERROR_INVALID_DRIVE: errno.ENODEV,
+ winerror.ERROR_INVALID_EXE_SIGNATURE: errno.ENOEXEC,
+ winerror.ERROR_INVALID_FLAGS: errno.EINVAL,
+ winerror.ERROR_INVALID_FUNCTION: errno.ENOSYS,
+ winerror.ERROR_INVALID_HANDLE: errno.EBADF,
+ winerror.ERROR_INVALID_LOGON_HOURS: errno.EACCES,
+ winerror.ERROR_INVALID_NAME: errno.EINVAL,
+ winerror.ERROR_INVALID_OWNER: errno.EINVAL,
+ winerror.ERROR_INVALID_PARAMETER: errno.EINVAL,
+ winerror.ERROR_INVALID_PASSWORD: errno.EPERM,
+ winerror.ERROR_INVALID_PRIMARY_GROUP: errno.EINVAL,
+ winerror.ERROR_INVALID_SIGNAL_NUMBER: errno.EINVAL,
+ winerror.ERROR_INVALID_TARGET_HANDLE: errno.EIO,
+ winerror.ERROR_INVALID_WORKSTATION: errno.EACCES,
+ winerror.ERROR_IO_DEVICE: errno.EIO,
+ winerror.ERROR_IO_INCOMPLETE: errno.EINTR,
+ winerror.ERROR_LOCKED: errno.EBUSY,
+ winerror.ERROR_LOCK_VIOLATION: errno.EACCES,
+ winerror.ERROR_LOGON_FAILURE: errno.EACCES,
+ winerror.ERROR_MAPPED_ALIGNMENT: errno.EINVAL,
+ winerror.ERROR_META_EXPANSION_TOO_LONG: errno.E2BIG,
+ winerror.ERROR_MORE_DATA: errno.EPIPE,
+ winerror.ERROR_NEGATIVE_SEEK: errno.ESPIPE,
+ winerror.ERROR_NOACCESS: errno.EFAULT,
+ winerror.ERROR_NONE_MAPPED: errno.EINVAL,
+ winerror.ERROR_NOT_ENOUGH_MEMORY: errno.ENOMEM,
+ winerror.ERROR_NOT_READY: errno.EAGAIN,
+ winerror.ERROR_NOT_SAME_DEVICE: errno.EXDEV,
+ winerror.ERROR_NO_DATA: errno.EPIPE,
+ winerror.ERROR_NO_MORE_SEARCH_HANDLES: errno.EIO,
+ winerror.ERROR_NO_PROC_SLOTS: errno.EAGAIN,
+ winerror.ERROR_NO_SUCH_PRIVILEGE: errno.EACCES,
+ winerror.ERROR_OPEN_FAILED: errno.EIO,
+ winerror.ERROR_OPEN_FILES: errno.EBUSY,
+ winerror.ERROR_OPERATION_ABORTED: errno.EINTR,
+ winerror.ERROR_OUTOFMEMORY: errno.ENOMEM,
+ winerror.ERROR_PASSWORD_EXPIRED: errno.EACCES,
+ winerror.ERROR_PATH_BUSY: errno.EBUSY,
+ winerror.ERROR_PATH_NOT_FOUND: errno.ENOENT,
+ winerror.ERROR_PIPE_BUSY: errno.EBUSY,
+ winerror.ERROR_PIPE_CONNECTED: errno.EPIPE,
+ winerror.ERROR_PIPE_LISTENING: errno.EPIPE,
+ winerror.ERROR_PIPE_NOT_CONNECTED: errno.EPIPE,
+ winerror.ERROR_PRIVILEGE_NOT_HELD: errno.EACCES,
+ winerror.ERROR_READ_FAULT: errno.EIO,
+ winerror.ERROR_SEEK: errno.EIO,
+ winerror.ERROR_SEEK_ON_DEVICE: errno.ESPIPE,
+ winerror.ERROR_SHARING_BUFFER_EXCEEDED: errno.ENFILE,
+ winerror.ERROR_SHARING_VIOLATION: errno.EACCES,
+ winerror.ERROR_STACK_OVERFLOW: errno.ENOMEM,
+ winerror.ERROR_SWAPERROR: errno.ENOENT,
+ winerror.ERROR_TOO_MANY_MODULES: errno.EMFILE,
+ winerror.ERROR_TOO_MANY_OPEN_FILES: errno.EMFILE,
+ winerror.ERROR_UNRECOGNIZED_MEDIA: errno.ENXIO,
+ winerror.ERROR_UNRECOGNIZED_VOLUME: errno.ENODEV,
+ winerror.ERROR_WAIT_NO_CHILDREN: errno.ECHILD,
+ winerror.ERROR_WRITE_FAULT: errno.EIO,
+ winerror.ERROR_WRITE_PROTECT: errno.EROFS,
+ }
+
+ def __init__(self, err):
+ self.win_errno, self.win_function, self.win_strerror = err
+ if self.win_strerror.endswith('.'):
+ self.win_strerror = self.win_strerror[:-1]
+
+class WinIOError(WinError, IOError):
+ def __init__(self, err, filename=None):
+ WinError.__init__(self, err)
+ IOError.__init__(self, self.winerror_map.get(self.win_errno, 0),
+ self.win_strerror)
+ self.filename = filename
+
+class WinOSError(WinError, OSError):
+ def __init__(self, err):
+ WinError.__init__(self, err)
+ OSError.__init__(self, self.winerror_map.get(self.win_errno, 0),
+ self.win_strerror)
+
+def os_link(src, dst):
+ # NB will only succeed on NTFS
+ try:
+ win32file.CreateHardLink(dst, src)
+ except pywintypes.error, details:
+ raise WinOSError(details)
+
+def nlinks(pathname):
+ """Return number of hardlinks for the given file."""
+ try:
+ fh = win32file.CreateFile(pathname,
+ win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
+ None, win32file.OPEN_EXISTING, 0, None)
+ res = win32file.GetFileInformationByHandle(fh)
+ fh.Close()
+ return res[7]
+ except pywintypes.error:
+ return os.stat(pathname).st_nlink
+
+def testpid(pid):
+ '''return True if pid is still running or unable to
+ determine, False otherwise'''
+ try:
+ handle = win32api.OpenProcess(
+ win32con.PROCESS_QUERY_INFORMATION, False, pid)
+ if handle:
+ status = win32process.GetExitCodeProcess(handle)
+ return status == win32con.STILL_ACTIVE
+ except pywintypes.error, details:
+ return details[0] != winerror.ERROR_INVALID_PARAMETER
+ return True
+
+def system_rcpath_win32():
+ '''return default os-specific hgrc search path'''
+ proc = win32api.GetCurrentProcess()
+ try:
+ # This will fail on windows < NT
+ filename = win32process.GetModuleFileNameEx(proc, 0)
+ except:
+ filename = win32api.GetModuleFileName(0)
+ return [os.path.join(os.path.dirname(filename), 'mercurial.ini')]
+
+def user_rcpath():
+ '''return os-specific hgrc search path to the user dir'''
+ userdir = os.path.expanduser('~')
+ if userdir == '~':
+ # We are on win < nt: fetch the APPDATA directory location and use
+ # the parent directory as the user home dir.
+ appdir = shell.SHGetPathFromIDList(
+ shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA))
+ userdir = os.path.dirname(appdir)
+ return os.path.join(userdir, 'mercurial.ini')
+
+class posixfile_nt(object):
+ '''file object with posix-like semantics. on windows, normal
+ files can not be deleted or renamed if they are open. must open
+ with win32file.FILE_SHARE_DELETE. this flag does not exist on
+ windows < nt, so do not use this class there.'''
+
+ # tried to use win32file._open_osfhandle to pass fd to os.fdopen,
+ # but does not work at all. wrap win32 file api instead.
+
+ def __init__(self, name, mode='rb'):
+ access = 0
+ if 'r' in mode or '+' in mode:
+ access |= win32file.GENERIC_READ
+ if 'w' in mode or 'a' in mode:
+ access |= win32file.GENERIC_WRITE
+ if 'r' in mode:
+ creation = win32file.OPEN_EXISTING
+ elif 'a' in mode:
+ creation = win32file.OPEN_ALWAYS
+ else:
+ creation = win32file.CREATE_ALWAYS
+ try:
+ self.handle = win32file.CreateFile(name,
+ access,
+ win32file.FILE_SHARE_READ |
+ win32file.FILE_SHARE_WRITE |
+ win32file.FILE_SHARE_DELETE,
+ None,
+ creation,
+ win32file.FILE_ATTRIBUTE_NORMAL,
+ 0)
+ except pywintypes.error, err:
+ raise WinIOError(err, name)
+ self.closed = False
+ self.name = name
+ self.mode = mode
+
+ def __iter__(self):
+ for line in self.read().splitlines(True):
+ yield line
+
+ def read(self, count=-1):
+ try:
+ cs = cStringIO.StringIO()
+ while count:
+ wincount = int(count)
+ if wincount == -1:
+ wincount = 1048576
+ val, data = win32file.ReadFile(self.handle, wincount)
+ if not data: break
+ cs.write(data)
+ if count != -1:
+ count -= len(data)
+ return cs.getvalue()
+ except pywintypes.error, err:
+ raise WinIOError(err)
+
+ def write(self, data):
+ try:
+ if 'a' in self.mode:
+ win32file.SetFilePointer(self.handle, 0, win32file.FILE_END)
+ nwrit = 0
+ while nwrit < len(data):
+ val, nwrit = win32file.WriteFile(self.handle, data)
+ data = data[nwrit:]
+ except pywintypes.error, err:
+ raise WinIOError(err)
+
+ def seek(self, pos, whence=0):
+ try:
+ win32file.SetFilePointer(self.handle, int(pos), whence)
+ except pywintypes.error, err:
+ raise WinIOError(err)
+
+ def tell(self):
+ try:
+ return win32file.SetFilePointer(self.handle, 0,
+ win32file.FILE_CURRENT)
+ except pywintypes.error, err:
+ raise WinIOError(err)
+
+ def close(self):
+ if not self.closed:
+ self.handle = None
+ self.closed = True
+
+ def flush(self):
+ try:
+ win32file.FlushFileBuffers(self.handle)
+ except pywintypes.error, err:
+ raise WinIOError(err)
+
+ def truncate(self, pos=0):
+ try:
+ win32file.SetFilePointer(self.handle, int(pos),
+ win32file.FILE_BEGIN)
+ win32file.SetEndOfFile(self.handle)
+ except pywintypes.error, err:
+ raise WinIOError(err)
new file mode 100644
--- /dev/null
+++ b/mercurial/version.py
@@ -0,0 +1,74 @@
+# Copyright (C) 2005 by Intevation GmbH
+# Author(s):
+# Thomas Arendsen Hein <thomas@intevation.de>
+#
+# This program is free software under the GNU GPL (>=v2)
+# Read the file COPYING coming with the software for details.
+
+"""
+Mercurial version
+"""
+
+import os
+import os.path
+import re
+import time
+import util
+
+unknown_version = 'unknown'
+remembered_version = False
+
+def get_version():
+ """Return version information if available."""
+ try:
+ from mercurial.__version__ import version
+ except ImportError:
+ version = unknown_version
+ return version
+
+def write_version(version):
+ """Overwrite version file."""
+ if version == get_version():
+ return
+ directory = os.path.dirname(__file__)
+ for suffix in ['py', 'pyc', 'pyo']:
+ try:
+ os.unlink(os.path.join(directory, '__version__.%s' % suffix))
+ except OSError:
+ pass
+ f = open(os.path.join(directory, '__version__.py'), 'w')
+ f.write("# This file is auto-generated.\n")
+ f.write("version = %r\n" % version)
+ f.close()
+
+def remember_version(version=None):
+ """Store version information."""
+ global remembered_version
+ if not version and os.path.isdir(".hg"):
+ f = os.popen("hg identify 2> %s" % util.nulldev) # use real hg installation
+ ident = f.read()[:-1]
+ if not f.close() and ident:
+ ids = ident.split(' ', 1)
+ version = ids.pop(0)
+ if version[-1] == '+':
+ version = version[:-1]
+ modified = True
+ else:
+ modified = False
+ if version.isalnum() and ids:
+ for tag in ids[0].split('/'):
+ # is a tag is suitable as a version number?
+ if re.match(r'^(\d+\.)+[\w.-]+$', tag):
+ version = tag
+ break
+ if modified:
+ version += time.strftime('+%Y%m%d')
+ if version:
+ remembered_version = True
+ write_version(version)
+
+def forget_version():
+ """Remove version information."""
+ if remembered_version:
+ write_version(unknown_version)
+
new file mode 100644
--- /dev/null
+++ b/notes.txt
@@ -0,0 +1,146 @@
+Some notes about Mercurial's design
+
+Revlogs:
+
+The fundamental storage type in Mercurial is a "revlog". A revlog is
+the set of all revisions to a file. Each revision is either stored
+compressed in its entirety or as a compressed binary delta against the
+previous version. The decision of when to store a full version is made
+based on how much data would be needed to reconstruct the file. This
+lets us ensure that we never need to read huge amounts of data to
+reconstruct a file, regardless of how many revisions of it we store.
+
+In fact, we should always be able to do it with a single read,
+provided we know when and where to read. This is where the index comes
+in. Each revlog has an index containing a special hash (nodeid) of the
+text, hashes for its parents, and where and how much of the revlog
+data we need to read to reconstruct it. Thus, with one read of the
+index and one read of the data, we can reconstruct any version in time
+proportional to the file size.
+
+Similarly, revlogs and their indices are append-only. This means that
+adding a new version is also O(1) seeks.
+
+Generally revlogs are used to represent revisions of files, but they
+also are used to represent manifests and changesets.
+
+Manifests:
+
+A manifest is simply a list of all files in a given revision of a
+project along with the nodeids of the corresponding file revisions. So
+grabbing a given version of the project means simply looking up its
+manifest and reconstruction all the file revisions pointed to by it.
+
+Changesets:
+
+A changeset is a list of all files changed in a check-in along with a
+change description and some metadata like user and date. It also
+contains a nodeid to the relevent revision of the manifest. Changesets
+and manifests are one-to-one, but contain different data for
+convenience.
+
+Nodeids:
+
+Nodeids are unique ids that are used to represent the contents of a
+file AND its position in the project history. That is, if you change a
+file and then change it back, the result will have a different nodeid
+because it has different history. This is accomplished by including
+the parents of a given revision's nodeids with the revision's text
+when calculating the hash.
+
+Graph merging:
+
+Nodeids are implemented as they are to simplify merging. Merging a
+pair of directed acyclic graphs (aka "the family tree" of the file
+history) requires some method of determining if nodes in different
+graphs correspond. Simply comparing the contents of the node (by
+comparing text of given revisions or their hashes) can get confused by
+identical revisions in the tree.
+
+The nodeid approach makes it trivial - the hash uniquely describes a
+revision's contents and its graph position relative to the root, so
+merge is simply checking whether each nodeid in graph A is in the hash
+table of graph B. If not, we pull them in, adding them sequentially to
+the revlog.
+
+Branching and merging:
+
+Everything in Mercurial is potentially a branch and every user
+effectively works in their own branch. When you do a checkout,
+Mercurial remembers what the parent changeset was and uses it for the
+next check in.
+
+To do a merge of branches in Mercurial, you check out the heads of the
+two branches into the same working directory which causes a merge to
+be performed, and then check in the result once you're happy with it.
+The resulting checkin will have two parents.
+
+It decides when a merge is necessary by first determining if there are
+any uncommitted changes in the working directory. This effectively
+makes the working directory a branch off the checked in version it's
+based on. Then it also determines if the working directory is a direct
+ancestor or descendent of the second version we're attempting to
+checkout. If neither is true, we simply replace the working directory
+version with the new version. Otherwise we perform a merge between the
+two versions.
+
+Merging files and manifests:
+
+We begin by comparing two versions manifests and deciding which files
+need to be added, deleted, and merged.
+
+Then for each file, we perform a graph merge and resolve as above.
+It's important to merge files using per-file DAGs rather than just
+changeset level DAGs as this diagram illustrates:
+
+M M1 M2
+
+AB
+ |`-------v M2 clones M
+aB AB file A is change in mainline
+ |`---v AB' file B is changed in M2
+ | aB / | M1 clones M
+ | ab/ | M1 changes B
+ | ab' | M1 merges from M2, changes to B conflict
+ | | A'B' M2 changes A
+ `---+--.|
+ | a'B' M2 merges from mainline, changes to A conflict
+ `--.|
+ ??? depending on which ancestor we choose, we will have
+ to redo A hand-merge, B hand-merge, or both
+ but if we look at the files independently, everything
+ is fine
+
+The result is a merged version in the working directory, waiting for
+check-in.
+
+Rollback:
+
+When performing a commit or a merge, we order things so that the
+changeset entry gets added last. We keep a transaction log of the name
+of each file touched and its length prior to the transaction. On
+abort, we simply truncate each file to its prior length. This is one
+of the nice properties of the append-only structure of the revlogs.
+We can also reuse this journal for "rollback".
+
+Merging between repositories:
+
+One of the key features of Mercurial is the ability to merge between
+independent repositories in a decentralized fashion. Each repository
+can act as a read-only server or a client. Clients operating by
+pulling all branches that it hasn't seen from the server and adding
+them into its graph. This is done in two steps: searching for new
+"roots" and pulling a "changegroup"
+
+Searching for new "roots" begins by finding all new heads and
+searching backwards from those heads to the first unknown nodes in
+their respective branches. These nodes are the 'roots' that are used
+to calculate the 'changegroup': the set of all changesets starting at
+those roots. Mercurial takes pains to make this search efficient in
+both bandwidth and round-trips.
+
+Once the roots are found, the changegroup can be transferred as a
+single streaming transfer. This is organized as an ordered set of
+deltas for changesets, manifests, and files. Large chunks of deltas
+can be directly added to the repository without unpacking so it's
+fairly fast.
new file mode 100644
--- /dev/null
+++ b/rewrite-log
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+import sys, os
+from mercurial import hg
+
+f = sys.argv[1]
+
+r1 = hg.revlog(open, f + ".i", f + ".d")
+r2 = hg.revlog(open, f + ".i2", f + ".d2")
+
+tr = hg.transaction(open, "journal")
+
+for i in xrange(r1.count()):
+ n = r1.node(i)
+ p1, p2 = r1.parents(n)
+ l = r1.linkrev(n)
+ t = r1.revision(n)
+ n2 = r2.addrevision(t, tr, l, p1, p2)
+tr.close()
+
+os.rename(f + ".i", f + ".i.old")
+os.rename(f + ".d", f + ".d.old")
+os.rename(f + ".i2", f + ".i")
+os.rename(f + ".d2", f + ".d")
new file mode 100644
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+#
+# This is the mercurial setup script.
+#
+# './setup.py install', or
+# './setup.py --help' for more options
+
+import sys
+if not hasattr(sys, 'version_info') or sys.version_info < (2, 3):
+ raise SystemExit, "Mercurial requires python 2.3 or later."
+
+import glob
+from distutils.core import setup, Extension
+from distutils.command.install_data import install_data
+
+# mercurial.packagescan must be the first mercurial module imported
+import mercurial.packagescan
+import mercurial.version
+
+# py2exe needs to be installed to work
+try:
+ import py2exe
+
+ # Help py2exe to find win32com.shell
+ try:
+ import modulefinder
+ import win32com
+ for p in win32com.__path__[1:]: # Take the path to win32comext
+ modulefinder.AddPackagePath("win32com", p)
+ pn = "win32com.shell"
+ __import__(pn)
+ m = sys.modules[pn]
+ for p in m.__path__[1:]:
+ modulefinder.AddPackagePath(pn, p)
+ except ImportError:
+ pass
+
+ # Due to the use of demandload py2exe is not finding the modules.
+ # packagescan.getmodules creates a list of modules included in
+ # the mercurial package plus depdent modules.
+ from py2exe.build_exe import py2exe as build_exe
+
+ class py2exe_for_demandload(build_exe):
+ """ overwrites the py2exe command class for getting the build
+ directory and for setting the 'includes' option."""
+ def initialize_options(self):
+ self.build_lib = None
+ build_exe.initialize_options(self)
+ def finalize_options(self):
+ # Get the build directory, ie. where to search for modules.
+ self.set_undefined_options('build',
+ ('build_lib', 'build_lib'))
+ # Sets the 'includes' option with the list of needed modules
+ if not self.includes:
+ self.includes = []
+ else:
+ self.includes = self.includes.split(',')
+ mercurial.packagescan.scan(self.build_lib,'mercurial')
+ mercurial.packagescan.scan(self.build_lib,'mercurial/hgweb')
+ mercurial.packagescan.scan(self.build_lib,'hgext')
+ self.includes += mercurial.packagescan.getmodules()
+ build_exe.finalize_options(self)
+except ImportError:
+ py2exe_for_demandload = None
+
+
+# specify version string, otherwise 'hg identify' will be used:
+version = ''
+
+class install_package_data(install_data):
+ def finalize_options(self):
+ self.set_undefined_options('install',
+ ('install_lib', 'install_dir'))
+ install_data.finalize_options(self)
+
+mercurial.version.remember_version(version)
+cmdclass = {'install_data': install_package_data}
+py2exe_opts = {}
+if py2exe_for_demandload is not None:
+ cmdclass['py2exe'] = py2exe_for_demandload
+ py2exe_opts['console'] = ['hg']
+setup(name='mercurial',
+ version=mercurial.version.get_version(),
+ author='Matt Mackall',
+ author_email='mpm@selenic.com',
+ url='http://selenic.com/mercurial',
+ description='Scalable distributed SCM',
+ license='GNU GPL',
+ packages=['mercurial', 'mercurial.hgweb', 'hgext'],
+ ext_modules=[Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
+ Extension('mercurial.bdiff', ['mercurial/bdiff.c'])],
+ data_files=[('mercurial/templates',
+ ['templates/map'] +
+ glob.glob('templates/map-*') +
+ glob.glob('templates/*.tmpl')),
+ ('mercurial/templates/static',
+ glob.glob('templates/static/*'))],
+ cmdclass=cmdclass,
+ scripts=['hg', 'hgmerge'],
+ options=dict(bdist_mpkg=dict(zipdist=True,
+ license='COPYING',
+ readme='contrib/macosx/Readme.html',
+ welcome='contrib/macosx/Welcome.html')),
+ **py2exe_opts)
new file mode 100644
--- /dev/null
+++ b/templates/changelog-gitweb.tmpl
@@ -0,0 +1,34 @@
+#header#
+<title>#repo|escape#: Changelog</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / changelog
+</div>
+
+<form action="#">
+<div class="search">
+<input type="hidden" name="repo" value="#repo|escape#" />
+<input type="hidden" name="style" value="gitweb" />
+<input type="hidden" name="cmd" value="changelog" />
+<input type="text" name="rev" />
+</div>
+</form>
+</div>
+
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | changelog | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a><br/>
+<br/>
+#changenav%naventry#<br/>
+</div>
+
+#entries%changelogentry#
+
+<div class="page_nav">
+#changenav%naventry#<br/>
+</div>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/changelog-rss.tmpl
@@ -0,0 +1,6 @@
+#header#
+ <title>#repo|escape# Changelog</title>
+ <description>#repo|escape# Changelog</description>
+ #entries%changelogentry#
+ </channel>
+</rss>
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/templates/changelog.tmpl
@@ -0,0 +1,37 @@
+#header#
+<title>#repo|escape#: changelog</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="buttons">
+<a href="?cmd=tags">tags</a>
+<a href="?mf=#manifest|short#;path=/">manifest</a>
+#archives%archiveentry#
+<a type="application/rss+xml" href="?style=rss">rss</a>
+</div>
+
+<h2>changelog for #repo|escape#</h2>
+
+<form action="#">
+<p>
+<label for="search1">search:</label>
+<input type="hidden" name="cmd" value="changelog">
+<input name="rev" id="search1" type="text" size="30">
+navigate: <small class="navigate">#changenav%naventry#</small>
+</p>
+</form>
+
+#entries%changelogentry#
+
+<form action="#">
+<p>
+<label for="search2">search:</label>
+<input type="hidden" name="cmd" value="changelog">
+<input name="rev" id="search2" type="text" size="30">
+navigate: <small class="navigate">#changenav%naventry#</small>
+</p>
+</form>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/changelogentry-gitweb.tmpl
@@ -0,0 +1,14 @@
+<div>
+<a class="title" href="?cmd=changeset;node=#node#;style=gitweb"><span class="age">#date|age# ago</span>#desc|strip|firstline|escape#</a>
+</div>
+<div class="title_text">
+<div class="log_link">
+<a href="?cmd=changeset;node=#node#;style=gitweb">changeset</a><br/>
+</div>
+<i>#author|obfuscate# [#date|rfc822date#] rev #rev#</i><br/>
+</div>
+<div class="log_body">
+#desc|strip|escape|addbreaks#
+<br/>
+<br/>
+</div>
new file mode 100644
--- /dev/null
+++ b/templates/changelogentry-rss.tmpl
@@ -0,0 +1,7 @@
+<item>
+ <title>#desc|strip|firstline|strip|escape#</title>
+ <link>#url#?cs=#node|short#</link>
+ <description><![CDATA[#desc|strip|escape|addbreaks#]]></description>
+ <author>#author|obfuscate#</author>
+ <pubDate>#date|rfc822date#</pubDate>
+</item>
new file mode 100644
--- /dev/null
+++ b/templates/changelogentry.tmpl
@@ -0,0 +1,25 @@
+<table class="logEntry parity#parity#">
+ <tr>
+ <th class="age">#date|age# ago:</th>
+ <th class="firstline">#desc|strip|firstline|escape#</th>
+ </tr>
+ <tr>
+ <th class="revision">changeset #rev#:</th>
+ <td class="node"><a href="?cs=#node|short#">#node|short#</a></td>
+ </tr>
+ #parent%changelogparent#
+ #child%changelogchild#
+ #changelogtag#
+ <tr>
+ <th class="author">author:</th>
+ <td class="author">#author|obfuscate#</td>
+ </tr>
+ <tr>
+ <th class="date">date:</th>
+ <td class="date">#date|date#</td>
+ </tr>
+ <tr>
+ <th class="files"><a href="?mf=#manifest|short#;path=/">files</a>:</th>
+ <td class="files">#files#</td>
+ </tr>
+</table>
new file mode 100644
--- /dev/null
+++ b/templates/changeset-gitweb.tmpl
@@ -0,0 +1,41 @@
+#header#
+<title>#repo|escape#: Changeset</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / changeset
+</div>
+
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;rev=#rev#;style=gitweb">changelog</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a> | changeset | <a href="?cmd=changeset;node=#node#;style=raw">raw</a> #archives%archiveentry#<br/>
+</div>
+
+<div>
+<a class="title" href="?cmd=changeset;node=#node#;style=raw">#desc|strip|escape|firstline#</a>
+</div>
+<div class="title_text">
+<table cellspacing="0">
+<tr><td>author</td><td>#author|obfuscate#</td></tr>
+<tr><td></td><td>#date|date# (#date|age# ago)</td></tr>
+<tr><td>changeset</td><td style="font-family:monospace">#node|short#</td></tr>
+<tr><td>manifest</td><td style="font-family:monospace"><a class="list" href="?cmd=manifest;manifest=#manifest|short#;path=/;style=gitweb">#manifest|short#</a></td></tr>
+#parent%changesetparent#
+#child%changesetchild#
+#changesettag#
+</table></div>
+
+<div class="title_text">
+#desc|strip|escape|addbreaks#
+</div>
+
+<div class="title_text">
+<table cellspacing="0">
+#files#
+</table></div>
+
+<div class="page_body">#diff#</div>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/changeset-raw.tmpl
@@ -0,0 +1,9 @@
+#header#
+# HG changeset patch
+# User #author#
+# Date #date|date#
+# Node ID #node#
+#parent%changesetparent#
+#desc#
+
+#diff#
new file mode 100644
--- /dev/null
+++ b/templates/changeset.tmpl
@@ -0,0 +1,46 @@
+#header#
+<title>#repo|escape#: changeset #node|short#</title>
+</head>
+<body>
+
+<div class="buttons">
+<a href="?cl=#rev#">changelog</a>
+<a href="?cmd=tags">tags</a>
+<a href="?mf=#manifest|short#;path=/">manifest</a>
+<a href="?cs=#node|short#;style=raw">raw</a>
+#archives%archiveentry#
+</div>
+
+<h2>changeset: #desc|strip|escape|firstline#</h2>
+
+<table id="changesetEntry">
+<tr>
+ <th class="changeset">changeset #rev#:</th>
+ <td class="changeset"><a href="?cs=#node|short#">#node|short#</a></td>
+</tr>
+#parent%changesetparent#
+#child%changesetchild#
+#changesettag#
+<tr>
+ <th class="author">author:</th>
+ <td class="author">#author|obfuscate#</td>
+</tr>
+<tr>
+ <th class="date">date:</th>
+ <td class="date">#date|date# (#date|age# ago)</td></tr>
+<tr>
+ <th class="files">files:</th>
+ <td class="files">#files#</td></tr>
+<tr>
+ <th class="description">description:</th>
+ <td class="description">#desc|strip|escape|addbreaks#</td>
+</tr>
+</table>
+
+<div id="changesetDiff">
+#diff#
+</div>
+
+#footer#
+
+
new file mode 100644
--- /dev/null
+++ b/templates/error-gitweb.tmpl
@@ -0,0 +1,22 @@
+#header#
+<title>#repo|escape#: Error</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / error
+</div>
+
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">changelog</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a><br/>
+</div>
+
+<div>
+<br/>
+<i>An error occured while processing your request</i><br/>
+<br/>
+</div>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/error.tmpl
@@ -0,0 +1,15 @@
+#header#
+<title>Mercurial Error</title>
+</head>
+<body>
+
+<h2>Mercurial Error</h2>
+
+<p>
+An error occured while processing your request:
+</p>
+<p>
+#error|escape#
+</p>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/fileannotate-gitweb.tmpl
@@ -0,0 +1,45 @@
+#header#
+<title>#repo|escape#: Annotate</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / annotate
+</div>
+
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">changelog</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=#path|urlescape#;style=gitweb">manifest</a> | <a href="?cmd=changeset;node=#node#;style=gitweb">changeset</a> | <a href="?cmd=file;file=#file|urlescape#;filenode=#filenode#;style=gitweb">file</a> | <a href="?cmd=filelog;file=#file|urlescape#;filenode=#filenode#;style=gitweb">revisions</a> | annotate | <a href="?cmd=annotate;file=#file|urlescape#;filenode=#filenode#;style=raw">raw</a><br/>
+</div>
+
+<div class="title">#file|escape#</div>
+
+<table>
+<tr>
+ <td class="metatag">changeset #rev#:</td>
+ <td><a href="?cs=#node|short#;style=gitweb">#node|short#</a></td></tr>
+#rename%filerename#
+#parent%fileannotateparent#
+#child%fileannotatechild#
+<tr>
+ <td class="metatag">manifest:</td>
+ <td><a href="?mf=#manifest|short#;path=/;style=gitweb">#manifest|short#</a></td></tr>
+<tr>
+ <td class="metatag">author:</td>
+ <td>#author|obfuscate#</td></tr>
+<tr>
+ <td class="metatag">date:</td>
+ <td>#date|date# (#date|age# ago)</td></tr>
+<tr>
+ <td class="metatag">permissions:</td>
+ <td>#permissions|permissions#</td></tr>
+</table>
+
+<div class="page_body">
+<table>
+#annotate%annotateline#
+</table>
+</div>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/fileannotate-raw.tmpl
@@ -0,0 +1,5 @@
+#header#
+#annotate%annotateline#
+#footer#
+
+
new file mode 100644
--- /dev/null
+++ b/templates/fileannotate.tmpl
@@ -0,0 +1,42 @@
+#header#
+<title>#repo|escape#: #file|escape# annotate</title>
+</head>
+<body>
+
+<div class="buttons">
+<a href="?cl=#rev#">changelog</a>
+<a href="?tags=">tags</a>
+<a href="?cs=#node|short#">changeset</a>
+<a href="?mf=#manifest|short#;path=#path|urlescape#">manifest</a>
+<a href="?f=#filenode|short#;file=#file|urlescape#">file</a>
+<a href="?fl=#filenode|short#;file=#file|urlescape#">revisions</a>
+<a href="?fa=#filenode|short#;file=#file|urlescape#;style=raw">raw</a>
+</div>
+
+<h2>Annotate #file|escape#</h2>
+
+<table>
+<tr>
+ <td class="metatag">changeset #rev#:</td>
+ <td><a href="?cs=#node|short#">#node|short#</a></td></tr>
+#rename%filerename#
+#parent%fileannotateparent#
+#child%fileannotatechild#
+<tr>
+ <td class="metatag">author:</td>
+ <td>#author|obfuscate#</td></tr>
+<tr>
+ <td class="metatag">date:</td>
+ <td>#date|date# (#date|age# ago)</td></tr>
+<tr>
+ <td class="metatag">permissions:</td>
+ <td>#permissions|permissions#</td></tr>
+</table>
+
+<br/>
+
+<table cellspacing="0" cellpadding="0">
+#annotate%annotateline#
+</table>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/filediff-raw.tmpl
@@ -0,0 +1,5 @@
+#header#
+#diff#
+#footer#
+
+
new file mode 100644
--- /dev/null
+++ b/templates/filediff.tmpl
@@ -0,0 +1,33 @@
+#header#
+<title>#repo|escape#: #file|escape# diff</title>
+</head>
+<body>
+
+<div class="buttons">
+<a href="?cl=#rev#">changelog</a>
+<a href="?tags=">tags</a>
+<a href="?cs=#node|short#">changeset</a>
+<a href="?f=#filenode|short#;file=#file|urlescape#">file</a>
+<a href="?fl=#filenode|short#;file=#file|urlescape#">revisions</a>
+<a href="?fa=#filenode|short#;file=#file|urlescape#">annotate</a>
+<a href="?fd=#node|short#;file=#file|urlescape#;style=raw">raw</a>
+</div>
+
+<h2>#file|escape#</h2>
+
+<table id="filediffEntry">
+<tr>
+ <th class="revision">revision #rev#:</th>
+ <td class="revision"><a href="?cs=#node|short#">#node|short#</a></td>
+</tr>
+#parent%filediffparent#
+#child%filediffchild#
+</table>
+
+<div id="fileDiff">
+#diff#
+</div>
+
+#footer#
+
+
new file mode 100644
--- /dev/null
+++ b/templates/filelog-gitweb.tmpl
@@ -0,0 +1,22 @@
+#header#
+<title>#repo|escape#: File revisions</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / file revisions
+</div>
+
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">changelog</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=file;file=#file|urlescape#;filenode=#filenode#;style=gitweb">file</a> | revisions | <a href="?cmd=annotate;file=#file|urlescape#;filenode=#filenode#;style=gitweb">annotate</a> | <a href="?fl=#filenode|short#;file=#file|urlescape#;style=rss">rss</a><br/>
+</div>
+
+<div class="title" >#file|urlescape#</div>
+
+<table>
+#entries%filelogentry#
+</table>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/filelog-rss.tmpl
@@ -0,0 +1,6 @@
+#header#
+ <title>#repo|escape#: #file|escape# history</title>
+ <description>#file|escape# revision history</description>
+ #entries%filelogentry#
+ </channel>
+</rss>
new file mode 100644
--- /dev/null
+++ b/templates/filelog.tmpl
@@ -0,0 +1,21 @@
+#header#
+<title>#repo|escape#: #file|escape# history</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?fl=0;file=#file|urlescape#;style=rss" title="RSS feed for #repo|escape#:#file#">
+</head>
+</head>
+<body>
+
+<div class="buttons">
+<a href="?cl=tip">changelog</a>
+<a href="?tags=">tags</a>
+<a href="?f=#filenode|short#;file=#file|urlescape#">file</a>
+<a href="?fa=#filenode|short#;file=#file|urlescape#">annotate</a>
+<a type="application/rss+xml" href="?fl=0;file=#file|urlescape#;style=rss">rss</a>
+</div>
+
+<h2>#file|escape# revision history</h2>
+
+#entries%filelogentry#
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/filelogentry-rss.tmpl
@@ -0,0 +1,7 @@
+<item>
+ <title>#desc|strip|firstline|strip|escape#</title>
+ <link>#url#?f=#filenode|short#;file=#file|urlescape#</link>
+ <description><![CDATA[#desc|strip|escape|addbreaks#]]></description>
+ <author>#author|obfuscate#</author>
+ <pubDate>#date|rfc822date#</pubDate>>
+</item>
new file mode 100644
--- /dev/null
+++ b/templates/filelogentry.tmpl
@@ -0,0 +1,25 @@
+<table class="logEntry parity#parity#">
+ <tr>
+ <th class="age">#date|age# ago:</th>
+ <th class="firstline"><a href="?cs=#node|short#">#desc|strip|firstline|escape#</a></th>
+ </tr>
+ <tr>
+ <th class="revision">revision #filerev#:</td>
+ <td class="node">
+ <a href="?f=#filenode|short#;file=#file|urlescape#">#filenode|short#</a>
+ <a href="?fd=#node|short#;file=#file|urlescape#">(diff)</a>
+ <a href="?fa=#filenode|short#;file=#file|urlescape#">(annotate)</a>
+ </td>
+ </tr>
+ #rename%filelogrename#
+ <tr>
+ <th class="author">author:</th>
+ <td class="author">#author|obfuscate#</td>
+ </tr>
+ <tr>
+ <th class="date">date:</th>
+ <td class="date">#date|date#</td>
+ </tr>
+</table>
+
+
new file mode 100644
--- /dev/null
+++ b/templates/filerevision-gitweb.tmpl
@@ -0,0 +1,43 @@
+#header#
+<title>#repo|escape#: File revision</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / file revision
+</div>
+
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">changelog</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?mf=#manifest|short#;path=#path|urlescape#;style=gitweb">manifest</a> | <a href="?cmd=changeset;node=#node#;style=gitweb">changeset</a> | file | <a href="?cmd=filelog;file=#file|urlescape#;filenode=#filenode#;style=gitweb">revisions</a> | <a href="?cmd=annotate;file=#file|urlescape#;filenode=#filenode#;style=gitweb">annotate</a> | <a href="?cmd=file;file=#file|urlescape#;filenode=#filenode#;style=raw">raw</a><br/>
+</div>
+
+<div class="title">#file|escape#</div>
+
+<table>
+<tr>
+ <td class="metatag">changeset #rev#:</td>
+ <td><a href="?cs=#node|short#;style=gitweb">#node|short#</a></td></tr>
+#rename%filerename#
+#parent%fileannotateparent#
+#child%fileannotatechild#
+<tr>
+ <td class="metatag">manifest:</td>
+ <td><a href="?mf=#manifest|short#;path=/;style=gitweb">#manifest|short#</a></td></tr>
+<tr>
+ <td class="metatag">author:</td>
+ <td>#author|obfuscate#</td></tr>
+<tr>
+ <td class="metatag">date:</td>
+ <td>#date|date# (#date|age# ago)</td></tr>
+<tr>
+ <td class="metatag">permissions:</td>
+ <td>#permissions|permissions#</td></tr>
+</table>
+
+<div class="page_body">
+#text%fileline#
+</div>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/filerevision.tmpl
@@ -0,0 +1,40 @@
+#header#
+<title>#repo|escape#:#file|escape#</title>
+</head>
+<body>
+
+<div class="buttons">
+<a href="?cl=#rev#">changelog</a>
+<a href="?tags=">tags</a>
+<a href="?cs=#node|short#">changeset</a>
+<a href="?mf=#manifest|short#;path=#path|urlescape#">manifest</a>
+<a href="?fl=#filenode|short#;file=#file|urlescape#">revisions</a>
+<a href="?fa=#filenode|short#;file=#file|urlescape#">annotate</a>
+<a href="?f=#filenode|short#;file=#file|urlescape#;style=raw">raw</a>
+</div>
+
+<h2>#file|escape#</h2>
+
+<table>
+<tr>
+ <td class="metatag">changeset #rev#:</td>
+ <td><a href="?cs=#node|short#">#node|short#</a></td></tr>
+#rename%filerename#
+#parent%filerevparent#
+#child%filerevchild#
+<tr>
+ <td class="metatag">author:</td>
+ <td>#author|obfuscate#</td></tr>
+<tr>
+ <td class="metatag">date:</td>
+ <td>#date|date# (#date|age# ago)</td></tr>
+<tr>
+ <td class="metatag">permissions:</td>
+ <td>#permissions|permissions#</td></tr>
+</table>
+
+<pre>
+#text%fileline#
+</pre>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/footer-gitweb.tmpl
@@ -0,0 +1,6 @@
+<div class="page_footer">
+<div class="page_footer_text">#repo|escape#</div>
+<a class="rss_logo" href="?cmd=changelog;style=rss">RSS</a>
+</div>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/templates/footer.tmpl
@@ -0,0 +1,8 @@
+#motd#
+<div class="logo">
+powered by<br/>
+<a href="http://www.selenic.com/mercurial/">mercurial</a>
+</div>
+
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/templates/header-gitweb.tmpl
@@ -0,0 +1,11 @@
+Content-type: text/html
+
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
+<head>
+<link rel="icon" href="?static=hgicon.png" type="image/png">
+<meta http-equiv="content-type" content="text/html; charset=utf-8"/>
+<meta name="robots" content="index, nofollow"/>
+<link rel="stylesheet" href="?static=style-gitweb.css" type="text/css" />
+
new file mode 100644
--- /dev/null
+++ b/templates/header-raw.tmpl
@@ -0,0 +1,1 @@
+Content-type: text/plain
new file mode 100644
--- /dev/null
+++ b/templates/header-rss.tmpl
@@ -0,0 +1,6 @@
+Content-type: text/xml
+
+<rss version="2.0">
+ <channel>
+ <link>#url#</link>
+ <language>en-us</language>
new file mode 100644
--- /dev/null
+++ b/templates/header.tmpl
@@ -0,0 +1,8 @@
+Content-type: text/html
+
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+<link rel="icon" href="?static=hgicon.png" type="image/png">
+<meta name="robots" content="index, nofollow" />
+<link rel="stylesheet" href="?static=style.css" type="text/css" />
new file mode 100644
--- /dev/null
+++ b/templates/index.tmpl
@@ -0,0 +1,19 @@
+#header#
+<title>Mercurial repositories index</title>
+</head>
+<body>
+
+<h2>Mercurial Repositories</h2>
+
+<table>
+ <tr>
+ <td><a href="?sort=#sort_name#">Name</a></td>
+ <td><a href="?sort=#sort_description#">Description</a></td>
+ <td><a href="?sort=#sort_contact#">Contact</a></td>
+ <td><a href="?sort=#sort_lastchange#">Last change</a></td>
+ <td> </td>
+ <tr>
+ #entries%indexentry#
+</table>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/manifest-gitweb.tmpl
@@ -0,0 +1,27 @@
+#header#
+<title>#repo|escape#: Manifest</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / manifest
+</div>
+
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">changelog</a> | <a href="?cmd=tags;style=gitweb">tags</a> | manifest | <a href="?cs=#node|short#;style=gitweb">changeset</a> #archives%archiveentry#<br/>
+</div>
+
+<div class="title" >#path|escape#</div>
+<div class="page_body">
+<table cellspacing="0">
+<tr class="light">
+<td style="font-family:monospace">drwxr-xr-x</td>
+<td><a href="?cmd=manifest;manifest=#manifest#;path=#up|urlescape#;style=gitweb">[up]</a></td>
+<td class="link"> </td>
+</tr>
+#dentries%manifestdirentry#
+#fentries%manifestfileentry#
+</table>
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/manifest.tmpl
@@ -0,0 +1,22 @@
+#header#
+<title>#repo|escape#: manifest #manifest|short#</title>
+</head>
+<body>
+
+<div class="buttons">
+<a href="?cl=#rev#">changelog</a>
+<a href="?tags=">tags</a>
+<a href="?cs=#node|short#">changeset</a>
+#archives%archiveentry#
+</div>
+
+<h2>manifest for changeset #node|short#: #path|escape#</h2>
+
+<table cellpadding="0" cellspacing="0">
+<tr class="parity1">
+ <td><tt>drwxr-xr-x</tt>
+ <td><a href="?mf=#manifest|short#;path=#up|urlescape#">[up]</a>
+#dentries%manifestdirentry#
+#fentries%manifestfileentry#
+</table>
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/map
@@ -0,0 +1,50 @@
+default = 'changelog'
+header = header.tmpl
+footer = footer.tmpl
+search = search.tmpl
+changelog = changelog.tmpl
+naventry = '<a href="?cl=#rev#">#label|escape#</a> '
+filedifflink = '<a href="?fd=#node|short#;file=#file|urlescape#">#file|escape#</a> '
+filenodelink = '<a href="?f=#filenode|short#;file=#file|urlescape#">#file|escape#</a> '
+fileellipses = '...'
+changelogentry = changelogentry.tmpl
+searchentry = changelogentry.tmpl
+changeset = changeset.tmpl
+manifest = manifest.tmpl
+manifestdirentry = '<tr class="parity#parity#"><td><tt>drwxr-xr-x</tt> <td><a href="?cmd=manifest;manifest=#manifest#;path=#path|urlescape#">#basename|escape#/</a>'
+manifestfileentry = '<tr class="parity#parity#"><td><tt>#permissions|permissions#</tt> <td><a href="?f=#filenode|short#;file=#file|urlescape#">#basename|escape#</a>'
+filerevision = filerevision.tmpl
+fileannotate = fileannotate.tmpl
+filediff = filediff.tmpl
+filelog = filelog.tmpl
+fileline = '<div class="parity#parity#"><span class="lineno">#linenumber#</span>#line|escape#</div>'
+filelogentry = filelogentry.tmpl
+annotateline = '<tr class="parity#parity#"><td class="annotate"><a href="?cs=#node|short#">#author|obfuscate#@#rev#</a></td><td><pre>#line|escape#</pre></td></tr>'
+difflineplus = '<span class="plusline">#line|escape#</span>'
+difflineminus = '<span class="minusline">#line|escape#</span>'
+difflineat = '<span class="atline">#line|escape#</span>'
+diffline = '#line|escape#'
+changelogparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="?cs=#node|short#">#node|short#</a></td></tr>'
+changesetparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="?cs=#node|short#">#node|short#</a></td></tr>'
+filerevparent = '<tr><td class="metatag">parent:</td><td><a href="?f=#node|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
+filerename = '<tr><td class="metatag">parent:</td><td><a href="?f=#node|short#;file=#file|urlescape#">#file|escape#@#node|short#</a></td></tr>'
+filelogrename = '<tr><th>base:</th><td><a href="?f=#node|short#;file=#file|urlescape#">#file|escape#@#node|short#</a></td></tr>'
+fileannotateparent = '<tr><td class="metatag">parent:</td><td><a href="?fa=#filenode|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
+changesetchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="?cs=#node|short#">#node|short#</a></td></tr>'
+changelogchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="?cs=#node|short#">#node|short#</a></td></tr>'
+filerevchild = '<tr><td class="metatag">child:</td><td><a href="?f=#node|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
+fileannotatechild = '<tr><td class="metatag">child:</td><td><a href="?fa=#filenode|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
+tags = tags.tmpl
+tagentry = '<li class="tagEntry parity#parity#"><tt class="node">#node#</tt> <a href="?cs=#node|short#">#tag|escape#</a></li>'
+diffblock = '<pre class="parity#parity#">#lines#</pre>'
+changelogtag = '<tr><th class="tag">tag:</th><td class="tag">#tag|escape#</td></tr>'
+changesettag = '<tr><th class="tag">tag:</th><td class="tag">#tag|escape#</td></tr>'
+filediffparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="?cs=#node|short#">#node|short#</a></td></tr>'
+filelogparent = '<tr><th>parent #rev#:</th><td><a href="?f=#node|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
+filediffchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="?cs=#node|short#">#node|short#</a></td></tr>'
+filelogchild = '<tr><th>child #rev#:</th><td><a href="?f=#node|short#;file=#file|urlescape#">#node|short#</a></td></tr>'
+indexentry = '<tr class="parity#parity#"><td><a href="#url#">#name|escape#</a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a href="#url#?cl=tip;style=rss">RSS</a> #archives%archiveentry#</td></tr>'
+index = index.tmpl
+archiveentry = '<a href="#url#?ca=#node|short#;type=#type|urlescape#">#type|escape#</a> '
+notfound = notfound.tmpl
+error = error.tmpl
new file mode 100644
--- /dev/null
+++ b/templates/map-cmdline.changelog
@@ -0,0 +1,14 @@
+header = '{date|shortdate} {author|person} <{author|email}>\n\n'
+header_verbose = ''
+changeset = '\t* {files|stringify|fill68|tabindent}{desc|fill68|tabindent|strip}\n\t[{node|short}]{tags}\n\n'
+changeset_quiet = '\t* {desc|firstline|fill68|tabindent|strip}\n\n'
+changeset_verbose = '{date|isodate} {author|person} <{author|email}> ({node|short}{tags})\n\n\t* {file_adds|stringify|fill68|tabindent}{file_dels|stringify|fill68|tabindent}{files|stringify|fill68|tabindent}{desc|fill68|tabindent|strip}\n\n'
+start_tags = ' ['
+tag = '{tag}, '
+last_tag = '{tag}]'
+file = '{file}, '
+last_file = '{file}:\n\t'
+file_add = '{file_add}, '
+last_file_add = '{file_add}: new file.\n* '
+file_del = '{file_del}, '
+last_file_del = '{file_del}: deleted file.\n* '
new file mode 100644
--- /dev/null
+++ b/templates/map-cmdline.compact
@@ -0,0 +1,8 @@
+changeset = '{rev}{tags}{parents} {node|short} {date|isodate} {author|user}\n {desc|firstline|strip}\n\n'
+changeset_quiet = '{rev}:{node|short}\n'
+start_tags = '['
+tag = '{tag},'
+last_tag = '{tag}]'
+start_parents = ':'
+parent = '{rev},'
+last_parent = '{rev}'
new file mode 100644
--- /dev/null
+++ b/templates/map-cmdline.default
@@ -0,0 +1,13 @@
+changeset = 'changeset: {rev}:{node|short}\n{tags}{short_parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n'
+changeset_quiet = '{rev}:{node|short}\n'
+changeset_verbose = 'changeset: {rev}:{node}\n{tags}{parents}{manifest}user: {author}\ndate: {date|date}\nfiles: {files}\n{file_adds}{file_dels}description:\n{desc|strip}\n\n\n'
+start_file_adds = 'files+: '
+file_add = ' {file_add}'
+end_file_adds = '\n'
+start_file_dels = 'files-: '
+file_del = ' {file_del}'
+end_file_dels = '\n'
+short_parent = 'parent: {rev}:{node|short}\n'
+parent = 'parent: {rev}:{node}\n'
+manifest = 'manifest: {rev}:{node}\n'
+tag = 'tag: {tag}\n'
new file mode 100644
--- /dev/null
+++ b/templates/map-gitweb
@@ -0,0 +1,50 @@
+default = 'summary'
+header = header-gitweb.tmpl
+footer = footer-gitweb.tmpl
+search = search-gitweb.tmpl
+changelog = changelog-gitweb.tmpl
+summary = summary-gitweb.tmpl
+error = error-gitweb.tmpl
+naventry = '<a href="?cmd=changelog;rev=#rev#;style=gitweb">#label|escape#</a> '
+navshortentry = '<a href="?cmd=shortlog;rev=#rev#;style=gitweb">#label|escape#</a> '
+filedifflink = '<a href="?cmd=filediff;node=#node#;file=#file|urlescape#;style=gitweb">#file|escape#</a> '
+filenodelink = '<tr class="light"><td><a class="list" href="">#file|escape#</a></td><td></td><td class="link"><a href="?cmd=file;filenode=#filenode#;file=#file|urlescape#;style=gitweb">file</a> | <!-- FIXME: <a href="?fd=#filenode|short#;file=#file|urlescape#;style=gitweb">diff</a> | --> <a href="?cmd=filelog;filenode=#filenode|short#;file=#file|urlescape#;style=gitweb">revisions</a></td></tr>'
+fileellipses = '...'
+changelogentry = changelogentry-gitweb.tmpl
+searchentry = changelogentry-gitweb.tmpl
+changeset = changeset-gitweb.tmpl
+manifest = manifest-gitweb.tmpl
+manifestdirentry = '<tr class="parity#parity#"><td style="font-family:monospace">drwxr-xr-x</td><td><a href="?mf=#manifest|short#;path=#path|urlescape#;style=gitweb">#basename|escape#/</a></td><td class="link"><a href="?mf=#manifest|short#;path=#path|urlescape#;style=gitweb">manifest</a></td></tr>'
+manifestfileentry = '<tr class="parity#parity#"><td style="font-family:monospace">#permissions|permissions#</td><td class="list"><a class="list" href="?f=#filenode|short#;file=#file|urlescape#;style=gitweb">#basename|escape#</a></td><td class="link"><a href="?f=#filenode|short#;file=#file|urlescape#;style=gitweb">file</a> | <a href="?fl=#filenode|short#;file=#file|urlescape#;style=gitweb">revisions</a> | <a href="?fa=#filenode|short#;file=#file|urlescape#;style=gitweb">annotate</a></td></tr>'
+filerevision = filerevision-gitweb.tmpl
+fileannotate = fileannotate-gitweb.tmpl
+filelog = filelog-gitweb.tmpl
+fileline = '<div style="font-family:monospace; white-space: pre;" class="parity#parity#"><span class="linenr"> #linenumber#</span> #line|escape#</div>'
+annotateline = '<tr style="font-family:monospace; white-space: pre;" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="?cs=#node|short#;style=gitweb">#author|obfuscate#@#rev#</a></td><td>#line|escape#</td></tr>'
+difflineplus = '<div class="pre" style="color:#008800;">#line|escape#</div>'
+difflineminus = '<div class="pre" style="color:#cc0000;">#line|escape#</div>'
+difflineat = '<div class="pre" style="color:#990099;">#line|escape#</div>'
+diffline = '<div class="pre">#line|escape#</div>'
+changelogparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="?cmd=changeset;node=#node#;style=gitweb">#node|short#</a></td></tr>'
+changesetparent = '<tr><td>parent</td><td style="font-family:monospace"><a class="list" href="?cmd=changeset;node=#node|short#;style=gitweb">#node|short#</a></td></tr>'
+filerevparent = '<tr><td class="metatag">parent:</td><td><a href="?cmd=file;file=#file|urlescape#;filenode=#node#;style=gitweb">#node|short#</a></td></tr>'
+filerename = '<tr><td class="metatag">parent:</td><td><a href="?f=#node|short#;file=#file|urlescape#;style=gitweb">#file|escape#@#node|short#</a></td></tr>'
+filelogrename = '| <a href="?f=#node|short#;file=#file|urlescape#;style=gitweb">base</a>'
+fileannotateparent = '<tr><td class="metatag">parent:</td><td><a href="?cmd=annotate;file=#file|urlescape#;filenode=#node#;style=gitweb">#node|short#</a></td></tr>'
+changelogchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="?cmd=changeset;node=#node#;style=gitweb">#node|short#</a></td></tr>'
+changesetchild = '<tr><td>child</td><td style="font-family:monospace"><a class="list" href="?cmd=changeset;node=#node|short#;style=gitweb">#node|short#</a></td></tr>'
+filerevchild = '<tr><td class="metatag">child:</td><td><a href="?cmd=file;file=#file|urlescape#;filenode=#node#;style=gitweb">#node|short#</a></td></tr>'
+fileannotatechild = '<tr><td class="metatag">child:</td><td><a href="?cmd=annotate;file=#file|urlescape#;filenode=#node#;style=gitweb">#node|short#</a></td></tr>'
+tags = tags-gitweb.tmpl
+tagentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="?cmd=changeset;node=#node|short#;style=gitweb"><b>#tag|escape#</b></a></td><td class="link"><a href="?cmd=changeset;node=#node|short#;style=gitweb">changeset</a> | <a href="?cmd=changelog;rev=#node|short#;style=gitweb">changelog</a> | <a href="?mf=#tagmanifest|short#;path=/;style=gitweb">manifest</a></td></tr>'
+diffblock = '#lines#'
+changelogtag = '<tr><th class="tag">tag:</th><td class="tag">#tag|escape#</td></tr>'
+changesettag = '<tr><td>tag</td><td>#tag|escape#</td></tr>'
+filediffparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="?cmd=changeset;node=#node#;style=gitweb">#node|short#</a></td></tr>'
+filelogparent = '<tr><td align="right">parent #rev#: </td><td><a href="?cmd=file;file=#file|urlescape#;filenode=#node#;style=gitweb">#node|short#</a></td></tr>'
+filediffchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="?cmd=changeset;node=#node#;style=gitweb">#node|short#</a></td></tr>'
+filelogchild = '<tr><td align="right">child #rev#: </td><td><a href="?cmd=file;file=#file|urlescape#;filenode=#node#;style=gitweb">#node|short#</a></td></tr>'
+shortlog = shortlog-gitweb.tmpl
+shortlogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><i>#author#</i></td><td><a class="list" href="?cmd=changeset;node=#node|short#;style=gitweb"><b>#desc|strip|firstline|escape#</b></a></td><td class="link"><a href="?cmd=changeset;node=#node|short#;style=gitweb">changeset</a> | <a href="?cmd=manifest;manifest=#manifest|short#;path=/;style=gitweb">manifest</a></td></tr>'
+filelogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="?cmd=changeset;node=#node|short#;style=gitweb"><b>#desc|strip|firstline|escape#</b></a></td><td class="link"><!-- FIXME: <a href="?fd=#node|short#;file=#file|urlescape#;style=gitweb">diff</a> | --> <a href="?fa=#filenode|short#;file=#file|urlescape#;style=gitweb">annotate</a> #rename%filelogrename#</td></tr>'
+archiveentry = ' | <a href="?ca=#node|short#;type=#type|urlescape#">#type|escape#</a> '
new file mode 100644
--- /dev/null
+++ b/templates/map-raw
@@ -0,0 +1,16 @@
+header = header-raw.tmpl
+footer = ''
+changeset = changeset-raw.tmpl
+difflineplus = '#line#'
+difflineminus = '#line#'
+difflineat = '#line#'
+diffline = '#line#'
+changesetparent = '# parent: #node#'
+changesetchild = '# child: #node#'
+filenodelink = ''
+filerevision = 'Content-Type: #mimetype#\nContent-Disposition: filename=#file#\n\n#raw#'
+fileline = '#line#'
+diffblock = '#lines#'
+filediff = filediff-raw.tmpl
+fileannotate = fileannotate-raw.tmpl
+annotateline = '#author#@#rev#: #line#'
new file mode 100644
--- /dev/null
+++ b/templates/map-rss
@@ -0,0 +1,8 @@
+default = 'changelog'
+header = header-rss.tmpl
+changelog = changelog-rss.tmpl
+changelogentry = changelogentry-rss.tmpl
+filelog = filelog-rss.tmpl
+filelogentry = filelogentry-rss.tmpl
+tags = tags-rss.tmpl
+tagentry = tagentry-rss.tmpl
new file mode 100644
--- /dev/null
+++ b/templates/notfound.tmpl
@@ -0,0 +1,12 @@
+#header#
+<title>Mercurial repositories index</title>
+</head>
+<body>
+
+<h2>Mercurial Repositories</h2>
+
+The specified repository "#repo|escape#" is unknown, sorry.
+
+Please go back to the main repository list page.
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/search-gitweb.tmpl
@@ -0,0 +1,24 @@
+#header#
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | log | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a><br/>
+</div>
+
+<h2>searching for #query|escape#</h2>
+
+<form>
+search:
+<input type="hidden" name="cmd" value="changelog">
+<input type="hidden" name="style" value="gitweb">
+<input name="rev" type="text" width="30" value="#query|escape#">
+</form>
+
+#entries#
+
+<form>
+search:
+<input type="hidden" name="cmd" value="changelog">
+<input type="hidden" name="style" value="gitweb">
+<input name="rev" type="text" width="30">
+</form>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/search.tmpl
@@ -0,0 +1,32 @@
+#header#
+<title>#repo|escape#: searching for #query|escape#</title>
+</head>
+<body>
+
+<div class="buttons">
+<a href="?cl=tip">changelog</a>
+<a href="?tags=">tags</a>
+<a href="?mf=#manifest|short#;path=/">manifest</a>
+</div>
+
+<h2>searching for #query|escape#</h2>
+
+<form>
+<p>
+search:
+<input type="hidden" name="cmd" value="changelog">
+<input name="rev" type="text" width="30" value="#query|escape#">
+</p>
+</form>
+
+#entries#
+
+<form>
+<p>
+search:
+<input type="hidden" name="cmd" value="changelog">
+<input name="rev" type="text" width="30" value="#query|escape#">
+</p>
+</form>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/shortlog-gitweb.tmpl
@@ -0,0 +1,13 @@
+#header#
+
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">log</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a><br/>
+
+#changenav%naventry#<br/>
+</div>
+
+<table cellspacing="0">
+#entries#
+</table>
+
+#footer#
new file mode 100644
index 0000000000000000000000000000000000000000..37ba3d1c6f17137d9c5f5776fa040caf5fe73ff9
GIT binary patch
literal 593
zc$@)I0<QguP)<h;3K|Lk000e1NJLTq000mG000mO0ssI2kdbIM00009a7bBm000XU
z000XU0RWnu7ytkO2XskIMF-Uh9TW;VpMjwv0005-Nkl<ZD9@FWPs=e;7{<>W$NUkd
zX$nnYLt$-$V!?uy+1V%`z&Eh=ah|duER<4|QWhju3gb^nF*8iYobxWG-qqXl=2~5M
z*IoDB)sG^CfNuoBmqLTVU^<;@nwHP!1wrWd`{(mHo6VNXWtyh{alzqmsH*yYzpvLT
zLdY<T=ks|woh-`&01!ej#(xbV1f|pI*=%;d-%F*E*X#ZH`4I%6SS+$EJDE&ct=8po
ziN#{?_j|kD%Cd|oiqds`xm@;oJ-^?NG3Gdqrs?5u*zI;{nogxsx~^|Fn^Y?Gdc6<;
zfMJ+iF1J`LMx&A2?dEwNW8ClebzPTbIh{@$hS6*`kH@1d%Lo7fA#}N1)oN7`gm$~V
z+wDx#)OFqMcE{s!JN0-xhG8ItAjVkJwEcb`3WWlJfU2r?;Pd%dmR+q@mSri5q9_W-
zaR2~ECX?B2w+zELozC0s*6Z~|QG^f{3I#<`?)Q7U-JZ|q5W;9Q8i_=pBuSzunx=U;
z9C)5jBoYw9^?EHyQl(M}1OlQcCX>lXB*ODN003Z&P17_@)3Pi=i0wb04<W?v-u}7K
zXmmQA+wDgE!qR9o8jr`%=ab_&uh(l?R=r;Tjiqon91I2-hIu?57~@*4h7h9uORK#=
fQItJW-{SoTm)8|5##k|m00000NkvXXu0mjf{mKw4
new file mode 100644
--- /dev/null
+++ b/templates/static/style-gitweb.css
@@ -0,0 +1,49 @@
+body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; }
+a { color:#0000cc; }
+a:hover, a:visited, a:active { color:#880000; }
+div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
+div.page_header a:visited { color:#0000cc; }
+div.page_header a:hover { color:#880000; }
+div.page_nav { padding:8px; }
+div.page_nav a:visited { color:#0000cc; }
+div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px}
+div.page_footer { height:17px; padding:4px 8px; background-color: #d9d8d1; }
+div.page_footer_text { float:left; color:#555555; font-style:italic; }
+div.page_body { padding:8px; }
+div.title, a.title {
+ display:block; padding:6px 8px;
+ font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000;
+}
+a.title:hover { background-color: #d9d8d1; }
+div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; }
+div.log_body { padding:8px 8px 8px 150px; }
+.age { white-space:nowrap; }
+span.age { position:relative; float:left; width:142px; font-style:italic; }
+div.log_link {
+ padding:0px 8px;
+ font-size:10px; font-family:sans-serif; font-style:normal;
+ position:relative; float:left; width:136px;
+}
+div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; }
+a.list { text-decoration:none; color:#000000; }
+a.list:hover { text-decoration:underline; color:#880000; }
+table { padding:8px 4px; }
+th { padding:2px 5px; font-size:12px; text-align:left; }
+tr.light:hover, .parity0:hover { background-color:#edece6; }
+tr.dark, .parity1 { background-color:#f6f6f0; }
+tr.dark:hover, .parity1:hover { background-color:#edece6; }
+td { padding:2px 5px; font-size:12px; vertical-align:top; }
+td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; }
+div.pre { font-family:monospace; font-size:12px; white-space:pre; }
+div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
+div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
+div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
+.linenr { color:#999999; text-decoration:none }
+a.rss_logo {
+ float:right; padding:3px 0px; width:35px; line-height:10px;
+ border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e;
+ color:#ffffff; background-color:#ff6600;
+ font-weight:bold; font-family:sans-serif; font-size:10px;
+ text-align:center; text-decoration:none;
+}
+a.rss_logo:hover { background-color:#ee5500; }
new file mode 100644
--- /dev/null
+++ b/templates/static/style.css
@@ -0,0 +1,72 @@
+a { text-decoration:none; }
+.age { white-space:nowrap; }
+.indexlinks { white-space:nowrap; }
+.parity0 { background-color: #dddddd; }
+.parity1 { background-color: #eeeeee; }
+.lineno { width: 60px; color: #aaaaaa; font-size: smaller;
+ text-align: right; padding-right:1em; }
+.plusline { color: green; }
+.minusline { color: red; }
+.atline { color: purple; }
+.annotate { font-size: smaller; text-align: right; padding-right: 1em; }
+.buttons a {
+ background-color: #666666;
+ padding: 2pt;
+ color: white;
+ font-family: sans;
+ font-weight: bold;
+}
+.navigate a {
+ background-color: #ccc;
+ padding: 2pt;
+ font-family: sans;
+ color: black;
+}
+
+.metatag {
+ background-color: #888888;
+ color: white;
+ text-align: right;
+}
+
+/* Common */
+pre { margin: 0; }
+
+.logo {
+ background-color: #333;
+ padding: 4pt;
+ margin: 8pt 0 8pt 8pt;
+ font-family: sans;
+ font-size: 60%;
+ color: white;
+ float: right;
+ clear: right;
+ text-align: left;
+}
+
+.logo a {
+ font-weight: bold;
+ font-size: 150%;
+ color: #999;
+}
+
+/* Changelog/Filelog entries */
+.logEntry { width: 100%; }
+.logEntry .age { width: 15%; }
+.logEntry th { font-weight: normal; text-align: right; vertical-align: top; }
+.logEntry th.age, .logEntry th.firstline { font-weight: bold; }
+.logEntry th.firstline { text-align: left; width: inherit; }
+
+/* Tag entries */
+#tagEntries { list-style: none; margin: 0; padding: 0; }
+#tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; }
+
+/* Changeset entry */
+#changesetEntry { }
+#changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; }
+#changesetEntry th.files, #changesetEntry th.description { vertical-align: top; }
+
+/* File diff view */
+#filediffEntry { }
+#filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; }
+
new file mode 100644
--- /dev/null
+++ b/templates/summary-gitweb.tmpl
@@ -0,0 +1,34 @@
+#header#
+<title>#repo|escape#: Summary</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / summary
+</div>
+<div class="page_nav">
+summary | <a href="?cmd=changelog;style=gitweb">changelog</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a><br/>
+</div>
+
+<div class="title"> </div>
+<table cellspacing="0">
+<tr><td>description</td><td>#desc#</td></tr>
+<tr><td>owner</td><td>#owner|escape#</td></tr>
+<!-- <tr><td>last change</td><td>#lastchange|rfc822date#</td></tr> -->
+</table>
+
+<div><a class="title" href="?cmd=changelog;style=gitweb">changes</a></div>
+<table cellspacing="0">
+#shortlog#
+<tr class="light"><td colspan="3"><a class="list" href="?cmd=changelog;style=gitweb">...</a></td></tr>
+</table>
+
+<div><a class="title" href="?cmd=tags;style=gitweb">tags</a></div>
+<table cellspacing="0">
+#tags#
+<tr class="light"><td colspan="3"><a class="list" href="?cmd=tags;style=gitweb">...</a></td></tr>
+</table>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/tagentry-rss.tmpl
@@ -0,0 +1,6 @@
+<item>
+ <title>#tag|escape#</title>
+ <link>#url#?cs=#node|short#</link>
+ <description><![CDATA[#tag|strip|escape|addbreaks#]]></description>
+ <pubDate>#date|rfc822date#</pubDate>
+</item>
new file mode 100644
--- /dev/null
+++ b/templates/tags-gitweb.tmpl
@@ -0,0 +1,21 @@
+#header#
+<title>#repo|escape#: Tags</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#">
+</head>
+<body>
+
+<div class="page_header">
+<a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / tags
+</div>
+
+<div class="page_nav">
+<a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">changelog</a> | tags | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a>
+<br/>
+</div>
+
+<table cellspacing="0">
+#entries%tagentry#
+</table>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/tags-rss.tmpl
@@ -0,0 +1,6 @@
+#header#
+ <title>#repo|escape#: tags </title>
+ <description>#repo|escape# tag history</description>
+ #entriesnotip%tagentry#
+ </channel>
+</rss>
new file mode 100644
--- /dev/null
+++ b/templates/tags.tmpl
@@ -0,0 +1,20 @@
+#header#
+<title>#repo|escape#: tags</title>
+<link rel="alternate" type="application/rss+xml"
+ href="?cmd=tags;style=rss" title="RSS feed for #repo|escape#: tags">
+</head>
+<body>
+
+<div class="buttons">
+<a href="?cl=tip">changelog</a>
+<a href="?mf=#manifest|short#;path=/">manifest</a>
+<a type="application/rss+xml" href="?cmd=tags;style=rss">rss</a>
+</div>
+
+<h2>tags:</h2>
+
+<ul id="tagEntries">
+#entries%tagentry#
+</ul>
+
+#footer#
new file mode 100644
--- /dev/null
+++ b/templates/template-vars.txt
@@ -0,0 +1,39 @@
+repo the name of the repo
+rev a changeset.manifest revision
+node a changeset node
+changesets total number of changesets
+file a filename
+filenode a file node
+filerev a file revision
+filerevs total number of file revisions
+up the directory of the relevant file
+path a path in the manifest, starting with "/"
+basename a short pathname
+manifest a manifest node
+manifestrev a manifest revision
+date a date string
+age age in hours, days, etc
+line a line of text (escaped)
+desc a description (escaped, with breaks)
+shortdesc a short description (escaped)
+author a name or email addressv(obfuscated)
+parent a list of the parent
+child a list of the children
+tags a list of tag
+
+header the global page header
+footer the global page footer
+
+files a list of file links
+dirs a set of directory links
+diff a diff of one or more files
+annotate an annotated file
+entries the entries relevant to the page
+
+Templates and commands:
+ changelog(rev) - a page for browsing changesets
+ naventry - a link for jumping to a changeset number
+ filenodelink - jump to file diff
+ fileellipses - printed after maxfiles
+ changelogentry - an entry in the log
+ manifest - browse a manifest as a directory tree
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/tests/README
@@ -0,0 +1,33 @@
+A simple testing framework
+
+To run the tests, do:
+
+cd tests/
+python run-tests.py
+
+This finds all scripts in the test directory named test-* and executes
+them. The scripts can be either shell scripts or Python. Each test is
+run in a temporary directory that is removed when the test is complete.
+
+A test-<x> succeeds if the script returns success and its output
+matches test-<x>.out. If the new output doesn't match, it is stored in
+test-<x>.err.
+
+There are some tricky points here that you should be aware of when
+writing tests:
+
+- hg commit and hg merge want user interaction
+
+ for commit use -m "text"
+ for hg merge, set HGMERGE to something noninteractive (like true or merge)
+
+- changeset hashes will change based on user and date which make
+ things like hg history output change
+
+ use commit -m "test" -u test -d "1000000 0"
+
+- diff will show the current time
+
+ use hg diff | sed "s/\(\(---\|+++\) [a-zA-Z0-9_/.-]*\).*/\1/" to strip
+ dates
+
new file mode 100755
--- /dev/null
+++ b/tests/coverage.py
@@ -0,0 +1,891 @@
+#!/usr/bin/python
+#
+# Perforce Defect Tracking Integration Project
+# <http://www.ravenbrook.com/project/p4dti/>
+#
+# COVERAGE.PY -- COVERAGE TESTING
+#
+# Gareth Rees, Ravenbrook Limited, 2001-12-04
+# Ned Batchelder, 2004-12-12
+# http://nedbatchelder.com/code/modules/coverage.html
+#
+#
+# 1. INTRODUCTION
+#
+# This module provides coverage testing for Python code.
+#
+# The intended readership is all Python developers.
+#
+# This document is not confidential.
+#
+# See [GDR 2001-12-04a] for the command-line interface, programmatic
+# interface and limitations. See [GDR 2001-12-04b] for requirements and
+# design.
+
+"""Usage:
+
+coverage.py -x MODULE.py [ARG1 ARG2 ...]
+ Execute module, passing the given command-line arguments, collecting
+ coverage data.
+
+coverage.py -e
+ Erase collected coverage data.
+
+coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ...
+ Report on the statement coverage for the given files. With the -m
+ option, show line numbers of the statements that weren't executed.
+
+coverage.py -a [-d dir] [-o dir1,dir2,...] FILE1 FILE2 ...
+ Make annotated copies of the given files, marking statements that
+ are executed with > and statements that are missed with !. With
+ the -d option, make the copies in that directory. Without the -d
+ option, make each copy in the same directory as the original.
+
+-o dir,dir2,...
+ Omit reporting or annotating files when their filename path starts with
+ a directory listed in the omit list.
+ e.g. python coverage.py -i -r -o c:\python23,lib\enthought\traits
+
+Coverage data is saved in the file .coverage by default. Set the
+COVERAGE_FILE environment variable to save it somewhere else."""
+
+__version__ = "2.5.20051204" # see detailed history at the end of this file.
+
+import compiler
+import compiler.visitor
+import os
+import re
+import string
+import sys
+import threading
+import types
+
+# 2. IMPLEMENTATION
+#
+# This uses the "singleton" pattern.
+#
+# The word "morf" means a module object (from which the source file can
+# be deduced by suitable manipulation of the __file__ attribute) or a
+# filename.
+#
+# When we generate a coverage report we have to canonicalize every
+# filename in the coverage dictionary just in case it refers to the
+# module we are reporting on. It seems a shame to throw away this
+# information so the data in the coverage dictionary is transferred to
+# the 'cexecuted' dictionary under the canonical filenames.
+#
+# The coverage dictionary is called "c" and the trace function "t". The
+# reason for these short names is that Python looks up variables by name
+# at runtime and so execution time depends on the length of variables!
+# In the bottleneck of this application it's appropriate to abbreviate
+# names to increase speed.
+
+class StatementFindingAstVisitor(compiler.visitor.ASTVisitor):
+ def __init__(self, statements, excluded, suite_spots):
+ compiler.visitor.ASTVisitor.__init__(self)
+ self.statements = statements
+ self.excluded = excluded
+ self.suite_spots = suite_spots
+ self.excluding_suite = 0
+
+ def doRecursive(self, node):
+ self.recordNodeLine(node)
+ for n in node.getChildNodes():
+ self.dispatch(n)
+
+ visitStmt = visitModule = doRecursive
+
+ def doCode(self, node):
+ if hasattr(node, 'decorators') and node.decorators:
+ self.dispatch(node.decorators)
+ self.doSuite(node, node.code)
+
+ visitFunction = visitClass = doCode
+
+ def getFirstLine(self, node):
+ # Find the first line in the tree node.
+ lineno = node.lineno
+ for n in node.getChildNodes():
+ f = self.getFirstLine(n)
+ if lineno and f:
+ lineno = min(lineno, f)
+ else:
+ lineno = lineno or f
+ return lineno
+
+ def getLastLine(self, node):
+ # Find the first line in the tree node.
+ lineno = node.lineno
+ for n in node.getChildNodes():
+ lineno = max(lineno, self.getLastLine(n))
+ return lineno
+
+ def doStatement(self, node):
+ self.recordLine(self.getFirstLine(node))
+
+ visitAssert = visitAssign = visitAssTuple = visitDiscard = visitPrint = \
+ visitPrintnl = visitRaise = visitSubscript = visitDecorators = \
+ doStatement
+
+ def recordNodeLine(self, node):
+ return self.recordLine(node.lineno)
+
+ def recordLine(self, lineno):
+ # Returns a bool, whether the line is included or excluded.
+ if lineno:
+ # Multi-line tests introducing suites have to get charged to their
+ # keyword.
+ if lineno in self.suite_spots:
+ lineno = self.suite_spots[lineno][0]
+ # If we're inside an exluded suite, record that this line was
+ # excluded.
+ if self.excluding_suite:
+ self.excluded[lineno] = 1
+ return 0
+ # If this line is excluded, or suite_spots maps this line to
+ # another line that is exlcuded, then we're excluded.
+ elif self.excluded.has_key(lineno) or \
+ self.suite_spots.has_key(lineno) and \
+ self.excluded.has_key(self.suite_spots[lineno][1]):
+ return 0
+ # Otherwise, this is an executable line.
+ else:
+ self.statements[lineno] = 1
+ return 1
+ return 0
+
+ default = recordNodeLine
+
+ def recordAndDispatch(self, node):
+ self.recordNodeLine(node)
+ self.dispatch(node)
+
+ def doSuite(self, intro, body, exclude=0):
+ exsuite = self.excluding_suite
+ if exclude or (intro and not self.recordNodeLine(intro)):
+ self.excluding_suite = 1
+ self.recordAndDispatch(body)
+ self.excluding_suite = exsuite
+
+ def doPlainWordSuite(self, prevsuite, suite):
+ # Finding the exclude lines for else's is tricky, because they aren't
+ # present in the compiler parse tree. Look at the previous suite,
+ # and find its last line. If any line between there and the else's
+ # first line are excluded, then we exclude the else.
+ lastprev = self.getLastLine(prevsuite)
+ firstelse = self.getFirstLine(suite)
+ for l in range(lastprev+1, firstelse):
+ if self.suite_spots.has_key(l):
+ self.doSuite(None, suite, exclude=self.excluded.has_key(l))
+ break
+ else:
+ self.doSuite(None, suite)
+
+ def doElse(self, prevsuite, node):
+ if node.else_:
+ self.doPlainWordSuite(prevsuite, node.else_)
+
+ def visitFor(self, node):
+ self.doSuite(node, node.body)
+ self.doElse(node.body, node)
+
+ def visitIf(self, node):
+ # The first test has to be handled separately from the rest.
+ # The first test is credited to the line with the "if", but the others
+ # are credited to the line with the test for the elif.
+ self.doSuite(node, node.tests[0][1])
+ for t, n in node.tests[1:]:
+ self.doSuite(t, n)
+ self.doElse(node.tests[-1][1], node)
+
+ def visitWhile(self, node):
+ self.doSuite(node, node.body)
+ self.doElse(node.body, node)
+
+ def visitTryExcept(self, node):
+ self.doSuite(node, node.body)
+ for i in range(len(node.handlers)):
+ a, b, h = node.handlers[i]
+ if not a:
+ # It's a plain "except:". Find the previous suite.
+ if i > 0:
+ prev = node.handlers[i-1][2]
+ else:
+ prev = node.body
+ self.doPlainWordSuite(prev, h)
+ else:
+ self.doSuite(a, h)
+ self.doElse(node.handlers[-1][2], node)
+
+ def visitTryFinally(self, node):
+ self.doSuite(node, node.body)
+ self.doPlainWordSuite(node.body, node.final)
+
+ def visitGlobal(self, node):
+ # "global" statements don't execute like others (they don't call the
+ # trace function), so don't record their line numbers.
+ pass
+
+the_coverage = None
+
+class coverage:
+ error = "coverage error"
+
+ # Name of the cache file (unless environment variable is set).
+ cache_default = ".coverage"
+
+ # Environment variable naming the cache file.
+ cache_env = "COVERAGE_FILE"
+
+ # A dictionary with an entry for (Python source file name, line number
+ # in that file) if that line has been executed.
+ c = {}
+
+ # A map from canonical Python source file name to a dictionary in
+ # which there's an entry for each line number that has been
+ # executed.
+ cexecuted = {}
+
+ # Cache of results of calling the analysis2() method, so that you can
+ # specify both -r and -a without doing double work.
+ analysis_cache = {}
+
+ # Cache of results of calling the canonical_filename() method, to
+ # avoid duplicating work.
+ canonical_filename_cache = {}
+
+ def __init__(self):
+ global the_coverage
+ if the_coverage:
+ raise self.error, "Only one coverage object allowed."
+ self.usecache = 1
+ self.cache = None
+ self.exclude_re = ''
+ self.nesting = 0
+ self.cstack = []
+ self.xstack = []
+ self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.path.sep)
+
+ # t(f, x, y). This method is passed to sys.settrace as a trace function.
+ # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and
+ # the arguments and return value of the trace function.
+ # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code
+ # objects.
+
+ def t(self, f, w, a): #pragma: no cover
+ #print w, f.f_code.co_filename, f.f_lineno
+ if w == 'line':
+ self.c[(f.f_code.co_filename, f.f_lineno)] = 1
+ for c in self.cstack:
+ c[(f.f_code.co_filename, f.f_lineno)] = 1
+ return self.t
+
+ def help(self, error=None):
+ if error:
+ print error
+ print
+ print __doc__
+ sys.exit(1)
+
+ def command_line(self):
+ import getopt
+ settings = {}
+ optmap = {
+ '-a': 'annotate',
+ '-d:': 'directory=',
+ '-e': 'erase',
+ '-h': 'help',
+ '-i': 'ignore-errors',
+ '-m': 'show-missing',
+ '-r': 'report',
+ '-x': 'execute',
+ '-o': 'omit=',
+ }
+ short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '')
+ long_opts = optmap.values()
+ options, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
+ for o, a in options:
+ if optmap.has_key(o):
+ settings[optmap[o]] = 1
+ elif optmap.has_key(o + ':'):
+ settings[optmap[o + ':']] = a
+ elif o[2:] in long_opts:
+ settings[o[2:]] = 1
+ elif o[2:] + '=' in long_opts:
+ settings[o[2:]] = a
+ else:
+ self.help("Unknown option: '%s'." % o)
+ if settings.get('help'):
+ self.help()
+ for i in ['erase', 'execute']:
+ for j in ['annotate', 'report']:
+ if settings.get(i) and settings.get(j):
+ self.help("You can't specify the '%s' and '%s' "
+ "options at the same time." % (i, j))
+ args_needed = (settings.get('execute')
+ or settings.get('annotate')
+ or settings.get('report'))
+ action = settings.get('erase') or args_needed
+ if not action:
+ self.help("You must specify at least one of -e, -x, -r, or -a.")
+ if not args_needed and args:
+ self.help("Unexpected arguments %s." % args)
+
+ self.get_ready()
+ self.exclude('#pragma[: ]+[nN][oO] [cC][oO][vV][eE][rR]')
+
+ if settings.get('erase'):
+ self.erase()
+ if settings.get('execute'):
+ if not args:
+ self.help("Nothing to do.")
+ sys.argv = args
+ self.start()
+ import __main__
+ sys.path[0] = os.path.dirname(sys.argv[0])
+ execfile(sys.argv[0], __main__.__dict__)
+ if not args:
+ args = self.cexecuted.keys()
+ ignore_errors = settings.get('ignore-errors')
+ show_missing = settings.get('show-missing')
+ directory = settings.get('directory')
+ omit = filter(None, settings.get('omit', '').split(','))
+ omit += ['/<'] # Always skip /<string> etc.
+
+ if settings.get('report'):
+ self.report(args, show_missing, ignore_errors, omit_prefixes=omit)
+ if settings.get('annotate'):
+ self.annotate(args, directory, ignore_errors, omit_prefixes=omit)
+
+ def use_cache(self, usecache):
+ self.usecache = usecache
+
+ def get_ready(self):
+ if self.usecache and not self.cache:
+ self.cache = os.path.abspath(os.environ.get(self.cache_env,
+ self.cache_default))
+ self.restore()
+ self.analysis_cache = {}
+
+ def start(self):
+ self.get_ready()
+ if self.nesting == 0: #pragma: no cover
+ sys.settrace(self.t)
+ if hasattr(threading, 'settrace'):
+ threading.settrace(self.t)
+ self.nesting += 1
+
+ def stop(self):
+ self.nesting -= 1
+ if self.nesting == 0: #pragma: no cover
+ sys.settrace(None)
+ if hasattr(threading, 'settrace'):
+ threading.settrace(None)
+
+ def erase(self):
+ self.c = {}
+ self.analysis_cache = {}
+ self.cexecuted = {}
+ if self.cache and os.path.exists(self.cache):
+ os.remove(self.cache)
+ self.exclude_re = ""
+
+ def exclude(self, re):
+ if self.exclude_re:
+ self.exclude_re += "|"
+ self.exclude_re += "(" + re + ")"
+
+ def begin_recursive(self):
+ self.cstack.append(self.c)
+ self.xstack.append(self.exclude_re)
+
+ def end_recursive(self):
+ self.c = self.cstack.pop()
+ self.exclude_re = self.xstack.pop()
+
+ # save(). Save coverage data to the coverage cache.
+
+ def save(self):
+ # move to directory that must exist.
+ os.chdir(os.sep)
+ if self.usecache and self.cache:
+ self.canonicalize_filenames()
+ cache = open(self.cache, 'wb')
+ import marshal
+ marshal.dump(self.cexecuted, cache)
+ cache.close()
+
+ # restore(). Restore coverage data from the coverage cache (if it exists).
+
+ def restore(self):
+ self.c = {}
+ self.cexecuted = {}
+ assert self.usecache
+ if not os.path.exists(self.cache):
+ return
+ try:
+ cache = open(self.cache, 'rb')
+ import marshal
+ cexecuted = marshal.load(cache)
+ cache.close()
+ if isinstance(cexecuted, types.DictType):
+ self.cexecuted = cexecuted
+ except:
+ pass
+
+ # canonical_filename(filename). Return a canonical filename for the
+ # file (that is, an absolute path with no redundant components and
+ # normalized case). See [GDR 2001-12-04b, 3.3].
+
+ def canonical_filename(self, filename):
+ if not self.canonical_filename_cache.has_key(filename):
+ f = filename
+ if os.path.isabs(f) and not os.path.exists(f):
+ f = os.path.basename(f)
+ if not os.path.isabs(f):
+ for path in [os.curdir] + sys.path:
+ g = os.path.join(path, f)
+ if os.path.exists(g):
+ f = g
+ break
+ cf = os.path.normcase(os.path.abspath(f))
+ self.canonical_filename_cache[filename] = cf
+ return self.canonical_filename_cache[filename]
+
+ # canonicalize_filenames(). Copy results from "c" to "cexecuted",
+ # canonicalizing filenames on the way. Clear the "c" map.
+
+ def canonicalize_filenames(self):
+ for filename, lineno in self.c.keys():
+ f = self.canonical_filename(filename)
+ if not self.cexecuted.has_key(f):
+ self.cexecuted[f] = {}
+ self.cexecuted[f][lineno] = 1
+ self.c = {}
+
+ # morf_filename(morf). Return the filename for a module or file.
+
+ def morf_filename(self, morf):
+ if isinstance(morf, types.ModuleType):
+ if not hasattr(morf, '__file__'):
+ raise self.error, "Module has no __file__ attribute."
+ file = morf.__file__
+ else:
+ file = morf
+ return self.canonical_filename(file)
+
+ # analyze_morf(morf). Analyze the module or filename passed as
+ # the argument. If the source code can't be found, raise an error.
+ # Otherwise, return a tuple of (1) the canonical filename of the
+ # source code for the module, (2) a list of lines of statements
+ # in the source code, and (3) a list of lines of excluded statements.
+
+ def analyze_morf(self, morf):
+ if self.analysis_cache.has_key(morf):
+ return self.analysis_cache[morf]
+ filename = self.morf_filename(morf)
+ ext = os.path.splitext(filename)[1]
+ if ext == '.pyc':
+ if not os.path.exists(filename[0:-1]):
+ raise self.error, ("No source for compiled code '%s'."
+ % filename)
+ filename = filename[0:-1]
+ elif ext != '.py':
+ raise self.error, "File '%s' not Python source." % filename
+ source = open(filename, 'r')
+ lines, excluded_lines = self.find_executable_statements(
+ source.read(), exclude=self.exclude_re
+ )
+ source.close()
+ result = filename, lines, excluded_lines
+ self.analysis_cache[morf] = result
+ return result
+
+ def get_suite_spots(self, tree, spots):
+ import symbol, token
+ for i in range(1, len(tree)):
+ if type(tree[i]) == type(()):
+ if tree[i][0] == symbol.suite:
+ # Found a suite, look back for the colon and keyword.
+ lineno_colon = lineno_word = None
+ for j in range(i-1, 0, -1):
+ if tree[j][0] == token.COLON:
+ lineno_colon = tree[j][2]
+ elif tree[j][0] == token.NAME:
+ if tree[j][1] == 'elif':
+ # Find the line number of the first non-terminal
+ # after the keyword.
+ t = tree[j+1]
+ while t and token.ISNONTERMINAL(t[0]):
+ t = t[1]
+ if t:
+ lineno_word = t[2]
+ else:
+ lineno_word = tree[j][2]
+ break
+ elif tree[j][0] == symbol.except_clause:
+ # "except" clauses look like:
+ # ('except_clause', ('NAME', 'except', lineno), ...)
+ if tree[j][1][0] == token.NAME:
+ lineno_word = tree[j][1][2]
+ break
+ if lineno_colon and lineno_word:
+ # Found colon and keyword, mark all the lines
+ # between the two with the two line numbers.
+ for l in range(lineno_word, lineno_colon+1):
+ spots[l] = (lineno_word, lineno_colon)
+ self.get_suite_spots(tree[i], spots)
+
+ def find_executable_statements(self, text, exclude=None):
+ # Find lines which match an exclusion pattern.
+ excluded = {}
+ suite_spots = {}
+ if exclude:
+ reExclude = re.compile(exclude)
+ lines = text.split('\n')
+ for i in range(len(lines)):
+ if reExclude.search(lines[i]):
+ excluded[i+1] = 1
+
+ import parser
+ tree = parser.suite(text+'\n\n').totuple(1)
+ self.get_suite_spots(tree, suite_spots)
+
+ # Use the compiler module to parse the text and find the executable
+ # statements. We add newlines to be impervious to final partial lines.
+ statements = {}
+ ast = compiler.parse(text+'\n\n')
+ visitor = StatementFindingAstVisitor(statements, excluded, suite_spots)
+ compiler.walk(ast, visitor, walker=visitor)
+
+ lines = statements.keys()
+ lines.sort()
+ excluded_lines = excluded.keys()
+ excluded_lines.sort()
+ return lines, excluded_lines
+
+ # format_lines(statements, lines). Format a list of line numbers
+ # for printing by coalescing groups of lines as long as the lines
+ # represent consecutive statements. This will coalesce even if
+ # there are gaps between statements, so if statements =
+ # [1,2,3,4,5,10,11,12,13,14] and lines = [1,2,5,10,11,13,14] then
+ # format_lines will return "1-2, 5-11, 13-14".
+
+ def format_lines(self, statements, lines):
+ pairs = []
+ i = 0
+ j = 0
+ start = None
+ pairs = []
+ while i < len(statements) and j < len(lines):
+ if statements[i] == lines[j]:
+ if start == None:
+ start = lines[j]
+ end = lines[j]
+ j = j + 1
+ elif start:
+ pairs.append((start, end))
+ start = None
+ i = i + 1
+ if start:
+ pairs.append((start, end))
+ def stringify(pair):
+ start, end = pair
+ if start == end:
+ return "%d" % start
+ else:
+ return "%d-%d" % (start, end)
+ return string.join(map(stringify, pairs), ", ")
+
+ # Backward compatibility with version 1.
+ def analysis(self, morf):
+ f, s, _, m, mf = self.analysis2(morf)
+ return f, s, m, mf
+
+ def analysis2(self, morf):
+ filename, statements, excluded = self.analyze_morf(morf)
+ self.canonicalize_filenames()
+ if not self.cexecuted.has_key(filename):
+ self.cexecuted[filename] = {}
+ missing = []
+ for line in statements:
+ if not self.cexecuted[filename].has_key(line):
+ missing.append(line)
+ return (filename, statements, excluded, missing,
+ self.format_lines(statements, missing))
+
+ def relative_filename(self, filename):
+ """ Convert filename to relative filename from self.relative_dir.
+ """
+ return filename.replace(self.relative_dir, "")
+
+ def morf_name(self, morf):
+ """ Return the name of morf as used in report.
+ """
+ if isinstance(morf, types.ModuleType):
+ return morf.__name__
+ else:
+ return self.relative_filename(os.path.splitext(morf)[0])
+
+ def filter_by_prefix(self, morfs, omit_prefixes):
+ """ Return list of morfs where the morf name does not begin
+ with any one of the omit_prefixes.
+ """
+ filtered_morfs = []
+ for morf in morfs:
+ for prefix in omit_prefixes:
+ if self.morf_name(morf).startswith(prefix):
+ break
+ else:
+ filtered_morfs.append(morf)
+
+ return filtered_morfs
+
+ def morf_name_compare(self, x, y):
+ return cmp(self.morf_name(x), self.morf_name(y))
+
+ def report(self, morfs, show_missing=1, ignore_errors=0, file=None, omit_prefixes=[]):
+ if not isinstance(morfs, types.ListType):
+ morfs = [morfs]
+ morfs = self.filter_by_prefix(morfs, omit_prefixes)
+ morfs.sort(self.morf_name_compare)
+
+ max_name = max([5,] + map(len, map(self.morf_name, morfs)))
+ fmt_name = "%%- %ds " % max_name
+ fmt_err = fmt_name + "%s: %s"
+ header = fmt_name % "Name" + " Stmts Exec Cover"
+ fmt_coverage = fmt_name + "% 6d % 6d % 5d%%"
+ if show_missing:
+ header = header + " Missing"
+ fmt_coverage = fmt_coverage + " %s"
+ if not file:
+ file = sys.stdout
+ print >>file, header
+ print >>file, "-" * len(header)
+ total_statements = 0
+ total_executed = 0
+ for morf in morfs:
+ name = self.morf_name(morf)
+ try:
+ _, statements, _, missing, readable = self.analysis2(morf)
+ n = len(statements)
+ m = n - len(missing)
+ if n > 0:
+ pc = 100.0 * m / n
+ else:
+ pc = 100.0
+ args = (name, n, m, pc)
+ if show_missing:
+ args = args + (readable,)
+ print >>file, fmt_coverage % args
+ total_statements = total_statements + n
+ total_executed = total_executed + m
+ except KeyboardInterrupt: #pragma: no cover
+ raise
+ except:
+ if not ignore_errors:
+ type, msg = sys.exc_info()[0:2]
+ print >>file, fmt_err % (name, type, msg)
+ if len(morfs) > 1:
+ print >>file, "-" * len(header)
+ if total_statements > 0:
+ pc = 100.0 * total_executed / total_statements
+ else:
+ pc = 100.0
+ args = ("TOTAL", total_statements, total_executed, pc)
+ if show_missing:
+ args = args + ("",)
+ print >>file, fmt_coverage % args
+
+ # annotate(morfs, ignore_errors).
+
+ blank_re = re.compile(r"\s*(#|$)")
+ else_re = re.compile(r"\s*else\s*:\s*(#|$)")
+
+ def annotate(self, morfs, directory=None, ignore_errors=0, omit_prefixes=[]):
+ morfs = self.filter_by_prefix(morfs, omit_prefixes)
+ for morf in morfs:
+ try:
+ filename, statements, excluded, missing, _ = self.analysis2(morf)
+ self.annotate_file(filename, statements, excluded, missing, directory)
+ except KeyboardInterrupt:
+ raise
+ except:
+ if not ignore_errors:
+ raise
+
+ def annotate_file(self, filename, statements, excluded, missing, directory=None):
+ source = open(filename, 'r')
+ if directory:
+ dest_file = os.path.join(directory,
+ os.path.basename(filename)
+ + ',cover')
+ else:
+ dest_file = filename + ',cover'
+ dest = open(dest_file, 'w')
+ lineno = 0
+ i = 0
+ j = 0
+ covered = 1
+ while 1:
+ line = source.readline()
+ if line == '':
+ break
+ lineno = lineno + 1
+ while i < len(statements) and statements[i] < lineno:
+ i = i + 1
+ while j < len(missing) and missing[j] < lineno:
+ j = j + 1
+ if i < len(statements) and statements[i] == lineno:
+ covered = j >= len(missing) or missing[j] > lineno
+ if self.blank_re.match(line):
+ dest.write(' ')
+ elif self.else_re.match(line):
+ # Special logic for lines containing only 'else:'.
+ # See [GDR 2001-12-04b, 3.2].
+ if i >= len(statements) and j >= len(missing):
+ dest.write('! ')
+ elif i >= len(statements) or j >= len(missing):
+ dest.write('> ')
+ elif statements[i] == missing[j]:
+ dest.write('! ')
+ else:
+ dest.write('> ')
+ elif lineno in excluded:
+ dest.write('- ')
+ elif covered:
+ dest.write('> ')
+ else:
+ dest.write('! ')
+ dest.write(line)
+ source.close()
+ dest.close()
+
+# Singleton object.
+the_coverage = coverage()
+
+# Module functions call methods in the singleton object.
+def use_cache(*args, **kw): return the_coverage.use_cache(*args, **kw)
+def start(*args, **kw): return the_coverage.start(*args, **kw)
+def stop(*args, **kw): return the_coverage.stop(*args, **kw)
+def erase(*args, **kw): return the_coverage.erase(*args, **kw)
+def begin_recursive(*args, **kw): return the_coverage.begin_recursive(*args, **kw)
+def end_recursive(*args, **kw): return the_coverage.end_recursive(*args, **kw)
+def exclude(*args, **kw): return the_coverage.exclude(*args, **kw)
+def analysis(*args, **kw): return the_coverage.analysis(*args, **kw)
+def analysis2(*args, **kw): return the_coverage.analysis2(*args, **kw)
+def report(*args, **kw): return the_coverage.report(*args, **kw)
+def annotate(*args, **kw): return the_coverage.annotate(*args, **kw)
+def annotate_file(*args, **kw): return the_coverage.annotate_file(*args, **kw)
+
+# Save coverage data when Python exits. (The atexit module wasn't
+# introduced until Python 2.0, so use sys.exitfunc when it's not
+# available.)
+try:
+ import atexit
+ atexit.register(the_coverage.save)
+except ImportError:
+ sys.exitfunc = the_coverage.save
+
+# Command-line interface.
+if __name__ == '__main__':
+ the_coverage.command_line()
+
+
+# A. REFERENCES
+#
+# [GDR 2001-12-04a] "Statement coverage for Python"; Gareth Rees;
+# Ravenbrook Limited; 2001-12-04;
+# <http://www.nedbatchelder.com/code/modules/rees-coverage.html>.
+#
+# [GDR 2001-12-04b] "Statement coverage for Python: design and
+# analysis"; Gareth Rees; Ravenbrook Limited; 2001-12-04;
+# <http://www.nedbatchelder.com/code/modules/rees-design.html>.
+#
+# [van Rossum 2001-07-20a] "Python Reference Manual (releae 2.1.1)";
+# Guide van Rossum; 2001-07-20;
+# <http://www.python.org/doc/2.1.1/ref/ref.html>.
+#
+# [van Rossum 2001-07-20b] "Python Library Reference"; Guido van Rossum;
+# 2001-07-20; <http://www.python.org/doc/2.1.1/lib/lib.html>.
+#
+#
+# B. DOCUMENT HISTORY
+#
+# 2001-12-04 GDR Created.
+#
+# 2001-12-06 GDR Added command-line interface and source code
+# annotation.
+#
+# 2001-12-09 GDR Moved design and interface to separate documents.
+#
+# 2001-12-10 GDR Open cache file as binary on Windows. Allow
+# simultaneous -e and -x, or -a and -r.
+#
+# 2001-12-12 GDR Added command-line help. Cache analysis so that it
+# only needs to be done once when you specify -a and -r.
+#
+# 2001-12-13 GDR Improved speed while recording. Portable between
+# Python 1.5.2 and 2.1.1.
+#
+# 2002-01-03 GDR Module-level functions work correctly.
+#
+# 2002-01-07 GDR Update sys.path when running a file with the -x option,
+# so that it matches the value the program would get if it were run on
+# its own.
+#
+# 2004-12-12 NMB Significant code changes.
+# - Finding executable statements has been rewritten so that docstrings and
+# other quirks of Python execution aren't mistakenly identified as missing
+# lines.
+# - Lines can be excluded from consideration, even entire suites of lines.
+# - The filesystem cache of covered lines can be disabled programmatically.
+# - Modernized the code.
+#
+# 2004-12-14 NMB Minor tweaks. Return 'analysis' to its original behavior
+# and add 'analysis2'. Add a global for 'annotate', and factor it, adding
+# 'annotate_file'.
+#
+# 2004-12-31 NMB Allow for keyword arguments in the module global functions.
+# Thanks, Allen.
+#
+# 2005-12-02 NMB Call threading.settrace so that all threads are measured.
+# Thanks Martin Fuzzey. Add a file argument to report so that reports can be
+# captured to a different destination.
+#
+# 2005-12-03 NMB coverage.py can now measure itself.
+#
+# 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames,
+# and sorting and omitting files to report on.
+#
+# C. COPYRIGHT AND LICENCE
+#
+# Copyright 2001 Gareth Rees. All rights reserved.
+# Copyright 2004-2005 Ned Batchelder. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# 1. Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the
+# distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+# DAMAGE.
+#
+# $Id: coverage.py 26 2005-12-04 18:42:44Z ned $
new file mode 100644
--- /dev/null
+++ b/tests/fish-merge
@@ -0,0 +1,88 @@
+#!/bin/sh
+
+set -e
+set -x
+
+# skip commit logs
+HGMERGE=tkmerge; export HGMERGE
+EDITOR=true; export EDITOR
+
+rm -rf m m1 m2
+mkdir m
+cd m
+
+echo "m this that"
+echo "this" > a
+echo "that" > b
+hg init
+hg addremove
+hg commit
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
+
+cd ..
+echo "m2 this that "
+mkdir m2
+cd m2
+hg branch ../m
+hg checkout
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
+
+cd ../m
+echo "m this1 that "
+echo "this1" > a
+hg commit
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
+
+cd ..
+echo "m1 this1 that "
+mkdir m1
+cd m1
+hg branch ../m
+hg checkout
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
+
+cd ../m1
+echo "m1 this1 that1"
+echo "that1" > b
+hg commit
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
+
+cd ../m2
+echo "m2 this that2"
+echo "that2" > b
+hg commit
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
+
+cd ../m1
+echo "m1:m2 this1 that1 that2"
+hg merge ../m2 # b should conflict, a should be fine
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
+
+cd ../m2
+echo "m2 this2 that2"
+echo "this2" > a
+hg commit
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
+
+cd ../m2
+echo "m2:m this12 that2"
+hg merge ../m # a should conflict, b should be fine
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
+
+# now here's the interesting bit
+# if we choose ancestor by file, no conflicts
+# otherwise we've got two equally close ancestors, each with a conflict
+# if we go back to the root, we'll have both conflicts again
+echo "m2:m1 this12 that12"
+hg merge ../m1 # should be clean
+echo "a:" `hg dump a` "b:" `hg dump b`
+echo
new file mode 100755
--- /dev/null
+++ b/tests/md5sum.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python
+#
+# Based on python's Tools/scripts/md5sum.py
+#
+# This software may be used and distributed according to the terms
+# of the PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2, which is
+# GPL-compatible.
+
+import sys
+import os
+import md5
+
+for filename in sys.argv[1:]:
+ try:
+ fp = open(filename, 'rb')
+ except IOError, msg:
+ sys.stderr.write('%s: Can\'t open: %s\n' % (filename, msg))
+ sys.exit(1)
+
+ m = md5.new()
+ try:
+ while 1:
+ data = fp.read(8192)
+ if not data:
+ break
+ m.update(data)
+ except IOError, msg:
+ sys.stderr.write('%s: I/O error: %s\n' % (filename, msg))
+ sys.exit(1)
+ sys.stdout.write('%s %s\n' % (m.hexdigest(), filename))
+
+sys.exit(0)
new file mode 100755
--- /dev/null
+++ b/tests/run-tests.py
@@ -0,0 +1,271 @@
+#!/usr/bin/env python
+#
+# run-tests.py - Run a set of tests on Mercurial
+#
+# Copyright 2006 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+import os, sys, shutil, re
+import tempfile
+import difflib
+import popen2
+from optparse import OptionParser
+
+required_tools = ["python", "diff", "grep", "unzip", "gunzip", "bunzip2", "sed"]
+
+parser = OptionParser("%prog [options] [tests]")
+parser.add_option("-v", "--verbose", action="store_true",
+ help="output verbose messages")
+parser.add_option("-c", "--cover", action="store_true",
+ help="print a test coverage report")
+parser.add_option("-s", "--cover_stdlib", action="store_true",
+ help="print a test coverage report inc. standard libraries")
+parser.add_option("-C", "--annotate", action="store_true",
+ help="output files annotated with coverage")
+(options, args) = parser.parse_args()
+verbose = options.verbose
+coverage = options.cover or options.cover_stdlib or options.annotate
+
+def vlog(*msg):
+ if verbose:
+ for m in msg:
+ print m,
+ print
+
+def splitnewlines(text):
+ '''like str.splitlines, but only split on newlines.
+ keep line endings.'''
+ i = 0
+ lines = []
+ while True:
+ n = text.find('\n', i)
+ if n == -1:
+ last = text[i:]
+ if last:
+ lines.append(last)
+ return lines
+ lines.append(text[i:n+1])
+ i = n + 1
+
+def show_diff(expected, output):
+ for line in difflib.unified_diff(expected, output,
+ "Expected output", "Test output", lineterm=''):
+ sys.stdout.write(line)
+
+def find_program(program):
+ """Search PATH for a executable program"""
+ for p in os.environ.get('PATH', os.defpath).split(os.pathsep):
+ name = os.path.join(p, program)
+ if os.access(name, os.X_OK):
+ return name
+ return None
+
+def check_required_tools():
+ # Before we go any further, check for pre-requisite tools
+ # stuff from coreutils (cat, rm, etc) are not tested
+ for p in required_tools:
+ if os.name == 'nt':
+ p += '.exe'
+ found = find_program(p)
+ if found:
+ vlog("# Found prerequisite", p, "at", found)
+ else:
+ print "WARNING: Did not find prerequisite tool: "+p
+
+def cleanup_exit():
+ if verbose:
+ print "# Cleaning up HGTMP", HGTMP
+ shutil.rmtree(HGTMP, True)
+
+def install_hg():
+ vlog("# Performing temporary installation of HG")
+ installerrs = os.path.join("tests", "install.err")
+
+ os.chdir("..") # Get back to hg root
+ cmd = ('%s setup.py clean --all'
+ ' install --force --home="%s" --install-lib="%s" >%s 2>&1'
+ % (sys.executable, INST, PYTHONDIR, installerrs))
+ vlog("# Running", cmd)
+ if os.system(cmd) == 0:
+ if not verbose:
+ os.remove(installerrs)
+ else:
+ f = open(installerrs)
+ for line in f:
+ print line,
+ f.close()
+ sys.exit(1)
+ os.chdir(TESTDIR)
+
+ os.environ["PATH"] = "%s%s%s" % (BINDIR, os.pathsep, os.environ["PATH"])
+ os.environ["PYTHONPATH"] = PYTHONDIR
+
+ if coverage:
+ vlog("# Installing coverage wrapper")
+ os.environ['COVERAGE_FILE'] = COVERAGE_FILE
+ if os.path.exists(COVERAGE_FILE):
+ os.unlink(COVERAGE_FILE)
+ # Create a wrapper script to invoke hg via coverage.py
+ os.rename(os.path.join(BINDIR, "hg"), os.path.join(BINDIR, "_hg.py"))
+ f = open(os.path.join(BINDIR, 'hg'), 'w')
+ f.write('#!' + sys.executable + '\n')
+ f.write('import sys, os; os.execv(sys.executable, [sys.executable, '+ \
+ '"%s", "-x", "%s"] + sys.argv[1:])\n' % (
+ os.path.join(TESTDIR, 'coverage.py'),
+ os.path.join(BINDIR, '_hg.py')))
+ f.close()
+ os.chmod(os.path.join(BINDIR, 'hg'), 0700)
+
+def output_coverage():
+ vlog("# Producing coverage report")
+ omit = [BINDIR, TESTDIR, PYTHONDIR]
+ if not options.cover_stdlib:
+ # Exclude as system paths (ignoring empty strings seen on win)
+ omit += [x for x in sys.path if x != '']
+ omit = ','.join(omit)
+ os.chdir(PYTHONDIR)
+ cmd = '"%s" "%s" -r "--omit=%s"' % (
+ sys.executable, os.path.join(TESTDIR, 'coverage.py'), omit)
+ vlog("# Running: "+cmd)
+ os.system(cmd)
+ if options.annotate:
+ adir = os.path.join(TESTDIR, 'annotated')
+ if not os.path.isdir(adir):
+ os.mkdir(adir)
+ cmd = '"%s" "%s" -a "--directory=%s" "--omit=%s"' % (
+ sys.executable, os.path.join(TESTDIR, 'coverage.py'),
+ adir, omit)
+ vlog("# Running: "+cmd)
+ os.system(cmd)
+
+def run(cmd):
+ """Run command in a sub-process, capturing the output (stdout and stderr).
+ Return the exist code, and output."""
+ # TODO: Use subprocess.Popen if we're running on Python 2.4
+ if os.name == 'nt':
+ tochild, fromchild = os.popen4(cmd)
+ tochild.close()
+ output = fromchild.read()
+ ret = fromchild.close()
+ if ret == None:
+ ret = 0
+ else:
+ proc = popen2.Popen4(cmd)
+ proc.tochild.close()
+ output = proc.fromchild.read()
+ ret = proc.wait()
+ return ret, splitnewlines(output)
+
+def run_one(test):
+ vlog("# Test", test)
+ if not verbose:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+
+ err = os.path.join(TESTDIR, test+".err")
+ ref = os.path.join(TESTDIR, test+".out")
+
+ if os.path.exists(err):
+ os.remove(err) # Remove any previous output files
+
+ # Make a tmp subdirectory to work in
+ tmpd = os.path.join(HGTMP, test)
+ os.mkdir(tmpd)
+ os.chdir(tmpd)
+
+ if test.endswith(".py"):
+ cmd = '%s "%s"' % (sys.executable, os.path.join(TESTDIR, test))
+ else:
+ cmd = '"%s"' % (os.path.join(TESTDIR, test))
+
+ # To reliably get the error code from batch files on WinXP,
+ # the "cmd /c call" prefix is needed. Grrr
+ if os.name == 'nt' and test.endswith(".bat"):
+ cmd = 'cmd /c call "%s"' % (os.path.join(TESTDIR, test))
+
+ vlog("# Running", cmd)
+ ret, out = run(cmd)
+ vlog("# Ret was:", ret)
+
+ diffret = 0
+ # If reference output file exists, check test output against it
+ if os.path.exists(ref):
+ f = open(ref, "r")
+ ref_out = splitnewlines(f.read())
+ f.close()
+ else:
+ ref_out = ['']
+ if out != ref_out:
+ diffret = 1
+ print "\nERROR: %s output changed" % (test)
+ show_diff(ref_out, out)
+ if ret:
+ print "\nERROR: %s failed with error code %d" % (test, ret)
+ elif diffret:
+ ret = diffret
+
+ if ret != 0: # Save errors to a file for diagnosis
+ f = open(err, "wb")
+ for line in out:
+ f.write(line)
+ f.close()
+
+ os.chdir(TESTDIR)
+ shutil.rmtree(tmpd, True)
+ return ret == 0
+
+
+os.umask(022)
+
+check_required_tools()
+
+# Reset some environment variables to well-known values so that
+# the tests produce repeatable output.
+os.environ['LANG'] = os.environ['LC_ALL'] = 'C'
+os.environ['TZ'] = 'GMT'
+
+os.environ["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"'
+os.environ["HGMERGE"] = sys.executable + ' -c "import sys; sys.exit(0)"'
+os.environ["HGUSER"] = "test"
+os.environ["HGRCPATH"] = ""
+
+TESTDIR = os.environ["TESTDIR"] = os.getcwd()
+HGTMP = os.environ["HGTMP"] = tempfile.mkdtemp("", "hgtests.")
+vlog("# Using TESTDIR", TESTDIR)
+vlog("# Using HGTMP", HGTMP)
+
+INST = os.path.join(HGTMP, "install")
+BINDIR = os.path.join(INST, "bin")
+PYTHONDIR = os.path.join(INST, "lib", "python")
+COVERAGE_FILE = os.path.join(TESTDIR, ".coverage")
+
+try:
+ try:
+ install_hg()
+
+ tests = 0
+ failed = 0
+
+ if len(args) == 0:
+ args = os.listdir(".")
+ for test in args:
+ if test.startswith("test-"):
+ if '~' in test or re.search(r'\.(out|err)$', test):
+ continue
+ if not run_one(test):
+ failed += 1
+ tests += 1
+
+ print "\n# Ran %d tests, %d failed." % (tests, failed)
+ if coverage:
+ output_coverage()
+ except KeyboardInterrupt:
+ failed = True
+ print "\ninterrupted!"
+finally:
+ cleanup_exit()
+
+if failed:
+ sys.exit(1)
new file mode 100755
--- /dev/null
+++ b/tests/test-addremove
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+hg init rep
+cd rep
+mkdir dir
+touch foo dir/bar
+hg -v addremove
+hg -v commit -m "add 1" -d "1000000 0"
+cd dir/
+touch ../foo_2 bar_2
+hg -v addremove
+hg -v commit -m "add 2" -d "1000000 0"
new file mode 100644
--- /dev/null
+++ b/tests/test-addremove.out
@@ -0,0 +1,10 @@
+(the addremove command is deprecated; use add and remove --after instead)
+adding dir/bar
+adding foo
+dir/bar
+foo
+(the addremove command is deprecated; use add and remove --after instead)
+adding dir/bar_2
+adding foo_2
+dir/bar_2
+foo_2
new file mode 100755
--- /dev/null
+++ b/tests/test-archive
@@ -0,0 +1,53 @@
+#!/bin/sh
+
+mkdir test
+cd test
+hg init
+echo foo>foo
+hg addremove
+hg commit -m 1
+echo bar>bar
+hg addremove
+hg commit -m 2
+mkdir baz
+echo bletch>baz/bletch
+hg addremove
+hg commit -m 3
+echo "[web]" >> .hg/hgrc
+echo "name = test-archive" >> .hg/hgrc
+echo "allowzip = true" >> .hg/hgrc
+echo "allowgz = true" >> .hg/hgrc
+echo "allowbz2 = true" >> .hg/hgrc
+hg serve -p 20059 -d --pid-file=hg.pid
+
+TIP=`hg id -v | cut -f1 -d' '`
+QTIP=`hg id -q`
+cat > getarchive.py <<EOF
+import sys, urllib2
+node, archive = sys.argv[1:]
+f = urllib2.urlopen('http://127.0.0.1:20059/?cmd=archive;node=%s;type=%s'
+ % (node, archive))
+sys.stdout.write(f.read())
+EOF
+http_proxy= python getarchive.py "$TIP" gz | gunzip | tar tf - | sed "s/$QTIP/TIP/"
+http_proxy= python getarchive.py "$TIP" bz2 | bunzip2 | tar tf - | sed "s/$QTIP/TIP/"
+http_proxy= python getarchive.py "$TIP" zip > archive.zip
+unzip -t archive.zip | sed "s/$QTIP/TIP/"
+
+kill `cat hg.pid`
+sleep 1 # wait for server to scream and die
+
+hg archive -t tar test.tar
+tar tf test.tar
+
+hg archive -t tbz2 -X baz test.tar.bz2
+bunzip2 -dc test.tar.bz2 | tar tf -
+
+hg archive -t tgz -p %b-%h test-%h.tar.gz
+gzip -dc test-$QTIP.tar.gz | tar tf - | sed "s/$QTIP/TIP/"
+
+hg archive -t zip -p /illegal test.zip
+hg archive -t zip -p very/../bad test.zip
+
+hg archive -t zip -r 2 test.zip
+unzip -t test.zip
new file mode 100644
--- /dev/null
+++ b/tests/test-archive.out
@@ -0,0 +1,38 @@
+(the addremove command is deprecated; use add and remove --after instead)
+adding foo
+(the addremove command is deprecated; use add and remove --after instead)
+adding bar
+(the addremove command is deprecated; use add and remove --after instead)
+adding baz/bletch
+test-archive-TIP/.hg_archival.txt
+test-archive-TIP/bar
+test-archive-TIP/baz/bletch
+test-archive-TIP/foo
+test-archive-TIP/.hg_archival.txt
+test-archive-TIP/bar
+test-archive-TIP/baz/bletch
+test-archive-TIP/foo
+Archive: archive.zip
+ testing: test-archive-TIP/.hg_archival.txt OK
+ testing: test-archive-TIP/bar OK
+ testing: test-archive-TIP/baz/bletch OK
+ testing: test-archive-TIP/foo OK
+No errors detected in compressed data of archive.zip.
+test/.hg_archival.txt
+test/bar
+test/baz/bletch
+test/foo
+test/.hg_archival.txt
+test/bar
+test/foo
+test-TIP/.hg_archival.txt
+test-TIP/bar
+test-TIP/baz/bletch
+test-TIP/foo
+abort: archive prefix contains illegal components
+Archive: test.zip
+ testing: test/.hg_archival.txt OK
+ testing: test/bar OK
+ testing: test/baz/bletch OK
+ testing: test/foo OK
+No errors detected in compressed data of test.zip.
new file mode 100755
--- /dev/null
+++ b/tests/test-backout
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+echo '# basic operation'
+hg init basic
+cd basic
+echo a > a
+hg commit -d '0 0' -A -m a
+echo b >> a
+hg commit -d '1 0' -m b
+
+hg backout -d '2 0' tip
+cat a
+
+echo '# file that was removed is recreated'
+cd ..
+hg init remove
+cd remove
+
+echo content > a
+hg commit -d '0 0' -A -m a
+
+hg rm a
+hg commit -d '1 0' -m b
+
+hg backout -d '2 0' --merge tip
+cat a
+
+echo '# backout of backout is as if nothing happened'
+
+hg backout -d '3 0' --merge tip
+cat a 2>/dev/null || echo cat: a: No such file or directory
+
+echo '# backout with merge'
+cd ..
+hg init merge
+cd merge
+
+echo line 1 > a
+hg commit -d '0 0' -A -m a
+
+echo line 2 >> a
+hg commit -d '1 0' -m b
+
+echo line 3 >> a
+hg commit -d '2 0' -m c
+
+hg backout --merge -d '3 0' 1
+hg commit -d '4 0' -m d
+cat a
+
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-backout.out
@@ -0,0 +1,21 @@
+# basic operation
+adding a
+changeset 2:b38a34ddfd9f backs out changeset 1:a820f4f40a57
+a
+# file that was removed is recreated
+adding a
+adding a
+changeset 2:44cd84c7349a backs out changeset 1:76862dcce372
+content
+# backout of backout is as if nothing happened
+removing a
+changeset 3:0dd8a0ed5e99 backs out changeset 2:44cd84c7349a
+cat: a: No such file or directory
+# backout with merge
+adding a
+changeset 3:6c77ecc28460 backs out changeset 1:314f55b1bf23
+merging with changeset 2:b66ea5b77abb
+merging a
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+line 1
new file mode 100755
--- /dev/null
+++ b/tests/test-backwards-remove
@@ -0,0 +1,12 @@
+#!/bin/sh
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+ls
+echo This is file b1 > b
+hg add b
+hg commit -m "commit #1" -d "1000000 0"
+hg co 0
+# B should disappear
+ls
new file mode 100644
--- /dev/null
+++ b/tests/test-backwards-remove.out
@@ -0,0 +1,3 @@
+a
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+a
new file mode 100755
--- /dev/null
+++ b/tests/test-bad-pull
@@ -0,0 +1,25 @@
+#!/bin/sh
+
+hg clone http://localhost:20059/ copy
+echo $?
+ls copy 2>/dev/null || echo copy: No such file or directory
+
+cat > dumb.py <<EOF
+import BaseHTTPServer, SimpleHTTPServer, signal
+
+def run(server_class=BaseHTTPServer.HTTPServer,
+ handler_class=SimpleHTTPServer.SimpleHTTPRequestHandler):
+ server_address = ('localhost', 20059)
+ httpd = server_class(server_address, handler_class)
+ httpd.serve_forever()
+
+signal.signal(signal.SIGTERM, lambda x: sys.exit(0))
+run()
+EOF
+
+python dumb.py 2>/dev/null &
+
+http_proxy= hg clone http://localhost:20059/foo copy2
+echo $?
+
+kill $!
new file mode 100644
--- /dev/null
+++ b/tests/test-bad-pull.out
@@ -0,0 +1,5 @@
+abort: error: Connection refused
+255
+copy: No such file or directory
+abort: HTTP Error 404: File not found
+255
new file mode 100755
--- /dev/null
+++ b/tests/test-basic
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+echo a > a
+hg add a
+hg commit -m test -d "1000000 0"
+hg history
+hg manifest
+hg cat a
+hg verify
new file mode 100644
--- /dev/null
+++ b/tests/test-basic.out
@@ -0,0 +1,13 @@
+changeset: 0:0acdaf898367
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: test
+
+b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
+a
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
new file mode 100755
--- /dev/null
+++ b/tests/test-bdiff
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+import sys
+from mercurial import bdiff, mpatch
+
+def test1(a, b):
+ d = bdiff.bdiff(a, b)
+ c = a
+ if d:
+ c = mpatch.patches(a, [d])
+ if c != b:
+ print "***", `a`, `b`
+ print "bad:"
+ print `c`[:200]
+ print `d`
+
+def test(a, b):
+ print "***", `a`, `b`
+ test1(a, b)
+ test1(b, a)
+
+test("a\nc\n\n\n\n", "a\nb\n\n\n")
+test("a\nb\nc\n", "a\nc\n")
+test("", "")
+test("a\nb\nc", "a\nb\nc")
+test("a\nb\nc\nd\n", "a\nd\n")
+test("a\nb\nc\nd\n", "a\nc\ne\n")
+test("a\nb\nc\n", "a\nc\n")
+test("a\n", "c\na\nb\n")
+test("a\n", "")
+test("a\n", "b\nc\n")
+test("a\n", "c\na\n")
+test("", "adjfkjdjksdhfksj")
+test("", "ab")
+test("", "abc")
+test("a", "a")
+test("ab", "ab")
+test("abc", "abc")
+test("a\n", "a\n")
+test("a\nb", "a\nb")
+
+print "done"
new file mode 100644
--- /dev/null
+++ b/tests/test-bdiff.out
@@ -0,0 +1,20 @@
+*** 'a\nc\n\n\n\n' 'a\nb\n\n\n'
+*** 'a\nb\nc\n' 'a\nc\n'
+*** '' ''
+*** 'a\nb\nc' 'a\nb\nc'
+*** 'a\nb\nc\nd\n' 'a\nd\n'
+*** 'a\nb\nc\nd\n' 'a\nc\ne\n'
+*** 'a\nb\nc\n' 'a\nc\n'
+*** 'a\n' 'c\na\nb\n'
+*** 'a\n' ''
+*** 'a\n' 'b\nc\n'
+*** 'a\n' 'c\na\n'
+*** '' 'adjfkjdjksdhfksj'
+*** '' 'ab'
+*** '' 'abc'
+*** 'a' 'a'
+*** 'ab' 'ab'
+*** 'abc' 'abc'
+*** 'a\n' 'a\n'
+*** 'a\nb' 'a\nb'
+done
new file mode 100755
--- /dev/null
+++ b/tests/test-bundle
@@ -0,0 +1,54 @@
+#!/bin/sh
+
+hg init test
+cd test
+echo 0 > afile
+hg add afile
+hg commit -m "0.0" -d "1000000 0"
+echo 1 >> afile
+hg commit -m "0.1" -d "1000000 0"
+echo 2 >> afile
+hg commit -m "0.2" -d "1000000 0"
+echo 3 >> afile
+hg commit -m "0.3" -d "1000000 0"
+hg update -C 0
+echo 1 >> afile
+hg commit -m "1.1" -d "1000000 0"
+echo 2 >> afile
+hg commit -m "1.2" -d "1000000 0"
+echo "a line" > fred
+echo 3 >> afile
+hg add fred
+hg commit -m "1.3" -d "1000000 0"
+hg mv afile adifferentfile
+hg commit -m "1.3m" -d "1000000 0"
+hg update -C 3
+hg mv afile anotherfile
+hg commit -m "0.3m" -d "1000000 0"
+hg verify
+cd ..
+hg init empty
+hg -R test bundle full.hg empty
+hg -R test unbundle full.hg
+hg -R empty unbundle full.hg
+hg -R empty heads
+hg -R empty verify
+
+rm -rf empty
+hg init empty
+cd empty
+hg -R bundle://../full.hg log
+#doesn't work (yet ?)
+#hg -R bundle://../full.hg verify
+hg pull bundle://../full.hg
+cd ..
+
+rm -rf empty
+hg init empty
+hg clone -r 3 test partial
+hg clone partial partial2
+cd partial
+hg -R bundle://../full.hg log
+hg incoming bundle://../full.hg
+hg -R bundle://../full.hg outgoing ../partial2
+cd ..
new file mode 100644
--- /dev/null
+++ b/tests/test-bundle.out
@@ -0,0 +1,203 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+4 files, 9 changesets, 7 total revisions
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 0 changesets with 0 changes to 4 files
+(run 'hg update' to get a working copy)
+adding changesets
+adding manifests
+adding file changes
+added 9 changesets with 7 changes to 4 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+changeset: 8:836ac62537ab
+tag: tip
+parent: 3:ac69c658229d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.3m
+
+changeset: 7:80fe151401c2
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.3m
+
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+4 files, 9 changesets, 7 total revisions
+changeset: 8:836ac62537ab
+tag: tip
+parent: 3:ac69c658229d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.3m
+
+changeset: 7:80fe151401c2
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.3m
+
+changeset: 6:1e3f6b843bd6
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.3
+
+changeset: 5:024e4e7df376
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.2
+
+changeset: 4:5f4f3ceb285e
+parent: 0:5649c9d34dd8
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.1
+
+changeset: 3:ac69c658229d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.3
+
+changeset: 2:d62976ca1e50
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.2
+
+changeset: 1:10b2180f755b
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.1
+
+changeset: 0:5649c9d34dd8
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.0
+
+pulling from bundle://../full.hg
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 9 changesets with 7 changes to 4 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 4 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+changeset: 8:836ac62537ab
+tag: tip
+parent: 3:ac69c658229d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.3m
+
+changeset: 7:80fe151401c2
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.3m
+
+changeset: 6:1e3f6b843bd6
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.3
+
+changeset: 5:024e4e7df376
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.2
+
+changeset: 4:5f4f3ceb285e
+parent: 0:5649c9d34dd8
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.1
+
+changeset: 3:ac69c658229d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.3
+
+changeset: 2:d62976ca1e50
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.2
+
+changeset: 1:10b2180f755b
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.1
+
+changeset: 0:5649c9d34dd8
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.0
+
+searching for changes
+changeset: 4:5f4f3ceb285e
+parent: 0:5649c9d34dd8
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.1
+
+changeset: 5:024e4e7df376
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.2
+
+changeset: 6:1e3f6b843bd6
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.3
+
+changeset: 7:80fe151401c2
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.3m
+
+changeset: 8:836ac62537ab
+tag: tip
+parent: 3:ac69c658229d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.3m
+
+searching for changes
+changeset: 4:5f4f3ceb285e
+parent: 0:5649c9d34dd8
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.1
+
+changeset: 5:024e4e7df376
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.2
+
+changeset: 6:1e3f6b843bd6
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.3
+
+changeset: 7:80fe151401c2
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1.3m
+
+changeset: 8:836ac62537ab
+tag: tip
+parent: 3:ac69c658229d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0.3m
+
new file mode 100755
--- /dev/null
+++ b/tests/test-cat
@@ -0,0 +1,18 @@
+#!/bin/sh
+#
+mkdir t
+cd t
+hg init
+echo 0 > a
+echo 0 > b
+hg ci -A -m m -d "1000000 0"
+hg rm a
+hg cat a
+sleep 1 # make sure mtime is changed
+echo 1 > b
+hg ci -m m -d "1000000 0"
+echo 2 > b
+hg cat -r 0 a
+hg cat -r 0 b
+hg cat -r 1 a
+hg cat -r 1 b
new file mode 100644
--- /dev/null
+++ b/tests/test-cat.out
@@ -0,0 +1,7 @@
+adding a
+adding b
+0
+0
+0
+a: No such file in rev 03f6b0774996
+1
new file mode 100755
--- /dev/null
+++ b/tests/test-clone
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+mkdir a
+cd a
+hg init
+echo a > a
+hg add a
+hg commit -m test -d '0 0'
+
+# Default operation
+hg clone . ../b
+cd ../b
+cat a
+hg verify
+
+# No update
+hg clone -U . ../c
+cd ../c
+cat a 2>/dev/null || echo "a not present"
+hg verify
+
+# Default destination
+mkdir ../d
+cd ../d
+hg clone ../a
+cd a
+hg cat a
new file mode 100755
--- /dev/null
+++ b/tests/test-clone-failure
@@ -0,0 +1,41 @@
+#!/bin/sh
+
+# No local source
+hg clone a b
+echo $?
+
+# No remote source
+hg clone http://127.0.0.1:3121/a b
+echo $?
+rm -rf b # work around bug with http clone
+
+# Inaccessible source
+mkdir a
+chmod 000 a
+hg clone a b
+echo $?
+
+# Inaccessible destination
+mkdir b
+cd b
+hg init
+hg clone . ../a
+echo $?
+cd ..
+chmod 700 a
+rm -rf a b
+
+# Source of wrong type
+mkfifo a
+hg clone a b
+echo $?
+rm a
+
+# Default destination, same directory
+mkdir q
+cd q
+hg init
+cd ..
+hg clone q
+
+true
new file mode 100644
--- /dev/null
+++ b/tests/test-clone-failure.out
@@ -0,0 +1,11 @@
+abort: repository a not found!
+255
+abort: error: Connection refused
+255
+abort: repository a not found!
+255
+abort: destination '../a' already exists
+255
+abort: repository a not found!
+255
+abort: destination 'q' already exists
new file mode 100755
--- /dev/null
+++ b/tests/test-clone-pull-corruption
@@ -0,0 +1,32 @@
+#!/bin/sh
+#
+# Corrupt an hg repo with a pull started during an aborted commit
+#
+
+# Create two repos, so that one of them can pull from the other one.
+hg init source
+cd source
+touch foo
+hg add foo
+hg ci -m 'add foo'
+hg clone . ../corrupted
+echo >> foo
+hg ci -m 'change foo'
+
+# Add a hook to wait 5 seconds and then abort the commit
+cd ../corrupted
+echo '[hooks]' >> .hg/hgrc
+echo 'pretxncommit = sleep 5; exit 1' >> .hg/hgrc
+
+# start a commit...
+touch bar
+hg add bar
+hg ci -m 'add bar' &
+
+# ... and start a pull while the commit is still running
+sleep 1
+hg pull ../source 2>/dev/null
+
+# see what happened
+wait
+hg verify
new file mode 100644
--- /dev/null
+++ b/tests/test-clone-pull-corruption.out
@@ -0,0 +1,16 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from ../source
+abort: pretxncommit hook exited with status 1
+transaction abort!
+rollback completed
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+(run 'hg update' to get a working copy)
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 2 changesets, 2 total revisions
new file mode 100755
--- /dev/null
+++ b/tests/test-clone-r
@@ -0,0 +1,59 @@
+#!/bin/sh
+
+hg init test
+cd test
+cat >>afile <<EOF
+0
+EOF
+hg add afile
+hg commit -m "0.0"
+cat >>afile <<EOF
+1
+EOF
+hg commit -m "0.1"
+cat >>afile <<EOF
+2
+EOF
+hg commit -m "0.2"
+cat >>afile <<EOF
+3
+EOF
+hg commit -m "0.3"
+hg update -C 0
+cat >>afile <<EOF
+1
+EOF
+hg commit -m "1.1"
+cat >>afile <<EOF
+2
+EOF
+hg commit -m "1.2"
+cat >fred <<EOF
+a line
+EOF
+cat >>afile <<EOF
+3
+EOF
+hg add fred
+hg commit -m "1.3"
+hg mv afile adifferentfile
+hg commit -m "1.3m"
+hg update -C 3
+hg mv afile anotherfile
+hg commit -m "0.3m"
+hg debugindex .hg/data/afile.i
+hg debugindex .hg/data/adifferentfile.i
+hg debugindex .hg/data/anotherfile.i
+hg debugindex .hg/data/fred.i
+hg debugindex .hg/00manifest.i
+hg verify
+cd ..
+for i in 0 1 2 3 4 5 6 7 8; do
+ hg clone -r "$i" test test-"$i"
+ cd test-"$i"
+ hg verify
+ cd ..
+done
+cd test-8
+hg pull ../test-7
+hg verify
new file mode 100644
--- /dev/null
+++ b/tests/test-clone-r.out
@@ -0,0 +1,137 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 3 0 0 362fef284ce2 000000000000 000000000000
+ 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000
+ 2 8 7 2 2 4c982badb186 125144f7e028 000000000000
+ 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 75 0 7 905359268f77 000000000000 000000000000
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 75 0 8 905359268f77 000000000000 000000000000
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000
+ 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000
+ 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000
+ 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000
+ 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000
+ 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000
+ 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+4 files, 9 changesets, 7 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 2 changesets, 2 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 3 changesets, 3 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 4 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 4 changesets, 4 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 2 changesets, 2 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 3 changesets, 3 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 5 changes to 2 files
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+2 files, 4 changesets, 5 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 6 changes to 3 files
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+3 files, 5 changesets, 6 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 5 changes to 2 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+2 files, 5 changesets, 5 total revisions
+pulling from ../test-7
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 2 changes to 3 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+4 files, 9 changesets, 7 total revisions
new file mode 100644
--- /dev/null
+++ b/tests/test-clone.out
@@ -0,0 +1,15 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+a
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+a not present
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+a
new file mode 100755
--- /dev/null
+++ b/tests/test-command-template
@@ -0,0 +1,91 @@
+#!/bin/sh
+
+hg init a
+cd a
+echo a > a
+hg add a
+echo line 1 > b
+echo line 2 >> b
+hg commit -l b -d '1000000 0' -u 'User Name <user@hostname>'
+hg add b
+echo other 1 > c
+echo other 2 >> c
+echo >> c
+echo other 3 >> c
+hg commit -l c -d '1100000 0' -u 'A. N. Other <other@place>'
+hg add c
+hg commit -m 'no person' -d '1200000 0' -u 'other@place'
+echo c >> c
+hg commit -m 'no user, no domain' -d '1300000 0' -u 'person'
+
+# make sure user/global hgrc does not affect tests
+echo '[ui]' > .hg/hgrc
+echo 'logtemplate =' >> .hg/hgrc
+echo 'style =' >> .hg/hgrc
+
+echo '# default style is like normal output'
+hg log > log.out
+hg log --style default > style.out
+diff log.out style.out
+hg log -v > log.out
+hg log -v --style default > style.out
+diff log.out style.out
+hg log --debug > log.out
+hg log --debug --style default > style.out
+diff log.out style.out
+
+echo '# compact style works'
+hg log --style compact
+hg log -v --style compact
+hg log --debug --style compact
+
+echo '# error if style not readable'
+touch q
+chmod 0 q
+hg log --style ./q
+
+echo '# error if no style'
+hg log --style notexist
+
+echo '# error if style missing key'
+echo 'q = q' > t
+hg log --style ./t
+
+echo '# error if include fails'
+echo 'changeset = q' >> t
+hg log --style ./t
+
+echo '# include works'
+rm -f q
+echo '{rev}' > q
+hg log --style ./t
+
+echo '# ui.style works'
+echo '[ui]' > .hg/hgrc
+echo 'style = t' >> .hg/hgrc
+hg log
+
+echo "# keys work"
+for key in author branches date desc file_adds file_dels files \
+ manifest node parents rev tags; do
+ for mode in '' --verbose --debug; do
+ hg log $mode --template "$key$mode: {$key}\n"
+ done
+done
+
+echo '# filters work'
+hg log --template '{author|domain}\n'
+hg log --template '{author|person}\n'
+hg log --template '{author|user}\n'
+hg log --template '{date|age}\n' > /dev/null || exit 1
+hg log --template '{date|date}\n'
+hg log --template '{date|isodate}\n'
+hg log --template '{date|rfc822date}\n'
+hg log --template '{desc|firstline}\n'
+hg log --template '{node|short}\n'
+
+echo '# error on syntax'
+echo 'x = "f' >> t
+hg log
+
+echo '# done'
new file mode 100644
--- /dev/null
+++ b/tests/test-command-template.out
@@ -0,0 +1,255 @@
+# default style is like normal output
+18a19
+> files:
+29a31
+> files:
+43a46
+> files:
+# compact style works
+3[tip] 10e46f2dcbf4 1970-01-16 01:06 +0000 person
+ no user, no domain
+
+2 97054abb4ab8 1970-01-14 21:20 +0000 other
+ no person
+
+1 b608e9d1a3f0 1970-01-13 17:33 +0000 other
+ other 1
+
+0 1e4e1b8f71e0 1970-01-12 13:46 +0000 user
+ line 1
+
+3[tip] 10e46f2dcbf4 1970-01-16 01:06 +0000 person
+ no user, no domain
+
+2 97054abb4ab8 1970-01-14 21:20 +0000 other
+ no person
+
+1 b608e9d1a3f0 1970-01-13 17:33 +0000 other
+ other 1
+
+0 1e4e1b8f71e0 1970-01-12 13:46 +0000 user
+ line 1
+
+3[tip]:2,-1 10e46f2dcbf4 1970-01-16 01:06 +0000 person
+ no user, no domain
+
+2:1,-1 97054abb4ab8 1970-01-14 21:20 +0000 other
+ no person
+
+1:0,-1 b608e9d1a3f0 1970-01-13 17:33 +0000 other
+ other 1
+
+0:-1,-1 1e4e1b8f71e0 1970-01-12 13:46 +0000 user
+ line 1
+
+# error if style not readable
+abort: Permission denied - ./q
+# error if no style
+abort: No such file or directory - notexist
+# error if style missing key
+abort: ./t: no key named 'changeset'
+# error if include fails
+abort: template file ./q: Permission denied
+# include works
+3
+2
+1
+0
+# ui.style works
+3
+2
+1
+0
+# keys work
+author: person
+author: other@place
+author: A. N. Other <other@place>
+author: User Name <user@hostname>
+author--verbose: person
+author--verbose: other@place
+author--verbose: A. N. Other <other@place>
+author--verbose: User Name <user@hostname>
+author--debug: person
+author--debug: other@place
+author--debug: A. N. Other <other@place>
+author--debug: User Name <user@hostname>
+branches:
+branches:
+branches:
+branches:
+branches--verbose:
+branches--verbose:
+branches--verbose:
+branches--verbose:
+branches--debug:
+branches--debug:
+branches--debug:
+branches--debug:
+date: 1300000.00
+date: 1200000.00
+date: 1100000.00
+date: 1000000.00
+date--verbose: 1300000.00
+date--verbose: 1200000.00
+date--verbose: 1100000.00
+date--verbose: 1000000.00
+date--debug: 1300000.00
+date--debug: 1200000.00
+date--debug: 1100000.00
+date--debug: 1000000.00
+desc: no user, no domain
+desc: no person
+desc: other 1
+other 2
+
+other 3
+desc: line 1
+line 2
+desc--verbose: no user, no domain
+desc--verbose: no person
+desc--verbose: other 1
+other 2
+
+other 3
+desc--verbose: line 1
+line 2
+desc--debug: no user, no domain
+desc--debug: no person
+desc--debug: other 1
+other 2
+
+other 3
+desc--debug: line 1
+line 2
+file_adds:
+file_adds:
+file_adds:
+file_adds:
+file_adds--verbose:
+file_adds--verbose:
+file_adds--verbose:
+file_adds--verbose:
+file_adds--debug:
+file_adds--debug: c
+file_adds--debug: b
+file_adds--debug: a
+file_dels:
+file_dels:
+file_dels:
+file_dels:
+file_dels--verbose:
+file_dels--verbose:
+file_dels--verbose:
+file_dels--verbose:
+file_dels--debug:
+file_dels--debug:
+file_dels--debug:
+file_dels--debug:
+files: c
+files: c
+files: b
+files: a
+files--verbose: c
+files--verbose: c
+files--verbose: b
+files--verbose: a
+files--debug: c
+files--debug:
+files--debug:
+files--debug:
+manifest:
+manifest:
+manifest:
+manifest:
+manifest--verbose:
+manifest--verbose:
+manifest--verbose:
+manifest--verbose:
+manifest--debug: 3:cb5a1327723b
+manifest--debug: 2:6e0e82995c35
+manifest--debug: 1:4e8d705b1e53
+manifest--debug: 0:a0c8bcbbb45c
+node: 10e46f2dcbf4823578cf180f33ecf0b957964c47
+node: 97054abb4ab824450e9164180baf491ae0078465
+node: b608e9d1a3f0273ccf70fb85fd6866b3482bf965
+node: 1e4e1b8f71e05681d422154f5421e385fec3454f
+node--verbose: 10e46f2dcbf4823578cf180f33ecf0b957964c47
+node--verbose: 97054abb4ab824450e9164180baf491ae0078465
+node--verbose: b608e9d1a3f0273ccf70fb85fd6866b3482bf965
+node--verbose: 1e4e1b8f71e05681d422154f5421e385fec3454f
+node--debug: 10e46f2dcbf4823578cf180f33ecf0b957964c47
+node--debug: 97054abb4ab824450e9164180baf491ae0078465
+node--debug: b608e9d1a3f0273ccf70fb85fd6866b3482bf965
+node--debug: 1e4e1b8f71e05681d422154f5421e385fec3454f
+parents:
+parents:
+parents:
+parents:
+parents--verbose:
+parents--verbose:
+parents--verbose:
+parents--verbose:
+parents--debug: 2:97054abb4ab8 -1:000000000000
+parents--debug: 1:b608e9d1a3f0 -1:000000000000
+parents--debug: 0:1e4e1b8f71e0 -1:000000000000
+parents--debug: -1:000000000000 -1:000000000000
+rev: 3
+rev: 2
+rev: 1
+rev: 0
+rev--verbose: 3
+rev--verbose: 2
+rev--verbose: 1
+rev--verbose: 0
+rev--debug: 3
+rev--debug: 2
+rev--debug: 1
+rev--debug: 0
+tags: tip
+tags:
+tags:
+tags:
+tags--verbose: tip
+tags--verbose:
+tags--verbose:
+tags--verbose:
+tags--debug: tip
+tags--debug:
+tags--debug:
+tags--debug:
+# filters work
+
+place
+place
+hostname
+person
+other
+A. N. Other
+User Name
+person
+other
+other
+user
+Fri Jan 16 01:06:40 1970 +0000
+Wed Jan 14 21:20:00 1970 +0000
+Tue Jan 13 17:33:20 1970 +0000
+Mon Jan 12 13:46:40 1970 +0000
+1970-01-16 01:06 +0000
+1970-01-14 21:20 +0000
+1970-01-13 17:33 +0000
+1970-01-12 13:46 +0000
+Fri, 16 Jan 1970 01:06:40 +0000
+Wed, 14 Jan 1970 21:20:00 +0000
+Tue, 13 Jan 1970 17:33:20 +0000
+Mon, 12 Jan 1970 13:46:40 +0000
+no user, no domain
+no person
+other 1
+line 1
+10e46f2dcbf4
+97054abb4ab8
+b608e9d1a3f0
+1e4e1b8f71e0
+# error on syntax
+abort: t:3: unmatched quotes
+# done
new file mode 100755
--- /dev/null
+++ b/tests/test-commit
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+hg init test
+cd test
+echo foo > foo
+hg add foo
+hg commit -d '0 0' -m commit-1
+echo foo >> foo
+hg commit -d '1 4444444' -m commit-3
+hg commit -d '1 15.1' -m commit-4
+hg commit -d 'foo bar' -m commit-5
+hg commit -d ' 1 4444' -m commit-6
+hg commit -d '111111111111 0' -m commit-7
+
+echo bar > bar
+hg add bar
+rm bar
+hg commit -d "1000000 0" -m commit-8 2>&1 | sed -e "s:/.*\(/test/.*\):...\1:"
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-commit.out
@@ -0,0 +1,16 @@
+abort: impossible time zone offset: 4444444
+transaction abort!
+rollback completed
+abort: invalid date: '1\t15.1'
+transaction abort!
+rollback completed
+abort: invalid date: 'foo bar'
+transaction abort!
+rollback completed
+abort: invalid date: ' 1 4444'
+transaction abort!
+rollback completed
+abort: date exceeds 32 bits: 111111111111
+transaction abort!
+rollback completed
+abort: No such file or directory: .../test/bar
new file mode 100755
--- /dev/null
+++ b/tests/test-committer
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+unset HGUSER
+EMAIL="My Name <myname@example.com>"
+export EMAIL
+
+hg init test
+cd test
+touch asdf
+hg add asdf
+hg commit -d '1000000 0' -m commit-1
+hg tip
new file mode 100644
--- /dev/null
+++ b/tests/test-committer.out
@@ -0,0 +1,6 @@
+changeset: 0:9426b370c206
+tag: tip
+user: My Name <myname@example.com>
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: commit-1
+
new file mode 100755
--- /dev/null
+++ b/tests/test-conflict
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+hg init
+echo "nothing" > a
+hg add a
+hg commit -m ancestor -d "1000000 0"
+echo "something" > a
+hg commit -m branch1 -d "1000000 0"
+hg co 0
+echo "something else" > a
+hg commit -m branch2 -d "1000000 0"
+HGMERGE=merge; export HGMERGE
+hg merge 1
+hg id
+egrep -v ">>>|<<<" a
+hg status
new file mode 100644
--- /dev/null
+++ b/tests/test-conflict.out
@@ -0,0 +1,13 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+merge: warning: conflicts during merge
+merging a
+merging a failed!
+0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+There are unresolved merges, you can redo the full merge using:
+ hg update -C 2
+ hg merge 1
+e7fe8eb3e180+0d24b7662d3e+ tip
+something else
+=======
+something
+M a
new file mode 100755
--- /dev/null
+++ b/tests/test-confused-revert
@@ -0,0 +1,55 @@
+#!/bin/sh
+
+hg init
+echo foo > a
+hg add a
+hg commit -m "1" -d "1000000 0"
+
+echo bar > b
+hg add b
+hg remove a
+
+echo "%%% should show a removed and b added"
+hg status
+
+echo "reverting..."
+hg revert
+
+echo "%%% should show b unknown and a back to normal"
+hg status
+
+rm b
+
+hg co -C 0
+echo foo-a > a
+hg commit -m "2a" -d "1000000 0"
+
+hg co -C 0
+echo foo-b > a
+hg commit -m "2b" -d "1000000 0"
+
+HGMERGE=true hg merge 1
+
+echo "%%% should show foo-b"
+cat a
+
+echo bar > b
+hg add b
+rm a
+hg remove a
+
+echo "%%% should show a removed and b added"
+hg status
+
+echo "%%% revert should fail"
+hg revert
+
+echo "%%% revert should be ok now"
+hg revert -r2
+
+echo "%%% should show b unknown and a marked modified (merged)"
+hg status
+
+echo "%%% should show foo-b"
+cat a
+
new file mode 100644
--- /dev/null
+++ b/tests/test-confused-revert.out
@@ -0,0 +1,27 @@
+%%% should show a removed and b added
+A b
+R a
+reverting...
+undeleting a
+forgetting b
+%%% should show b unknown and a back to normal
+? b
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+merging a
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+%%% should show foo-b
+foo-b
+%%% should show a removed and b added
+A b
+R a
+%%% revert should fail
+abort: working dir has two parents; you must specify the revision to revert to
+%%% revert should be ok now
+undeleting a
+forgetting b
+%%% should show b unknown and a marked modified (merged)
+? b
+%%% should show foo-b
+foo-b
new file mode 100755
--- /dev/null
+++ b/tests/test-copy
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+hg init
+echo a > a
+hg add a
+hg commit -m "1" -d "1000000 0"
+hg status
+hg copy a b
+hg status
+hg --debug commit -m "2" -d "1000000 0"
+echo "we should see two history entries"
+hg history -v
+echo "we should see one log entry for a"
+hg log a
+echo "this should show a revision linked to changeset 0"
+hg debugindex .hg/data/a.i
+echo "we should see one log entry for b"
+hg log b
+echo "this should show a revision linked to changeset 1"
+hg debugindex .hg/data/b.i
+
+echo "this should show the rename information in the metadata"
+hg debugdata .hg/data/b.d 0 | head -3 | tail -2
+
+$TESTDIR/md5sum.py .hg/data/b.i
+hg cat b > bsum
+$TESTDIR/md5sum.py bsum
+hg cat a > asum
+$TESTDIR/md5sum.py asum
+hg verify
new file mode 100644
--- /dev/null
+++ b/tests/test-copy.out
@@ -0,0 +1,51 @@
+A b
+b
+ b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
+we should see two history entries
+changeset: 1:386a3cc01532710ca78aed9a54fa2f459c04f29c
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+files: b
+description:
+2
+
+
+changeset: 0:33aaa84a386bd609094aeb21a97c09436c482ef1
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+files: a
+description:
+1
+
+
+we should see one log entry for a
+changeset: 0:33aaa84a386b
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1
+
+this should show a revision linked to changeset 0
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 3 0 0 b789fdd96dc2 000000000000 000000000000
+we should see one log entry for b
+changeset: 1:386a3cc01532
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+this should show a revision linked to changeset 1
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 65 0 1 9a263dd772e0 000000000000 000000000000
+this should show the rename information in the metadata
+copyrev: b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
+copy: a
+ed156f22f0a6fde642de0b5eba0cbbb2 .hg/data/b.i
+60b725f10c9c85c70d97880dfe8191b3 bsum
+60b725f10c9c85c70d97880dfe8191b3 asum
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+2 files, 2 changesets, 2 total revisions
new file mode 100755
--- /dev/null
+++ b/tests/test-copy2
@@ -0,0 +1,41 @@
+#!/bin/sh
+
+hg init
+echo foo > foo
+hg add foo
+hg commit -m1 -d"0 0"
+
+echo "# should show copy"
+hg copy foo bar
+hg debugstate|grep '^copy'
+
+echo "# shouldn't show copy"
+hg commit -m2 -d"0 0"
+hg debugstate|grep '^copy'
+
+echo "# should match"
+hg debugindex .hg/data/foo.i
+hg debugrename bar
+
+echo bleah > foo
+echo quux > bar
+hg commit -m3 -d"0 0"
+
+echo "# should not be renamed"
+hg debugrename bar
+
+hg copy -f foo bar
+echo "# should show copy"
+hg debugstate|grep '^copy'
+hg commit -m3 -d"0 0"
+
+echo "# should show no parents for tip"
+hg debugindex .hg/data/bar.i
+echo "# should match"
+hg debugindex .hg/data/foo.i
+hg debugrename bar
+
+echo "# should show no copies"
+hg debugstate|grep '^copy'
+
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-copy2.out
@@ -0,0 +1,22 @@
+# should show copy
+copy: foo -> bar
+# shouldn't show copy
+# should match
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 5 0 0 2ed2a3912a0b 000000000000 000000000000
+renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
+# should not be renamed
+not renamed
+# should show copy
+copy: foo -> bar
+# should show no parents for tip
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 69 0 1 6ca237634e1f 000000000000 000000000000
+ 1 69 6 1 2 7a1ff8e75f5b 6ca237634e1f 000000000000
+ 2 75 82 1 3 243dfe60f3d9 000000000000 000000000000
+# should match
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 5 0 0 2ed2a3912a0b 000000000000 000000000000
+ 1 5 7 1 2 dd12c926cf16 2ed2a3912a0b 000000000000
+renamed from foo:dd12c926cf165e3eb4cf87b084955cb617221c17
+# should show no copies
new file mode 100755
--- /dev/null
+++ b/tests/test-diff-newlines
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+hg init
+python -c 'print "confuse str.splitlines\nembedded\rnewline"' > a
+hg ci -Ama -d '1 0'
+echo clean diff >> a
+hg ci -mb -d '2 0'
+hg diff -r0 -r1
new file mode 100644
--- /dev/null
+++ b/tests/test-diff-newlines.out
@@ -0,0 +1,8 @@
+adding a
+diff -r 107ba6f817b5 -r 310ce7989cdc a
+--- a/a Thu Jan 01 00:00:01 1970 +0000
++++ b/a Thu Jan 01 00:00:02 1970 +0000
+@@ -1,2 +1,3 @@ confuse str.splitlines
+ confuse str.splitlines
+ embedded
newline
++clean diff
new file mode 100755
--- /dev/null
+++ b/tests/test-diffdir
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+hg init
+touch a
+hg add a
+hg ci -m "a" -d "1000000 0"
+
+echo 123 > b
+hg add b
+hg diff | sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
+ -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/"
+
+hg diff -r tip | sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
+ -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/"
+
+echo foo > a
+hg diff | sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
+ -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/"
new file mode 100644
--- /dev/null
+++ b/tests/test-diffdir.out
@@ -0,0 +1,20 @@
+diff -r acd8075edac9 b
+--- /dev/null
++++ b/b
+@@ -0,0 +1,1 @@
++123
+diff -r acd8075edac9 b
+--- /dev/null
++++ b/b
+@@ -0,0 +1,1 @@
++123
+diff -r acd8075edac9 a
+--- a/a
++++ b/a
+@@ -0,0 +1,1 @@
++foo
+diff -r acd8075edac9 b
+--- /dev/null
++++ b/b
+@@ -0,0 +1,1 @@
++123
new file mode 100755
--- /dev/null
+++ b/tests/test-empty
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+hg init
+hg log
+hg grep wah
+hg manifest
+hg verify
new file mode 100755
--- /dev/null
+++ b/tests/test-empty-dir
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+hg init
+echo 123 > a
+hg add a
+hg commit -m "first" -d "1000000 0" a
+mkdir sub
+echo 321 > sub/b
+hg add sub/b
+hg commit -m "second" -d "1000000 0" sub/b
+cat sub/b
+hg co 0
+cat sub/b 2>/dev/null || echo "sub/b not present"
+ls sub 2>/dev/null || echo "sub not present"
+
+true
new file mode 100644
--- /dev/null
+++ b/tests/test-empty-dir.out
@@ -0,0 +1,4 @@
+321
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+sub/b not present
+sub not present
new file mode 100755
--- /dev/null
+++ b/tests/test-empty-group
@@ -0,0 +1,49 @@
+#!/bin/sh
+#
+# A B
+#
+# 3 4 3
+# |\/| |\
+# |/\| | \
+# 1 2 1 2
+# \ / \ /
+# 0 0
+#
+# if the result of the merge of 1 and 2
+# is the same in 3 and 4, no new manifest
+# will be created and the manifest group
+# will be empty during the pull
+#
+# (plus we test a failure where outgoing
+# wrongly reported the number of csets)
+#
+
+hg init a
+cd a
+touch init
+hg ci -A -m 0 -d "1000000 0"
+touch x y
+hg ci -A -m 1 -d "1000000 0"
+hg update 0
+touch x y
+hg ci -A -m 2 -d "1000000 0"
+hg merge 1
+hg ci -A -m m1 -d "1000000 0"
+#hg log
+#hg debugindex .hg/00manifest.i
+hg update -C 1
+hg merge 2
+hg ci -A -m m2 -d "1000000 0"
+#hg log
+#hg debugindex .hg/00manifest.i
+
+cd ..
+hg clone -r 3 a b
+hg clone -r 4 a c
+hg -R a outgoing b
+hg -R a outgoing c
+hg -R b outgoing c
+hg -R c outgoing b
+
+hg -R b pull a
+hg -R c pull a
new file mode 100644
--- /dev/null
+++ b/tests/test-empty-group.out
@@ -0,0 +1,72 @@
+adding init
+adding x
+adding y
+0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+adding x
+adding y
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 3 changes to 3 files
+3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 3 changes to 3 files
+3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+searching for changes
+changeset: 4:fdb3c546e859
+tag: tip
+parent: 1:1f703b3fcbc6
+parent: 2:de997049e034
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: m2
+
+searching for changes
+changeset: 3:f40f830c0024
+parent: 2:de997049e034
+parent: 1:1f703b3fcbc6
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: m1
+
+searching for changes
+changeset: 3:f40f830c0024
+tag: tip
+parent: 2:de997049e034
+parent: 1:1f703b3fcbc6
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: m1
+
+searching for changes
+changeset: 3:fdb3c546e859
+tag: tip
+parent: 1:1f703b3fcbc6
+parent: 2:de997049e034
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: m2
+
+pulling from a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 0 changes to 0 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+pulling from a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 0 changes to 0 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
new file mode 100644
--- /dev/null
+++ b/tests/test-empty.out
@@ -0,0 +1,5 @@
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+0 files, 0 changesets, 0 total revisions
new file mode 100755
--- /dev/null
+++ b/tests/test-encode
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+hg init
+
+cat > .hg/hgrc <<EOF
+[encode]
+*.gz = gunzip
+
+[decode]
+*.gz = gzip
+
+EOF
+
+echo "this is a test" | gzip > a.gz
+hg add a.gz
+hg ci -m "test" -d "1000000 0"
+echo %% no changes
+hg status
+touch a.gz
+
+echo %% no changes
+hg status
+
+echo %% uncompressed contents in repo
+hg debugdata .hg/data/a.gz.d 0
+
+echo %% uncompress our working dir copy
+gunzip < a.gz
+
+rm a.gz
+hg co
+
+echo %% uncompress our new working dir copy
+gunzip < a.gz
new file mode 100644
--- /dev/null
+++ b/tests/test-encode.out
@@ -0,0 +1,9 @@
+%% no changes
+%% no changes
+%% uncompressed contents in repo
+this is a test
+%% uncompress our working dir copy
+this is a test
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+%% uncompress our new working dir copy
+this is a test
new file mode 100755
--- /dev/null
+++ b/tests/test-excessive-merge
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+hg init
+
+echo foo > a
+echo foo > b
+hg add a b
+
+hg ci -m "test" -d "1000000 0"
+
+echo blah > a
+
+hg ci -m "branch a" -d "1000000 0"
+
+hg co 0
+
+echo blah > b
+
+hg ci -m "branch b" -d "1000000 0"
+HGMERGE=true hg merge 1
+
+hg ci -m "merge b/a -> blah" -d "1000000 0"
+
+hg co 1
+HGMERGE=true hg merge 2
+hg ci -m "merge a/b -> blah" -d "1000000 0"
+
+hg log
+hg debugindex .hg/00changelog.i
+
+echo
+
+echo 1
+hg manifest 1
+echo 2
+hg manifest 2
+echo 3
+hg manifest 3
+echo 4
+hg manifest 4
+
+echo
+
+hg debugindex .hg/data/a.i
+
+hg verify
new file mode 100644
--- /dev/null
+++ b/tests/test-excessive-merge.out
@@ -0,0 +1,65 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+changeset: 4:f6c172c6198c
+tag: tip
+parent: 1:448a8c5e42f1
+parent: 2:7c5dc2e857f2
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: merge a/b -> blah
+
+changeset: 3:13d875a22764
+parent: 2:7c5dc2e857f2
+parent: 1:448a8c5e42f1
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: merge b/a -> blah
+
+changeset: 2:7c5dc2e857f2
+parent: 0:dc1751ec2e9d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: branch b
+
+changeset: 1:448a8c5e42f1
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: branch a
+
+changeset: 0:dc1751ec2e9d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: test
+
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 64 0 0 dc1751ec2e9d 000000000000 000000000000
+ 1 64 68 1 1 448a8c5e42f1 dc1751ec2e9d 000000000000
+ 2 132 68 2 2 7c5dc2e857f2 dc1751ec2e9d 000000000000
+ 3 200 75 3 3 13d875a22764 7c5dc2e857f2 448a8c5e42f1
+ 4 275 29 3 4 f6c172c6198c 448a8c5e42f1 7c5dc2e857f2
+
+1
+79d7492df40aa0fa093ec4209be78043c181f094 644 a
+2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 b
+2
+2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 a
+79d7492df40aa0fa093ec4209be78043c181f094 644 b
+3
+79d7492df40aa0fa093ec4209be78043c181f094 644 a
+79d7492df40aa0fa093ec4209be78043c181f094 644 b
+4
+79d7492df40aa0fa093ec4209be78043c181f094 644 a
+79d7492df40aa0fa093ec4209be78043c181f094 644 b
+
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 5 0 0 2ed2a3912a0b 000000000000 000000000000
+ 1 5 6 1 1 79d7492df40a 2ed2a3912a0b 000000000000
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+2 files, 5 changesets, 4 total revisions
new file mode 100755
--- /dev/null
+++ b/tests/test-filebranch
@@ -0,0 +1,79 @@
+#!/bin/sh
+
+# This test makes sure that we don't mark a file as merged with its ancestor
+# when we do a merge.
+
+cat <<'EOF' > merge
+#!/bin/sh
+echo merging for `basename $1`
+EOF
+chmod +x merge
+
+echo creating base
+hg init a
+cd a
+echo 1 > foo
+echo 1 > bar
+echo 1 > baz
+echo 1 > quux
+hg add foo bar baz quux
+hg commit -m "base" -d "1000000 0"
+
+cd ..
+hg clone a b
+
+echo creating branch a
+cd a
+echo 2a > foo
+echo 2a > bar
+hg commit -m "branch a" -d "1000000 0"
+
+echo creating branch b
+
+cd ..
+cd b
+echo 2b > foo
+echo 2b > baz
+hg commit -m "branch b" -d "1000000 0"
+
+echo "we shouldn't have anything but n state here"
+hg debugstate | cut -b 1-16,35-
+
+echo merging
+hg pull ../a
+env HGMERGE=../merge hg merge -v
+
+echo 2m > foo
+echo 2b > baz
+echo new > quux
+
+echo "we shouldn't have anything but foo in merge state here"
+hg debugstate | cut -b 1-16,35- | grep "^m"
+
+hg ci -m "merge" -d "1000000 0"
+
+echo "main: we should have a merge here"
+hg debugindex .hg/00changelog.i
+
+echo "log should show foo and quux changed"
+hg log -v -r tip
+
+echo "foo: we should have a merge here"
+hg debugindex .hg/data/foo.i
+
+echo "bar: we shouldn't have a merge here"
+hg debugindex .hg/data/bar.i
+
+echo "baz: we shouldn't have a merge here"
+hg debugindex .hg/data/baz.i
+
+echo "quux: we shouldn't have a merge here"
+hg debugindex .hg/data/quux.i
+
+echo "manifest entries should match tips of all files"
+hg manifest
+
+echo "everything should be clean now"
+hg status
+
+hg verify
new file mode 100644
--- /dev/null
+++ b/tests/test-filebranch.out
@@ -0,0 +1,73 @@
+creating base
+4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+creating branch a
+creating branch b
+we shouldn't have anything but n state here
+n 644 2 bar
+n 644 3 baz
+n 644 3 foo
+n 644 2 quux
+merging
+pulling from ../a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 2 changes to 2 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+merging for foo
+resolving manifests
+getting bar
+merging foo
+resolving foo
+1 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+we shouldn't have anything but foo in merge state here
+m 644 3 foo
+main: we should have a merge here
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 77 0 0 c36078bec30d 000000000000 000000000000
+ 1 77 73 1 1 182b283965f1 c36078bec30d 000000000000
+ 2 150 71 2 2 a6aef98656b7 c36078bec30d 000000000000
+ 3 221 72 3 3 0c2cc6fc80e2 182b283965f1 a6aef98656b7
+log should show foo and quux changed
+changeset: 3:0c2cc6fc80e2d4ee289bb658dbbe9ad932380fe9
+tag: tip
+parent: 1:182b283965f1069c0112784e30e7755ad1c0dd52
+parent: 2:a6aef98656b71154cae9d87408abe6d0218c8045
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+files: foo quux
+description:
+merge
+
+
+foo: we should have a merge here
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 3 0 0 b8e02f643373 000000000000 000000000000
+ 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
+ 2 7 4 2 2 33d1fb69067a b8e02f643373 000000000000
+ 3 11 4 3 3 aa27919ee430 2ffeddde1b65 33d1fb69067a
+bar: we shouldn't have a merge here
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 3 0 0 b8e02f643373 000000000000 000000000000
+ 1 3 4 1 2 33d1fb69067a b8e02f643373 000000000000
+baz: we shouldn't have a merge here
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 3 0 0 b8e02f643373 000000000000 000000000000
+ 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
+quux: we shouldn't have a merge here
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 3 0 0 b8e02f643373 000000000000 000000000000
+ 1 3 5 1 3 6128c0f33108 b8e02f643373 000000000000
+manifest entries should match tips of all files
+33d1fb69067a0139622a3fa3b7ba1cdb1367972e 644 bar
+2ffeddde1b65b4827f6746174a145474129fa2ce 644 baz
+aa27919ee4303cfd575e1fb932dd64d75aa08be4 644 foo
+6128c0f33108e8cfbb4e0824d13ae48b466d7280 644 quux
+everything should be clean now
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+4 files, 4 changesets, 10 total revisions
new file mode 100755
--- /dev/null
+++ b/tests/test-flags
@@ -0,0 +1,34 @@
+#!/bin/sh -e
+
+umask 027
+mkdir test1
+cd test1
+
+hg init
+touch a b
+hg add a b
+hg ci -m "added a b" -d "1000000 0"
+
+cd ..
+mkdir test2
+cd test2
+
+hg init
+hg pull ../test1
+hg co
+chmod +x a
+hg ci -m "chmod +x a" -d "1000000 0"
+
+cd ../test1
+echo 123 >>a
+hg ci -m "a updated" -d "1000000 0"
+
+hg pull ../test2
+hg heads
+hg history
+
+hg -v merge
+
+ls -l ../test[12]/a > foo
+cut -b 1-10 < foo
+
new file mode 100644
--- /dev/null
+++ b/tests/test-flags.out
@@ -0,0 +1,51 @@
+pulling from ../test1
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 2 changes to 2 files
+(run 'hg update' to get a working copy)
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from ../test2
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+changeset: 2:b833d578451e
+tag: tip
+parent: 0:4536b1c2ca69
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: chmod +x a
+
+changeset: 1:a187cb361a5a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: a updated
+
+changeset: 2:b833d578451e
+tag: tip
+parent: 0:4536b1c2ca69
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: chmod +x a
+
+changeset: 1:a187cb361a5a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: a updated
+
+changeset: 0:4536b1c2ca69
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: added a b
+
+resolving manifests
+merging a
+resolving a
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+-rwxr-x---
+-rwxr-x---
new file mode 100755
--- /dev/null
+++ b/tests/test-globalopts
@@ -0,0 +1,72 @@
+#!/bin/sh
+
+hg init a
+cd a
+echo a > a
+hg ci -A -d'1 0' -m a
+
+cd ..
+
+hg init b
+cd b
+echo b > b
+hg ci -A -d'1 0' -m b
+
+cd ..
+
+hg clone a c
+cd c
+hg pull -f ../b
+HGMERGE=merge hg merge
+
+cd ..
+
+echo %% -R/--repository
+hg -R a tip
+hg --repository b tip
+
+echo %% abbrev of long option
+hg --repo c tip
+
+echo %% --cwd
+hg --cwd a parents
+
+echo %% -y/--noninteractive - just be sure it is parsed
+hg --cwd a tip -q --noninteractive
+hg --cwd a tip -q -y
+
+echo %% -q/--quiet
+hg -R a -q tip
+hg -R b -q tip
+hg -R c --quiet parents
+
+echo %% -v/--verbose
+hg --cwd c head -v
+hg --cwd b tip --verbose
+
+echo %% --config
+hg --cwd c --config paths.quuxfoo=bar paths | grep -q quuxfoo && echo quuxfoo
+hg --cwd c --config '' tip -q
+hg --cwd c --config a.b tip -q
+hg --cwd c --config a tip -q
+hg --cwd c --config a.= tip -q
+hg --cwd c --config .b= tip -q
+
+echo %% --debug
+hg --cwd c log --debug
+
+echo %% --traceback
+hg --cwd c --config x --traceback tip 2>&1 | grep -i 'traceback'
+
+echo %% --time
+hg --cwd a --time tip 2>&1 | grep '^Time:' | sed 's/[0-9][0-9]*/x/g'
+
+echo %% --version
+hg --version -q | sed 's/version \([a-f0-9+]*\|unknown\)/version xxx/'
+
+echo %% -h/--help
+hg -h
+hg --help
+
+echo %% not tested: --debugger
+
new file mode 100644
--- /dev/null
+++ b/tests/test-globalopts.out
@@ -0,0 +1,205 @@
+adding a
+adding b
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from ../b
+searching for changes
+warning: repository is unrelated
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+%% -R/--repository
+changeset: 0:8580ff50825a
+tag: tip
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+summary: a
+
+changeset: 0:b6c483daf290
+tag: tip
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+summary: b
+
+%% abbrev of long option
+changeset: 1:b6c483daf290
+tag: tip
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+summary: b
+
+%% --cwd
+changeset: 0:8580ff50825a
+tag: tip
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+summary: a
+
+%% -y/--noninteractive - just be sure it is parsed
+0:8580ff50825a
+0:8580ff50825a
+%% -q/--quiet
+0:8580ff50825a
+0:b6c483daf290
+0:8580ff50825a
+1:b6c483daf290
+%% -v/--verbose
+changeset: 1:b6c483daf2907ce5825c0bb50f5716226281cc1a
+tag: tip
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+files: b
+description:
+b
+
+
+changeset: 0:8580ff50825a50c8f716709acdf8de0deddcd6ab
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+files: a
+description:
+a
+
+
+changeset: 0:b6c483daf2907ce5825c0bb50f5716226281cc1a
+tag: tip
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+files: b
+description:
+b
+
+
+%% --config
+quuxfoo
+abort: malformed --config option:
+abort: malformed --config option: a.b
+abort: malformed --config option: a
+abort: malformed --config option: a.=
+abort: malformed --config option: .b=
+%% --debug
+changeset: 1:b6c483daf2907ce5825c0bb50f5716226281cc1a
+tag: tip
+parent: -1:0000000000000000000000000000000000000000
+parent: -1:0000000000000000000000000000000000000000
+manifest: 1:23226e7a252cacdc2d99e4fbdc3653441056de49
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+files+: b
+description:
+b
+
+
+changeset: 0:8580ff50825a50c8f716709acdf8de0deddcd6ab
+parent: -1:0000000000000000000000000000000000000000
+parent: -1:0000000000000000000000000000000000000000
+manifest: 0:a0c8bcbbb45c63b90b70ad007bf38961f64f2af0
+user: test
+date: Thu Jan 01 00:00:01 1970 +0000
+files+: a
+description:
+a
+
+
+%% --traceback
+%% --time
+Time: real x.x secs (user x.x+x.x sys x.x+x.x)
+%% --version
+Mercurial Distributed SCM (version xxx)
+%% -h/--help
+Mercurial Distributed SCM
+
+list of commands (use "hg help -v" to show aliases and global options):
+
+ add add the specified files on the next commit
+ annotate show changeset information per file line
+ archive create unversioned archive of a repository revision
+ backout reverse effect of earlier changeset
+ bundle create a changegroup file
+ cat output the latest or given revisions of files
+ clone make a copy of an existing repository
+ commit commit the specified files or all outstanding changes
+ copy mark files as copied for the next commit
+ diff diff repository (or selected files)
+ export dump the header and diffs for one or more changesets
+ grep search for a pattern in specified files and revisions
+ heads show current repository heads
+ help show help for a given command or all commands
+ identify print information about the working copy
+ import import an ordered set of patches
+ incoming show new changesets found in source
+ init create a new repository in the given directory
+ locate locate files matching specific patterns
+ log show revision history of entire repository or files
+ manifest output the latest or given revision of the project manifest
+ merge Merge working directory with another revision
+ outgoing show changesets not found in destination
+ parents show the parents of the working dir or revision
+ paths show definition of symbolic path names
+ pull pull changes from the specified source
+ push push changes to the specified destination
+ recover roll back an interrupted transaction
+ remove remove the specified files on the next commit
+ rename rename files; equivalent of copy + remove
+ revert revert files or dirs to their states as of some revision
+ rollback roll back the last transaction in this repository
+ root print the root (top) of the current working dir
+ serve export the repository via HTTP
+ status show changed files in the working directory
+ tag add a tag for the current tip or a given revision
+ tags list repository tags
+ tip show the tip revision
+ unbundle apply a changegroup file
+ update update or merge working directory
+ verify verify the integrity of the repository
+ version output version and copyright information
+Mercurial Distributed SCM
+
+list of commands (use "hg help -v" to show aliases and global options):
+
+ add add the specified files on the next commit
+ annotate show changeset information per file line
+ archive create unversioned archive of a repository revision
+ backout reverse effect of earlier changeset
+ bundle create a changegroup file
+ cat output the latest or given revisions of files
+ clone make a copy of an existing repository
+ commit commit the specified files or all outstanding changes
+ copy mark files as copied for the next commit
+ diff diff repository (or selected files)
+ export dump the header and diffs for one or more changesets
+ grep search for a pattern in specified files and revisions
+ heads show current repository heads
+ help show help for a given command or all commands
+ identify print information about the working copy
+ import import an ordered set of patches
+ incoming show new changesets found in source
+ init create a new repository in the given directory
+ locate locate files matching specific patterns
+ log show revision history of entire repository or files
+ manifest output the latest or given revision of the project manifest
+ merge Merge working directory with another revision
+ outgoing show changesets not found in destination
+ parents show the parents of the working dir or revision
+ paths show definition of symbolic path names
+ pull pull changes from the specified source
+ push push changes to the specified destination
+ recover roll back an interrupted transaction
+ remove remove the specified files on the next commit
+ rename rename files; equivalent of copy + remove
+ revert revert files or dirs to their states as of some revision
+ rollback roll back the last transaction in this repository
+ root print the root (top) of the current working dir
+ serve export the repository via HTTP
+ status show changed files in the working directory
+ tag add a tag for the current tip or a given revision
+ tags list repository tags
+ tip show the tip revision
+ unbundle apply a changegroup file
+ update update or merge working directory
+ verify verify the integrity of the repository
+ version output version and copyright information
+%% not tested: --debugger
new file mode 100755
--- /dev/null
+++ b/tests/test-grep
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+echo import > port
+hg add port
+hg commit -m 0 -u spam -d '0 0'
+echo export >> port
+hg commit -m 1 -u eggs -d '1 0'
+echo export > port
+echo vaportight >> port
+echo 'import/export' >> port
+hg commit -m 2 -u spam -d '2 0'
+echo 'import/export' >> port
+hg commit -m 3 -u eggs -d '3 0'
+head -n 3 port > port1
+mv port1 port
+hg commit -m 4 -u spam -d '4 0'
+hg grep port port
+echo 'FIXME: history is wrong here'
+hg grep --all -nu port port
+hg grep import port
new file mode 100644
--- /dev/null
+++ b/tests/test-grep.out
@@ -0,0 +1,10 @@
+port:4:export
+port:4:vaportight
+port:4:import/export
+FIXME: history is wrong here
+port:1:1:-:eggs:import
+port:1:2:+:eggs:vaportight
+port:1:3:+:eggs:import/export
+port:0:2:+:spam:export
+port:0:1:+:spam:import
+port:4:import/export
new file mode 100755
--- /dev/null
+++ b/tests/test-help
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+hg
+hg -q
+hg help
+hg -q help
+hg add -h
+hg add --skjdfks
+hg help diff
+hg help status
+hg -q help status
+hg help foo
+hg skjdfks
+
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-help.out
@@ -0,0 +1,253 @@
+Mercurial Distributed SCM
+
+basic commands (use "hg help" for the full list or option "-v" for details):
+
+ add add the specified files on the next commit
+ annotate show changeset information per file line
+ clone make a copy of an existing repository
+ commit commit the specified files or all outstanding changes
+ diff diff repository (or selected files)
+ export dump the header and diffs for one or more changesets
+ init create a new repository in the given directory
+ log show revision history of entire repository or files
+ parents show the parents of the working dir or revision
+ pull pull changes from the specified source
+ push push changes to the specified destination
+ remove remove the specified files on the next commit
+ revert revert files or dirs to their states as of some revision
+ serve export the repository via HTTP
+ status show changed files in the working directory
+ update update or merge working directory
+ add add the specified files on the next commit
+ annotate show changeset information per file line
+ clone make a copy of an existing repository
+ commit commit the specified files or all outstanding changes
+ diff diff repository (or selected files)
+ export dump the header and diffs for one or more changesets
+ init create a new repository in the given directory
+ log show revision history of entire repository or files
+ parents show the parents of the working dir or revision
+ pull pull changes from the specified source
+ push push changes to the specified destination
+ remove remove the specified files on the next commit
+ revert revert files or dirs to their states as of some revision
+ serve export the repository via HTTP
+ status show changed files in the working directory
+ update update or merge working directory
+Mercurial Distributed SCM
+
+list of commands (use "hg help -v" to show aliases and global options):
+
+ add add the specified files on the next commit
+ annotate show changeset information per file line
+ archive create unversioned archive of a repository revision
+ backout reverse effect of earlier changeset
+ bundle create a changegroup file
+ cat output the latest or given revisions of files
+ clone make a copy of an existing repository
+ commit commit the specified files or all outstanding changes
+ copy mark files as copied for the next commit
+ diff diff repository (or selected files)
+ export dump the header and diffs for one or more changesets
+ grep search for a pattern in specified files and revisions
+ heads show current repository heads
+ help show help for a given command or all commands
+ identify print information about the working copy
+ import import an ordered set of patches
+ incoming show new changesets found in source
+ init create a new repository in the given directory
+ locate locate files matching specific patterns
+ log show revision history of entire repository or files
+ manifest output the latest or given revision of the project manifest
+ merge Merge working directory with another revision
+ outgoing show changesets not found in destination
+ parents show the parents of the working dir or revision
+ paths show definition of symbolic path names
+ pull pull changes from the specified source
+ push push changes to the specified destination
+ recover roll back an interrupted transaction
+ remove remove the specified files on the next commit
+ rename rename files; equivalent of copy + remove
+ revert revert files or dirs to their states as of some revision
+ rollback roll back the last transaction in this repository
+ root print the root (top) of the current working dir
+ serve export the repository via HTTP
+ status show changed files in the working directory
+ tag add a tag for the current tip or a given revision
+ tags list repository tags
+ tip show the tip revision
+ unbundle apply a changegroup file
+ update update or merge working directory
+ verify verify the integrity of the repository
+ version output version and copyright information
+ add add the specified files on the next commit
+ annotate show changeset information per file line
+ archive create unversioned archive of a repository revision
+ backout reverse effect of earlier changeset
+ bundle create a changegroup file
+ cat output the latest or given revisions of files
+ clone make a copy of an existing repository
+ commit commit the specified files or all outstanding changes
+ copy mark files as copied for the next commit
+ diff diff repository (or selected files)
+ export dump the header and diffs for one or more changesets
+ grep search for a pattern in specified files and revisions
+ heads show current repository heads
+ help show help for a given command or all commands
+ identify print information about the working copy
+ import import an ordered set of patches
+ incoming show new changesets found in source
+ init create a new repository in the given directory
+ locate locate files matching specific patterns
+ log show revision history of entire repository or files
+ manifest output the latest or given revision of the project manifest
+ merge Merge working directory with another revision
+ outgoing show changesets not found in destination
+ parents show the parents of the working dir or revision
+ paths show definition of symbolic path names
+ pull pull changes from the specified source
+ push push changes to the specified destination
+ recover roll back an interrupted transaction
+ remove remove the specified files on the next commit
+ rename rename files; equivalent of copy + remove
+ revert revert files or dirs to their states as of some revision
+ rollback roll back the last transaction in this repository
+ root print the root (top) of the current working dir
+ serve export the repository via HTTP
+ status show changed files in the working directory
+ tag add a tag for the current tip or a given revision
+ tags list repository tags
+ tip show the tip revision
+ unbundle apply a changegroup file
+ update update or merge working directory
+ verify verify the integrity of the repository
+ version output version and copyright information
+hg add [OPTION]... [FILE]...
+
+add the specified files on the next commit
+
+ Schedule files to be version controlled and added to the repository.
+
+ The files will be added to the repository at the next commit.
+
+ If no names are given, add all files in the repository.
+
+options:
+
+ -I --include include names matching the given patterns
+ -X --exclude exclude names matching the given patterns
+hg add: option --skjdfks not recognized
+hg add [OPTION]... [FILE]...
+
+add the specified files on the next commit
+
+ Schedule files to be version controlled and added to the repository.
+
+ The files will be added to the repository at the next commit.
+
+ If no names are given, add all files in the repository.
+
+options:
+
+ -I --include include names matching the given patterns
+ -X --exclude exclude names matching the given patterns
+hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...
+
+diff repository (or selected files)
+
+ Show differences between revisions for the specified files.
+
+ Differences between files are shown using the unified diff format.
+
+ When two revision arguments are given, then changes are shown
+ between those revisions. If only one revision is specified then
+ that revision is compared to the working directory, and, when no
+ revisions are specified, the working directory files are compared
+ to its parent.
+
+ Without the -a option, diff will avoid generating diffs of files
+ it detects as binary. With -a, diff will generate a diff anyway,
+ probably with undesirable results.
+
+options:
+
+ -r --rev revision
+ -a --text treat all files as text
+ -p --show-function show which function each change is in
+ -w --ignore-all-space ignore white space when comparing lines
+ -I --include include names matching the given patterns
+ -X --exclude exclude names matching the given patterns
+hg status [OPTION]... [FILE]...
+
+show changed files in the working directory
+
+ Show changed files in the repository. If names are
+ given, only files that match are shown.
+
+ The codes used to show the status of files are:
+ M = modified
+ A = added
+ R = removed
+ ! = deleted, but still tracked
+ ? = not tracked
+ I = ignored (not shown by default)
+
+aliases: st
+
+options:
+
+ -m --modified show only modified files
+ -a --added show only added files
+ -r --removed show only removed files
+ -d --deleted show only deleted (but tracked) files
+ -u --unknown show only unknown (not tracked) files
+ -i --ignored show ignored files
+ -n --no-status hide status prefix
+ -0 --print0 end filenames with NUL, for use with xargs
+ -I --include include names matching the given patterns
+ -X --exclude exclude names matching the given patterns
+hg status [OPTION]... [FILE]...
+
+show changed files in the working directory
+hg: unknown command 'foo'
+Mercurial Distributed SCM
+
+basic commands (use "hg help" for the full list or option "-v" for details):
+
+ add add the specified files on the next commit
+ annotate show changeset information per file line
+ clone make a copy of an existing repository
+ commit commit the specified files or all outstanding changes
+ diff diff repository (or selected files)
+ export dump the header and diffs for one or more changesets
+ init create a new repository in the given directory
+ log show revision history of entire repository or files
+ parents show the parents of the working dir or revision
+ pull pull changes from the specified source
+ push push changes to the specified destination
+ remove remove the specified files on the next commit
+ revert revert files or dirs to their states as of some revision
+ serve export the repository via HTTP
+ status show changed files in the working directory
+ update update or merge working directory
+hg: unknown command 'skjdfks'
+Mercurial Distributed SCM
+
+basic commands (use "hg help" for the full list or option "-v" for details):
+
+ add add the specified files on the next commit
+ annotate show changeset information per file line
+ clone make a copy of an existing repository
+ commit commit the specified files or all outstanding changes
+ diff diff repository (or selected files)
+ export dump the header and diffs for one or more changesets
+ init create a new repository in the given directory
+ log show revision history of entire repository or files
+ parents show the parents of the working dir or revision
+ pull pull changes from the specified source
+ push push changes to the specified destination
+ remove remove the specified files on the next commit
+ revert revert files or dirs to their states as of some revision
+ serve export the repository via HTTP
+ status show changed files in the working directory
+ update update or merge working directory
new file mode 100755
--- /dev/null
+++ b/tests/test-hgignore
@@ -0,0 +1,45 @@
+#!/bin/sh
+
+hg init
+touch a.o
+touch a.c
+touch syntax
+mkdir dir
+touch dir/a.o
+touch dir/b.o
+touch dir/c.o
+
+hg add dir/a.o
+hg commit -m 0
+hg add dir/b.o
+
+echo "--" ; hg status
+
+echo "*.o" > .hgignore
+echo "--" ; hg status 2>&1 | sed -e 's/abort: .*\.hgignore:/abort: .hgignore:/'
+
+echo ".*\.o" > .hgignore
+echo "--" ; hg status
+
+# XXX: broken
+#echo "glob:**.o" > .hgignore
+#echo "--" ; hg status
+#
+#echo "glob:*.o" > .hgignore
+#echo "--" ; hg status
+
+echo "syntax: invalid" > .hgignore
+echo "--" ; hg status 2>&1 | sed -e 's/.*\.hgignore:/.hgignore:/'
+
+echo "syntax: glob" > .hgignore
+echo "*.o" >> .hgignore
+echo "--" ; hg status
+
+echo "relglob:syntax*" > .hgignore
+echo "--" ; hg status
+
+echo "relglob:*" > .hgignore
+echo "--" ; hg status
+
+cd dir
+echo "--" ; hg status .
new file mode 100644
--- /dev/null
+++ b/tests/test-hgignore.out
@@ -0,0 +1,36 @@
+--
+A dir/b.o
+? a.c
+? a.o
+? dir/c.o
+? syntax
+--
+abort: .hgignore: invalid pattern (relre): *.o
+--
+A dir/b.o
+? .hgignore
+? a.c
+? syntax
+--
+.hgignore: ignoring invalid syntax 'invalid'
+A dir/b.o
+? .hgignore
+? a.c
+? a.o
+? dir/c.o
+? syntax
+--
+A dir/b.o
+? .hgignore
+? a.c
+? syntax
+--
+A dir/b.o
+? .hgignore
+? a.c
+? a.o
+? dir/c.o
+--
+A dir/b.o
+--
+A b.o
new file mode 100755
--- /dev/null
+++ b/tests/test-hgrc
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+echo "invalid" > .hg/hgrc
+hg status 2>&1 |sed -e "s:/.*\(/t/.*\):...\1:"
new file mode 100644
--- /dev/null
+++ b/tests/test-hgrc.out
@@ -0,0 +1,4 @@
+abort: Failed to parse .../t/.hg/hgrc
+File contains no section headers.
+file: .../t/.hg/hgrc, line: 1
+'invalid\n'
new file mode 100755
--- /dev/null
+++ b/tests/test-hook
@@ -0,0 +1,186 @@
+#!/bin/sh
+
+# commit hooks can see env vars
+hg init a
+cd a
+echo "[hooks]" > .hg/hgrc
+echo 'commit = echo commit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
+echo 'commit.b = echo commit hook b' >> .hg/hgrc
+echo 'precommit = echo precommit hook: p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
+echo 'pretxncommit = echo pretxncommit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2; hg -q tip' >> .hg/hgrc
+echo a > a
+hg add a
+hg commit -m a -d "1000000 0"
+
+hg clone . ../b
+cd ../b
+
+# changegroup hooks can see env vars
+echo '[hooks]' > .hg/hgrc
+echo 'prechangegroup = echo prechangegroup hook' >> .hg/hgrc
+echo 'changegroup = echo changegroup hook: n=$HG_NODE' >> .hg/hgrc
+echo 'incoming = echo incoming hook: n=$HG_NODE' >> .hg/hgrc
+
+# pretxncommit and commit hooks can see both parents of merge
+cd ../a
+echo b >> a
+hg commit -m a1 -d "1 0"
+hg update -C 0
+echo b > b
+hg add b
+hg commit -m b -d '1 0'
+hg merge 1
+hg commit -m merge -d '2 0'
+
+cd ../b
+hg pull ../a
+
+# tag hooks can see env vars
+cd ../a
+echo 'pretag = echo pretag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
+echo 'tag = echo tag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
+hg tag -d '3 0' a
+hg tag -l la
+
+# pretag hook can forbid tagging
+echo 'pretag.forbid = echo pretag.forbid hook; exit 1' >> .hg/hgrc
+hg tag -d '4 0' fa
+hg tag -l fla
+
+# pretxncommit hook can see changeset, can roll back txn, changeset
+# no more there after
+echo 'pretxncommit.forbid = echo pretxncommit.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
+echo z > z
+hg add z
+hg -q tip
+hg commit -m 'fail' -d '4 0'
+hg -q tip
+
+# precommit hook can prevent commit
+echo 'precommit.forbid = echo precommit.forbid hook; exit 1' >> .hg/hgrc
+hg commit -m 'fail' -d '4 0'
+hg -q tip
+
+# preupdate hook can prevent update
+echo 'preupdate = echo preupdate hook: p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
+hg update 1
+
+# update hook
+echo 'update = echo update hook: p1=$HG_PARENT1 p2=$HG_PARENT2 err=$HG_ERROR' >> .hg/hgrc
+hg update
+
+# prechangegroup hook can prevent incoming changes
+cd ../b
+hg -q tip
+echo '[hooks]' > .hg/hgrc
+echo 'prechangegroup.forbid = echo prechangegroup.forbid hook; exit 1' >> .hg/hgrc
+hg pull ../a
+
+# pretxnchangegroup hook can see incoming changes, can roll back txn,
+# incoming changes no longer there after
+echo '[hooks]' > .hg/hgrc
+echo 'pretxnchangegroup.forbid = echo pretxnchangegroup.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
+hg pull ../a
+hg -q tip
+
+# outgoing hooks can see env vars
+rm .hg/hgrc
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing = echo preoutgoing hook: s=$HG_SOURCE' >> ../a/.hg/hgrc
+echo 'outgoing = echo outgoing hook: n=$HG_NODE s=$HG_SOURCE' >> ../a/.hg/hgrc
+hg pull ../a
+hg rollback
+
+# preoutgoing hook can prevent outgoing changes
+echo 'preoutgoing.forbid = echo preoutgoing.forbid hook; exit 1' >> ../a/.hg/hgrc
+hg pull ../a
+
+cat > hooktests.py <<EOF
+from mercurial import util
+
+uncallable = 0
+
+def printargs(args):
+ args.pop('ui', None)
+ args.pop('repo', None)
+ a = list(args.items())
+ a.sort()
+ print 'hook args:'
+ for k, v in a:
+ print ' ', k, v
+
+def passhook(**args):
+ printargs(args)
+
+def failhook(**args):
+ printargs(args)
+ return True
+
+class LocalException(Exception):
+ pass
+
+def raisehook(**args):
+ raise LocalException('exception from hook')
+
+def aborthook(**args):
+ raise util.Abort('raise abort from hook')
+
+def brokenhook(**args):
+ return 1 + {}
+
+class container:
+ unreachable = 1
+EOF
+
+echo '# test python hooks'
+PYTHONPATH="`pwd`:$PYTHONPATH"
+export PYTHONPATH
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
+hg pull ../a 2>&1 | grep 'raised an exception'
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
+hg pull ../a 2>&1 | grep 'raised an exception'
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
+hg pull ../a
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
+hg pull ../a
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
+hg pull ../a
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
+hg pull ../a
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
+hg pull ../a
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
+hg pull ../a
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
+hg pull ../a
+
+echo '[hooks]' > ../a/.hg/hgrc
+echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
+hg pull ../a
+
+echo '# make sure --traceback works'
+echo '[hooks]' > .hg/hgrc
+echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
+
+echo a >> a
+hg --traceback commit -A -m a 2>&1 | grep '^Traceback'
+
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-hook.out
@@ -0,0 +1,140 @@
+precommit hook: p1=0000000000000000000000000000000000000000 p2=
+pretxncommit hook: n=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p1=0000000000000000000000000000000000000000 p2=
+0:29b62aeb769f
+commit hook: n=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p1=0000000000000000000000000000000000000000 p2=
+commit hook b
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+precommit hook: p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
+pretxncommit hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
+1:b702efe96888
+commit hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
+commit hook b
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+precommit hook: p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
+pretxncommit hook: n=1324a5531bac09b329c3845d35ae6a7526874edb p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
+2:1324a5531bac
+commit hook: n=1324a5531bac09b329c3845d35ae6a7526874edb p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
+commit hook b
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+precommit hook: p1=1324a5531bac09b329c3845d35ae6a7526874edb p2=b702efe9688826e3a91283852b328b84dbf37bc2
+pretxncommit hook: n=4c52fb2e402287dd5dc052090682536c8406c321 p1=1324a5531bac09b329c3845d35ae6a7526874edb p2=b702efe9688826e3a91283852b328b84dbf37bc2
+3:4c52fb2e4022
+commit hook: n=4c52fb2e402287dd5dc052090682536c8406c321 p1=1324a5531bac09b329c3845d35ae6a7526874edb p2=b702efe9688826e3a91283852b328b84dbf37bc2
+commit hook b
+prechangegroup hook
+changegroup hook: n=b702efe9688826e3a91283852b328b84dbf37bc2
+incoming hook: n=b702efe9688826e3a91283852b328b84dbf37bc2
+incoming hook: n=1324a5531bac09b329c3845d35ae6a7526874edb
+incoming hook: n=4c52fb2e402287dd5dc052090682536c8406c321
+pulling from ../a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 2 changes to 2 files
+(run 'hg update' to get a working copy)
+pretag hook: t=a n=4c52fb2e402287dd5dc052090682536c8406c321 l=0
+precommit hook: p1=4c52fb2e402287dd5dc052090682536c8406c321 p2=
+pretxncommit hook: n=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 p1=4c52fb2e402287dd5dc052090682536c8406c321 p2=
+4:4f92e785b90a
+commit hook: n=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 p1=4c52fb2e402287dd5dc052090682536c8406c321 p2=
+commit hook b
+tag hook: t=a n=4c52fb2e402287dd5dc052090682536c8406c321 l=0
+pretag hook: t=la n=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 l=1
+tag hook: t=la n=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 l=1
+pretag hook: t=fa n=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 l=0
+pretag.forbid hook
+abort: pretag.forbid hook exited with status 1
+pretag hook: t=fla n=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 l=1
+pretag.forbid hook
+abort: pretag.forbid hook exited with status 1
+4:4f92e785b90a
+precommit hook: p1=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 p2=
+pretxncommit hook: n=7792358308a2026661cea44f9d47c072813004cb p1=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 p2=
+5:7792358308a2
+pretxncommit.forbid hook: tip=5:7792358308a2
+abort: pretxncommit.forbid hook exited with status 1
+transaction abort!
+rollback completed
+4:4f92e785b90a
+precommit hook: p1=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 p2=
+precommit.forbid hook
+abort: precommit.forbid hook exited with status 1
+4:4f92e785b90a
+preupdate hook: p1=b702efe9688826e3a91283852b328b84dbf37bc2 p2=
+0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+preupdate hook: p1=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 p2=
+update hook: p1=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 p2= err=0
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+3:4c52fb2e4022
+prechangegroup.forbid hook
+pulling from ../a
+searching for changes
+abort: prechangegroup.forbid hook exited with status 1
+pretxnchangegroup.forbid hook: tip=4:4f92e785b90a
+pulling from ../a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+abort: pretxnchangegroup.forbid hook exited with status 1
+transaction abort!
+rollback completed
+3:4c52fb2e4022
+preoutgoing hook: s=pull
+outgoing hook: n=4f92e785b90ae8995dfe156e39dd4fbc3b346a24 s=pull
+pulling from ../a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+(run 'hg update' to get a working copy)
+rolling back last transaction
+preoutgoing hook: s=pull
+preoutgoing.forbid hook
+pulling from ../a
+searching for changes
+abort: preoutgoing.forbid hook exited with status 1
+# test python hooks
+error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
+error: preoutgoing.raise hook raised an exception: exception from hook
+pulling from ../a
+searching for changes
+error: preoutgoing.abort hook failed: raise abort from hook
+abort: raise abort from hook
+pulling from ../a
+searching for changes
+hook args:
+ hooktype preoutgoing
+ source pull
+abort: preoutgoing.fail hook failed
+pulling from ../a
+searching for changes
+abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
+pulling from ../a
+searching for changes
+abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
+pulling from ../a
+searching for changes
+abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
+pulling from ../a
+searching for changes
+abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
+pulling from ../a
+searching for changes
+abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
+pulling from ../a
+searching for changes
+hook args:
+ hooktype preoutgoing
+ source pull
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+(run 'hg update' to get a working copy)
+# make sure --traceback works
+Traceback (most recent call last):
new file mode 100755
--- /dev/null
+++ b/tests/test-http-proxy
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+hg init a
+cd a
+echo a > a
+hg ci -Ama -d '1123456789 0'
+hg serve -p 20059 -d --pid-file=hg.pid
+
+cd ..
+("$TESTDIR/tinyproxy.py" 20060 localhost >/dev/null 2>&1 </dev/null &
+echo $! > proxy.pid)
+sleep 2
+
+echo %% url for proxy
+http_proxy=http://localhost:20060/ hg --config http_proxy.always=True clone http://localhost:20059/ b
+
+echo %% host:port for proxy
+http_proxy=localhost:20060 hg clone --config http_proxy.always=True http://localhost:20059/ c
+
+echo %% proxy url with user name and password
+http_proxy=http://user:passwd@localhost:20060 hg clone --config http_proxy.always=True http://localhost:20059/ d
+
+echo %% url with user name and password
+http_proxy=http://user:passwd@localhost:20060 hg clone --config http_proxy.always=True http://user:passwd@localhost:20059/ e
+
+echo %% bad host:port for proxy
+http_proxy=localhost:20061 hg clone --config http_proxy.always=True http://localhost:20059/ f
+
+kill $(cat proxy.pid a/hg.pid)
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-http-proxy.out
@@ -0,0 +1,31 @@
+adding a
+%% url for proxy
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+%% host:port for proxy
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+%% proxy url with user name and password
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+%% url with user name and password
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+%% bad host:port for proxy
+abort: error: Connection refused
new file mode 100755
--- /dev/null
+++ b/tests/test-hup
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+hg init
+mkfifo p
+
+hg serve --stdio < p &
+P=$!
+(echo lock; echo addchangegroup; sleep 5) > p &
+Q=$!
+sleep 1
+kill -HUP $P
+wait
+ls .hg
+
+
+
new file mode 100644
--- /dev/null
+++ b/tests/test-hup.out
@@ -0,0 +1,9 @@
+0
+0
+adding changesets
+killed!
+transaction abort!
+rollback completed
+00changelog.i
+data
+journal.dirstate
new file mode 100755
--- /dev/null
+++ b/tests/test-incoming-outgoing
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+mkdir test
+cd test
+hg init
+for i in 0 1 2 3 4 5 6 7 8; do
+ echo $i >> foo
+ hg commit -A -m $i -d "1000000 0"
+done
+hg verify
+hg serve -p 20059 -d --pid-file=hg.pid
+cd ..
+
+hg init new
+# http incoming
+http_proxy= hg -R new incoming http://localhost:20059/
+# local incoming
+hg -R new incoming test
+
+# test with --bundle
+http_proxy= hg -R new incoming --bundle test.hg http://localhost:20059/
+hg -R new incoming --bundle test2.hg test
+
+# test the resulting bundles
+hg init temp
+hg init temp2
+hg -R temp unbundle test.hg
+hg -R temp2 unbundle test2.hg
+hg -R temp tip
+hg -R temp2 tip
+
+rm -rf temp temp2 new
+
+# test outgoing
+hg clone test test-dev
+cd test-dev
+for i in 9 10 11 12 13; do
+ echo $i >> foo
+ hg commit -A -m $i -d "1000000 0"
+done
+hg verify
+cd ..
+hg -R test-dev outgoing test
+http_proxy= hg -R test-dev outgoing http://localhost:20059/
+
+kill `cat test/hg.pid`
new file mode 100644
--- /dev/null
+++ b/tests/test-incoming-outgoing.out
@@ -0,0 +1,272 @@
+adding foo
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 9 changesets, 9 total revisions
+changeset: 0:9cb21d99fe27
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0
+
+changeset: 1:d717f5dfad6a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1
+
+changeset: 2:c0d6b86da426
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+changeset: 3:dfacbd43b3fe
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 3
+
+changeset: 4:1f3a964b6022
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 4
+
+changeset: 5:c028bcc7a28a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 5
+
+changeset: 6:a0c0095f3389
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 6
+
+changeset: 7:d4be65f4e891
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 7
+
+changeset: 8:92b83e334ef8
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 8
+
+changeset: 0:9cb21d99fe27
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0
+
+changeset: 1:d717f5dfad6a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1
+
+changeset: 2:c0d6b86da426
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+changeset: 3:dfacbd43b3fe
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 3
+
+changeset: 4:1f3a964b6022
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 4
+
+changeset: 5:c028bcc7a28a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 5
+
+changeset: 6:a0c0095f3389
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 6
+
+changeset: 7:d4be65f4e891
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 7
+
+changeset: 8:92b83e334ef8
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 8
+
+changeset: 0:9cb21d99fe27
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0
+
+changeset: 1:d717f5dfad6a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1
+
+changeset: 2:c0d6b86da426
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+changeset: 3:dfacbd43b3fe
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 3
+
+changeset: 4:1f3a964b6022
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 4
+
+changeset: 5:c028bcc7a28a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 5
+
+changeset: 6:a0c0095f3389
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 6
+
+changeset: 7:d4be65f4e891
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 7
+
+changeset: 8:92b83e334ef8
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 8
+
+changeset: 0:9cb21d99fe27
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 0
+
+changeset: 1:d717f5dfad6a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1
+
+changeset: 2:c0d6b86da426
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+changeset: 3:dfacbd43b3fe
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 3
+
+changeset: 4:1f3a964b6022
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 4
+
+changeset: 5:c028bcc7a28a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 5
+
+changeset: 6:a0c0095f3389
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 6
+
+changeset: 7:d4be65f4e891
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 7
+
+changeset: 8:92b83e334ef8
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 8
+
+adding changesets
+adding manifests
+adding file changes
+added 9 changesets with 9 changes to 1 files
+(run 'hg update' to get a working copy)
+adding changesets
+adding manifests
+adding file changes
+added 9 changesets with 9 changes to 1 files
+(run 'hg update' to get a working copy)
+changeset: 8:92b83e334ef8
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 8
+
+changeset: 8:92b83e334ef8
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 8
+
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 14 changesets, 14 total revisions
+searching for changes
+changeset: 9:3741c3ad1096
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 9
+
+changeset: 10:de4143c8d9a5
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 10
+
+changeset: 11:0e1c188b9a7a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 11
+
+changeset: 12:251354d0fdd3
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 12
+
+changeset: 13:bdaadd969642
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 13
+
+searching for changes
+changeset: 9:3741c3ad1096
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 9
+
+changeset: 10:de4143c8d9a5
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 10
+
+changeset: 11:0e1c188b9a7a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 11
+
+changeset: 12:251354d0fdd3
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 12
+
+changeset: 13:bdaadd969642
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 13
+
new file mode 100755
--- /dev/null
+++ b/tests/test-locate
@@ -0,0 +1,20 @@
+#!/bin/sh
+#
+mkdir t
+cd t
+hg init
+echo 0 > a
+echo 0 > b
+hg ci -A -m m -d "1000000 0"
+touch nottracked
+hg locate a
+hg locate NONEXISTENT
+hg locate
+hg rm a
+hg ci -m m -d "1000000 0"
+hg locate a
+hg locate NONEXISTENT
+hg locate
+hg locate -r 0 a
+hg locate -r 0 NONEXISTENT
+hg locate -r 0
new file mode 100644
--- /dev/null
+++ b/tests/test-locate.out
@@ -0,0 +1,13 @@
+adding a
+adding b
+a
+NONEXISTENT: No such file or directory
+a
+b
+a: No such file or directory
+NONEXISTENT: No such file or directory
+b
+a
+NONEXISTENT: No such file in rev 14467d15ef43
+a
+b
new file mode 100755
--- /dev/null
+++ b/tests/test-lock-badness
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+hg init a
+echo a > a/a
+hg --cwd a ci -A -m a
+hg clone a b
+echo b > b/b
+hg --cwd b ci -A -m b
+chmod 100 a/.hg
+hg --cwd b push ../a
+chmod 700 a/.hg
new file mode 100644
--- /dev/null
+++ b/tests/test-lock-badness.out
@@ -0,0 +1,5 @@
+adding a
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+adding b
+pushing to ../a
+abort: could not lock repository ../a: Permission denied
new file mode 100755
--- /dev/null
+++ b/tests/test-merge-revert
@@ -0,0 +1,45 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+echo "added file1" > file1
+echo "added file2" > file2
+hg add file1 file2
+hg commit -m "added file1 and file2" -d "1000000 0" -u user
+echo "changed file1" >> file1
+hg commit -m "changed file1" -d "1000000 0" -u user
+hg -q log
+hg id
+hg update -C 0
+hg id
+echo "changed file1" >> file1
+hg id
+hg revert
+hg diff
+hg status
+hg id
+hg update
+hg diff
+hg status
+hg id
+hg update -C 0
+echo "changed file1" >> file1
+HGMERGE=merge hg update
+hg diff
+hg status
+hg id
+hg revert
+hg diff
+hg status
+hg id
+hg revert -r tip
+hg diff
+hg status
+hg id
+hg update -C
+hg diff
+hg status
+hg id
+cd ..; /bin/rm -rf t
+
new file mode 100644
--- /dev/null
+++ b/tests/test-merge-revert.out
@@ -0,0 +1,24 @@
+1:016807e6fdaf
+0:eb43f19ff115
+016807e6fdaf tip
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+eb43f19ff115
+eb43f19ff115+
+reverting file1
+? file1.orig
+eb43f19ff115
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+? file1.orig
+016807e6fdaf tip
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+merging file1
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+? file1.orig
+016807e6fdaf tip
+? file1.orig
+016807e6fdaf tip
+? file1.orig
+016807e6fdaf tip
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+? file1.orig
+016807e6fdaf tip
new file mode 100755
--- /dev/null
+++ b/tests/test-merge-revert2
@@ -0,0 +1,47 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+echo "added file1" > file1
+echo "another line of text" >> file1
+echo "added file2" > file2
+hg add file1 file2
+hg commit -m "added file1 and file2" -d "1000000 0" -u user
+echo "changed file1" >> file1
+hg commit -m "changed file1" -d "1000000 0" -u user
+hg -q log
+hg id
+hg update -C 0
+hg id
+echo "changed file1" >> file1
+hg id
+hg revert --no-backup
+hg diff
+hg status
+hg id
+hg update
+hg diff
+hg status
+hg id
+hg update -C 0
+echo "changed file1 different" >> file1
+HGMERGE=merge hg update
+hg diff | sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" -e "s/\(<<<<<<<\) .*/\1/" \
+ -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/" -e "s/\(>>>>>>>\) .*/\1/"
+hg status
+hg id
+hg revert --no-backup
+hg diff
+hg status
+hg id
+hg revert -r tip --no-backup
+hg diff
+hg status
+hg id
+hg update -C
+hg diff
+hg status
+hg id
+cd ..; /bin/rm -rf t
+
new file mode 100644
--- /dev/null
+++ b/tests/test-merge-revert2.out
@@ -0,0 +1,34 @@
+1:f248da0d4c3e
+0:9eca13a34789
+f248da0d4c3e tip
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+9eca13a34789
+9eca13a34789+
+reverting file1
+9eca13a34789
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+f248da0d4c3e tip
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+merge: warning: conflicts during merge
+merging file1
+merging file1 failed!
+0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+There are unresolved merges with locally modified files.
+diff -r f248da0d4c3e file1
+--- a/file1
++++ b/file1
+@@ -1,3 +1,7 @@ added file1
+ added file1
+ another line of text
++<<<<<<<
++changed file1 different
++=======
+ changed file1
++>>>>>>>
+M file1
+f248da0d4c3e+ tip
+reverting file1
+f248da0d4c3e tip
+f248da0d4c3e tip
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+f248da0d4c3e tip
new file mode 100755
--- /dev/null
+++ b/tests/test-merge1
@@ -0,0 +1,96 @@
+#!/bin/sh
+
+cat <<'EOF' > merge
+#!/bin/sh
+echo merging for `basename $1`
+EOF
+chmod +x merge
+
+mkdir t
+cd t
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+echo This is file b1 > b
+hg add b
+hg commit -m "commit #1" -d "1000000 0"
+
+hg update 0
+echo This is file c1 > c
+hg add c
+hg commit -m "commit #2" -d "1000000 0"
+echo This is file b1 > b
+echo %% no merges expected
+env HGMERGE=../merge hg merge 1
+cd ..; /bin/rm -rf t
+
+mkdir t
+cd t
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+echo This is file b1 > b
+hg add b
+hg commit -m "commit #1" -d "1000000 0"
+
+hg update 0
+echo This is file c1 > c
+hg add c
+hg commit -m "commit #2" -d "1000000 0"
+echo This is file b2 > b
+echo %% merge should fail
+env HGMERGE=../merge hg merge 1
+echo %% merge of b expected
+env HGMERGE=../merge hg merge -f 1
+cd ..; /bin/rm -rf t
+echo %%
+
+mkdir t
+cd t
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+echo This is file b1 > b
+hg add b
+hg commit -m "commit #1" -d "1000000 0"
+echo This is file b22 > b
+hg commit -m "commit #2" -d "1000000 0"
+hg update 1
+echo This is file c1 > c
+hg add c
+hg commit -m "commit #3" -d "1000000 0"
+
+echo 'Contents of b should be "this is file b1"'
+cat b
+
+echo This is file b22 > b
+echo %% merge fails
+env HGMERGE=../merge hg merge 2
+echo %% merge expected!
+env HGMERGE=../merge hg merge -f 2
+cd ..; /bin/rm -rf t
+
+mkdir t
+cd t
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+echo This is file b1 > b
+hg add b
+hg commit -m "commit #1" -d "1000000 0"
+echo This is file b22 > b
+hg commit -m "commit #2" -d "1000000 0"
+hg update 1
+echo This is file c1 > c
+hg add c
+hg commit -m "commit #3" -d "1000000 0"
+echo This is file b33 > b
+echo %% merge of b should fail
+env HGMERGE=../merge hg merge 2
+echo %% merge of b expected
+env HGMERGE=../merge hg merge -f 2
+cd ..; /bin/rm -rf t
new file mode 100644
--- /dev/null
+++ b/tests/test-merge1.out
@@ -0,0 +1,31 @@
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+%% no merges expected
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+%% merge should fail
+abort: 'b' already exists in the working dir and differs from remote
+%% merge of b expected
+merging for b
+merging b
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+%%
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+Contents of b should be "this is file b1"
+This is file b1
+%% merge fails
+abort: outstanding uncommitted changes
+%% merge expected!
+merging for b
+merging b
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+%% merge of b should fail
+abort: outstanding uncommitted changes
+%% merge of b expected
+merging for b
+merging b
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
new file mode 100755
--- /dev/null
+++ b/tests/test-merge2
@@ -0,0 +1,48 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+echo This is file b1 > b
+hg add b
+hg commit -m "commit #1" -d "1000000 0"
+rm b
+hg update 0
+echo This is file b2 > b
+hg add b
+hg commit -m "commit #2" -d "1000000 0"
+cd ..; /bin/rm -rf t
+
+mkdir t
+cd t
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+echo This is file b1 > b
+hg add b
+hg commit -m "commit #1" -d "1000000 0"
+rm b
+hg update 0
+echo This is file b2 > b
+hg commit -A -m "commit #2" -d "1000000 0"
+cd ..; /bin/rm -rf t
+
+mkdir t
+cd t
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+echo This is file b1 > b
+hg add b
+hg commit -m "commit #1" -d "1000000 0"
+rm b
+hg remove b
+hg update 0
+echo This is file b2 > b
+hg commit -A -m "commit #2" -d "1000000 0"
+cd ..; /bin/rm -rf t
new file mode 100644
--- /dev/null
+++ b/tests/test-merge2.out
@@ -0,0 +1,5 @@
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+adding b
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+adding b
new file mode 100755
--- /dev/null
+++ b/tests/test-merge3
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+touch b
+hg add b
+rm b
+hg commit -A -m"comment #1" -d "1000000 0"
new file mode 100644
--- /dev/null
+++ b/tests/test-merge3.out
@@ -0,0 +1,2 @@
+removing b
+nothing changed
new file mode 100755
--- /dev/null
+++ b/tests/test-merge4
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+hg init
+echo This is file a1 > a
+hg add a
+hg commit -m "commit #0" -d "1000000 0"
+echo This is file b1 > b
+hg add b
+hg commit -m "commit #1" -d "1000000 0"
+hg update 0
+echo This is file c1 > c
+hg add c
+hg commit -m "commit #2" -d "1000000 0"
+hg merge 1
+rm b
+echo This is file c22 > c
+hg commit -m "commit #3" -d "1000000 0"
new file mode 100644
--- /dev/null
+++ b/tests/test-merge4.out
@@ -0,0 +1,3 @@
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
new file mode 100755
--- /dev/null
+++ b/tests/test-merge5
@@ -0,0 +1,20 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+echo This is file a1 > a
+echo This is file b1 > b
+hg add a b
+hg commit -m "commit #0" -d "1000000 0"
+echo This is file b22 > b
+hg commit -m"comment #1" -d "1000000 0"
+hg update 0
+rm b
+hg commit -A -m"comment #2" -d "1000000 0"
+# in theory, we shouldn't need the "-y" below, but it prevents
+# this test from hanging when "hg update" erroneously prompts the
+# user for "keep or delete"
+hg update -y 1
+
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-merge5.out
@@ -0,0 +1,6 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+removing b
+this update spans a branch affecting the following files:
+ b
+aborting update spanning branches!
+(use 'hg merge' to merge across branches or 'hg update -C' to lose changes)
new file mode 100755
--- /dev/null
+++ b/tests/test-merge6
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+cat <<'EOF' > merge
+#!/bin/sh
+echo merging for `basename $1`
+EOF
+chmod +x merge
+HGMERGE=./merge; export HGMERGE
+
+mkdir A1
+cd A1
+hg init
+echo This is file foo1 > foo
+echo This is file bar1 > bar
+hg add foo bar
+hg commit -m "commit text" -d "1000000 0"
+
+cd ..
+hg clone A1 B1
+
+cd A1
+rm bar
+hg remove bar
+hg commit -m "commit test" -d "1000000 0"
+
+cd ../B1
+echo This is file foo22 > foo
+hg commit -m "commit test" -d "1000000 0"
+
+cd ..
+hg clone A1 A2
+hg clone B1 B2
+
+cd A1
+hg pull ../B1
+hg merge
+hg commit -m "commit test" -d "1000000 0"
+echo bar should remain deleted.
+hg manifest
+
+cd ../B2
+hg pull ../A2
+hg merge
+hg commit -m "commit test" -d "1000000 0"
+echo bar should remain deleted.
+hg manifest
new file mode 100644
--- /dev/null
+++ b/tests/test-merge6.out
@@ -0,0 +1,25 @@
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from ../B1
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+bar should remain deleted.
+f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo
+pulling from ../A2
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 0 changes to 0 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+bar should remain deleted.
+f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo
new file mode 100755
--- /dev/null
+++ b/tests/test-merge7
@@ -0,0 +1,66 @@
+#!/bin/sh
+
+# initial
+hg init test-a
+cd test-a
+cat >test.txt <<"EOF"
+1
+2
+3
+EOF
+hg add test.txt
+hg commit -m "Initial" -d "1000000 0"
+
+# clone
+cd ..
+hg clone test-a test-b
+
+# change test-a
+cd test-a
+cat >test.txt <<"EOF"
+one
+two
+three
+EOF
+hg commit -m "Numbers as words" -d "1000000 0"
+
+# change test-b
+cd ../test-b
+cat >test.txt <<"EOF"
+1
+2.5
+3
+EOF
+hg commit -m "2 -> 2.5" -d "1000000 0"
+
+# now pull and merge from test-a
+hg pull ../test-a
+HGMERGE=merge hg merge
+# resolve conflict
+cat >test.txt <<"EOF"
+one
+two-point-five
+three
+EOF
+rm -f *.orig
+hg commit -m "Merge 1" -d "1000000 0"
+
+# change test-a again
+cd ../test-a
+cat >test.txt <<"EOF"
+one
+two-point-one
+three
+EOF
+hg commit -m "two -> two-point-one" -d "1000000 0"
+
+# pull and merge from test-a again
+cd ../test-b
+hg pull ../test-a
+HGMERGE=merge hg merge --debug
+
+cat test.txt | sed "s% .*%%"
+
+hg debugindex .hg/data/test.txt.i
+
+hg log
new file mode 100644
--- /dev/null
+++ b/tests/test-merge7.out
@@ -0,0 +1,78 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from ../test-a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+merge: warning: conflicts during merge
+merging test.txt
+merging test.txt failed!
+0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+There are unresolved merges, you can redo the full merge using:
+ hg update -C 1
+ hg merge 2
+pulling from ../test-a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+merge: warning: conflicts during merge
+resolving manifests
+ force False allow True moddirstate True linear False
+ ancestor 055d847dd401 local 2eded9ab0a5c remote 84cf5750dd20
+ test.txt versions differ, resolve
+merging test.txt
+resolving test.txt
+file test.txt: my fc3148072371 other d40249267ae3 ancestor 8fe46a3eb557
+merging test.txt failed!
+0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+There are unresolved merges, you can redo the full merge using:
+ hg update -C 3
+ hg merge 4
+one
+<<<<<<<
+two-point-five
+=======
+two-point-one
+>>>>>>>
+three
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 7 0 0 01365c4cca56 000000000000 000000000000
+ 1 7 9 1 1 7b013192566a 01365c4cca56 000000000000
+ 2 16 15 2 2 8fe46a3eb557 01365c4cca56 000000000000
+ 3 31 27 2 3 fc3148072371 7b013192566a 8fe46a3eb557
+ 4 58 25 4 4 d40249267ae3 8fe46a3eb557 000000000000
+changeset: 4:a070d41e8360
+tag: tip
+parent: 2:faaea63e63a9
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: two -> two-point-one
+
+changeset: 3:451c744aabcc
+parent: 1:e409be6afcc0
+parent: 2:faaea63e63a9
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Merge 1
+
+changeset: 2:faaea63e63a9
+parent: 0:095c92b91f1a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Numbers as words
+
+changeset: 1:e409be6afcc0
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2 -> 2.5
+
+changeset: 0:095c92b91f1a
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Initial
+
new file mode 100755
--- /dev/null
+++ b/tests/test-nested-repo
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+hg init a
+cd a
+hg init b
+echo x > b/x
+echo '# should print nothing'
+hg st
+echo '# should print ? b/x'
+hg st b/x
+
+hg add b/x
+
+echo '# should print A b/x'
+hg st
+echo '# should forget b/x'
+hg revert
+echo '# should print nothing'
+hg st b
new file mode 100644
--- /dev/null
+++ b/tests/test-nested-repo.out
@@ -0,0 +1,8 @@
+# should print nothing
+# should print ? b/x
+? b/x
+# should print A b/x
+A b/x
+# should forget b/x
+forgetting b/x
+# should print nothing
new file mode 100755
--- /dev/null
+++ b/tests/test-notfound
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+hg init
+
+echo "Is there an error message when trying to diff non-existing files?"
+hg diff not found
+
+echo "Is there an error message when trying to add non-existing files?"
+hg add not found
new file mode 100644
--- /dev/null
+++ b/tests/test-notfound.out
@@ -0,0 +1,6 @@
+Is there an error message when trying to diff non-existing files?
+found: No such file or directory
+not: No such file or directory
+Is there an error message when trying to add non-existing files?
+found: No such file or directory
+not: No such file or directory
new file mode 100755
--- /dev/null
+++ b/tests/test-parseindex
@@ -0,0 +1,52 @@
+#!/bin/sh
+#
+# revlog.parseindex must be able to parse the index file even if
+# an index entry is split between two 64k blocks. The ideal test
+# would be to create an index file with inline data where
+# 64k < size < 64k + 64 (64k is the size of the read buffer, 64 is
+# the size of an index entry) and with an index entry starting right
+# before the 64k block boundary, and try to read it.
+#
+# We approximate that by reducing the read buffer to 1 byte.
+#
+
+hg init a
+cd a
+echo abc > foo
+hg add foo
+hg commit -m 'add foo' -d '1000000 0'
+
+echo >> foo
+hg commit -m 'change foo' -d '1000001 0'
+hg log -r 0:
+
+cat >> test.py << EOF
+from mercurial import changelog, util
+from mercurial.node import *
+
+class singlebyteread(object):
+ def __init__(self, real):
+ self.real = real
+
+ def read(self, size=-1):
+ if size == 65536:
+ size = 1
+ return self.real.read(size)
+
+ def __getattr__(self, key):
+ return getattr(self.real, key)
+
+def opener(*args):
+ o = util.opener(*args)
+ def wrapper(*a):
+ f = o(*a)
+ return singlebyteread(f)
+ return wrapper
+
+cl = changelog.changelog(opener('.hg'))
+print cl.count(), 'revisions:'
+for r in xrange(cl.count()):
+ print short(cl.node(r))
+EOF
+
+python test.py
new file mode 100644
--- /dev/null
+++ b/tests/test-parseindex.out
@@ -0,0 +1,14 @@
+changeset: 0:9c2cf2b35aa7
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: add foo
+
+changeset: 1:3756a9556b89
+tag: tip
+user: test
+date: Mon Jan 12 13:46:41 1970 +0000
+summary: change foo
+
+2 revisions:
+9c2cf2b35aa7
+3756a9556b89
new file mode 100755
--- /dev/null
+++ b/tests/test-permissions
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+hg init
+echo foo > a
+hg add a
+hg commit -m "1" -d "1000000 0"
+hg verify
+chmod -r .hg/data/a.i
+hg verify 2>/dev/null || echo verify failed
+chmod +r .hg/data/a.i
+hg verify 2>/dev/null || echo verify failed
+chmod -w .hg/data/a.i
+echo barber > a
+hg commit -m "2" -d "1000000 0" 2>/dev/null || echo commit failed
+
new file mode 100644
--- /dev/null
+++ b/tests/test-permissions.out
@@ -0,0 +1,16 @@
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+verify failed
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+commit failed
new file mode 100755
--- /dev/null
+++ b/tests/test-pull
@@ -0,0 +1,21 @@
+#!/bin/sh
+
+mkdir test
+cd test
+echo foo>foo
+hg init
+hg addremove
+hg commit -m 1
+hg verify
+hg serve -p 20059 -d --pid-file=hg.pid
+cd ..
+
+http_proxy= hg clone http://localhost:20059/ copy
+cd copy
+hg verify
+hg co
+cat foo
+hg manifest
+hg pull
+
+kill `cat ../test/hg.pid`
new file mode 100755
--- /dev/null
+++ b/tests/test-pull-permission
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+mkdir a
+cd a
+hg init
+echo foo > b
+hg add b
+hg ci -m "b" -d "1000000 0"
+
+chmod -w .hg
+
+cd ..
+
+hg clone a b
+
+chmod +w a/.hg # let test clean up
+
+cd b
+hg verify
new file mode 100644
--- /dev/null
+++ b/tests/test-pull-permission.out
@@ -0,0 +1,11 @@
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
new file mode 100755
--- /dev/null
+++ b/tests/test-pull-pull-corruption
@@ -0,0 +1,41 @@
+#!/bin/sh
+#
+# Corrupt an hg repo with two pulls.
+#
+
+# create one repo with a long history
+hg init source1
+cd source1
+touch foo
+hg add foo
+for i in 1 2 3 4 5 6 7 8 9 10; do
+ echo $i >> foo
+ hg ci -m $i
+done
+cd ..
+
+# create one repo with a shorter history
+hg clone -r 0 source1 source2
+cd source2
+echo a >> foo
+hg ci -m a
+cd ..
+
+# create a third repo to pull both other repos into it
+hg init corrupted
+cd corrupted
+# use a hook to make the second pull start while the first one is still running
+echo '[hooks]' >> .hg/hgrc
+echo 'prechangegroup = sleep 5' >> .hg/hgrc
+
+# start a pull...
+hg pull ../source1 &
+
+# ... and start another pull before the first one has finished
+sleep 1
+hg pull ../source2 2>/dev/null
+
+# see the result
+wait
+hg verify
+
new file mode 100644
--- /dev/null
+++ b/tests/test-pull-pull-corruption.out
@@ -0,0 +1,25 @@
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from ../source2
+pulling from ../source1
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 10 changesets with 10 changes to 1 files
+(run 'hg update' to get a working copy)
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 11 changesets, 11 total revisions
new file mode 100755
--- /dev/null
+++ b/tests/test-pull-pull-corruption2
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# Corrupt an hg repo with two pulls.
+#
+
+# create one repo with a long history
+hg init source1
+cd source1
+touch foo
+hg add foo
+for i in 1 2 3 4 5 6 7 8 9 10; do
+ echo $i >> foo
+ hg ci -m $i
+done
+cd ..
+
+# create a third repo to pull both other repos into it
+hg init version2
+hg -R version2 pull source1 &
+sleep 1
+
+hg clone --pull -U version2 corrupted
+hg -R corrupted verify
+hg -R version2 verify
new file mode 100644
--- /dev/null
+++ b/tests/test-pull-pull-corruption2.out
@@ -0,0 +1,22 @@
+pulling from source1
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 10 changesets with 10 changes to 1 files
+(run 'hg update' to get a working copy)
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 10 changesets with 10 changes to 1 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 10 changesets, 10 total revisions
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 10 changesets, 10 total revisions
new file mode 100644
--- /dev/null
+++ b/tests/test-pull.out
@@ -0,0 +1,24 @@
+(the addremove command is deprecated; use add and remove --after instead)
+adding foo
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+foo
+2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
+pulling from http://localhost:20059/
+searching for changes
+no changes found
new file mode 100755
--- /dev/null
+++ b/tests/test-push-hook-lock
@@ -0,0 +1,13 @@
+#!/bin/sh
+hg init 1
+echo '[ui]' >> 1/.hg/hgrc
+echo 'timeout = 10' >> 1/.hg/hgrc
+echo foo > 1/foo
+hg --cwd 1 ci -A -m foo
+hg clone 1 2
+hg clone 2 3
+echo '[hooks]' >> 2/.hg/hgrc
+echo 'changegroup.push = hg push -qf ../1' >> 2/.hg/hgrc
+echo bar >> 3/foo
+hg --cwd 3 ci -m bar
+hg --cwd 3 push ../2
new file mode 100644
--- /dev/null
+++ b/tests/test-push-hook-lock.out
@@ -0,0 +1,9 @@
+adding foo
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pushing to ../2
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
new file mode 100755
--- /dev/null
+++ b/tests/test-push-r
@@ -0,0 +1,61 @@
+#!/bin/sh
+
+hg init test
+cd test
+cat >>afile <<EOF
+0
+EOF
+hg add afile
+hg commit -m "0.0"
+cat >>afile <<EOF
+1
+EOF
+hg commit -m "0.1"
+cat >>afile <<EOF
+2
+EOF
+hg commit -m "0.2"
+cat >>afile <<EOF
+3
+EOF
+hg commit -m "0.3"
+hg update -C 0
+cat >>afile <<EOF
+1
+EOF
+hg commit -m "1.1"
+cat >>afile <<EOF
+2
+EOF
+hg commit -m "1.2"
+cat >fred <<EOF
+a line
+EOF
+cat >>afile <<EOF
+3
+EOF
+hg add fred
+hg commit -m "1.3"
+hg mv afile adifferentfile
+hg commit -m "1.3m"
+hg update -C 3
+hg mv afile anotherfile
+hg commit -m "0.3m"
+hg debugindex .hg/data/afile.i
+hg debugindex .hg/data/adifferentfile.i
+hg debugindex .hg/data/anotherfile.i
+hg debugindex .hg/data/fred.i
+hg debugindex .hg/00manifest.i
+hg verify
+cd ..
+for i in 0 1 2 3 4 5 6 7 8; do
+ mkdir test-"$i"
+ hg --cwd test-"$i" init
+ hg -R test push -r "$i" test-"$i"
+ cd test-"$i"
+ hg verify
+ cd ..
+done
+cd test-8
+hg pull ../test-7
+hg verify
new file mode 100644
--- /dev/null
+++ b/tests/test-push-r.out
@@ -0,0 +1,137 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 3 0 0 362fef284ce2 000000000000 000000000000
+ 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000
+ 2 8 7 2 2 4c982badb186 125144f7e028 000000000000
+ 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 75 0 7 905359268f77 000000000000 000000000000
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 75 0 8 905359268f77 000000000000 000000000000
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000
+ rev offset length base linkrev nodeid p1 p2
+ 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000
+ 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000
+ 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000
+ 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000
+ 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000
+ 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000
+ 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+4 files, 9 changesets, 7 total revisions
+pushing to test-0
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+pushing to test-1
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 1 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 2 changesets, 2 total revisions
+pushing to test-2
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 1 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 3 changesets, 3 total revisions
+pushing to test-3
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 4 changes to 1 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 4 changesets, 4 total revisions
+pushing to test-4
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 1 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 2 changesets, 2 total revisions
+pushing to test-5
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 1 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 3 changesets, 3 total revisions
+pushing to test-6
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 5 changes to 2 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+2 files, 4 changesets, 5 total revisions
+pushing to test-7
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 6 changes to 3 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+3 files, 5 changesets, 6 total revisions
+pushing to test-8
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 5 changesets with 5 changes to 2 files
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+2 files, 5 changesets, 5 total revisions
+pulling from ../test-7
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 4 changesets with 2 changes to 3 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+4 files, 9 changesets, 7 total revisions
new file mode 100755
--- /dev/null
+++ b/tests/test-push-warn
@@ -0,0 +1,55 @@
+#!/bin/sh
+
+mkdir a
+cd a
+hg init
+echo foo > t1
+hg add t1
+hg commit -m "1" -d "1000000 0"
+
+cd ..
+hg clone a b
+
+cd a
+echo foo > t2
+hg add t2
+hg commit -m "2" -d "1000000 0"
+
+cd ../b
+echo foo > t3
+hg add t3
+hg commit -m "3" -d "1000000 0"
+
+hg push ../a
+hg pull ../a
+hg push ../a
+hg merge
+hg commit -m "4" -d "1000000 0"
+hg push ../a
+cd ..
+
+hg init c
+cd c
+for i in 0 1 2; do
+ echo $i >> foo
+ hg ci -Am $i -d "1000000 0"
+done
+cd ..
+
+hg clone c d
+cd d
+for i in 0 1; do
+ hg co -C $i
+ echo d-$i >> foo
+ hg ci -m d-$i -d "1000000 0"
+done
+
+HGMERGE=true hg merge 3
+hg ci -m c-d -d "1000000 0"
+
+hg push ../c
+hg push -r 2 ../c
+hg push -r 3 -r 4 ../c
+hg push -r 5 ../c
+
+exit 0
new file mode 100644
--- /dev/null
+++ b/tests/test-push-warn.out
@@ -0,0 +1,50 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pushing to ../a
+searching for changes
+abort: unsynced remote changes!
+(did you forget to sync? use push -f to force)
+pulling from ../a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+pushing to ../a
+searching for changes
+abort: push creates new remote branches!
+(did you forget to merge? use push -f to force)
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+pushing to ../a
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 1 changes to 1 files
+adding foo
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+merging foo
+0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+pushing to ../c
+searching for changes
+abort: push creates new remote branches!
+(did you forget to merge? use push -f to force)
+pushing to ../c
+searching for changes
+no changes found
+pushing to ../c
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 2 changesets with 2 changes to 1 files (+2 heads)
+pushing to ../c
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
new file mode 100755
--- /dev/null
+++ b/tests/test-rawcommit1
@@ -0,0 +1,33 @@
+#!/bin/sh
+hg --debug init
+echo this is a1 > a
+hg add a
+hg commit -m0 -d "1000000 0"
+echo this is b1 > b
+hg add b
+hg commit -m1 -d "1000000 0"
+hg manifest 1
+echo this is c1 > c
+hg rawcommit -p 1 -d "1000000 0" -m2 c
+hg manifest 2
+hg parents
+rm b
+hg rawcommit -p 2 -d "1000000 0" -m3 b
+hg manifest 3
+hg parents
+echo this is a22 > a
+hg rawcommit -p 3 -d "1000000 0" -m4 a
+hg manifest 4
+hg parents
+echo this is c22 > c
+hg rawcommit -p 1 -d "1000000 0" -m5 c
+hg manifest 5
+hg parents
+# merge, but no files changed
+hg rawcommit -p 4 -p 5 -d "1000000 0" -m6
+hg manifest 6
+hg parents
+# no changes what-so-ever
+hg rawcommit -p 6 -d "1000000 0" -m7
+hg manifest 7
+hg parents
new file mode 100644
--- /dev/null
+++ b/tests/test-rawcommit1.out
@@ -0,0 +1,59 @@
+05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
+54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
+(the rawcommit command is deprecated)
+05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
+54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+changeset: 2:e110db3db549
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+(the rawcommit command is deprecated)
+05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+changeset: 3:0f9843914735
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 3
+
+(the rawcommit command is deprecated)
+d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+changeset: 4:909a3d1d3ee1
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 4
+
+(the rawcommit command is deprecated)
+05f9e54f4c9b86b09099803d8b49a50edcb4eaab 644 a
+54837d97f2932a8194e69745a280a2c11e61ff9c 644 b
+3570202ceac2b52517df64ebd0a062cb0d8fe33a 644 c
+changeset: 4:909a3d1d3ee1
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 4
+
+(the rawcommit command is deprecated)
+d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+changeset: 6:725fdd0728db
+tag: tip
+parent: 4:909a3d1d3ee1
+parent: 5:f56d4c64ab98
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 6
+
+(the rawcommit command is deprecated)
+d6e3c4976c13feb1728cd3ac851abaf7256a5c23 644 a
+76d5e637cbec1bcc04a5a3fa4bcc7d13f6847c00 644 c
+changeset: 7:2c11b55105cb
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 7
+
new file mode 100755
--- /dev/null
+++ b/tests/test-remove
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+hg init a
+cd a
+echo a > foo
+hg rm foo
+hg add foo
+hg commit -m 1 -d "1000000 0"
+hg remove
+rm foo
+hg remove foo
+hg revert
+rm foo
+hg remove --after
+hg commit -m 2 -d "1000000 0"
+hg export 0
+hg export 1
+hg log -p -r 0
+hg log -p -r 1
+
+echo a > a
+hg add a
+hg rm a
+hg rm -f a
+echo b > b
+hg ci -A -m 3 -d "1000001 0"
+echo c >> b
+hg rm b
+hg rm -f b
+
+cd ..
+hg clone a b
new file mode 100644
--- /dev/null
+++ b/tests/test-remove.out
@@ -0,0 +1,58 @@
+not removing foo: file is not managed
+abort: no files specified
+undeleting foo
+removing foo
+# HG changeset patch
+# User test
+# Date 1000000 0
+# Node ID 8ba83d44753d6259db5ce6524974dd1174e90f47
+# Parent 0000000000000000000000000000000000000000
+1
+
+diff -r 000000000000 -r 8ba83d44753d foo
+--- /dev/null Thu Jan 01 00:00:00 1970 +0000
++++ b/foo Mon Jan 12 13:46:40 1970 +0000
+@@ -0,0 +1,1 @@
++a
+# HG changeset patch
+# User test
+# Date 1000000 0
+# Node ID a1fce69c50d97881c5c014ab23f580f720c78678
+# Parent 8ba83d44753d6259db5ce6524974dd1174e90f47
+2
+
+diff -r 8ba83d44753d -r a1fce69c50d9 foo
+--- a/foo Mon Jan 12 13:46:40 1970 +0000
++++ /dev/null Thu Jan 01 00:00:00 1970 +0000
+@@ -1,1 +0,0 @@
+-a
+changeset: 0:8ba83d44753d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1
+
+diff -r 000000000000 -r 8ba83d44753d foo
+--- /dev/null Thu Jan 01 00:00:00 1970 +0000
++++ b/foo Mon Jan 12 13:46:40 1970 +0000
+@@ -0,0 +1,1 @@
++a
+
+
+changeset: 1:a1fce69c50d9
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+diff -r 8ba83d44753d -r a1fce69c50d9 foo
+--- a/foo Mon Jan 12 13:46:40 1970 +0000
++++ /dev/null Thu Jan 01 00:00:00 1970 +0000
+@@ -1,1 +0,0 @@
+-a
+
+
+not removing a: file has been marked for add (use -f to force removal)
+adding a
+adding b
+not removing b: file is modified (use -f to force removal)
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
new file mode 100755
--- /dev/null
+++ b/tests/test-rename
@@ -0,0 +1,211 @@
+#!/bin/sh
+
+hg init
+mkdir d1 d1/d11 d2
+echo d1/a > d1/a
+echo d1/ba > d1/ba
+echo d1/a1 > d1/d11/a1
+echo d1/b > d1/b
+echo d2/b > d2/b
+hg add d1/a d1/b d1/ba d1/d11/a1 d2/b
+hg commit -m "1" -d "1000000 0"
+
+echo "# rename a single file"
+hg rename d1/d11/a1 d2/c
+hg status
+hg update -C
+
+echo "# rename --after a single file"
+mv d1/d11/a1 d2/c
+hg rename --after d1/d11/a1 d2/c
+hg status
+hg update -C
+
+echo "# move a single file to an existing directory"
+hg rename d1/d11/a1 d2
+hg status
+hg update -C
+
+echo "# move --after a single file to an existing directory"
+mv d1/d11/a1 d2
+hg rename --after d1/d11/a1 d2
+hg status
+hg update -C
+
+echo "# rename a file using a relative path"
+(cd d1/d11; hg rename ../../d2/b e)
+hg status
+hg update -C
+
+echo "# rename --after a file using a relative path"
+(cd d1/d11; mv ../../d2/b e; hg rename --after ../../d2/b e)
+hg status
+hg update -C
+
+echo "# rename directory d1 as d3"
+hg rename d1/ d3
+hg status
+hg update -C
+
+echo "# rename --after directory d1 as d3"
+mv d1 d3
+hg rename --after d1 d3
+hg status
+hg update -C
+
+echo "# move a directory using a relative path"
+(cd d2; mkdir d3; hg rename ../d1/d11 d3)
+hg status
+hg update -C
+
+echo "# move --after a directory using a relative path"
+(cd d2; mkdir d3; mv ../d1/d11 d3; hg rename --after ../d1/d11 d3)
+hg status
+hg update -C
+
+echo "# move directory d1/d11 to an existing directory d2 (removes empty d1)"
+hg rename d1/d11/ d2
+hg status
+hg update -C
+
+echo "# move directories d1 and d2 to a new directory d3"
+mkdir d3
+hg rename d1 d2 d3
+hg status
+hg update -C
+
+echo "# move --after directories d1 and d2 to a new directory d3"
+mkdir d3
+mv d1 d2 d3
+hg rename --after d1 d2 d3
+hg status
+hg update -C
+
+echo "# move everything under directory d1 to existing directory d2, do not"
+echo "# overwrite existing files (d2/b)"
+hg rename d1/* d2
+hg status
+diff d1/b d2/b
+hg update -C
+
+echo "# attempt to move potentially more than one file into a non-existent"
+echo "# directory"
+hg rename 'glob:d1/**' dx
+
+echo "# move every file under d1 to d2/d21 (glob)"
+mkdir d2/d21
+hg rename 'glob:d1/**' d2/d21
+hg status
+hg update -C
+
+echo "# move --after some files under d1 to d2/d21 (glob)"
+mkdir d2/d21
+mv d1/a d1/d11/a1 d2/d21
+hg rename --after 'glob:d1/**' d2/d21
+hg status
+hg update -C
+
+echo "# move every file under d1 starting with an 'a' to d2/d21 (regexp)"
+mkdir d2/d21
+hg rename 're:d1/([^a][^/]*/)*a.*' d2/d21
+hg status
+hg update -C
+
+echo "# attempt to overwrite an existing file"
+echo "ca" > d1/ca
+hg rename d1/ba d1/ca
+hg status
+hg update -C
+
+echo "# forced overwrite of an existing file"
+echo "ca" > d1/ca
+hg rename --force d1/ba d1/ca
+hg status
+hg update -C
+
+echo "# replace a symlink with a file"
+ln -s ba d1/ca
+hg rename --force d1/ba d1/ca
+hg status
+hg update -C
+
+echo "# do not copy more than one source file to the same destination file"
+mkdir d3
+hg rename d1/* d2/* d3
+hg status
+hg update -C
+
+echo "# move a whole subtree with \"hg rename .\""
+mkdir d3
+(cd d1; hg rename . ../d3)
+hg status
+hg update -C
+
+echo "# move a whole subtree with \"hg rename --after .\""
+mkdir d3
+mv d1/* d3
+(cd d1; hg rename --after . ../d3)
+hg status
+hg update -C
+
+echo "# move the parent tree with \"hg rename ..\""
+(cd d1/d11; hg rename .. ../../d3)
+hg status
+hg update -C
+
+echo "# skip removed files"
+hg remove d1/b
+hg rename d1 d3
+hg status
+hg update -C
+
+echo "# transitive rename"
+hg rename d1/b d1/bb
+hg rename d1/bb d1/bc
+hg status
+hg update -C
+
+echo "# transitive rename --after"
+hg rename d1/b d1/bb
+mv d1/bb d1/bc
+hg rename --after d1/bb d1/bc
+hg status
+hg update -C
+
+echo "# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)"
+hg rename d1/b d1/bb
+echo "some stuff added to d1/bb" >> d1/bb
+hg rename d1/bb d1/b
+hg status
+hg debugstate | grep copy
+hg update -C
+
+echo "# check illegal path components"
+
+hg rename d1/d11/a1 .hg/foo
+hg status
+hg rename d1/d11/a1 ../foo
+hg status
+
+mv d1/d11/a1 .hg/foo
+hg rename --after d1/d11/a1 .hg/foo
+hg status
+hg update -C
+rm .hg/foo
+
+hg rename d1/d11/a1 .hg
+hg status
+hg rename d1/d11/a1 ..
+hg status
+
+mv d1/d11/a1 .hg
+hg rename --after d1/d11/a1 .hg
+hg status
+hg update -C
+rm .hg/a1
+
+(cd d1/d11; hg rename ../../d2/b ../../.hg/foo)
+hg status
+(cd d1/d11; hg rename ../../d2/b ../../../foo)
+hg status
+
new file mode 100644
--- /dev/null
+++ b/tests/test-rename.out
@@ -0,0 +1,302 @@
+# rename a single file
+A d2/c
+R d1/d11/a1
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# rename --after a single file
+A d2/c
+R d1/d11/a1
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# move a single file to an existing directory
+A d2/a1
+R d1/d11/a1
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# move --after a single file to an existing directory
+A d2/a1
+R d1/d11/a1
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# rename a file using a relative path
+A d1/d11/e
+R d2/b
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# rename --after a file using a relative path
+A d1/d11/e
+R d2/b
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# rename directory d1 as d3
+copying d1/a to d3/a
+copying d1/b to d3/b
+copying d1/ba to d3/ba
+copying d1/d11/a1 to d3/d11/a1
+removing d1/a
+removing d1/b
+removing d1/ba
+removing d1/d11/a1
+A d3/a
+A d3/b
+A d3/ba
+A d3/d11/a1
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+4 files updated, 0 files merged, 4 files removed, 0 files unresolved
+# rename --after directory d1 as d3
+copying d1/a to d3/a
+copying d1/b to d3/b
+copying d1/ba to d3/ba
+copying d1/d11/a1 to d3/d11/a1
+removing d1/a
+removing d1/b
+removing d1/ba
+removing d1/d11/a1
+A d3/a
+A d3/b
+A d3/ba
+A d3/d11/a1
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+4 files updated, 0 files merged, 4 files removed, 0 files unresolved
+# move a directory using a relative path
+copying ../d1/d11/a1 to d3/d11/a1
+removing ../d1/d11/a1
+A d2/d3/d11/a1
+R d1/d11/a1
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# move --after a directory using a relative path
+copying ../d1/d11/a1 to d3/d11/a1
+removing ../d1/d11/a1
+A d2/d3/d11/a1
+R d1/d11/a1
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# move directory d1/d11 to an existing directory d2 (removes empty d1)
+copying d1/d11/a1 to d2/d11/a1
+removing d1/d11/a1
+A d2/d11/a1
+R d1/d11/a1
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# move directories d1 and d2 to a new directory d3
+copying d1/a to d3/d1/a
+copying d1/b to d3/d1/b
+copying d1/ba to d3/d1/ba
+copying d1/d11/a1 to d3/d1/d11/a1
+copying d2/b to d3/d2/b
+removing d1/a
+removing d1/b
+removing d1/ba
+removing d1/d11/a1
+removing d2/b
+A d3/d1/a
+A d3/d1/b
+A d3/d1/ba
+A d3/d1/d11/a1
+A d3/d2/b
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+R d2/b
+5 files updated, 0 files merged, 5 files removed, 0 files unresolved
+# move --after directories d1 and d2 to a new directory d3
+copying d1/a to d3/d1/a
+copying d1/b to d3/d1/b
+copying d1/ba to d3/d1/ba
+copying d1/d11/a1 to d3/d1/d11/a1
+copying d2/b to d3/d2/b
+removing d1/a
+removing d1/b
+removing d1/ba
+removing d1/d11/a1
+removing d2/b
+A d3/d1/a
+A d3/d1/b
+A d3/d1/ba
+A d3/d1/d11/a1
+A d3/d2/b
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+R d2/b
+5 files updated, 0 files merged, 5 files removed, 0 files unresolved
+# move everything under directory d1 to existing directory d2, do not
+# overwrite existing files (d2/b)
+d2/b: not overwriting - file exists
+copying d1/d11/a1 to d2/d11/a1
+removing d1/d11/a1
+A d2/a
+A d2/ba
+A d2/d11/a1
+R d1/a
+R d1/ba
+R d1/d11/a1
+1c1
+< d1/b
+---
+> d2/b
+3 files updated, 0 files merged, 3 files removed, 0 files unresolved
+# attempt to move potentially more than one file into a non-existent
+# directory
+abort: with multiple sources, destination must be an existing directory
+# move every file under d1 to d2/d21 (glob)
+copying d1/a to d2/d21/a
+copying d1/b to d2/d21/b
+copying d1/ba to d2/d21/ba
+copying d1/d11/a1 to d2/d21/a1
+removing d1/a
+removing d1/b
+removing d1/ba
+removing d1/d11/a1
+A d2/d21/a
+A d2/d21/a1
+A d2/d21/b
+A d2/d21/ba
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+4 files updated, 0 files merged, 4 files removed, 0 files unresolved
+# move --after some files under d1 to d2/d21 (glob)
+copying d1/a to d2/d21/a
+copying d1/d11/a1 to d2/d21/a1
+removing d1/a
+removing d1/d11/a1
+A d2/d21/a
+A d2/d21/a1
+R d1/a
+R d1/d11/a1
+2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+# move every file under d1 starting with an 'a' to d2/d21 (regexp)
+copying d1/a to d2/d21/a
+copying d1/d11/a1 to d2/d21/a1
+removing d1/a
+removing d1/d11/a1
+A d2/d21/a
+A d2/d21/a1
+R d1/a
+R d1/d11/a1
+2 files updated, 0 files merged, 2 files removed, 0 files unresolved
+# attempt to overwrite an existing file
+d1/ca: not overwriting - file exists
+? d1/ca
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+# forced overwrite of an existing file
+A d1/ca
+R d1/ba
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# replace a symlink with a file
+A d1/ca
+R d1/ba
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# do not copy more than one source file to the same destination file
+copying d1/d11/a1 to d3/d11/a1
+d3/b: not overwriting - d2/b collides with d1/b
+removing d1/d11/a1
+A d3/a
+A d3/b
+A d3/ba
+A d3/d11/a1
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+4 files updated, 0 files merged, 4 files removed, 0 files unresolved
+# move a whole subtree with "hg rename ."
+copying a to ../d3/d1/a
+copying b to ../d3/d1/b
+copying ba to ../d3/d1/ba
+copying d11/a1 to ../d3/d1/d11/a1
+removing a
+removing b
+removing ba
+removing d11/a1
+A d3/d1/a
+A d3/d1/b
+A d3/d1/ba
+A d3/d1/d11/a1
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+4 files updated, 0 files merged, 4 files removed, 0 files unresolved
+# move a whole subtree with "hg rename --after ."
+copying a to ../d3/a
+copying b to ../d3/b
+copying ba to ../d3/ba
+copying d11/a1 to ../d3/d11/a1
+removing a
+removing b
+removing ba
+removing d11/a1
+A d3/a
+A d3/b
+A d3/ba
+A d3/d11/a1
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+4 files updated, 0 files merged, 4 files removed, 0 files unresolved
+# move the parent tree with "hg rename .."
+copying ../a to ../../d3/a
+copying ../b to ../../d3/b
+copying ../ba to ../../d3/ba
+copying a1 to ../../d3/d11/a1
+removing ../a
+removing ../b
+removing ../ba
+removing a1
+A d3/a
+A d3/b
+A d3/ba
+A d3/d11/a1
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+4 files updated, 0 files merged, 4 files removed, 0 files unresolved
+# skip removed files
+copying d1/a to d3/a
+copying d1/ba to d3/ba
+copying d1/d11/a1 to d3/d11/a1
+removing d1/a
+removing d1/ba
+removing d1/d11/a1
+A d3/a
+A d3/ba
+A d3/d11/a1
+R d1/a
+R d1/b
+R d1/ba
+R d1/d11/a1
+4 files updated, 0 files merged, 3 files removed, 0 files unresolved
+# transitive rename
+A d1/bc
+R d1/b
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# transitive rename --after
+A d1/bc
+R d1/b
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)
+M d1/b
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+# check illegal path components
+abort: path contains illegal component: .hg/foo
+
+abort: ../foo not under root
+abort: path contains illegal component: .hg/foo
+
+! d1/d11/a1
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+abort: path contains illegal component: .hg/a1
+
+abort: ../a1 not under root
+abort: path contains illegal component: .hg/a1
+
+! d1/d11/a1
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+abort: path contains illegal component: .hg/foo
+
+abort: ../../../foo not under root
new file mode 100755
--- /dev/null
+++ b/tests/test-revert
@@ -0,0 +1,83 @@
+#!/bin/sh
+
+hg init
+echo 123 > a
+echo 123 > c
+echo 123 > e
+hg add a c e
+hg commit -m "first" -d "1000000 0" a c e
+echo 123 > b
+echo %% should show b unknown
+hg status
+echo 12 > c
+echo %% should show b unknown and c modified
+hg status
+hg add b
+echo %% should show b added and c modified
+hg status
+hg rm a
+echo %% should show a removed, b added and c modified
+hg status
+hg revert a
+echo %% should show b added, copy saved, and c modified
+hg status
+hg revert b
+echo %% should show b unknown, and c modified
+hg status
+hg revert --no-backup c
+echo %% should show unknown: b
+hg status
+echo %% should show a b c e
+ls
+echo %% should verbosely save backup to e.orig
+echo z > e
+hg revert -v
+echo %% should say no changes needed
+hg revert a
+echo %% should say file not managed
+echo q > q
+hg revert q
+rm q
+echo %% should say file not found
+hg revert notfound
+hg rm a
+hg commit -m "second" -d "1000000 0"
+echo z > z
+hg add z
+hg st
+echo %% should add a, forget z
+hg revert -r0
+echo %% should forget a
+hg revert -rtip
+rm -f a *.orig
+echo %% should silently add a
+hg revert -r0 a
+hg st a
+
+hg update -C
+chmod +x c
+hg revert
+echo %% should print non-executable
+test -x c || echo non-executable
+
+chmod +x c
+hg commit -d '1000001 0' -m exe
+
+chmod -x c
+hg revert
+echo %% should print executable
+test -x c && echo executable
+
+echo %% issue 241
+hg init a
+cd a
+echo a >> a
+hg commit -A -d '1 0' -m a
+echo a >> a
+hg commit -d '2 0' -m a
+hg update 0
+mkdir b
+echo b > b/b
+hg revert -rtip
+
+true
new file mode 100755
--- /dev/null
+++ b/tests/test-revert-unknown
@@ -0,0 +1,20 @@
+#!/bin/sh
+
+hg init
+touch unknown
+
+touch a
+hg add a
+hg ci -m "1" -d "1000000 0"
+
+touch b
+hg add b
+hg ci -m "2" -d "1000000 0"
+
+echo %% Should show unknown
+hg status
+hg revert -r 0
+echo %% Should show unknown and b removed
+hg status
+echo %% Should show a and unknown
+ls
new file mode 100644
--- /dev/null
+++ b/tests/test-revert-unknown.out
@@ -0,0 +1,9 @@
+%% Should show unknown
+? unknown
+removing b
+%% Should show unknown and b removed
+R b
+? unknown
+%% Should show a and unknown
+a
+unknown
new file mode 100644
--- /dev/null
+++ b/tests/test-revert.out
@@ -0,0 +1,56 @@
+%% should show b unknown
+? b
+%% should show b unknown and c modified
+M c
+? b
+%% should show b added and c modified
+M c
+A b
+%% should show a removed, b added and c modified
+M c
+A b
+R a
+%% should show b added, copy saved, and c modified
+M c
+A b
+%% should show b unknown, and c modified
+M c
+? b
+%% should show unknown: b
+? b
+%% should show a b c e
+a
+b
+c
+e
+%% should verbosely save backup to e.orig
+saving current version of e as e.orig
+reverting e
+resolving manifests
+getting e
+%% should say no changes needed
+no changes needed to a
+%% should say file not managed
+file not managed: q
+%% should say file not found
+notfound: No such file in rev 095eacd0c0d7
+A z
+? b
+? e.orig
+%% should add a, forget z
+adding a
+forgetting z
+%% should forget a
+forgetting a
+%% should silently add a
+A a
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+reverting c
+%% should print non-executable
+non-executable
+reverting c
+%% should print executable
+executable
+%% issue 241
+adding a
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
new file mode 100755
--- /dev/null
+++ b/tests/test-ro-message
@@ -0,0 +1,17 @@
+#!/bin/sh
+HG=hg
+"$HG" init
+mkdir b
+echo 'Bouncy' >b/bouncy
+echo 'tricycle' >b/vehicle
+"$HG" add b/bouncy
+"$HG" add b/vehicle
+"$HG" commit -m 'Adding bouncy'
+echo 'bouncy' >>b/bouncy
+"$HG" commit -m 'Making it bouncier'
+"$HG" update -C 0
+echo 'stationary' >>b/vehicle
+"$HG" commit -m 'Clarifying the vehicle.'
+"$HG" update -C 1
+chmod a-w b/vehicle
+"$HG" merge 2 2>&1 | sed 's|^\(.*[ ]\).*/\([^/]*/[^/]*/[^/]*\)$|\1\2|g'
new file mode 100644
--- /dev/null
+++ b/tests/test-ro-message.out
@@ -0,0 +1,3 @@
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+abort: Permission denied - test-ro-message/b/vehicle
new file mode 100755
--- /dev/null
+++ b/tests/test-rollback
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+echo a > a
+hg add a
+hg commit -m "test" -d "1000000 0"
+hg verify
+hg parents
+hg status
+hg rollback
+hg verify
+hg parents
+hg status
new file mode 100644
--- /dev/null
+++ b/tests/test-rollback.out
@@ -0,0 +1,18 @@
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+changeset: 0:0acdaf898367
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: test
+
+rolling back last transaction
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+0 files, 0 changesets, 0 total revisions
+A a
new file mode 100755
--- /dev/null
+++ b/tests/test-simple-update
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+set -e
+
+mkdir test
+cd test
+echo foo>foo
+hg init
+hg addremove
+hg commit -m "1"
+hg verify
+
+hg clone . ../branch
+cd ../branch
+hg co
+echo bar>>foo
+hg commit -m "2"
+
+cd ../test
+hg pull ../branch
+hg verify
+hg co
+cat foo
+hg manifest
new file mode 100644
--- /dev/null
+++ b/tests/test-simple-update.out
@@ -0,0 +1,25 @@
+(the addremove command is deprecated; use add and remove --after instead)
+adding foo
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+pulling from ../branch
+searching for changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+(run 'hg update' to get a working copy)
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 2 changesets, 2 total revisions
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+foo
+bar
+6f4310b00b9a147241b071a60c28a650827fb03d 644 foo
new file mode 100755
--- /dev/null
+++ b/tests/test-ssh
@@ -0,0 +1,70 @@
+#!/bin/sh
+
+# This test tries to exercise the ssh functionality with a dummy script
+
+cat <<'EOF' > dummyssh
+#!/bin/sh
+# this attempts to deal with relative pathnames
+cd `dirname $0`
+
+# check for proper args
+if [ $1 != "user@dummy" ] ; then
+ exit -1
+fi
+
+# check that we're in the right directory
+if [ ! -x dummyssh ] ; then
+ exit -1
+fi
+
+echo Got arguments 1:$1 2:$2 3:$3 4:$4 5:$5 >> dummylog
+$2
+EOF
+chmod +x dummyssh
+
+echo "# creating 'remote'"
+hg init remote
+cd remote
+echo this > foo
+hg ci -A -m "init" -d "1000000 0" foo
+
+cd ..
+
+echo "# clone remote"
+hg clone -e ./dummyssh ssh://user@dummy/remote local
+
+echo "# verify"
+cd local
+hg verify
+
+echo "# empty default pull"
+hg paths
+hg pull -e ../dummyssh
+
+echo "# local change"
+echo bleah > foo
+hg ci -m "add" -d "1000000 0"
+
+echo "# updating rc"
+echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
+echo "[ui]" >> .hg/hgrc
+echo "ssh = ../dummyssh" >> .hg/hgrc
+
+echo "# find outgoing"
+hg out ssh://user@dummy/remote
+
+echo "# find incoming on the remote side"
+hg incoming -R ../remote -e ../dummyssh ssh://user@dummy/local
+
+echo "# push"
+hg push
+
+cd ../remote
+
+echo "# check remote tip"
+hg tip
+hg verify
+hg cat foo
+
+cd ..
+cat dummylog
new file mode 100644
--- /dev/null
+++ b/tests/test-ssh.out
@@ -0,0 +1,62 @@
+# creating 'remote'
+# clone remote
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+# verify
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+# empty default pull
+default = ssh://user@dummy/remote
+pulling from ssh://user@dummy/remote
+searching for changes
+no changes found
+# local change
+# updating rc
+# find outgoing
+searching for changes
+changeset: 1:c54836a570be
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: add
+
+# find incoming on the remote side
+searching for changes
+changeset: 1:c54836a570be
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: add
+
+# push
+pushing to ssh://user@dummy/remote
+searching for changes
+remote: adding changesets
+remote: adding manifests
+remote: adding file changes
+remote: added 1 changesets with 1 changes to 1 files
+# check remote tip
+changeset: 1:c54836a570be
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: add
+
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 2 changesets, 2 total revisions
+bleah
+Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
+Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
+Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
+Got arguments 1:user@dummy 2:hg -R local serve --stdio 3: 4: 5:
+Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
new file mode 100755
--- /dev/null
+++ b/tests/test-static-http
@@ -0,0 +1,41 @@
+#!/bin/sh
+
+http_proxy= hg clone old-http://localhost:20059/ copy
+echo $?
+ls copy 2>/dev/null || echo copy: No such file or directory
+
+# This server doesn't do range requests so it's basically only good for
+# one pull
+cat > dumb.py <<EOF
+import BaseHTTPServer, SimpleHTTPServer, signal
+
+def run(server_class=BaseHTTPServer.HTTPServer,
+ handler_class=SimpleHTTPServer.SimpleHTTPRequestHandler):
+ server_address = ('localhost', 20059)
+ httpd = server_class(server_address, handler_class)
+ httpd.serve_forever()
+
+signal.signal(signal.SIGTERM, lambda x: sys.exit(0))
+run()
+EOF
+
+python dumb.py 2>/dev/null &
+
+mkdir remote
+cd remote
+hg init
+echo foo > bar
+hg add bar
+hg commit -m"test" -d "1000000 0"
+hg tip
+
+cd ..
+
+http_proxy= hg clone old-http://localhost:20059/remote local
+
+cd local
+hg verify
+cat bar
+http_proxy= hg pull
+
+kill $!
new file mode 100644
--- /dev/null
+++ b/tests/test-static-http.out
@@ -0,0 +1,24 @@
+abort: Connection refused
+255
+copy: No such file or directory
+changeset: 0:53e17d176ae6
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: test
+
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+checking changesets
+checking manifests
+crosschecking files in changesets and manifests
+checking files
+1 files, 1 changesets, 1 total revisions
+foo
+pulling from old-http://localhost:20059/remote
+searching for changes
+no changes found
new file mode 100755
--- /dev/null
+++ b/tests/test-status
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+hg init repo1
+cd repo1
+mkdir a b a/1 b/1 b/2
+touch in_root a/in_a b/in_b a/1/in_a_1 b/1/in_b_1 b/2/in_b_2
+echo "hg status in repo root:"
+hg status
+echo "hg status . in repo root:"
+hg status .
+for dir in a b a/1 b/1 b/2; do
+ echo "hg status in $dir:"
+ hg status --cwd "$dir"
+ echo "hg status . in $dir:"
+ hg status --cwd "$dir" .
+ echo "hg status .. in $dir:"
+ hg status --cwd "$dir" ..
+done
+cd ..
+
+hg init repo2
+cd repo2
+touch modified removed deleted ignored
+echo "ignored" > .hgignore
+hg ci -A -m 'initial checkin' -d "1000000 0"
+sleep 1 # make sure mtime is changed
+touch modified added unknown ignored
+hg add added
+hg remove removed
+rm deleted
+echo "hg status:"
+hg status
+echo "hg status modified added removed deleted unknown never-existed ignored:"
+hg status modified added removed deleted unknown never-existed ignored
new file mode 100644
--- /dev/null
+++ b/tests/test-status.out
@@ -0,0 +1,103 @@
+hg status in repo root:
+? a/1/in_a_1
+? a/in_a
+? b/1/in_b_1
+? b/2/in_b_2
+? b/in_b
+? in_root
+hg status . in repo root:
+? a/1/in_a_1
+? a/in_a
+? b/1/in_b_1
+? b/2/in_b_2
+? b/in_b
+? in_root
+hg status in a:
+? a/1/in_a_1
+? a/in_a
+? b/1/in_b_1
+? b/2/in_b_2
+? b/in_b
+? in_root
+hg status . in a:
+? 1/in_a_1
+? in_a
+hg status .. in a:
+? 1/in_a_1
+? in_a
+? ../b/1/in_b_1
+? ../b/2/in_b_2
+? ../b/in_b
+? ../in_root
+hg status in b:
+? a/1/in_a_1
+? a/in_a
+? b/1/in_b_1
+? b/2/in_b_2
+? b/in_b
+? in_root
+hg status . in b:
+? 1/in_b_1
+? 2/in_b_2
+? in_b
+hg status .. in b:
+? ../a/1/in_a_1
+? ../a/in_a
+? 1/in_b_1
+? 2/in_b_2
+? in_b
+? ../in_root
+hg status in a/1:
+? a/1/in_a_1
+? a/in_a
+? b/1/in_b_1
+? b/2/in_b_2
+? b/in_b
+? in_root
+hg status . in a/1:
+? in_a_1
+hg status .. in a/1:
+? in_a_1
+? ../in_a
+hg status in b/1:
+? a/1/in_a_1
+? a/in_a
+? b/1/in_b_1
+? b/2/in_b_2
+? b/in_b
+? in_root
+hg status . in b/1:
+? in_b_1
+hg status .. in b/1:
+? in_b_1
+? ../2/in_b_2
+? ../in_b
+hg status in b/2:
+? a/1/in_a_1
+? a/in_a
+? b/1/in_b_1
+? b/2/in_b_2
+? b/in_b
+? in_root
+hg status . in b/2:
+? in_b_2
+hg status .. in b/2:
+? ../1/in_b_1
+? in_b_2
+? ../in_b
+adding .hgignore
+adding deleted
+adding modified
+adding removed
+hg status:
+A added
+R removed
+! deleted
+? unknown
+hg status modified added removed deleted unknown never-existed ignored:
+never-existed: No such file or directory
+A added
+R removed
+! deleted
+? ignored
+? unknown
new file mode 100755
--- /dev/null
+++ b/tests/test-symlinks
@@ -0,0 +1,57 @@
+#!/bin/sh
+#Test bug regarding symlinks that showed up in hg 0.7
+#Author: Matthew Elder <sseses@gmail.com>
+
+#make and initialize repo
+hg init test; cd test;
+
+#make a file and a symlink
+touch foo; ln -s foo bar;
+
+#import with addremove -- symlink walking should _not_ screwup.
+hg addremove
+
+#commit -- the symlink should _not_ appear added to dir state
+hg commit -m 'initial'
+
+#add a new file so hg will let me commit again
+touch bomb
+
+#again, symlink should _not_ show up on dir state
+hg addremove
+
+#Assert screamed here before, should go by without consequence
+hg commit -m 'is there a bug?'
+
+cd .. ; rm -rf test
+hg init test; cd test;
+
+mkdir dir
+touch a.c dir/a.o dir/b.o
+# test what happens if we want to trick hg
+hg commit -A -m 0
+echo "relglob:*.o" > .hgignore
+rm a.c
+rm dir/a.o
+rm dir/b.o
+mkdir dir/a.o
+ln -sf nonexist dir/b.o
+mkfifo a.c
+# it should show a.c, dir/a.o and dir/b.o deleted
+hg status
+hg status a.c
+
+echo '# test absolute path through symlink outside repo'
+cd ..
+p=`pwd`
+hg init x
+ln -s x y
+cd x
+touch f
+hg add f
+hg status $p/y/f
+
+echo '# try symlink outside repo to file inside'
+ln -s x/f ../z
+# this should fail
+hg status ../z && { echo hg mistakenly exited with status 0; exit 1; } || :
new file mode 100644
--- /dev/null
+++ b/tests/test-symlinks.out
@@ -0,0 +1,17 @@
+(the addremove command is deprecated; use add and remove --after instead)
+adding foo
+(the addremove command is deprecated; use add and remove --after instead)
+adding bomb
+adding a.c
+adding dir/a.o
+adding dir/b.o
+! a.c
+! dir/a.o
+! dir/b.o
+? .hgignore
+a.c: unsupported file type (type is fifo)
+! a.c
+# test absolute path through symlink outside repo
+A f
+# try symlink outside repo to file inside
+abort: ../z not under root
new file mode 100755
--- /dev/null
+++ b/tests/test-tag
@@ -0,0 +1,25 @@
+#!/bin/sh
+
+hg init
+echo a > a
+hg add a
+hg commit -m "test" -d "1000000 0"
+hg history
+hg tag -d "1000000 0" "bleah"
+hg history
+
+echo foo >> .hgtags
+hg tag -d "1000000 0" "bleah2" || echo "failed"
+hg tag -d "1000000 0" -r 0 "bleah2" 1 || echo "failed"
+
+hg revert .hgtags
+hg tag -d "1000000 0" -r 0 "bleah0"
+hg tag -l -d "1000000 0" "bleah1" 1
+
+cat .hgtags
+cat .hg/localtags
+
+hg tag -l 'xx
+newline'
+hg tag -l 'xx:xx'
+true
new file mode 100644
--- /dev/null
+++ b/tests/test-tag.out
@@ -0,0 +1,29 @@
+changeset: 0:0acdaf898367
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: test
+
+changeset: 1:c5c60883086f
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Added tag bleah for changeset 0acdaf8983679e0aac16e811534eb49d7ee1f2b4
+
+changeset: 0:0acdaf898367
+tag: bleah
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: test
+
+abort: working copy of .hgtags is changed (please commit .hgtags manually)
+failed
+use of 'hg tag NAME [REV]' is deprecated, please use 'hg tag [-r REV] NAME' instead
+abort: use only one form to specify the revision
+failed
+use of 'hg tag NAME [REV]' is deprecated, please use 'hg tag [-r REV] NAME' instead
+0acdaf8983679e0aac16e811534eb49d7ee1f2b4 bleah
+0acdaf8983679e0aac16e811534eb49d7ee1f2b4 bleah0
+c5c60883086f5526bd3e36814b94a73a4e75e172 bleah1
+abort: '\n' cannot be used in a tag name
+abort: ':' cannot be used in a tag name
new file mode 100755
--- /dev/null
+++ b/tests/test-tags
@@ -0,0 +1,62 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+hg id
+echo a > a
+hg add a
+hg commit -m "test" -d "1000000 0"
+hg co
+hg identify
+T=`hg tip -v | head -n 1 | cut -d : -f 3`
+echo "$T first" > .hgtags
+cat .hgtags
+hg add .hgtags
+hg commit -m "add tags" -d "1000000 0"
+hg tags
+hg identify
+echo bb > a
+hg status
+hg identify
+hg co first
+hg id
+hg -v id
+hg status
+echo 1 > b
+hg add b
+hg commit -m "branch" -d "1000000 0"
+hg id
+hg merge 1
+hg id
+hg status
+
+hg commit -m "merge" -d "1000000 0"
+
+# create fake head, make sure tag not visible afterwards
+cp .hgtags tags
+hg tag -d "1000000 0" last
+hg rm .hgtags
+hg commit -m "remove" -d "1000000 0"
+
+mv tags .hgtags
+hg add .hgtags
+hg commit -m "readd" -d "1000000 0"
+
+hg tags
+
+# invalid tags
+echo "spam" >> .hgtags
+echo >> .hgtags
+echo "foo bar" >> .hgtags
+echo "$T invalid" | sed "s/..../a5a5/" >> .hg/localtags
+hg commit -m "tags" -d "1000000 0"
+
+# report tag parse error on other head
+hg up 3
+echo 'x y' >> .hgtags
+hg commit -m "head" -d "1000000 0"
+
+hg tags
+hg tip
+
new file mode 100644
--- /dev/null
+++ b/tests/test-tags.out
@@ -0,0 +1,41 @@
+unknown
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+0acdaf898367 tip
+0acdaf8983679e0aac16e811534eb49d7ee1f2b4 first
+tip 1:8a3ca90d111dc784e6575d373105be12570e8776
+first 0:0acdaf8983679e0aac16e811534eb49d7ee1f2b4
+8a3ca90d111d tip
+M a
+8a3ca90d111d+ tip
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+0acdaf898367+ first
+0acdaf8983679e0aac16e811534eb49d7ee1f2b4+ first
+M a
+8216907a933d tip
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+8216907a933d+8a3ca90d111d+ tip
+M .hgtags
+tip 6:c6af9d771a81bb9c7f267ec03491224a9f8ba1cd
+first 0:0acdaf8983679e0aac16e811534eb49d7ee1f2b4
+.hgtags (rev 7:39bba1bbbc4c), line 2: cannot parse entry
+.hgtags (rev 7:39bba1bbbc4c), line 4: node 'foo' is not well formed
+localtags, line 1: tag 'invalid' refers to unknown node
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+.hgtags (rev 7:39bba1bbbc4c), line 2: cannot parse entry
+.hgtags (rev 7:39bba1bbbc4c), line 4: node 'foo' is not well formed
+.hgtags (rev 8:4ca6f1b1a68c), line 2: node 'x' is not well formed
+localtags, line 1: tag 'invalid' refers to unknown node
+tip 8:4ca6f1b1a68c77be687a03aaeb1614671ba59b20
+first 0:0acdaf8983679e0aac16e811534eb49d7ee1f2b4
+changeset: 8:4ca6f1b1a68c
+.hgtags (rev 7:39bba1bbbc4c), line 2: cannot parse entry
+.hgtags (rev 7:39bba1bbbc4c), line 4: node 'foo' is not well formed
+.hgtags (rev 8:4ca6f1b1a68c), line 2: node 'x' is not well formed
+localtags, line 1: tag 'invalid' refers to unknown node
+tag: tip
+parent: 3:b2ef3841386b
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: head
+
new file mode 100755
--- /dev/null
+++ b/tests/test-unrelated-pull
@@ -0,0 +1,20 @@
+#!/bin/sh
+
+mkdir a
+cd a
+hg init
+echo 123 > a
+hg add a
+hg commit -m "a" -u a -d "1000000 0"
+
+cd ..
+mkdir b
+cd b
+hg init
+echo 321 > b
+hg add b
+hg commit -m "b" -u b -d "1000000 0"
+
+hg pull ../a
+hg pull -f ../a
+hg heads
new file mode 100644
--- /dev/null
+++ b/tests/test-unrelated-pull.out
@@ -0,0 +1,22 @@
+pulling from ../a
+searching for changes
+abort: repository is unrelated
+pulling from ../a
+searching for changes
+warning: repository is unrelated
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files (+1 heads)
+(run 'hg heads' to see heads, 'hg merge' to merge)
+changeset: 1:bdcee5d51fa6
+tag: tip
+user: a
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: a
+
+changeset: 0:f155ba1aa5ba
+user: b
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: b
+
new file mode 100755
--- /dev/null
+++ b/tests/test-up-local-change
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+set -e
+mkdir r1
+cd r1
+hg init
+echo a > a
+hg addremove
+hg commit -m "1" -d "1000000 0"
+
+hg clone . ../r2
+cd ../r2
+hg up
+echo abc > a
+hg diff | sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
+ -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/"
+
+cd ../r1
+echo b > b
+echo a2 > a
+hg addremove
+hg commit -m "2" -d "1000000 0"
+
+cd ../r2
+hg -q pull ../r1
+hg status
+hg parents
+hg --debug up
+hg parents
+hg --debug up 0
+hg parents
+hg --debug merge || echo failed
+hg parents
+hg --debug up
+hg parents
+hg -v history
+hg diff | sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
+ -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/"
+
+# create a second head
+cd ../r1
+hg up 0
+echo b2 > b
+echo a3 > a
+hg addremove
+hg commit -m "3" -d "1000000 0"
+
+cd ../r2
+hg -q pull ../r1
+hg status
+hg parents
+hg --debug up || echo failed
+hg --debug merge || echo failed
+hg --debug merge -f
+hg parents
+hg diff | sed -e "s/\(+++ [a-zA-Z0-9_/.-]*\).*/\1/" \
+ -e "s/\(--- [a-zA-Z0-9_/.-]*\).*/\1/"
+
+# test a local add
+cd ..
+hg init a
+hg init b
+echo a > a/a
+echo a > b/a
+hg --cwd a commit -A -m a
+cd b
+hg add a
+hg pull -u ../a
+hg st
new file mode 100644
--- /dev/null
+++ b/tests/test-up-local-change.out
@@ -0,0 +1,152 @@
+(the addremove command is deprecated; use add and remove --after instead)
+adding a
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+diff -r 33aaa84a386b a
+--- a/a
++++ b/a
+@@ -1,1 +1,1 @@ a
+-a
++abc
+(the addremove command is deprecated; use add and remove --after instead)
+adding b
+M a
+changeset: 0:33aaa84a386b
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1
+
+resolving manifests
+ force None allow None moddirstate True linear True
+ ancestor a0c8bcbbb45c local a0c8bcbbb45c remote 1165e8bd193e
+ a versions differ, resolve
+remote created b
+getting b
+merging a
+resolving a
+file a: my b789fdd96dc2 other d730145abbf9 ancestor b789fdd96dc2
+1 files updated, 1 files merged, 0 files removed, 0 files unresolved
+changeset: 1:802f095af299
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+resolving manifests
+ force None allow None moddirstate True linear True
+ ancestor a0c8bcbbb45c local 1165e8bd193e remote a0c8bcbbb45c
+remote deleted b
+removing b
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+changeset: 0:33aaa84a386b
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1
+
+abort: there is nothing to merge, just use 'hg update'
+failed
+changeset: 0:33aaa84a386b
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 1
+
+resolving manifests
+ force None allow None moddirstate True linear True
+ ancestor a0c8bcbbb45c local a0c8bcbbb45c remote 1165e8bd193e
+ a versions differ, resolve
+remote created b
+getting b
+merging a
+resolving a
+file a: my b789fdd96dc2 other d730145abbf9 ancestor b789fdd96dc2
+1 files updated, 1 files merged, 0 files removed, 0 files unresolved
+changeset: 1:802f095af299
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+changeset: 1:802f095af299cde27a85b2f056aef3829870956c
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+files: a b
+description:
+2
+
+
+changeset: 0:33aaa84a386bd609094aeb21a97c09436c482ef1
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+files: a
+description:
+1
+
+
+diff -r 802f095af299 a
+--- a/a
++++ b/a
+@@ -1,1 +1,1 @@ a2
+-a2
++abc
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+(the addremove command is deprecated; use add and remove --after instead)
+adding b
+M a
+changeset: 1:802f095af299
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+resolving manifests
+ force None allow None moddirstate True linear False
+ ancestor a0c8bcbbb45c local 1165e8bd193e remote 4096f2872392
+ a versions differ, resolve
+ b versions differ, resolve
+this update spans a branch affecting the following files:
+ a (resolve)
+ b (resolve)
+aborting update spanning branches!
+(use 'hg merge' to merge across branches or 'hg update -C' to lose changes)
+failed
+abort: outstanding uncommitted changes
+failed
+resolving manifests
+ force False allow True moddirstate True linear False
+ ancestor a0c8bcbbb45c local 1165e8bd193e remote 4096f2872392
+ a versions differ, resolve
+ b versions differ, resolve
+merging a
+resolving a
+file a: my d730145abbf9 other 13e0d5f949fa ancestor b789fdd96dc2
+merging b
+resolving b
+file b: my 1e88685f5dde other 61de8c7723ca ancestor 000000000000
+0 files updated, 2 files merged, 0 files removed, 0 files unresolved
+(branch merge, don't forget to commit)
+changeset: 1:802f095af299
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 2
+
+changeset: 2:030602aee63d
+tag: tip
+parent: 0:33aaa84a386b
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: 3
+
+diff -r 802f095af299 a
+--- a/a
++++ b/a
+@@ -1,1 +1,1 @@ a2
+-a2
++abc
+adding a
+pulling from ../a
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
new file mode 100755
--- /dev/null
+++ b/tests/test-update-reverse
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+hg init
+touch a
+hg add a
+hg commit -m "Added a" -d "1000000 0"
+
+touch main
+hg add main
+hg commit -m "Added main" -d "1000000 0"
+hg checkout 0
+
+echo Main should be gone
+ls
+
+touch side1
+hg add side1
+hg commit -m "Added side1" -d "1000000 0"
+touch side2
+hg add side2
+hg commit -m "Added side2" -d "1000000 0"
+
+hg log
+
+echo Should have two heads, side2 and main
+hg heads
+
+echo Should show "a side1 side2"
+ls
+
+hg update --debug -C 1
+echo Should only show "a main"
+ls
+
new file mode 100644
--- /dev/null
+++ b/tests/test-update-reverse.out
@@ -0,0 +1,54 @@
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+Main should be gone
+a
+changeset: 3:ded32b0db104
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Added side2
+
+changeset: 2:92a816cea698
+parent: 0:537353581d3d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Added side1
+
+changeset: 1:221226fb2bd8
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Added main
+
+changeset: 0:537353581d3d
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Added a
+
+Should have two heads, side2 and main
+changeset: 3:ded32b0db104
+tag: tip
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Added side2
+
+changeset: 1:221226fb2bd8
+user: test
+date: Mon Jan 12 13:46:40 1970 +0000
+summary: Added main
+
+Should show a side1 side2
+a
+side1
+side2
+resolving manifests
+ force 1 allow None moddirstate True linear False
+ ancestor 8515d4bfda76 local 1c0f48f8ece6 remote 0594b9004bae
+remote deleted side2, clobbering
+remote deleted side1, clobbering
+remote created main
+getting main
+removing side1
+removing side2
+1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+Should only show a main
+a
+main
new file mode 100755
--- /dev/null
+++ b/tests/test-walk
@@ -0,0 +1,61 @@
+#!/bin/sh
+
+mkdir t
+cd t
+hg init
+mkdir -p beans
+for b in kidney navy turtle borlotti black pinto; do
+ echo $b > beans/$b
+done
+mkdir -p mammals/Procyonidae
+for m in cacomistle coatimundi raccoon; do
+ echo $m > mammals/Procyonidae/$m
+done
+echo skunk > mammals/skunk
+echo fennel > fennel
+echo fenugreek > fenugreek
+echo fiddlehead > fiddlehead
+echo glob:glob > glob:glob
+hg addremove
+hg commit -m "commit #0" -d "1000000 0"
+hg debugwalk
+cd mammals
+hg debugwalk .
+hg debugwalk Procyonidae
+cd Procyonidae
+hg debugwalk .
+hg debugwalk ..
+cd ..
+hg debugwalk ../beans
+hg debugwalk .
+hg debugwalk .hg
+hg debugwalk ../.hg
+cd ..
+hg debugwalk -Ibeans
+hg debugwalk 'glob:mammals/../beans/b*'
+hg debugwalk '-X*/Procyonidae' mammals
+hg debugwalk path:mammals
+hg debugwalk ..
+hg debugwalk beans/../..
+hg debugwalk .hg
+hg debugwalk beans/../.hg
+hg debugwalk beans/../.hg/data
+hg debugwalk beans/.hg
+# Don't know how to test absolute paths without always getting a false
+# error.
+#hg debugwalk `pwd`/beans
+#hg debugwalk `pwd`/..
+hg debugwalk glob:\*
+hg debugwalk 're:.*[kb]$'
+hg debugwalk path:beans/black
+hg debugwalk beans 'glob:beans/*'
+hg debugwalk 'glob:j*'
+hg debugwalk NOEXIST
+mkfifo fifo
+hg debugwalk fifo
+rm fenugreek
+hg debugwalk fenugreek
+hg rm fenugreek
+hg debugwalk fenugreek
+touch new
+hg debugwalk new
new file mode 100644
--- /dev/null
+++ b/tests/test-walk.out
@@ -0,0 +1,98 @@
+(the addremove command is deprecated; use add and remove --after instead)
+adding beans/black
+adding beans/borlotti
+adding beans/kidney
+adding beans/navy
+adding beans/pinto
+adding beans/turtle
+adding fennel
+adding fenugreek
+adding fiddlehead
+adding glob:glob
+adding mammals/Procyonidae/cacomistle
+adding mammals/Procyonidae/coatimundi
+adding mammals/Procyonidae/raccoon
+adding mammals/skunk
+f beans/black beans/black
+f beans/borlotti beans/borlotti
+f beans/kidney beans/kidney
+f beans/navy beans/navy
+f beans/pinto beans/pinto
+f beans/turtle beans/turtle
+f fennel fennel
+f fenugreek fenugreek
+f fiddlehead fiddlehead
+f glob:glob glob:glob
+f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
+f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
+f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
+f mammals/skunk mammals/skunk
+f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
+f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
+f mammals/Procyonidae/raccoon Procyonidae/raccoon
+f mammals/skunk skunk
+f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
+f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
+f mammals/Procyonidae/raccoon Procyonidae/raccoon
+f mammals/Procyonidae/cacomistle cacomistle
+f mammals/Procyonidae/coatimundi coatimundi
+f mammals/Procyonidae/raccoon raccoon
+f mammals/Procyonidae/cacomistle cacomistle
+f mammals/Procyonidae/coatimundi coatimundi
+f mammals/Procyonidae/raccoon raccoon
+f mammals/skunk ../skunk
+f beans/black ../beans/black
+f beans/borlotti ../beans/borlotti
+f beans/kidney ../beans/kidney
+f beans/navy ../beans/navy
+f beans/pinto ../beans/pinto
+f beans/turtle ../beans/turtle
+f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
+f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
+f mammals/Procyonidae/raccoon Procyonidae/raccoon
+f mammals/skunk skunk
+.hg: No such file or directory
+abort: path contains illegal component: .hg
+
+f beans/black beans/black
+f beans/borlotti beans/borlotti
+f beans/kidney beans/kidney
+f beans/navy beans/navy
+f beans/pinto beans/pinto
+f beans/turtle beans/turtle
+f beans/black beans/black
+f beans/borlotti beans/borlotti
+f mammals/skunk mammals/skunk
+f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
+f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
+f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
+f mammals/skunk mammals/skunk
+abort: .. not under root
+abort: beans/../.. not under root
+abort: path contains illegal component: .hg
+
+abort: path contains illegal component: .hg
+
+abort: path contains illegal component: .hg/data
+
+beans/.hg: No such file or directory
+f fennel fennel
+f fenugreek fenugreek
+f fiddlehead fiddlehead
+f glob:glob glob:glob
+f beans/black beans/black
+f fenugreek fenugreek
+f glob:glob glob:glob
+f mammals/skunk mammals/skunk
+f beans/black beans/black
+f beans/black beans/black
+f beans/borlotti beans/borlotti
+f beans/kidney beans/kidney
+f beans/navy beans/navy
+f beans/pinto beans/pinto
+f beans/turtle beans/turtle
+NOEXIST: No such file or directory
+fifo: unsupported file type (type is fifo)
+m fenugreek fenugreek exact
+m fenugreek fenugreek exact
+f new new exact
new file mode 100755
--- /dev/null
+++ b/tests/tinyproxy.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+
+__doc__ = """Tiny HTTP Proxy.
+
+This module implements GET, HEAD, POST, PUT and DELETE methods
+on BaseHTTPServer, and behaves as an HTTP proxy. The CONNECT
+method is also implemented experimentally, but has not been
+tested yet.
+
+Any help will be greatly appreciated. SUZUKI Hisao
+"""
+
+__version__ = "0.2.1"
+
+import BaseHTTPServer, select, socket, SocketServer, urlparse
+
+class ProxyHandler (BaseHTTPServer.BaseHTTPRequestHandler):
+ __base = BaseHTTPServer.BaseHTTPRequestHandler
+ __base_handle = __base.handle
+
+ server_version = "TinyHTTPProxy/" + __version__
+ rbufsize = 0 # self.rfile Be unbuffered
+
+ def handle(self):
+ (ip, port) = self.client_address
+ if hasattr(self, 'allowed_clients') and ip not in self.allowed_clients:
+ self.raw_requestline = self.rfile.readline()
+ if self.parse_request(): self.send_error(403)
+ else:
+ self.__base_handle()
+
+ def _connect_to(self, netloc, soc):
+ i = netloc.find(':')
+ if i >= 0:
+ host_port = netloc[:i], int(netloc[i+1:])
+ else:
+ host_port = netloc, 80
+ print "\t" "connect to %s:%d" % host_port
+ try: soc.connect(host_port)
+ except socket.error, arg:
+ try: msg = arg[1]
+ except: msg = arg
+ self.send_error(404, msg)
+ return 0
+ return 1
+
+ def do_CONNECT(self):
+ soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ try:
+ if self._connect_to(self.path, soc):
+ self.log_request(200)
+ self.wfile.write(self.protocol_version +
+ " 200 Connection established\r\n")
+ self.wfile.write("Proxy-agent: %s\r\n" % self.version_string())
+ self.wfile.write("\r\n")
+ self._read_write(soc, 300)
+ finally:
+ print "\t" "bye"
+ soc.close()
+ self.connection.close()
+
+ def do_GET(self):
+ (scm, netloc, path, params, query, fragment) = urlparse.urlparse(
+ self.path, 'http')
+ if scm != 'http' or fragment or not netloc:
+ self.send_error(400, "bad url %s" % self.path)
+ return
+ soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ try:
+ if self._connect_to(netloc, soc):
+ self.log_request()
+ soc.send("%s %s %s\r\n" % (
+ self.command,
+ urlparse.urlunparse(('', '', path, params, query, '')),
+ self.request_version))
+ self.headers['Connection'] = 'close'
+ del self.headers['Proxy-Connection']
+ for key_val in self.headers.items():
+ soc.send("%s: %s\r\n" % key_val)
+ soc.send("\r\n")
+ self._read_write(soc)
+ finally:
+ print "\t" "bye"
+ soc.close()
+ self.connection.close()
+
+ def _read_write(self, soc, max_idling=20):
+ iw = [self.connection, soc]
+ ow = []
+ count = 0
+ while 1:
+ count += 1
+ (ins, _, exs) = select.select(iw, ow, iw, 3)
+ if exs: break
+ if ins:
+ for i in ins:
+ if i is soc:
+ out = self.connection
+ else:
+ out = soc
+ data = i.recv(8192)
+ if data:
+ out.send(data)
+ count = 0
+ else:
+ print "\t" "idle", count
+ if count == max_idling: break
+
+ do_HEAD = do_GET
+ do_POST = do_GET
+ do_PUT = do_GET
+ do_DELETE=do_GET
+
+class ThreadingHTTPServer (SocketServer.ThreadingMixIn,
+ BaseHTTPServer.HTTPServer): pass
+
+if __name__ == '__main__':
+ from sys import argv
+ if argv[1:] and argv[1] in ('-h', '--help'):
+ print argv[0], "[port [allowed_client_name ...]]"
+ else:
+ if argv[2:]:
+ allowed = []
+ for name in argv[2:]:
+ client = socket.gethostbyname(name)
+ allowed.append(client)
+ print "Accept: %s (%s)" % (client, name)
+ ProxyHandler.allowed_clients = allowed
+ del argv[2:]
+ else:
+ print "Any clients will be served..."
+ BaseHTTPServer.test(ProxyHandler, ThreadingHTTPServer)