Mercurial > hg > mercurial-crew-with-dirclash
comparison mercurial/localrepo.py @ 3044:fcadf7a32425
Merge with mpm
author | Josef "Jeff" Sipek <jeffpc@josefsipek.net> |
---|---|
date | Sun, 03 Sep 2006 06:06:02 -0400 |
parents | eef469259745 |
children | aebc3f64b20f |
comparison
equal
deleted
inserted
replaced
3043:2a4d4aecb2b4 | 3044:fcadf7a32425 |
---|---|
1 # localrepo.py - read/write repository class for mercurial | 1 # localrepo.py - read/write repository class for mercurial |
2 # | 2 # |
3 # Copyright 2005 Matt Mackall <mpm@selenic.com> | 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
4 # | 4 # |
5 # This software may be used and distributed according to the terms | 5 # This software may be used and distributed according to the terms |
6 # of the GNU General Public License, incorporated herein by reference. | 6 # of the GNU General Public License, incorporated herein by reference. |
7 | 7 |
8 from node import * | 8 from node import * |
167 r = runhook(hname, cmd) or r | 167 r = runhook(hname, cmd) or r |
168 return r | 168 return r |
169 | 169 |
170 tag_disallowed = ':\r\n' | 170 tag_disallowed = ':\r\n' |
171 | 171 |
172 def tag(self, name, node, local=False, message=None, user=None, date=None): | 172 def tag(self, name, node, message, local, user, date): |
173 '''tag a revision with a symbolic name. | 173 '''tag a revision with a symbolic name. |
174 | 174 |
175 if local is True, the tag is stored in a per-repository file. | 175 if local is True, the tag is stored in a per-repository file. |
176 otherwise, it is stored in the .hgtags file, and a new | 176 otherwise, it is stored in the .hgtags file, and a new |
177 changeset is committed with the change. | 177 changeset is committed with the change. |
189 | 189 |
190 for c in self.tag_disallowed: | 190 for c in self.tag_disallowed: |
191 if c in name: | 191 if c in name: |
192 raise util.Abort(_('%r cannot be used in a tag name') % c) | 192 raise util.Abort(_('%r cannot be used in a tag name') % c) |
193 | 193 |
194 self.hook('pretag', throw=True, node=node, tag=name, local=local) | 194 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local) |
195 | 195 |
196 if local: | 196 if local: |
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name)) | 197 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name)) |
198 self.hook('tag', node=node, tag=name, local=local) | 198 self.hook('tag', node=hex(node), tag=name, local=local) |
199 return | 199 return |
200 | 200 |
201 for x in self.changes(): | 201 for x in self.status()[:5]: |
202 if '.hgtags' in x: | 202 if '.hgtags' in x: |
203 raise util.Abort(_('working copy of .hgtags is changed ' | 203 raise util.Abort(_('working copy of .hgtags is changed ' |
204 '(please commit .hgtags manually)')) | 204 '(please commit .hgtags manually)')) |
205 | 205 |
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name)) | 206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name)) |
207 if self.dirstate.state('.hgtags') == '?': | 207 if self.dirstate.state('.hgtags') == '?': |
208 self.add(['.hgtags']) | 208 self.add(['.hgtags']) |
209 | 209 |
210 if not message: | |
211 message = _('Added tag %s for changeset %s') % (name, node) | |
212 | |
213 self.commit(['.hgtags'], message, user, date) | 210 self.commit(['.hgtags'], message, user, date) |
214 self.hook('tag', node=node, tag=name, local=local) | 211 self.hook('tag', node=hex(node), tag=name, local=local) |
215 | 212 |
216 def tags(self): | 213 def tags(self): |
217 '''return a mapping of tag to node''' | 214 '''return a mapping of tag to node''' |
218 if not self.tagscache: | 215 if not self.tagscache: |
219 self.tagscache = {} | 216 self.tagscache = {} |
290 | 287 |
291 def lookup(self, key): | 288 def lookup(self, key): |
292 try: | 289 try: |
293 return self.tags()[key] | 290 return self.tags()[key] |
294 except KeyError: | 291 except KeyError: |
292 if key == '.': | |
293 key = self.dirstate.parents()[0] | |
294 if key == nullid: | |
295 raise repo.RepoError(_("no revision checked out")) | |
295 try: | 296 try: |
296 return self.changelog.lookup(key) | 297 return self.changelog.lookup(key) |
297 except: | 298 except: |
298 raise repo.RepoError(_("unknown revision '%s'") % key) | 299 raise repo.RepoError(_("unknown revision '%s'") % key) |
299 | 300 |
464 orig_parent = self.dirstate.parents()[0] or nullid | 465 orig_parent = self.dirstate.parents()[0] or nullid |
465 p1 = p1 or self.dirstate.parents()[0] or nullid | 466 p1 = p1 or self.dirstate.parents()[0] or nullid |
466 p2 = p2 or self.dirstate.parents()[1] or nullid | 467 p2 = p2 or self.dirstate.parents()[1] or nullid |
467 c1 = self.changelog.read(p1) | 468 c1 = self.changelog.read(p1) |
468 c2 = self.changelog.read(p2) | 469 c2 = self.changelog.read(p2) |
469 m1 = self.manifest.read(c1[0]) | 470 m1 = self.manifest.read(c1[0]).copy() |
470 mf1 = self.manifest.readflags(c1[0]) | |
471 m2 = self.manifest.read(c2[0]) | 471 m2 = self.manifest.read(c2[0]) |
472 changed = [] | 472 changed = [] |
473 | 473 |
474 if orig_parent == p1: | 474 if orig_parent == p1: |
475 update_dirstate = 1 | 475 update_dirstate = 1 |
478 | 478 |
479 if not wlock: | 479 if not wlock: |
480 wlock = self.wlock() | 480 wlock = self.wlock() |
481 l = self.lock() | 481 l = self.lock() |
482 tr = self.transaction() | 482 tr = self.transaction() |
483 mm = m1.copy() | |
484 mfm = mf1.copy() | |
485 linkrev = self.changelog.count() | 483 linkrev = self.changelog.count() |
486 for f in files: | 484 for f in files: |
487 try: | 485 try: |
488 t = self.wread(f) | 486 t = self.wread(f) |
489 tm = util.is_exec(self.wjoin(f), mfm.get(f, False)) | 487 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f))) |
490 r = self.file(f) | 488 r = self.file(f) |
491 mfm[f] = tm | |
492 | 489 |
493 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2) | 490 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2) |
494 if entry: | 491 if entry: |
495 mm[f] = entry | 492 m1[f] = entry |
496 continue | 493 continue |
497 | 494 |
498 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2) | 495 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2) |
499 changed.append(f) | 496 changed.append(f) |
500 if update_dirstate: | 497 if update_dirstate: |
501 self.dirstate.update([f], "n") | 498 self.dirstate.update([f], "n") |
502 except IOError: | 499 except IOError: |
503 try: | 500 try: |
504 del mm[f] | 501 del m1[f] |
505 del mfm[f] | |
506 if update_dirstate: | 502 if update_dirstate: |
507 self.dirstate.forget([f]) | 503 self.dirstate.forget([f]) |
508 except: | 504 except: |
509 # deleted from p2? | 505 # deleted from p2? |
510 pass | 506 pass |
511 | 507 |
512 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0]) | 508 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0]) |
513 user = user or self.ui.username() | 509 user = user or self.ui.username() |
514 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date) | 510 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date) |
515 tr.close() | 511 tr.close() |
516 if update_dirstate: | 512 if update_dirstate: |
517 self.dirstate.setparents(n, nullid) | 513 self.dirstate.setparents(n, nullid) |
531 elif s == 'r': | 527 elif s == 'r': |
532 remove.append(f) | 528 remove.append(f) |
533 else: | 529 else: |
534 self.ui.warn(_("%s not tracked!\n") % f) | 530 self.ui.warn(_("%s not tracked!\n") % f) |
535 else: | 531 else: |
536 modified, added, removed, deleted, unknown = self.changes(match=match) | 532 modified, added, removed, deleted, unknown = self.status(match=match)[:5] |
537 commit = modified + added | 533 commit = modified + added |
538 remove = removed | 534 remove = removed |
539 | 535 |
540 p1, p2 = self.dirstate.parents() | 536 p1, p2 = self.dirstate.parents() |
541 c1 = self.changelog.read(p1) | 537 c1 = self.changelog.read(p1) |
542 c2 = self.changelog.read(p2) | 538 c2 = self.changelog.read(p2) |
543 m1 = self.manifest.read(c1[0]) | 539 m1 = self.manifest.read(c1[0]).copy() |
544 mf1 = self.manifest.readflags(c1[0]) | |
545 m2 = self.manifest.read(c2[0]) | 540 m2 = self.manifest.read(c2[0]) |
546 | 541 |
547 if not commit and not remove and not force and p2 == nullid: | 542 if not commit and not remove and not force and p2 == nullid: |
548 self.ui.status(_("nothing changed\n")) | 543 self.ui.status(_("nothing changed\n")) |
549 return None | 544 return None |
565 linkrev = self.changelog.count() | 560 linkrev = self.changelog.count() |
566 commit.sort() | 561 commit.sort() |
567 for f in commit: | 562 for f in commit: |
568 self.ui.note(f + "\n") | 563 self.ui.note(f + "\n") |
569 try: | 564 try: |
570 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False)) | 565 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f))) |
571 t = self.wread(f) | 566 t = self.wread(f) |
572 except IOError: | 567 except IOError: |
573 self.ui.warn(_("trouble committing %s!\n") % f) | 568 self.ui.warn(_("trouble committing %s!\n") % f) |
574 raise | 569 raise |
575 | 570 |
592 # remember what we've added so that we can later calculate | 587 # remember what we've added so that we can later calculate |
593 # the files to pull from a set of changesets | 588 # the files to pull from a set of changesets |
594 changed.append(f) | 589 changed.append(f) |
595 | 590 |
596 # update manifest | 591 # update manifest |
597 m1 = m1.copy() | |
598 m1.update(new) | 592 m1.update(new) |
599 for f in remove: | 593 for f in remove: |
600 if f in m1: | 594 if f in m1: |
601 del m1[f] | 595 del m1[f] |
602 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0], | 596 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], |
603 (new, remove)) | 597 (new, remove)) |
604 | 598 |
605 # add changeset | 599 # add changeset |
606 new = new.keys() | 600 new = new.keys() |
607 new.sort() | 601 new.sort() |
669 If node2 is None, compare node1 with working directory. | 663 If node2 is None, compare node1 with working directory. |
670 """ | 664 """ |
671 | 665 |
672 def fcmp(fn, mf): | 666 def fcmp(fn, mf): |
673 t1 = self.wread(fn) | 667 t1 = self.wread(fn) |
674 t2 = self.file(fn).read(mf.get(fn, nullid)) | 668 return self.file(fn).cmp(mf.get(fn, nullid), t1) |
675 return cmp(t1, t2) | |
676 | 669 |
677 def mfmatches(node): | 670 def mfmatches(node): |
678 change = self.changelog.read(node) | 671 change = self.changelog.read(node) |
679 mf = dict(self.manifest.read(change[0])) | 672 mf = dict(self.manifest.read(change[0])) |
680 for fn in mf.keys(): | 673 for fn in mf.keys(): |
712 # do a full compare of any files that might have changed | 705 # do a full compare of any files that might have changed |
713 mf2 = mfmatches(self.dirstate.parents()[0]) | 706 mf2 = mfmatches(self.dirstate.parents()[0]) |
714 for f in lookup: | 707 for f in lookup: |
715 if fcmp(f, mf2): | 708 if fcmp(f, mf2): |
716 modified.append(f) | 709 modified.append(f) |
717 elif wlock is not None: | 710 else: |
718 self.dirstate.update([f], "n") | 711 clean.append(f) |
712 if wlock is not None: | |
713 self.dirstate.update([f], "n") | |
719 else: | 714 else: |
720 # we are comparing working dir against non-parent | 715 # we are comparing working dir against non-parent |
721 # generate a pseudo-manifest for the working dir | 716 # generate a pseudo-manifest for the working dir |
722 mf2 = mfmatches(self.dirstate.parents()[0]) | 717 mf2 = mfmatches(self.dirstate.parents()[0]) |
723 for f in lookup + modified + added: | 718 for f in lookup + modified + added: |
751 | 746 |
752 # sort and return results: | 747 # sort and return results: |
753 for l in modified, added, removed, deleted, unknown, ignored, clean: | 748 for l in modified, added, removed, deleted, unknown, ignored, clean: |
754 l.sort() | 749 l.sort() |
755 return (modified, added, removed, deleted, unknown, ignored, clean) | 750 return (modified, added, removed, deleted, unknown, ignored, clean) |
756 | |
757 def changes(self, node1=None, node2=None, files=[], match=util.always, | |
758 wlock=None, list_ignored=False, list_clean=False): | |
759 '''DEPRECATED - use status instead''' | |
760 marduit = self.status(node1, node2, files, match, wlock, | |
761 list_ignored, list_clean) | |
762 if list_ignored: | |
763 return marduit[:-1] | |
764 else: | |
765 return marduit[:-2] | |
766 | 751 |
767 def add(self, list, wlock=None): | 752 def add(self, list, wlock=None): |
768 if not wlock: | 753 if not wlock: |
769 wlock = self.wlock() | 754 wlock = self.wlock() |
770 for f in list: | 755 for f in list: |
810 self.dirstate.update([f], "r") | 795 self.dirstate.update([f], "r") |
811 | 796 |
812 def undelete(self, list, wlock=None): | 797 def undelete(self, list, wlock=None): |
813 p = self.dirstate.parents()[0] | 798 p = self.dirstate.parents()[0] |
814 mn = self.changelog.read(p)[0] | 799 mn = self.changelog.read(p)[0] |
815 mf = self.manifest.readflags(mn) | |
816 m = self.manifest.read(mn) | 800 m = self.manifest.read(mn) |
817 if not wlock: | 801 if not wlock: |
818 wlock = self.wlock() | 802 wlock = self.wlock() |
819 for f in list: | 803 for f in list: |
820 if self.dirstate.state(f) not in "r": | 804 if self.dirstate.state(f) not in "r": |
821 self.ui.warn("%s not removed!\n" % f) | 805 self.ui.warn("%s not removed!\n" % f) |
822 else: | 806 else: |
823 t = self.file(f).read(m[f]) | 807 t = self.file(f).read(m[f]) |
824 self.wwrite(f, t) | 808 self.wwrite(f, t) |
825 util.set_exec(self.wjoin(f), mf[f]) | 809 util.set_exec(self.wjoin(f), m.execf(f)) |
826 self.dirstate.update([f], "n") | 810 self.dirstate.update([f], "n") |
827 | 811 |
828 def copy(self, source, dest, wlock=None): | 812 def copy(self, source, dest, wlock=None): |
829 p = self.wjoin(dest) | 813 p = self.wjoin(dest) |
830 if not os.path.exists(p): | 814 if not os.path.exists(p): |
1116 if force: | 1100 if force: |
1117 self.ui.warn(_("warning: repository is unrelated\n")) | 1101 self.ui.warn(_("warning: repository is unrelated\n")) |
1118 else: | 1102 else: |
1119 raise util.Abort(_("repository is unrelated")) | 1103 raise util.Abort(_("repository is unrelated")) |
1120 | 1104 |
1121 self.ui.note(_("found new changesets starting at ") + | 1105 self.ui.debug(_("found new changesets starting at ") + |
1122 " ".join([short(f) for f in fetch]) + "\n") | 1106 " ".join([short(f) for f in fetch]) + "\n") |
1123 | 1107 |
1124 self.ui.debug(_("%d total queries\n") % reqcnt) | 1108 self.ui.debug(_("%d total queries\n") % reqcnt) |
1125 | 1109 |
1126 return fetch.keys() | 1110 return fetch.keys() |
1171 if heads: | 1155 if heads: |
1172 return subset, updated_heads.keys() | 1156 return subset, updated_heads.keys() |
1173 else: | 1157 else: |
1174 return subset | 1158 return subset |
1175 | 1159 |
1176 def pull(self, remote, heads=None, force=False): | 1160 def pull(self, remote, heads=None, force=False, lock=None): |
1177 l = self.lock() | 1161 mylock = False |
1178 | 1162 if not lock: |
1179 fetch = self.findincoming(remote, force=force) | 1163 lock = self.lock() |
1180 if fetch == [nullid]: | 1164 mylock = True |
1181 self.ui.status(_("requesting all changes\n")) | 1165 |
1182 | 1166 try: |
1183 if not fetch: | 1167 fetch = self.findincoming(remote, force=force) |
1184 self.ui.status(_("no changes found\n")) | 1168 if fetch == [nullid]: |
1185 return 0 | 1169 self.ui.status(_("requesting all changes\n")) |
1186 | 1170 |
1187 if heads is None: | 1171 if not fetch: |
1188 cg = remote.changegroup(fetch, 'pull') | 1172 self.ui.status(_("no changes found\n")) |
1189 else: | 1173 return 0 |
1190 cg = remote.changegroupsubset(fetch, heads, 'pull') | 1174 |
1191 return self.addchangegroup(cg, 'pull', remote.url()) | 1175 if heads is None: |
1176 cg = remote.changegroup(fetch, 'pull') | |
1177 else: | |
1178 cg = remote.changegroupsubset(fetch, heads, 'pull') | |
1179 return self.addchangegroup(cg, 'pull', remote.url()) | |
1180 finally: | |
1181 if mylock: | |
1182 lock.release() | |
1192 | 1183 |
1193 def push(self, remote, force=False, revs=None): | 1184 def push(self, remote, force=False, revs=None): |
1194 # there are two ways to push to remote repo: | 1185 # there are two ways to push to remote repo: |
1195 # | 1186 # |
1196 # addchangegroup assumes local user can lock remote | 1187 # addchangegroup assumes local user can lock remote |
1691 self.hook("incoming", node=hex(self.changelog.node(i)), | 1682 self.hook("incoming", node=hex(self.changelog.node(i)), |
1692 source=srctype, url=url) | 1683 source=srctype, url=url) |
1693 | 1684 |
1694 return newheads - oldheads + 1 | 1685 return newheads - oldheads + 1 |
1695 | 1686 |
1696 def update(self, node, allow=False, force=False, choose=None, | |
1697 moddirstate=True, forcemerge=False, wlock=None, show_stats=True): | |
1698 pl = self.dirstate.parents() | |
1699 if not force and pl[1] != nullid: | |
1700 raise util.Abort(_("outstanding uncommitted merges")) | |
1701 | |
1702 err = False | |
1703 | |
1704 p1, p2 = pl[0], node | |
1705 pa = self.changelog.ancestor(p1, p2) | |
1706 m1n = self.changelog.read(p1)[0] | |
1707 m2n = self.changelog.read(p2)[0] | |
1708 man = self.manifest.ancestor(m1n, m2n) | |
1709 m1 = self.manifest.read(m1n) | |
1710 mf1 = self.manifest.readflags(m1n) | |
1711 m2 = self.manifest.read(m2n).copy() | |
1712 mf2 = self.manifest.readflags(m2n) | |
1713 ma = self.manifest.read(man) | |
1714 mfa = self.manifest.readflags(man) | |
1715 | |
1716 modified, added, removed, deleted, unknown = self.changes() | |
1717 | |
1718 # is this a jump, or a merge? i.e. is there a linear path | |
1719 # from p1 to p2? | |
1720 linear_path = (pa == p1 or pa == p2) | |
1721 | |
1722 if allow and linear_path: | |
1723 raise util.Abort(_("there is nothing to merge, just use " | |
1724 "'hg update' or look at 'hg heads'")) | |
1725 if allow and not forcemerge: | |
1726 if modified or added or removed: | |
1727 raise util.Abort(_("outstanding uncommitted changes")) | |
1728 | |
1729 if not forcemerge and not force: | |
1730 for f in unknown: | |
1731 if f in m2: | |
1732 t1 = self.wread(f) | |
1733 t2 = self.file(f).read(m2[f]) | |
1734 if cmp(t1, t2) != 0: | |
1735 raise util.Abort(_("'%s' already exists in the working" | |
1736 " dir and differs from remote") % f) | |
1737 | |
1738 # resolve the manifest to determine which files | |
1739 # we care about merging | |
1740 self.ui.note(_("resolving manifests\n")) | |
1741 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") % | |
1742 (force, allow, moddirstate, linear_path)) | |
1743 self.ui.debug(_(" ancestor %s local %s remote %s\n") % | |
1744 (short(man), short(m1n), short(m2n))) | |
1745 | |
1746 merge = {} | |
1747 get = {} | |
1748 remove = [] | |
1749 | |
1750 # construct a working dir manifest | |
1751 mw = m1.copy() | |
1752 mfw = mf1.copy() | |
1753 umap = dict.fromkeys(unknown) | |
1754 | |
1755 for f in added + modified + unknown: | |
1756 mw[f] = "" | |
1757 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False)) | |
1758 | |
1759 if moddirstate and not wlock: | |
1760 wlock = self.wlock() | |
1761 | |
1762 for f in deleted + removed: | |
1763 if f in mw: | |
1764 del mw[f] | |
1765 | |
1766 # If we're jumping between revisions (as opposed to merging), | |
1767 # and if neither the working directory nor the target rev has | |
1768 # the file, then we need to remove it from the dirstate, to | |
1769 # prevent the dirstate from listing the file when it is no | |
1770 # longer in the manifest. | |
1771 if moddirstate and linear_path and f not in m2: | |
1772 self.dirstate.forget((f,)) | |
1773 | |
1774 # Compare manifests | |
1775 for f, n in mw.iteritems(): | |
1776 if choose and not choose(f): | |
1777 continue | |
1778 if f in m2: | |
1779 s = 0 | |
1780 | |
1781 # is the wfile new since m1, and match m2? | |
1782 if f not in m1: | |
1783 t1 = self.wread(f) | |
1784 t2 = self.file(f).read(m2[f]) | |
1785 if cmp(t1, t2) == 0: | |
1786 n = m2[f] | |
1787 del t1, t2 | |
1788 | |
1789 # are files different? | |
1790 if n != m2[f]: | |
1791 a = ma.get(f, nullid) | |
1792 # are both different from the ancestor? | |
1793 if n != a and m2[f] != a: | |
1794 self.ui.debug(_(" %s versions differ, resolve\n") % f) | |
1795 # merge executable bits | |
1796 # "if we changed or they changed, change in merge" | |
1797 a, b, c = mfa.get(f, 0), mfw[f], mf2[f] | |
1798 mode = ((a^b) | (a^c)) ^ a | |
1799 merge[f] = (m1.get(f, nullid), m2[f], mode) | |
1800 s = 1 | |
1801 # are we clobbering? | |
1802 # is remote's version newer? | |
1803 # or are we going back in time? | |
1804 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]): | |
1805 self.ui.debug(_(" remote %s is newer, get\n") % f) | |
1806 get[f] = m2[f] | |
1807 s = 1 | |
1808 elif f in umap or f in added: | |
1809 # this unknown file is the same as the checkout | |
1810 # we need to reset the dirstate if the file was added | |
1811 get[f] = m2[f] | |
1812 | |
1813 if not s and mfw[f] != mf2[f]: | |
1814 if force: | |
1815 self.ui.debug(_(" updating permissions for %s\n") % f) | |
1816 util.set_exec(self.wjoin(f), mf2[f]) | |
1817 else: | |
1818 a, b, c = mfa.get(f, 0), mfw[f], mf2[f] | |
1819 mode = ((a^b) | (a^c)) ^ a | |
1820 if mode != b: | |
1821 self.ui.debug(_(" updating permissions for %s\n") | |
1822 % f) | |
1823 util.set_exec(self.wjoin(f), mode) | |
1824 del m2[f] | |
1825 elif f in ma: | |
1826 if n != ma[f]: | |
1827 r = _("d") | |
1828 if not force and (linear_path or allow): | |
1829 r = self.ui.prompt( | |
1830 (_(" local changed %s which remote deleted\n") % f) + | |
1831 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) | |
1832 if r == _("d"): | |
1833 remove.append(f) | |
1834 else: | |
1835 self.ui.debug(_("other deleted %s\n") % f) | |
1836 remove.append(f) # other deleted it | |
1837 else: | |
1838 # file is created on branch or in working directory | |
1839 if force and f not in umap: | |
1840 self.ui.debug(_("remote deleted %s, clobbering\n") % f) | |
1841 remove.append(f) | |
1842 elif n == m1.get(f, nullid): # same as parent | |
1843 if p2 == pa: # going backwards? | |
1844 self.ui.debug(_("remote deleted %s\n") % f) | |
1845 remove.append(f) | |
1846 else: | |
1847 self.ui.debug(_("local modified %s, keeping\n") % f) | |
1848 else: | |
1849 self.ui.debug(_("working dir created %s, keeping\n") % f) | |
1850 | |
1851 for f, n in m2.iteritems(): | |
1852 if choose and not choose(f): | |
1853 continue | |
1854 if f[0] == "/": | |
1855 continue | |
1856 if f in ma and n != ma[f]: | |
1857 r = _("k") | |
1858 if not force and (linear_path or allow): | |
1859 r = self.ui.prompt( | |
1860 (_("remote changed %s which local deleted\n") % f) + | |
1861 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) | |
1862 if r == _("k"): | |
1863 get[f] = n | |
1864 elif f not in ma: | |
1865 self.ui.debug(_("remote created %s\n") % f) | |
1866 get[f] = n | |
1867 else: | |
1868 if force or p2 == pa: # going backwards? | |
1869 self.ui.debug(_("local deleted %s, recreating\n") % f) | |
1870 get[f] = n | |
1871 else: | |
1872 self.ui.debug(_("local deleted %s\n") % f) | |
1873 | |
1874 del mw, m1, m2, ma | |
1875 | |
1876 if force: | |
1877 for f in merge: | |
1878 get[f] = merge[f][1] | |
1879 merge = {} | |
1880 | |
1881 if linear_path or force: | |
1882 # we don't need to do any magic, just jump to the new rev | |
1883 branch_merge = False | |
1884 p1, p2 = p2, nullid | |
1885 else: | |
1886 if not allow: | |
1887 self.ui.status(_("this update spans a branch" | |
1888 " affecting the following files:\n")) | |
1889 fl = merge.keys() + get.keys() | |
1890 fl.sort() | |
1891 for f in fl: | |
1892 cf = "" | |
1893 if f in merge: | |
1894 cf = _(" (resolve)") | |
1895 self.ui.status(" %s%s\n" % (f, cf)) | |
1896 self.ui.warn(_("aborting update spanning branches!\n")) | |
1897 self.ui.status(_("(use 'hg merge' to merge across branches" | |
1898 " or 'hg update -C' to lose changes)\n")) | |
1899 return 1 | |
1900 branch_merge = True | |
1901 | |
1902 xp1 = hex(p1) | |
1903 xp2 = hex(p2) | |
1904 if p2 == nullid: xxp2 = '' | |
1905 else: xxp2 = xp2 | |
1906 | |
1907 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2) | |
1908 | |
1909 # get the files we don't need to change | |
1910 files = get.keys() | |
1911 files.sort() | |
1912 for f in files: | |
1913 if f[0] == "/": | |
1914 continue | |
1915 self.ui.note(_("getting %s\n") % f) | |
1916 t = self.file(f).read(get[f]) | |
1917 self.wwrite(f, t) | |
1918 util.set_exec(self.wjoin(f), mf2[f]) | |
1919 if moddirstate: | |
1920 if branch_merge: | |
1921 self.dirstate.update([f], 'n', st_mtime=-1) | |
1922 else: | |
1923 self.dirstate.update([f], 'n') | |
1924 | |
1925 # merge the tricky bits | |
1926 failedmerge = [] | |
1927 files = merge.keys() | |
1928 files.sort() | |
1929 for f in files: | |
1930 self.ui.status(_("merging %s\n") % f) | |
1931 my, other, flag = merge[f] | |
1932 ret = self.merge3(f, my, other, xp1, xp2) | |
1933 if ret: | |
1934 err = True | |
1935 failedmerge.append(f) | |
1936 util.set_exec(self.wjoin(f), flag) | |
1937 if moddirstate: | |
1938 if branch_merge: | |
1939 # We've done a branch merge, mark this file as merged | |
1940 # so that we properly record the merger later | |
1941 self.dirstate.update([f], 'm') | |
1942 else: | |
1943 # We've update-merged a locally modified file, so | |
1944 # we set the dirstate to emulate a normal checkout | |
1945 # of that file some time in the past. Thus our | |
1946 # merge will appear as a normal local file | |
1947 # modification. | |
1948 f_len = len(self.file(f).read(other)) | |
1949 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1) | |
1950 | |
1951 remove.sort() | |
1952 for f in remove: | |
1953 self.ui.note(_("removing %s\n") % f) | |
1954 util.audit_path(f) | |
1955 try: | |
1956 util.unlink(self.wjoin(f)) | |
1957 except OSError, inst: | |
1958 if inst.errno != errno.ENOENT: | |
1959 self.ui.warn(_("update failed to remove %s: %s!\n") % | |
1960 (f, inst.strerror)) | |
1961 if moddirstate: | |
1962 if branch_merge: | |
1963 self.dirstate.update(remove, 'r') | |
1964 else: | |
1965 self.dirstate.forget(remove) | |
1966 | |
1967 if moddirstate: | |
1968 self.dirstate.setparents(p1, p2) | |
1969 | |
1970 if show_stats: | |
1971 stats = ((len(get), _("updated")), | |
1972 (len(merge) - len(failedmerge), _("merged")), | |
1973 (len(remove), _("removed")), | |
1974 (len(failedmerge), _("unresolved"))) | |
1975 note = ", ".join([_("%d files %s") % s for s in stats]) | |
1976 self.ui.status("%s\n" % note) | |
1977 if moddirstate: | |
1978 if branch_merge: | |
1979 if failedmerge: | |
1980 self.ui.status(_("There are unresolved merges," | |
1981 " you can redo the full merge using:\n" | |
1982 " hg update -C %s\n" | |
1983 " hg merge %s\n" | |
1984 % (self.changelog.rev(p1), | |
1985 self.changelog.rev(p2)))) | |
1986 else: | |
1987 self.ui.status(_("(branch merge, don't forget to commit)\n")) | |
1988 elif failedmerge: | |
1989 self.ui.status(_("There are unresolved merges with" | |
1990 " locally modified files.\n")) | |
1991 | |
1992 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err)) | |
1993 return err | |
1994 | |
1995 def merge3(self, fn, my, other, p1, p2): | |
1996 """perform a 3-way merge in the working directory""" | |
1997 | |
1998 def temp(prefix, node): | |
1999 pre = "%s~%s." % (os.path.basename(fn), prefix) | |
2000 (fd, name) = tempfile.mkstemp(prefix=pre) | |
2001 f = os.fdopen(fd, "wb") | |
2002 self.wwrite(fn, fl.read(node), f) | |
2003 f.close() | |
2004 return name | |
2005 | |
2006 fl = self.file(fn) | |
2007 base = fl.ancestor(my, other) | |
2008 a = self.wjoin(fn) | |
2009 b = temp("base", base) | |
2010 c = temp("other", other) | |
2011 | |
2012 self.ui.note(_("resolving %s\n") % fn) | |
2013 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") % | |
2014 (fn, short(my), short(other), short(base))) | |
2015 | |
2016 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge") | |
2017 or "hgmerge") | |
2018 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root, | |
2019 environ={'HG_FILE': fn, | |
2020 'HG_MY_NODE': p1, | |
2021 'HG_OTHER_NODE': p2, | |
2022 'HG_FILE_MY_NODE': hex(my), | |
2023 'HG_FILE_OTHER_NODE': hex(other), | |
2024 'HG_FILE_BASE_NODE': hex(base)}) | |
2025 if r: | |
2026 self.ui.warn(_("merging %s failed!\n") % fn) | |
2027 | |
2028 os.unlink(b) | |
2029 os.unlink(c) | |
2030 return r | |
2031 | |
2032 def verify(self): | |
2033 filelinkrevs = {} | |
2034 filenodes = {} | |
2035 changesets = revisions = files = 0 | |
2036 errors = [0] | |
2037 warnings = [0] | |
2038 neededmanifests = {} | |
2039 | |
2040 def err(msg): | |
2041 self.ui.warn(msg + "\n") | |
2042 errors[0] += 1 | |
2043 | |
2044 def warn(msg): | |
2045 self.ui.warn(msg + "\n") | |
2046 warnings[0] += 1 | |
2047 | |
2048 def checksize(obj, name): | |
2049 d = obj.checksize() | |
2050 if d[0]: | |
2051 err(_("%s data length off by %d bytes") % (name, d[0])) | |
2052 if d[1]: | |
2053 err(_("%s index contains %d extra bytes") % (name, d[1])) | |
2054 | |
2055 def checkversion(obj, name): | |
2056 if obj.version != revlog.REVLOGV0: | |
2057 if not revlogv1: | |
2058 warn(_("warning: `%s' uses revlog format 1") % name) | |
2059 elif revlogv1: | |
2060 warn(_("warning: `%s' uses revlog format 0") % name) | |
2061 | |
2062 revlogv1 = self.revlogversion != revlog.REVLOGV0 | |
2063 if self.ui.verbose or revlogv1 != self.revlogv1: | |
2064 self.ui.status(_("repository uses revlog format %d\n") % | |
2065 (revlogv1 and 1 or 0)) | |
2066 | |
2067 seen = {} | |
2068 self.ui.status(_("checking changesets\n")) | |
2069 checksize(self.changelog, "changelog") | |
2070 | |
2071 for i in range(self.changelog.count()): | |
2072 changesets += 1 | |
2073 n = self.changelog.node(i) | |
2074 l = self.changelog.linkrev(n) | |
2075 if l != i: | |
2076 err(_("incorrect link (%d) for changeset revision %d") %(l, i)) | |
2077 if n in seen: | |
2078 err(_("duplicate changeset at revision %d") % i) | |
2079 seen[n] = 1 | |
2080 | |
2081 for p in self.changelog.parents(n): | |
2082 if p not in self.changelog.nodemap: | |
2083 err(_("changeset %s has unknown parent %s") % | |
2084 (short(n), short(p))) | |
2085 try: | |
2086 changes = self.changelog.read(n) | |
2087 except KeyboardInterrupt: | |
2088 self.ui.warn(_("interrupted")) | |
2089 raise | |
2090 except Exception, inst: | |
2091 err(_("unpacking changeset %s: %s") % (short(n), inst)) | |
2092 continue | |
2093 | |
2094 neededmanifests[changes[0]] = n | |
2095 | |
2096 for f in changes[3]: | |
2097 filelinkrevs.setdefault(f, []).append(i) | |
2098 | |
2099 seen = {} | |
2100 self.ui.status(_("checking manifests\n")) | |
2101 checkversion(self.manifest, "manifest") | |
2102 checksize(self.manifest, "manifest") | |
2103 | |
2104 for i in range(self.manifest.count()): | |
2105 n = self.manifest.node(i) | |
2106 l = self.manifest.linkrev(n) | |
2107 | |
2108 if l < 0 or l >= self.changelog.count(): | |
2109 err(_("bad manifest link (%d) at revision %d") % (l, i)) | |
2110 | |
2111 if n in neededmanifests: | |
2112 del neededmanifests[n] | |
2113 | |
2114 if n in seen: | |
2115 err(_("duplicate manifest at revision %d") % i) | |
2116 | |
2117 seen[n] = 1 | |
2118 | |
2119 for p in self.manifest.parents(n): | |
2120 if p not in self.manifest.nodemap: | |
2121 err(_("manifest %s has unknown parent %s") % | |
2122 (short(n), short(p))) | |
2123 | |
2124 try: | |
2125 delta = mdiff.patchtext(self.manifest.delta(n)) | |
2126 except KeyboardInterrupt: | |
2127 self.ui.warn(_("interrupted")) | |
2128 raise | |
2129 except Exception, inst: | |
2130 err(_("unpacking manifest %s: %s") % (short(n), inst)) | |
2131 continue | |
2132 | |
2133 try: | |
2134 ff = [ l.split('\0') for l in delta.splitlines() ] | |
2135 for f, fn in ff: | |
2136 filenodes.setdefault(f, {})[bin(fn[:40])] = 1 | |
2137 except (ValueError, TypeError), inst: | |
2138 err(_("broken delta in manifest %s: %s") % (short(n), inst)) | |
2139 | |
2140 self.ui.status(_("crosschecking files in changesets and manifests\n")) | |
2141 | |
2142 for m, c in neededmanifests.items(): | |
2143 err(_("Changeset %s refers to unknown manifest %s") % | |
2144 (short(m), short(c))) | |
2145 del neededmanifests | |
2146 | |
2147 for f in filenodes: | |
2148 if f not in filelinkrevs: | |
2149 err(_("file %s in manifest but not in changesets") % f) | |
2150 | |
2151 for f in filelinkrevs: | |
2152 if f not in filenodes: | |
2153 err(_("file %s in changeset but not in manifest") % f) | |
2154 | |
2155 self.ui.status(_("checking files\n")) | |
2156 ff = filenodes.keys() | |
2157 ff.sort() | |
2158 for f in ff: | |
2159 if f == "/dev/null": | |
2160 continue | |
2161 files += 1 | |
2162 if not f: | |
2163 err(_("file without name in manifest %s") % short(n)) | |
2164 continue | |
2165 fl = self.file(f) | |
2166 checkversion(fl, f) | |
2167 checksize(fl, f) | |
2168 | |
2169 nodes = {nullid: 1} | |
2170 seen = {} | |
2171 for i in range(fl.count()): | |
2172 revisions += 1 | |
2173 n = fl.node(i) | |
2174 | |
2175 if n in seen: | |
2176 err(_("%s: duplicate revision %d") % (f, i)) | |
2177 if n not in filenodes[f]: | |
2178 err(_("%s: %d:%s not in manifests") % (f, i, short(n))) | |
2179 else: | |
2180 del filenodes[f][n] | |
2181 | |
2182 flr = fl.linkrev(n) | |
2183 if flr not in filelinkrevs.get(f, []): | |
2184 err(_("%s:%s points to unexpected changeset %d") | |
2185 % (f, short(n), flr)) | |
2186 else: | |
2187 filelinkrevs[f].remove(flr) | |
2188 | |
2189 # verify contents | |
2190 try: | |
2191 t = fl.read(n) | |
2192 except KeyboardInterrupt: | |
2193 self.ui.warn(_("interrupted")) | |
2194 raise | |
2195 except Exception, inst: | |
2196 err(_("unpacking file %s %s: %s") % (f, short(n), inst)) | |
2197 | |
2198 # verify parents | |
2199 (p1, p2) = fl.parents(n) | |
2200 if p1 not in nodes: | |
2201 err(_("file %s:%s unknown parent 1 %s") % | |
2202 (f, short(n), short(p1))) | |
2203 if p2 not in nodes: | |
2204 err(_("file %s:%s unknown parent 2 %s") % | |
2205 (f, short(n), short(p1))) | |
2206 nodes[n] = 1 | |
2207 | |
2208 # cross-check | |
2209 for node in filenodes[f]: | |
2210 err(_("node %s in manifests not in %s") % (hex(node), f)) | |
2211 | |
2212 self.ui.status(_("%d files, %d changesets, %d total revisions\n") % | |
2213 (files, changesets, revisions)) | |
2214 | |
2215 if warnings[0]: | |
2216 self.ui.warn(_("%d warnings encountered!\n") % warnings[0]) | |
2217 if errors[0]: | |
2218 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0]) | |
2219 return 1 | |
2220 | 1687 |
2221 def stream_in(self, remote): | 1688 def stream_in(self, remote): |
2222 fp = remote.stream_out() | 1689 fp = remote.stream_out() |
2223 resp = int(fp.readline()) | 1690 resp = int(fp.readline()) |
2224 if resp != 0: | 1691 if resp != 0: |
2240 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') % | 1707 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') % |
2241 (util.bytecount(total_bytes), elapsed, | 1708 (util.bytecount(total_bytes), elapsed, |
2242 util.bytecount(total_bytes / elapsed))) | 1709 util.bytecount(total_bytes / elapsed))) |
2243 self.reload() | 1710 self.reload() |
2244 return len(self.heads()) + 1 | 1711 return len(self.heads()) + 1 |
2245 | 1712 |
2246 def clone(self, remote, heads=[], stream=False): | 1713 def clone(self, remote, heads=[], stream=False): |
2247 '''clone remote repository. | 1714 '''clone remote repository. |
2248 | 1715 |
2249 keyword arguments: | 1716 keyword arguments: |
2250 heads: list of revs to clone (forces use of pull) | 1717 heads: list of revs to clone (forces use of pull) |
2269 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo")) | 1736 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo")) |
2270 util.rename(os.path.join(p, "journal.dirstate"), | 1737 util.rename(os.path.join(p, "journal.dirstate"), |
2271 os.path.join(p, "undo.dirstate")) | 1738 os.path.join(p, "undo.dirstate")) |
2272 return a | 1739 return a |
2273 | 1740 |
1741 def instance(ui, path, create): | |
1742 return localrepository(ui, util.drop_scheme('file', path), create) | |
1743 | |
1744 def islocal(path): | |
1745 return True |