comparison mercurial/revlog.py @ 1678:b345cc4c22c0

reverting 11d12bd6e1dcd9610fa26e97d25e7ad553e8ffa5 passing local everywhere violate the layering
author Benoit Boissinot <benoit.boissinot@ens-lyon.org>
date Thu, 15 Dec 2005 18:04:05 +0100
parents 11d12bd6e1dc
children c21b54f7f7b8
comparison
equal deleted inserted replaced
1677:11d12bd6e1dc 1678:b345cc4c22c0
175 Both pieces of the revlog are written to in an append-only 175 Both pieces of the revlog are written to in an append-only
176 fashion, which means we never need to rewrite a file to insert or 176 fashion, which means we never need to rewrite a file to insert or
177 remove data, and can use some simple techniques to avoid the need 177 remove data, and can use some simple techniques to avoid the need
178 for locking while reading. 178 for locking while reading.
179 """ 179 """
180 def __init__(self, opener, indexfile, datafile, local=True): 180 def __init__(self, opener, indexfile, datafile):
181 """ 181 """
182 create a revlog object 182 create a revlog object
183 183
184 opener is a function that abstracts the file opening operation 184 opener is a function that abstracts the file opening operation
185 and can be used to implement COW semantics or the like. 185 and can be used to implement COW semantics or the like.
186 """ 186 """
187 self.indexfile = indexfile 187 self.indexfile = indexfile
188 self.datafile = datafile 188 self.datafile = datafile
189 self.opener = opener 189 self.opener = opener
190 self.cache = None 190 self.cache = None
191 self.local = local # XXX only needed because statichttp
192 191
193 try: 192 try:
194 i = self.opener(self.indexfile).read() 193 i = self.opener(self.indexfile).read()
195 except IOError, inst: 194 except IOError, inst:
196 if inst.errno != errno.ENOENT: 195 if inst.errno != errno.ENOENT:
649 gy = y.next() 648 gy = y.next()
650 else: 649 else:
651 #print "next x" 650 #print "next x"
652 gx = x.next() 651 gx = x.next()
653 652
654 def group(self, nodelist, lookup, infocollect=None): 653 def group(self, nodelist, lookup, infocollect = None):
655 """calculate a delta group 654 """calculate a delta group
656 655
657 Given a list of changeset revs, return a set of deltas and 656 Given a list of changeset revs, return a set of deltas and
658 metadata corresponding to nodes. the first delta is 657 metadata corresponding to nodes. the first delta is
659 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to 658 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
660 have this parent as it has all history before these 659 have this parent as it has all history before these
661 changesets. parent is parent[0] 660 changesets. parent is parent[0]
662 """ 661 """
663 revs = [self.rev(n) for n in nodelist] 662 revs = [self.rev(n) for n in nodelist]
663 needed = dict.fromkeys(revs, 1)
664 664
665 # if we don't have any revisions touched by these changesets, bail 665 # if we don't have any revisions touched by these changesets, bail
666 if not revs: 666 if not revs:
667 yield struct.pack(">l", 0) 667 yield struct.pack(">l", 0)
668 return 668 return
669 669
670 # add the parent of the first rev 670 # add the parent of the first rev
671 p = self.parents(self.node(revs[0]))[0] 671 p = self.parents(self.node(revs[0]))[0]
672 revs.insert(0, self.rev(p)) 672 revs.insert(0, self.rev(p))
673 673
674 if self.local: 674 # for each delta that isn't contiguous in the log, we need to
675 mm = self.opener(self.datafile) 675 # reconstruct the base, reconstruct the result, and then
676 def chunk(r): 676 # calculate the delta. We also need to do this where we've
677 o = self.start(r) 677 # stored a full version and not a delta
678 l = self.length(r) 678 for i in xrange(0, len(revs) - 1):
679 mm.seek(o) 679 a, b = revs[i], revs[i + 1]
680 return decompress(mm.read(l)) 680 if a + 1 != b or self.base(b) == b:
681 else: 681 for j in xrange(self.base(a), a + 1):
682 # XXX: statichttp workaround 682 needed[j] = 1
683 needed = dict.fromkeys(revs[1:], 1) 683 for j in xrange(self.base(b), b + 1):
684 # for each delta that isn't contiguous in the log, we need to 684 needed[j] = 1
685 # reconstruct the base, reconstruct the result, and then 685
686 # calculate the delta. We also need to do this where we've 686 # calculate spans to retrieve from datafile
687 # stored a full version and not a delta 687 needed = needed.keys()
688 for i in xrange(0, len(revs) - 1): 688 needed.sort()
689 a, b = revs[i], revs[i + 1] 689 spans = []
690 if a + 1 != b or self.base(b) == b: 690 oo = -1
691 for j in xrange(self.base(a), a + 1): 691 ol = 0
692 needed[j] = 1 692 for n in needed:
693 for j in xrange(self.base(b), b + 1): 693 if n < 0: continue
694 needed[j] = 1 694 o = self.start(n)
695 695 l = self.length(n)
696 # calculate spans to retrieve from datafile 696 if oo + ol == o: # can we merge with the previous?
697 needed = needed.keys() 697 nl = spans[-1][2]
698 needed.sort() 698 nl.append((n, l))
699 spans = [] 699 ol += l
700 oo = -1 700 spans[-1] = (oo, ol, nl)
701 ol = 0 701 else:
702 for n in needed: 702 oo = o
703 if n < 0: continue 703 ol = l
704 o = self.start(n) 704 spans.append((oo, ol, [(n, l)]))
705 l = self.length(n) 705
706 if oo + ol == o: # can we merge with the previous? 706 # read spans in, divide up chunks
707 nl = spans[-1][2] 707 chunks = {}
708 nl.append((n, l)) 708 for span in spans:
709 ol += l 709 # we reopen the file for each span to make http happy for now
710 spans[-1] = (oo, ol, nl) 710 f = self.opener(self.datafile)
711 else: 711 f.seek(span[0])
712 oo = o 712 data = f.read(span[1])
713 ol = l 713
714 spans.append((oo, ol, [(n, l)])) 714 # divide up the span
715 715 pos = 0
716 # read spans in, divide up chunks 716 for r, l in span[2]:
717 chunks = {} 717 chunks[r] = decompress(data[pos: pos + l])
718 for span in spans: 718 pos += l
719 # we reopen the file for each span to make http happy for now
720 f = self.opener(self.datafile)
721 f.seek(span[0])
722 data = f.read(span[1])
723
724 # divide up the span
725 pos = 0
726 for r, l in span[2]:
727 chunks[r] = decompress(data[pos: pos + l])
728 pos += l
729 def chunk(r):
730 return chunks[r]
731 719
732 # helper to reconstruct intermediate versions 720 # helper to reconstruct intermediate versions
733 def construct(text, base, rev): 721 def construct(text, base, rev):
734 bins = [chunk(r) for r in xrange(base + 1, rev + 1)] 722 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
735 return mdiff.patches(text, bins) 723 return mdiff.patches(text, bins)
736 724
737 # build deltas 725 # build deltas
726 deltas = []
738 for d in xrange(0, len(revs) - 1): 727 for d in xrange(0, len(revs) - 1):
739 a, b = revs[d], revs[d + 1] 728 a, b = revs[d], revs[d + 1]
740 n = self.node(b) 729 n = self.node(b)
741 730
742 if infocollect is not None: 731 if infocollect is not None:
744 733
745 # do we need to construct a new delta? 734 # do we need to construct a new delta?
746 if a + 1 != b or self.base(b) == b: 735 if a + 1 != b or self.base(b) == b:
747 if a >= 0: 736 if a >= 0:
748 base = self.base(a) 737 base = self.base(a)
749 ta = chunk(self.base(a)) 738 ta = chunks[self.base(a)]
750 ta = construct(ta, base, a) 739 ta = construct(ta, base, a)
751 else: 740 else:
752 ta = "" 741 ta = ""
753 742
754 base = self.base(b) 743 base = self.base(b)
755 if a > base: 744 if a > base:
756 base = a 745 base = a
757 tb = ta 746 tb = ta
758 else: 747 else:
759 tb = chunk(self.base(b)) 748 tb = chunks[self.base(b)]
760 tb = construct(tb, base, b) 749 tb = construct(tb, base, b)
761 d = self.diff(ta, tb) 750 d = self.diff(ta, tb)
762 else: 751 else:
763 d = chunk(b) 752 d = chunks[b]
764 753
765 p = self.parents(n) 754 p = self.parents(n)
766 meta = n + p[0] + p[1] + lookup(n) 755 meta = n + p[0] + p[1] + lookup(n)
767 l = struct.pack(">l", len(meta) + len(d) + 4) 756 l = struct.pack(">l", len(meta) + len(d) + 4)
768 yield l 757 yield l