comparison mercurial/revlog.py @ 1538:482b4efdf013

Merge with upstream
author Thomas Arendsen Hein <thomas@intevation.de>
date Sun, 13 Nov 2005 02:08:39 +0100
parents 7ae0ce7a3dc4
children ccb9b62de892
comparison
equal deleted inserted replaced
1537:583b3696d24d 1538:482b4efdf013
29 s.update(text) 29 s.update(text)
30 return s.digest() 30 return s.digest()
31 31
32 def compress(text): 32 def compress(text):
33 """ generate a possibly-compressed representation of text """ 33 """ generate a possibly-compressed representation of text """
34 if not text: return text 34 if not text: return ("", text)
35 if len(text) < 44: 35 if len(text) < 44:
36 if text[0] == '\0': return text 36 if text[0] == '\0': return ("", text)
37 return 'u' + text 37 return ('u', text)
38 bin = zlib.compress(text) 38 bin = zlib.compress(text)
39 if len(bin) > len(text): 39 if len(bin) > len(text):
40 if text[0] == '\0': return text 40 if text[0] == '\0': return ("", text)
41 return 'u' + text 41 return ('u', text)
42 return bin 42 return ("", bin)
43 43
44 def decompress(bin): 44 def decompress(bin):
45 """ decompress the given input """ 45 """ decompress the given input """
46 if not bin: return bin 46 if not bin: return bin
47 t = bin[0] 47 t = bin[0]
68 self.l = len(data)/self.s 68 self.l = len(data)/self.s
69 self.index = [None] * self.l 69 self.index = [None] * self.l
70 self.map = {nullid: -1} 70 self.map = {nullid: -1}
71 self.all = 0 71 self.all = 0
72 self.revlog = revlog 72 self.revlog = revlog
73
74 def trunc(self, pos):
75 self.l = pos/self.s
73 76
74 def load(self, pos=None): 77 def load(self, pos=None):
75 if self.all: return 78 if self.all: return
76 if pos is not None: 79 if pos is not None:
77 block = pos / 1000 80 block = pos / 1000
102 pos += len(self.p.index) 105 pos += len(self.p.index)
103 self.p.load(pos) 106 self.p.load(pos)
104 return self.p.index[pos] 107 return self.p.index[pos]
105 def __getitem__(self, pos): 108 def __getitem__(self, pos):
106 return self.p.index[pos] or self.load(pos) 109 return self.p.index[pos] or self.load(pos)
110 def __delitem__(self, pos):
111 del self.p.index[pos]
107 def append(self, e): 112 def append(self, e):
108 self.p.index.append(e) 113 self.p.index.append(e)
114 def trunc(self, pos):
115 self.p.trunc(pos)
109 116
110 class lazymap: 117 class lazymap:
111 """a lazy version of the node map""" 118 """a lazy version of the node map"""
112 def __init__(self, parser): 119 def __init__(self, parser):
113 self.p = parser 120 self.p = parser
138 return self.p.map[key] 145 return self.p.map[key]
139 except KeyError: 146 except KeyError:
140 raise KeyError("node " + hex(key)) 147 raise KeyError("node " + hex(key))
141 def __setitem__(self, key, val): 148 def __setitem__(self, key, val):
142 self.p.map[key] = val 149 self.p.map[key] = val
150 def __delitem__(self, key):
151 del self.p.map[key]
143 152
144 class RevlogError(Exception): pass 153 class RevlogError(Exception): pass
145 154
146 class revlog: 155 class revlog:
147 """ 156 """
541 base = self.base(t) 550 base = self.base(t)
542 start = self.start(base) 551 start = self.start(base)
543 end = self.end(t) 552 end = self.end(t)
544 if not d: 553 if not d:
545 prev = self.revision(self.tip()) 554 prev = self.revision(self.tip())
546 d = self.diff(prev, text) 555 d = self.diff(prev, str(text))
547 data = compress(d) 556 data = compress(d)
548 dist = end - start + len(data) 557 l = len(data[1]) + len(data[0])
558 dist = end - start + l
549 559
550 # full versions are inserted when the needed deltas 560 # full versions are inserted when the needed deltas
551 # become comparable to the uncompressed text 561 # become comparable to the uncompressed text
552 if not n or dist > len(text) * 2: 562 if not n or dist > len(text) * 2:
553 data = compress(text) 563 data = compress(text)
564 l = len(data[1]) + len(data[0])
554 base = n 565 base = n
555 else: 566 else:
556 base = self.base(t) 567 base = self.base(t)
557 568
558 offset = 0 569 offset = 0
559 if t >= 0: 570 if t >= 0:
560 offset = self.end(t) 571 offset = self.end(t)
561 572
562 e = (offset, len(data), base, link, p1, p2, node) 573 e = (offset, l, base, link, p1, p2, node)
563 574
564 self.index.append(e) 575 self.index.append(e)
565 self.nodemap[node] = n 576 self.nodemap[node] = n
566 entry = struct.pack(indexformat, *e) 577 entry = struct.pack(indexformat, *e)
567 578
568 transaction.add(self.datafile, e[0]) 579 transaction.add(self.datafile, e[0])
569 self.opener(self.datafile, "a").write(data) 580 f = self.opener(self.datafile, "a")
581 if data[0]:
582 f.write(data[0])
583 f.write(data[1])
570 transaction.add(self.indexfile, n * len(entry)) 584 transaction.add(self.indexfile, n * len(entry))
571 self.opener(self.indexfile, "a").write(entry) 585 self.opener(self.indexfile, "a").write(entry)
572 586
573 self.cache = (node, n, text) 587 self.cache = (node, n, text)
574 return node 588 return node
799 # version is not the one we have a delta against. We use 813 # version is not the one we have a delta against. We use
800 # the size of the previous full rev as a proxy for the 814 # the size of the previous full rev as a proxy for the
801 # current size. 815 # current size.
802 816
803 if chain == prev: 817 if chain == prev:
804 cdelta = compress(delta) 818 tempd = compress(delta)
819 cdelta = tempd[0] + tempd[1]
805 820
806 if chain != prev or (end - start + len(cdelta)) > measure * 2: 821 if chain != prev or (end - start + len(cdelta)) > measure * 2:
807 # flush our writes here so we can read it in revision 822 # flush our writes here so we can read it in revision
808 dfh.flush() 823 dfh.flush()
809 ifh.flush() 824 ifh.flush()
826 841
827 dfh.close() 842 dfh.close()
828 ifh.close() 843 ifh.close()
829 return node 844 return node
830 845
846 def strip(self, rev, minlink):
847 if self.count() == 0 or rev >= self.count():
848 return
849
850 # When stripping away a revision, we need to make sure it
851 # does not actually belong to an older changeset.
852 # The minlink parameter defines the oldest revision
853 # we're allowed to strip away.
854 while minlink > self.index[rev][3]:
855 rev += 1
856 if rev >= self.count():
857 return
858
859 # first truncate the files on disk
860 end = self.start(rev)
861 self.opener(self.datafile, "a").truncate(end)
862 end = rev * struct.calcsize(indexformat)
863 self.opener(self.indexfile, "a").truncate(end)
864
865 # then reset internal state in memory to forget those revisions
866 self.cache = None
867 for p in self.index[rev:]:
868 del self.nodemap[p[6]]
869 del self.index[rev:]
870
871 # truncating the lazyindex also truncates the lazymap.
872 if isinstance(self.index, lazyindex):
873 self.index.trunc(end)
874
875
831 def checksize(self): 876 def checksize(self):
832 expected = 0 877 expected = 0
833 if self.count(): 878 if self.count():
834 expected = self.end(self.count() - 1) 879 expected = self.end(self.count() - 1)
835 try: 880 try: