mercurial/revlog.py
changeset 4982 9672e3c42b0c
parent 4981 e7131935fbb3
child 4983 4dbcfc6e359e
equal deleted inserted replaced
4981:e7131935fbb3 4982:9672e3c42b0c
   458                                   % (self.indexfile, flags >> 16))
   458                                   % (self.indexfile, flags >> 16))
   459         else:
   459         else:
   460             raise RevlogError(_("index %s unknown format %d")
   460             raise RevlogError(_("index %s unknown format %d")
   461                               % (self.indexfile, fmt))
   461                               % (self.indexfile, fmt))
   462         self.version = v
   462         self.version = v
       
   463         self._inline = v & REVLOGNGINLINEDATA
   463         self.nodemap = {nullid: nullrev}
   464         self.nodemap = {nullid: nullrev}
   464         self.index = []
   465         self.index = []
   465         self._io = revlogio()
   466         self._io = revlogio()
   466         if self.version == REVLOGV0:
   467         if self.version == REVLOGV0:
   467             self._io = revlogoldio()
   468             self._io = revlogoldio()
   468         if i:
   469         if i:
   469             self.index, self.nodemap = self._io.parseindex(f, st, self._inline())
   470             self.index, self.nodemap = self._io.parseindex(f, st, self._inline)
   470         # add the magic null revision at -1
   471         # add the magic null revision at -1
   471         self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
   472         self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
   472 
   473 
   473     def _loadindex(self, start, end):
   474     def _loadindex(self, start, end):
   474         """load a block of indexes all at once from the lazy parser"""
   475         """load a block of indexes all at once from the lazy parser"""
   486         """loads the map from the lazy parser"""
   487         """loads the map from the lazy parser"""
   487         if isinstance(self.nodemap, lazymap):
   488         if isinstance(self.nodemap, lazymap):
   488             self.nodemap.p.loadmap()
   489             self.nodemap.p.loadmap()
   489             self.nodemap = self.nodemap.p.map
   490             self.nodemap = self.nodemap.p.map
   490 
   491 
   491     def _inline(self):
       
   492         return self.version & REVLOGNGINLINEDATA
       
   493     def tip(self):
   492     def tip(self):
   494         return self.node(len(self.index) - 2)
   493         return self.node(len(self.index) - 2)
   495     def count(self):
   494     def count(self):
   496         return len(self.index) - 1
   495         return len(self.index) - 1
   497 
   496 
   842         """apply a list of patches to a string"""
   841         """apply a list of patches to a string"""
   843         return mdiff.patches(t, pl)
   842         return mdiff.patches(t, pl)
   844 
   843 
   845     def chunk(self, rev, df=None, cachelen=4096):
   844     def chunk(self, rev, df=None, cachelen=4096):
   846         start, length = self.start(rev), self.length(rev)
   845         start, length = self.start(rev), self.length(rev)
   847         inline = self._inline()
   846         inline = self._inline
   848         if inline:
   847         if inline:
   849             start += (rev + 1) * self._io.size
   848             start += (rev + 1) * self._io.size
   850         end = start + length
   849         end = start + length
   851         def loadcache(df):
   850         def loadcache(df):
   852             cache_length = max(cachelen, length) # 4k
   851             cache_length = max(cachelen, length) # 4k
   897         # look up what we need to read
   896         # look up what we need to read
   898         text = None
   897         text = None
   899         rev = self.rev(node)
   898         rev = self.rev(node)
   900         base = self.base(rev)
   899         base = self.base(rev)
   901 
   900 
   902         if self._inline():
   901         if self._inline:
   903             # we probably have the whole chunk cached
   902             # we probably have the whole chunk cached
   904             df = None
   903             df = None
   905         else:
   904         else:
   906             df = self.opener(self.datafile)
   905             df = self.opener(self.datafile)
   907 
   906 
   927 
   926 
   928         self.cache = (node, rev, text)
   927         self.cache = (node, rev, text)
   929         return text
   928         return text
   930 
   929 
   931     def checkinlinesize(self, tr, fp=None):
   930     def checkinlinesize(self, tr, fp=None):
   932         if not self._inline():
   931         if not self._inline:
   933             return
   932             return
   934         if not fp:
   933         if not fp:
   935             fp = self.opener(self.indexfile, 'r')
   934             fp = self.opener(self.indexfile, 'r')
   936             fp.seek(0, 2)
   935             fp.seek(0, 2)
   937         size = fp.tell()
   936         size = fp.tell()
   956             df.write(d)
   955             df.write(d)
   957         fp.close()
   956         fp.close()
   958         df.close()
   957         df.close()
   959         fp = self.opener(self.indexfile, 'w', atomictemp=True)
   958         fp = self.opener(self.indexfile, 'w', atomictemp=True)
   960         self.version &= ~(REVLOGNGINLINEDATA)
   959         self.version &= ~(REVLOGNGINLINEDATA)
       
   960         self._inline = False
   961         if self.count():
   961         if self.count():
   962             x = self.index[0]
   962             x = self.index[0]
   963             e = struct.pack(indexformatng, *x)[4:]
   963             e = struct.pack(indexformatng, *x)[4:]
   964             l = struct.pack(versionformat, self.version)
   964             l = struct.pack(versionformat, self.version)
   965             fp.write(l)
   965             fp.write(l)
   985         link - the linkrev data to add
   985         link - the linkrev data to add
   986         p1, p2 - the parent nodeids of the revision
   986         p1, p2 - the parent nodeids of the revision
   987         d - an optional precomputed delta
   987         d - an optional precomputed delta
   988         """
   988         """
   989         dfh = None
   989         dfh = None
   990         if not self._inline():
   990         if not self._inline:
   991             dfh = self.opener(self.datafile, "a")
   991             dfh = self.opener(self.datafile, "a")
   992         ifh = self.opener(self.indexfile, "a+")
   992         ifh = self.opener(self.indexfile, "a+")
   993         return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
   993         return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
   994 
   994 
   995     def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
   995     def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
  1028         else:
  1028         else:
  1029             entry = struct.pack(indexformatng, *e)
  1029             entry = struct.pack(indexformatng, *e)
  1030             if not curr:
  1030             if not curr:
  1031                 entry = struct.pack(versionformat, self.version) + entry[4:]
  1031                 entry = struct.pack(versionformat, self.version) + entry[4:]
  1032 
  1032 
  1033         if not self._inline():
  1033         if not self._inline:
  1034             transaction.add(self.datafile, offset)
  1034             transaction.add(self.datafile, offset)
  1035             transaction.add(self.indexfile, curr * len(entry))
  1035             transaction.add(self.indexfile, curr * len(entry))
  1036             if data[0]:
  1036             if data[0]:
  1037                 dfh.write(data[0])
  1037                 dfh.write(data[0])
  1038             dfh.write(data[1])
  1038             dfh.write(data[1])
  1116             end = self.end(t)
  1116             end = self.end(t)
  1117 
  1117 
  1118         ifh = self.opener(self.indexfile, "a+")
  1118         ifh = self.opener(self.indexfile, "a+")
  1119         ifh.seek(0, 2)
  1119         ifh.seek(0, 2)
  1120         transaction.add(self.indexfile, ifh.tell(), self.count())
  1120         transaction.add(self.indexfile, ifh.tell(), self.count())
  1121         if self._inline():
  1121         if self._inline:
  1122             dfh = None
  1122             dfh = None
  1123         else:
  1123         else:
  1124             transaction.add(self.datafile, end)
  1124             transaction.add(self.datafile, end)
  1125             dfh = self.opener(self.datafile, "a")
  1125             dfh = self.opener(self.datafile, "a")
  1126 
  1126 
  1165                 ifh.flush()
  1165                 ifh.flush()
  1166                 text = self.revision(chain)
  1166                 text = self.revision(chain)
  1167                 text = self.patches(text, [delta])
  1167                 text = self.patches(text, [delta])
  1168                 chk = self._addrevision(text, transaction, link, p1, p2, None,
  1168                 chk = self._addrevision(text, transaction, link, p1, p2, None,
  1169                                         ifh, dfh)
  1169                                         ifh, dfh)
  1170                 if not dfh and not self._inline():
  1170                 if not dfh and not self._inline:
  1171                     # addrevision switched from inline to conventional
  1171                     # addrevision switched from inline to conventional
  1172                     # reopen the index
  1172                     # reopen the index
  1173                     dfh = self.opener(self.datafile, "a")
  1173                     dfh = self.opener(self.datafile, "a")
  1174                     ifh = self.opener(self.indexfile, "a")
  1174                     ifh = self.opener(self.indexfile, "a")
  1175                 if chk != node:
  1175                 if chk != node:
  1178             else:
  1178             else:
  1179                 e = (offset_type(end, 0), len(cdelta), textlen, base,
  1179                 e = (offset_type(end, 0), len(cdelta), textlen, base,
  1180                      link, self.rev(p1), self.rev(p2), node)
  1180                      link, self.rev(p1), self.rev(p2), node)
  1181                 self.index.insert(-1, e)
  1181                 self.index.insert(-1, e)
  1182                 self.nodemap[node] = r
  1182                 self.nodemap[node] = r
  1183                 if self._inline():
  1183                 if self._inline:
  1184                     ifh.write(struct.pack(indexformatng, *e))
  1184                     ifh.write(struct.pack(indexformatng, *e))
  1185                     ifh.write(cdelta)
  1185                     ifh.write(cdelta)
  1186                     self.checkinlinesize(transaction, ifh)
  1186                     self.checkinlinesize(transaction, ifh)
  1187                     if not self._inline():
  1187                     if not self._inline:
  1188                         dfh = self.opener(self.datafile, "a")
  1188                         dfh = self.opener(self.datafile, "a")
  1189                         ifh = self.opener(self.indexfile, "a")
  1189                         ifh = self.opener(self.indexfile, "a")
  1190                 else:
  1190                 else:
  1191                     if self.version == REVLOGV0:
  1191                     if self.version == REVLOGV0:
  1192                         e = (end, len(cdelta), base, link, p1, p2, node)
  1192                         e = (end, len(cdelta), base, link, p1, p2, node)
  1219             if rev >= self.count():
  1219             if rev >= self.count():
  1220                 return
  1220                 return
  1221 
  1221 
  1222         # first truncate the files on disk
  1222         # first truncate the files on disk
  1223         end = self.start(rev)
  1223         end = self.start(rev)
  1224         if not self._inline():
  1224         if not self._inline:
  1225             df = self.opener(self.datafile, "a")
  1225             df = self.opener(self.datafile, "a")
  1226             df.truncate(end)
  1226             df.truncate(end)
  1227             end = rev * self._io.size
  1227             end = rev * self._io.size
  1228         else:
  1228         else:
  1229             end += rev * self._io.size
  1229             end += rev * self._io.size
  1259             f.seek(0, 2)
  1259             f.seek(0, 2)
  1260             actual = f.tell()
  1260             actual = f.tell()
  1261             s = self._io.size
  1261             s = self._io.size
  1262             i = actual / s
  1262             i = actual / s
  1263             di = actual - (i * s)
  1263             di = actual - (i * s)
  1264             if self._inline():
  1264             if self._inline:
  1265                 databytes = 0
  1265                 databytes = 0
  1266                 for r in xrange(self.count()):
  1266                 for r in xrange(self.count()):
  1267                     databytes += self.length(r)
  1267                     databytes += self.length(r)
  1268                 dd = 0
  1268                 dd = 0
  1269                 di = actual - self.count() * s - databytes
  1269                 di = actual - self.count() * s - databytes