141 if len(c) < 1: raise KeyError("No match found") |
141 if len(c) < 1: raise KeyError("No match found") |
142 return c[0] |
142 return c[0] |
143 |
143 |
144 return None |
144 return None |
145 |
145 |
146 def revisions(self, list): |
|
147 # this can be optimized to do spans, etc |
|
148 # be stupid for now |
|
149 for node in list: |
|
150 yield self.revision(node) |
|
151 |
|
152 def diff(self, a, b): |
146 def diff(self, a, b): |
153 return mdiff.textdiff(a, b) |
147 return mdiff.textdiff(a, b) |
154 |
148 |
155 def patches(self, t, pl): |
149 def patches(self, t, pl): |
156 return mdiff.patches(t, pl) |
150 return mdiff.patches(t, pl) |
270 elif bn in amap: |
264 elif bn in amap: |
271 return bn |
265 return bn |
272 |
266 |
273 return nullid |
267 return nullid |
274 |
268 |
275 def mergedag(self, other, transaction, linkseq, accumulate = None): |
|
276 """combine the nodes from other's DAG into ours""" |
|
277 old = self.tip() |
|
278 i = self.count() |
|
279 l = [] |
|
280 |
|
281 # merge the other revision log into our DAG |
|
282 for r in range(other.count()): |
|
283 id = other.node(r) |
|
284 if id not in self.nodemap: |
|
285 (xn, yn) = other.parents(id) |
|
286 l.append((id, xn, yn)) |
|
287 self.nodemap[id] = i |
|
288 i += 1 |
|
289 |
|
290 # merge node date for new nodes |
|
291 r = other.revisions([e[0] for e in l]) |
|
292 for e in l: |
|
293 t = r.next() |
|
294 if accumulate: accumulate(t) |
|
295 self.addrevision(t, transaction, linkseq.next(), e[1], e[2]) |
|
296 |
|
297 # return the unmerged heads for later resolving |
|
298 return (old, self.tip()) |
|
299 |
|
300 def group(self, linkmap): |
269 def group(self, linkmap): |
301 # given a list of changeset revs, return a set of deltas and |
270 # given a list of changeset revs, return a set of deltas and |
302 # metadata corresponding to nodes the first delta is |
271 # metadata corresponding to nodes. the first delta is |
303 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to |
272 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to |
304 # have this parent as it has all history before these |
273 # have this parent as it has all history before these |
305 # changesets. parent is parent[0] |
274 # changesets. parent is parent[0] |
306 |
275 |
307 revs = [] |
276 revs = [] |
438 # loop through our set of deltas |
407 # loop through our set of deltas |
439 pos = 0 |
408 pos = 0 |
440 while pos < len(data): |
409 while pos < len(data): |
441 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s", |
410 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s", |
442 data[pos:pos+84]) |
411 data[pos:pos+84]) |
|
412 link = linkmapper(cs) |
443 if node in self.nodemap: |
413 if node in self.nodemap: |
444 raise "already have %s" % hex(node[:4]) |
414 raise "already have %s" % hex(node[:4]) |
445 link = linkmapper(cs) |
|
446 delta = data[pos + 84:pos + l] |
415 delta = data[pos + 84:pos + l] |
447 pos += l |
416 pos += l |
448 |
417 |
449 # full versions are inserted when the needed deltas become |
418 # full versions are inserted when the needed deltas become |
450 # comparable to the uncompressed text or when the previous |
419 # comparable to the uncompressed text or when the previous |