##// END OF EJS Templates
Merge with crew
Bryan O'Sullivan -
r6151:8bc4fe42 merge default
parent child Browse files
Show More
@@ -967,7 +967,7 b' def walkchangerevs(ui, repo, pats, chang'
967 if follow:
967 if follow:
968 defrange = '%s:0' % repo.changectx().rev()
968 defrange = '%s:0' % repo.changectx().rev()
969 else:
969 else:
970 defrange = 'tip:0'
970 defrange = '-1:0'
971 revs = revrange(repo, opts['rev'] or [defrange])
971 revs = revrange(repo, opts['rev'] or [defrange])
972 wanted = {}
972 wanted = {}
973 slowpath = anypats or opts.get('removed')
973 slowpath = anypats or opts.get('removed')
@@ -1072,19 +1072,19 b' def grep(ui, repo, pattern, *pats, **opt'
1072 if st == 'window':
1072 if st == 'window':
1073 matches.clear()
1073 matches.clear()
1074 elif st == 'add':
1074 elif st == 'add':
1075 mf = repo.changectx(rev).manifest()
1075 ctx = repo.changectx(rev)
1076 matches[rev] = {}
1076 matches[rev] = {}
1077 for fn in fns:
1077 for fn in fns:
1078 if fn in skip:
1078 if fn in skip:
1079 continue
1079 continue
1080 try:
1080 try:
1081 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1081 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1082 fstate.setdefault(fn, [])
1082 fstate.setdefault(fn, [])
1083 if follow:
1083 if follow:
1084 copied = getfile(fn).renamed(mf[fn])
1084 copied = getfile(fn).renamed(ctx.filenode(fn))
1085 if copied:
1085 if copied:
1086 copies.setdefault(rev, {})[fn] = copied[0]
1086 copies.setdefault(rev, {})[fn] = copied[0]
1087 except KeyError:
1087 except revlog.LookupError:
1088 pass
1088 pass
1089 elif st == 'iter':
1089 elif st == 'iter':
1090 states = matches[rev].items()
1090 states = matches[rev].items()
@@ -199,35 +199,38 b' class hgweb(object):'
199 req.form['node'] = [fn[:-len(ext)]]
199 req.form['node'] = [fn[:-len(ext)]]
200 req.form['type'] = [type_]
200 req.form['type'] = [type_]
201
201
202 # actually process the request
202 # process this if it's a protocol request
203
204 cmd = req.form.get('cmd', [''])[0]
205 if cmd in protocol.__all__:
206 method = getattr(protocol, cmd)
207 method(self, req)
208 return
209
210 # process the web interface request
203
211
204 try:
212 try:
205
213
206 cmd = req.form.get('cmd', [''])[0]
214 tmpl = self.templater(req)
207 if cmd in protocol.__all__:
215 ctype = tmpl('mimetype', encoding=self.encoding)
208 method = getattr(protocol, cmd)
216 ctype = templater.stringify(ctype)
209 method(self, req)
217
210 else:
218 if cmd == '':
211 tmpl = self.templater(req)
219 req.form['cmd'] = [tmpl.cache['default']]
212 ctype = tmpl('mimetype', encoding=self.encoding)
220 cmd = req.form['cmd'][0]
213 ctype = templater.stringify(ctype)
214
215 if cmd == '':
216 req.form['cmd'] = [tmpl.cache['default']]
217 cmd = req.form['cmd'][0]
218
221
219 if cmd not in webcommands.__all__:
222 if cmd not in webcommands.__all__:
220 msg = 'No such method: %s' % cmd
223 msg = 'No such method: %s' % cmd
221 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
224 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
222 elif cmd == 'file' and 'raw' in req.form.get('style', []):
225 elif cmd == 'file' and 'raw' in req.form.get('style', []):
223 self.ctype = ctype
226 self.ctype = ctype
224 content = webcommands.rawfile(self, req, tmpl)
227 content = webcommands.rawfile(self, req, tmpl)
225 else:
228 else:
226 content = getattr(webcommands, cmd)(self, req, tmpl)
229 content = getattr(webcommands, cmd)(self, req, tmpl)
227 req.respond(HTTP_OK, ctype)
230 req.respond(HTTP_OK, ctype)
228
231
229 req.write(content)
232 req.write(content)
230 del tmpl
233 del tmpl
231
234
232 except revlog.LookupError, err:
235 except revlog.LookupError, err:
233 req.respond(HTTP_NOT_FOUND, ctype)
236 req.respond(HTTP_NOT_FOUND, ctype)
@@ -9,18 +9,6 b''
9 import changegroup, os
9 import changegroup, os
10 from node import *
10 from node import *
11
11
12 def _limitheads(cl, stoprev):
13 """return the list of all revs >= stoprev that have no children"""
14 seen = {}
15 heads = []
16
17 for r in xrange(cl.count() - 1, stoprev - 1, -1):
18 if r not in seen:
19 heads.append(r)
20 for p in cl.parentrevs(r):
21 seen[p] = 1
22 return heads
23
24 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
12 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
25 """create a bundle with the specified revisions as a backup"""
13 """create a bundle with the specified revisions as a backup"""
26 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
14 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
@@ -87,42 +75,39 b' def strip(ui, repo, node, backup="all"):'
87 pp = cl.parents(node)
75 pp = cl.parents(node)
88 striprev = cl.rev(node)
76 striprev = cl.rev(node)
89
77
90 # save is a list of all the branches we are truncating away
78 # Some revisions with rev > striprev may not be descendants of striprev.
91 # that we actually want to keep. changegroup will be used
79 # We have to find these revisions and put them in a bundle, so that
92 # to preserve them and add them back after the truncate
80 # we can restore them after the truncations.
93 saveheads = []
81 # To create the bundle we use repo.changegroupsubset which requires
94 savebases = {}
82 # the list of heads and bases of the set of interesting revisions.
95
83 # (head = revision in the set that has no descendant in the set;
96 heads = [cl.node(r) for r in _limitheads(cl, striprev)]
84 # base = revision in the set that has no ancestor in the set)
97 seen = {}
85 tostrip = {striprev: 1}
86 saveheads = {}
87 savebases = []
88 for r in xrange(striprev + 1, cl.count()):
89 parents = cl.parentrevs(r)
90 if parents[0] in tostrip or parents[1] in tostrip:
91 # r is a descendant of striprev
92 tostrip[r] = 1
93 # if this is a merge and one of the parents does not descend
94 # from striprev, mark that parent as a savehead.
95 if parents[1] != nullrev:
96 for p in parents:
97 if p not in tostrip and p > striprev:
98 saveheads[p] = 1
99 else:
100 # if no parents of this revision will be stripped, mark it as
101 # a savebase
102 if parents[0] < striprev and parents[1] < striprev:
103 savebases.append(cl.node(r))
98
104
99 # search through all the heads, finding those where the revision
105 for p in parents:
100 # we want to strip away is an ancestor. Also look for merges
106 if p in saveheads:
101 # that might be turned into new heads by the strip.
107 del saveheads[p]
102 while heads:
108 saveheads[r] = 1
103 h = heads.pop()
104 n = h
105 while True:
106 seen[n] = 1
107 pp = cl.parents(n)
108 if pp[1] != nullid:
109 for p in pp:
110 if cl.rev(p) > striprev and p not in seen:
111 heads.append(p)
112 if pp[0] == nullid:
113 break
114 if cl.rev(pp[0]) < striprev:
115 break
116 n = pp[0]
117 if n == node:
118 break
119 r = cl.reachable(h, node)
120 if node not in r:
121 saveheads.append(h)
122 for x in r:
123 if cl.rev(x) > striprev:
124 savebases[x] = 1
125
109
110 saveheads = [cl.node(r) for r in saveheads]
126 files = _collectfiles(repo, striprev)
111 files = _collectfiles(repo, striprev)
127
112
128 extranodes = _collectextranodes(repo, files, striprev)
113 extranodes = _collectextranodes(repo, files, striprev)
@@ -131,7 +116,7 b' def strip(ui, repo, node, backup="all"):'
131 if backup == "all":
116 if backup == "all":
132 _bundle(repo, [node], cl.heads(), node, 'backup')
117 _bundle(repo, [node], cl.heads(), node, 'backup')
133 if saveheads or extranodes:
118 if saveheads or extranodes:
134 chgrpfile = _bundle(repo, savebases.keys(), saveheads, node, 'temp',
119 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
135 extranodes)
120 extranodes)
136
121
137 cl.strip(striprev)
122 cl.strip(striprev)
@@ -933,19 +933,19 b' class revlog(object):'
933 raise RevlogError(_('incompatible revision flag %x') %
933 raise RevlogError(_('incompatible revision flag %x') %
934 (self.index[rev][0] & 0xFFFF))
934 (self.index[rev][0] & 0xFFFF))
935
935
936 if self._inline:
936 df = None
937 # we probably have the whole chunk cached
938 df = None
939 else:
940 df = self.opener(self.datafile)
941
937
942 # do we have useful data cached?
938 # do we have useful data cached?
943 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
939 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
944 base = self._cache[1]
940 base = self._cache[1]
945 text = str(self._cache[2])
941 text = str(self._cache[2])
946 self._loadindex(base, rev + 1)
942 self._loadindex(base, rev + 1)
943 if not self._inline and rev > base + 1:
944 df = self.opener(self.datafile)
947 else:
945 else:
948 self._loadindex(base, rev + 1)
946 self._loadindex(base, rev + 1)
947 if not self._inline and rev > base:
948 df = self.opener(self.datafile)
949 text = self.chunk(base, df=df)
949 text = self.chunk(base, df=df)
950
950
951 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
951 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
@@ -56,6 +56,17 b' Convert a foreign SCM repository to a Me'
56 subdirectory into the root of the repository, use '.' as the path to
56 subdirectory into the root of the repository, use '.' as the path to
57 rename to.
57 rename to.
58
58
59 The splicemap is a file that allows insertion of synthetic
60 history, letting you specify the parents of a revision. This is
61 useful if you want to e.g. give a Subversion merge two parents, or
62 graft two disconnected series of history together. Each entry
63 contains a key, followed by a space, followed by one or two
64 values, separated by spaces. The key is the revision ID in the
65 source revision control system whose parents should be modified
66 (same format as a key in .hg/shamap). The values are the revision
67 IDs (in either the source or destination revision control system)
68 that should be used as the new parents for that node.
69
59 Back end options:
70 Back end options:
60
71
61 --config convert.hg.clonebranches=False (boolean)
72 --config convert.hg.clonebranches=False (boolean)
@@ -81,6 +92,7 b' options:'
81 --filemap remap file names using contents of file
92 --filemap remap file names using contents of file
82 -r --rev import up to target revision REV
93 -r --rev import up to target revision REV
83 -s --source-type source repository type
94 -s --source-type source repository type
95 --splicemap splice synthesized history into place
84 --datesort try to sort changesets by date
96 --datesort try to sort changesets by date
85
97
86 use "hg -v help convert" to show global options
98 use "hg -v help convert" to show global options
General Comments 0
You need to be logged in to leave comments. Login now