Show More
@@ -0,0 +1,92 b'' | |||
|
1 | import sys, textwrap | |
|
2 | # import from the live mercurial repo | |
|
3 | sys.path.insert(0, "..") | |
|
4 | from mercurial.commands import table, globalopts | |
|
5 | from mercurial.i18n import gettext as _ | |
|
6 | ||
|
7 | def get_desc(docstr): | |
|
8 | if not docstr: | |
|
9 | return "", "" | |
|
10 | # sanitize | |
|
11 | docstr = docstr.strip("\n") | |
|
12 | docstr = docstr.rstrip() | |
|
13 | shortdesc = docstr.splitlines()[0].strip() | |
|
14 | ||
|
15 | i = docstr.find("\n") | |
|
16 | if i != -1: | |
|
17 | desc = docstr[i+2:] | |
|
18 | else: | |
|
19 | desc = " %s" % shortdesc | |
|
20 | return (shortdesc, desc) | |
|
21 | ||
|
22 | def get_opts(opts): | |
|
23 | for shortopt, longopt, default, desc in opts: | |
|
24 | allopts = [] | |
|
25 | if shortopt: | |
|
26 | allopts.append("-%s" % shortopt) | |
|
27 | if longopt: | |
|
28 | allopts.append("--%s" % longopt) | |
|
29 | desc += default and _(" (default: %s)") % default or "" | |
|
30 | yield(", ".join(allopts), desc) | |
|
31 | ||
|
32 | def get_cmd(cmd): | |
|
33 | d = {} | |
|
34 | attr = table[cmd] | |
|
35 | cmds = cmd.lstrip("^").split("|") | |
|
36 | ||
|
37 | d['synopsis'] = attr[2] | |
|
38 | d['cmd'] = cmds[0] | |
|
39 | d['aliases'] = cmd.split("|")[1:] | |
|
40 | d['desc'] = get_desc(attr[0].__doc__) | |
|
41 | d['opts'] = list(get_opts(attr[1])) | |
|
42 | return d | |
|
43 | ||
|
44 | ||
|
45 | def show_doc(ui): | |
|
46 | def bold(s, text=""): | |
|
47 | ui.write("%s\n%s\n%s\n" % (s, "="*len(s), text)) | |
|
48 | def underlined(s, text=""): | |
|
49 | ui.write("%s\n%s\n%s\n" % (s, "-"*len(s), text)) | |
|
50 | ||
|
51 | # print options | |
|
52 | underlined(_("OPTIONS")) | |
|
53 | for optstr, desc in get_opts(globalopts): | |
|
54 | ui.write("%s::\n %s\n\n" % (optstr, desc)) | |
|
55 | ||
|
56 | # print cmds | |
|
57 | underlined(_("COMMANDS")) | |
|
58 | h = {} | |
|
59 | for c, attr in table.items(): | |
|
60 | f = c.split("|")[0] | |
|
61 | f = f.lstrip("^") | |
|
62 | h[f] = c | |
|
63 | cmds = h.keys() | |
|
64 | cmds.sort() | |
|
65 | ||
|
66 | for f in cmds: | |
|
67 | if f.startswith("debug"): continue | |
|
68 | d = get_cmd(h[f]) | |
|
69 | # synopsis | |
|
70 | ui.write("%s::\n" % d['synopsis'].replace("hg ","", 1)) | |
|
71 | # description | |
|
72 | ui.write("%s\n\n" % d['desc'][1]) | |
|
73 | # options | |
|
74 | opt_output = list(d['opts']) | |
|
75 | if opt_output: | |
|
76 | opts_len = max([len(line[0]) for line in opt_output]) | |
|
77 | ui.write(_(" options:\n")) | |
|
78 | for optstr, desc in opt_output: | |
|
79 | if desc: | |
|
80 | s = "%-*s %s" % (opts_len, optstr, desc) | |
|
81 | else: | |
|
82 | s = optstr | |
|
83 | s = textwrap.fill(s, initial_indent=4 * " ", | |
|
84 | subsequent_indent=(6 + opts_len) * " ") | |
|
85 | ui.write("%s\n" % s) | |
|
86 | ui.write("\n") | |
|
87 | # aliases | |
|
88 | if d['aliases']: | |
|
89 | ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases'])) | |
|
90 | ||
|
91 | if __name__ == "__main__": | |
|
92 | show_doc(sys.stdout) |
This diff has been collapsed as it changes many lines, (1308 lines changed) Show them Hide them | |||
@@ -0,0 +1,1308 b'' | |||
|
1 | #!/usr/bin/env python | |
|
2 | # queue.py - patch queues for mercurial | |
|
3 | # | |
|
4 | # Copyright 2005 Chris Mason <mason@suse.com> | |
|
5 | # | |
|
6 | # This software may be used and distributed according to the terms | |
|
7 | # of the GNU General Public License, incorporated herein by reference. | |
|
8 | ||
|
9 | from mercurial.demandload import * | |
|
10 | demandload(globals(), "os sys re struct traceback errno bz2") | |
|
11 | from mercurial.i18n import gettext as _ | |
|
12 | from mercurial import ui, hg, revlog, commands, util | |
|
13 | ||
|
14 | versionstr = "0.45" | |
|
15 | ||
|
16 | repomap = {} | |
|
17 | ||
|
18 | class queue: | |
|
19 | def __init__(self, ui, path, patchdir=None): | |
|
20 | self.opener = util.opener(path) | |
|
21 | self.basepath = path | |
|
22 | if patchdir: | |
|
23 | self.path = patchdir | |
|
24 | else: | |
|
25 | self.path = os.path.join(path, "patches") | |
|
26 | self.ui = ui | |
|
27 | self.applied = [] | |
|
28 | self.full_series = [] | |
|
29 | self.applied_dirty = 0 | |
|
30 | self.series_dirty = 0 | |
|
31 | self.series_path = os.path.join(self.path, "series") | |
|
32 | self.status_path = os.path.join(self.path, "status") | |
|
33 | ||
|
34 | s = self.series_path | |
|
35 | if os.path.exists(s): | |
|
36 | self.full_series = self.opener(s).read().splitlines() | |
|
37 | self.read_series(self.full_series) | |
|
38 | ||
|
39 | s = self.status_path | |
|
40 | if os.path.exists(s): | |
|
41 | self.applied = self.opener(s).read().splitlines() | |
|
42 | ||
|
43 | def find_series(self, patch): | |
|
44 | pre = re.compile("(\s*)([^#]+)") | |
|
45 | index = 0 | |
|
46 | for l in self.full_series: | |
|
47 | m = pre.match(l) | |
|
48 | if m: | |
|
49 | s = m.group(2) | |
|
50 | s = s.rstrip() | |
|
51 | if s == patch: | |
|
52 | return index | |
|
53 | index += 1 | |
|
54 | return None | |
|
55 | ||
|
56 | def read_series(self, list): | |
|
57 | def matcher(list): | |
|
58 | pre = re.compile("(\s*)([^#]+)") | |
|
59 | for l in list: | |
|
60 | m = pre.match(l) | |
|
61 | if m: | |
|
62 | s = m.group(2) | |
|
63 | s = s.rstrip() | |
|
64 | if len(s) > 0: | |
|
65 | yield s | |
|
66 | self.series = [] | |
|
67 | self.series = [ x for x in matcher(list) ] | |
|
68 | ||
|
69 | def save_dirty(self): | |
|
70 | if self.applied_dirty: | |
|
71 | if len(self.applied) > 0: | |
|
72 | nl = "\n" | |
|
73 | else: | |
|
74 | nl = "" | |
|
75 | f = self.opener(self.status_path, "w") | |
|
76 | f.write("\n".join(self.applied) + nl) | |
|
77 | if self.series_dirty: | |
|
78 | if len(self.full_series) > 0: | |
|
79 | nl = "\n" | |
|
80 | else: | |
|
81 | nl = "" | |
|
82 | f = self.opener(self.series_path, "w") | |
|
83 | f.write("\n".join(self.full_series) + nl) | |
|
84 | ||
|
85 | def readheaders(self, patch): | |
|
86 | def eatdiff(lines): | |
|
87 | while lines: | |
|
88 | l = lines[-1] | |
|
89 | if (l.startswith("diff -") or | |
|
90 | l.startswith("Index:") or | |
|
91 | l.startswith("===========")): | |
|
92 | del lines[-1] | |
|
93 | else: | |
|
94 | break | |
|
95 | def eatempty(lines): | |
|
96 | while lines: | |
|
97 | l = lines[-1] | |
|
98 | if re.match('\s*$', l): | |
|
99 | del lines[-1] | |
|
100 | else: | |
|
101 | break | |
|
102 | ||
|
103 | pf = os.path.join(self.path, patch) | |
|
104 | message = [] | |
|
105 | comments = [] | |
|
106 | user = None | |
|
107 | format = None | |
|
108 | subject = None | |
|
109 | diffstart = 0 | |
|
110 | ||
|
111 | for line in file(pf): | |
|
112 | line = line.rstrip() | |
|
113 | if diffstart: | |
|
114 | if line.startswith('+++ '): | |
|
115 | diffstart = 2 | |
|
116 | break | |
|
117 | if line.startswith("--- "): | |
|
118 | diffstart = 1 | |
|
119 | continue | |
|
120 | elif format == "hgpatch": | |
|
121 | # parse values when importing the result of an hg export | |
|
122 | if line.startswith("# User "): | |
|
123 | user = line[7:] | |
|
124 | elif not line.startswith("# ") and line: | |
|
125 | message.append(line) | |
|
126 | format = None | |
|
127 | elif line == '# HG changeset patch': | |
|
128 | format = "hgpatch" | |
|
129 | elif (format != "tagdone" and (line.startswith("Subject: ") or | |
|
130 | line.startswith("subject: "))): | |
|
131 | subject = line[9:] | |
|
132 | format = "tag" | |
|
133 | elif (format != "tagdone" and (line.startswith("From: ") or | |
|
134 | line.startswith("from: "))): | |
|
135 | user = line[6:] | |
|
136 | format = "tag" | |
|
137 | elif format == "tag" and line == "": | |
|
138 | # when looking for tags (subject: from: etc) they | |
|
139 | # end once you find a blank line in the source | |
|
140 | format = "tagdone" | |
|
141 | else: | |
|
142 | message.append(line) | |
|
143 | comments.append(line) | |
|
144 | ||
|
145 | eatdiff(message) | |
|
146 | eatdiff(comments) | |
|
147 | eatempty(message) | |
|
148 | eatempty(comments) | |
|
149 | ||
|
150 | # make sure message isn't empty | |
|
151 | if format and format.startswith("tag") and subject: | |
|
152 | message.insert(0, "") | |
|
153 | message.insert(0, subject) | |
|
154 | return (message, comments, user, diffstart > 1) | |
|
155 | ||
|
156 | def mergeone(self, repo, mergeq, head, patch, rev, wlock): | |
|
157 | # first try just applying the patch | |
|
158 | (err, n) = self.apply(repo, [ patch ], update_status=False, | |
|
159 | strict=True, merge=rev, wlock=wlock) | |
|
160 | ||
|
161 | if err == 0: | |
|
162 | return (err, n) | |
|
163 | ||
|
164 | if n is None: | |
|
165 | self.ui.warn("apply failed for patch %s\n" % patch) | |
|
166 | sys.exit(1) | |
|
167 | ||
|
168 | self.ui.warn("patch didn't work out, merging %s\n" % patch) | |
|
169 | ||
|
170 | # apply failed, strip away that rev and merge. | |
|
171 | repo.update(head, allow=False, force=True, wlock=wlock) | |
|
172 | self.strip(repo, n, update=False, backup='strip', wlock=wlock) | |
|
173 | ||
|
174 | c = repo.changelog.read(rev) | |
|
175 | ret = repo.update(rev, allow=True, wlock=wlock) | |
|
176 | if ret: | |
|
177 | self.ui.warn("update returned %d\n" % ret) | |
|
178 | sys.exit(1) | |
|
179 | n = repo.commit(None, c[4], c[1], force=1, wlock=wlock) | |
|
180 | if n == None: | |
|
181 | self.ui.warn("repo commit failed\n") | |
|
182 | sys.exit(1) | |
|
183 | try: | |
|
184 | message, comments, user, patchfound = mergeq.readheaders(patch) | |
|
185 | except: | |
|
186 | self.ui.warn("Unable to read %s\n" % patch) | |
|
187 | sys.exit(1) | |
|
188 | ||
|
189 | patchf = self.opener(os.path.join(self.path, patch), "w") | |
|
190 | if comments: | |
|
191 | comments = "\n".join(comments) + '\n\n' | |
|
192 | patchf.write(comments) | |
|
193 | commands.dodiff(patchf, self.ui, repo, head, n) | |
|
194 | patchf.close() | |
|
195 | return (0, n) | |
|
196 | ||
|
197 | def qparents(self, repo, rev=None): | |
|
198 | if rev is None: | |
|
199 | (p1, p2) = repo.dirstate.parents() | |
|
200 | if p2 == revlog.nullid: | |
|
201 | return p1 | |
|
202 | if len(self.applied) == 0: | |
|
203 | return None | |
|
204 | (top, patch) = self.applied[-1].split(':') | |
|
205 | top = revlog.bin(top) | |
|
206 | return top | |
|
207 | pp = repo.changelog.parents(rev) | |
|
208 | if pp[1] != revlog.nullid: | |
|
209 | arevs = [ x.split(':')[0] for x in self.applied ] | |
|
210 | p0 = revlog.hex(pp[0]) | |
|
211 | p1 = revlog.hex(pp[1]) | |
|
212 | if p0 in arevs: | |
|
213 | return pp[0] | |
|
214 | if p1 in arevs: | |
|
215 | return pp[1] | |
|
216 | return None | |
|
217 | return pp[0] | |
|
218 | ||
|
219 | def mergepatch(self, repo, mergeq, series, wlock): | |
|
220 | if len(self.applied) == 0: | |
|
221 | # each of the patches merged in will have two parents. This | |
|
222 | # can confuse the qrefresh, qdiff, and strip code because it | |
|
223 | # needs to know which parent is actually in the patch queue. | |
|
224 | # so, we insert a merge marker with only one parent. This way | |
|
225 | # the first patch in the queue is never a merge patch | |
|
226 | # | |
|
227 | pname = ".hg.patches.merge.marker" | |
|
228 | n = repo.commit(None, '[mq]: merge marker', user=None, force=1, | |
|
229 | wlock=wlock) | |
|
230 | self.applied.append(revlog.hex(n) + ":" + pname) | |
|
231 | self.applied_dirty = 1 | |
|
232 | ||
|
233 | head = self.qparents(repo) | |
|
234 | ||
|
235 | for patch in series: | |
|
236 | patch = mergeq.lookup(patch) | |
|
237 | if not patch: | |
|
238 | self.ui.warn("patch %s does not exist\n" % patch) | |
|
239 | return (1, None) | |
|
240 | ||
|
241 | info = mergeq.isapplied(patch) | |
|
242 | if not info: | |
|
243 | self.ui.warn("patch %s is not applied\n" % patch) | |
|
244 | return (1, None) | |
|
245 | rev = revlog.bin(info[1]) | |
|
246 | (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock) | |
|
247 | if head: | |
|
248 | self.applied.append(revlog.hex(head) + ":" + patch) | |
|
249 | self.applied_dirty = 1 | |
|
250 | if err: | |
|
251 | return (err, head) | |
|
252 | return (0, head) | |
|
253 | ||
|
254 | def apply(self, repo, series, list=False, update_status=True, | |
|
255 | strict=False, patchdir=None, merge=None, wlock=None): | |
|
256 | # TODO unify with commands.py | |
|
257 | if not patchdir: | |
|
258 | patchdir = self.path | |
|
259 | pwd = os.getcwd() | |
|
260 | os.chdir(repo.root) | |
|
261 | err = 0 | |
|
262 | if not wlock: | |
|
263 | wlock = repo.wlock() | |
|
264 | lock = repo.lock() | |
|
265 | tr = repo.transaction() | |
|
266 | n = None | |
|
267 | for patch in series: | |
|
268 | self.ui.warn("applying %s\n" % patch) | |
|
269 | pf = os.path.join(patchdir, patch) | |
|
270 | ||
|
271 | try: | |
|
272 | message, comments, user, patchfound = self.readheaders(patch) | |
|
273 | except: | |
|
274 | self.ui.warn("Unable to read %s\n" % pf) | |
|
275 | err = 1 | |
|
276 | break | |
|
277 | ||
|
278 | if not message: | |
|
279 | message = "imported patch %s\n" % patch | |
|
280 | else: | |
|
281 | if list: | |
|
282 | message.append("\nimported patch %s" % patch) | |
|
283 | message = '\n'.join(message) | |
|
284 | ||
|
285 | try: | |
|
286 | f = os.popen("patch -p1 --no-backup-if-mismatch < '%s'" % (pf)) | |
|
287 | except: | |
|
288 | self.ui.warn("patch failed, unable to continue (try -v)\n") | |
|
289 | err = 1 | |
|
290 | break | |
|
291 | files = [] | |
|
292 | fuzz = False | |
|
293 | for l in f: | |
|
294 | l = l.rstrip('\r\n'); | |
|
295 | if self.ui.verbose: | |
|
296 | self.ui.warn(l + "\n") | |
|
297 | if l[:14] == 'patching file ': | |
|
298 | pf = os.path.normpath(l[14:]) | |
|
299 | # when patch finds a space in the file name, it puts | |
|
300 | # single quotes around the filename. strip them off | |
|
301 | if pf[0] == "'" and pf[-1] == "'": | |
|
302 | pf = pf[1:-1] | |
|
303 | if pf not in files: | |
|
304 | files.append(pf) | |
|
305 | printed_file = False | |
|
306 | file_str = l | |
|
307 | elif l.find('with fuzz') >= 0: | |
|
308 | if not printed_file: | |
|
309 | self.ui.warn(file_str + '\n') | |
|
310 | printed_file = True | |
|
311 | self.ui.warn(l + '\n') | |
|
312 | fuzz = True | |
|
313 | elif l.find('saving rejects to file') >= 0: | |
|
314 | self.ui.warn(l + '\n') | |
|
315 | elif l.find('FAILED') >= 0: | |
|
316 | if not printed_file: | |
|
317 | self.ui.warn(file_str + '\n') | |
|
318 | printed_file = True | |
|
319 | self.ui.warn(l + '\n') | |
|
320 | patcherr = f.close() | |
|
321 | ||
|
322 | if merge and len(files) > 0: | |
|
323 | # Mark as merged and update dirstate parent info | |
|
324 | repo.dirstate.update(repo.dirstate.filterfiles(files), 'm') | |
|
325 | p1, p2 = repo.dirstate.parents() | |
|
326 | repo.dirstate.setparents(p1, merge) | |
|
327 | if len(files) > 0: | |
|
328 | commands.addremove_lock(self.ui, repo, files, | |
|
329 | opts={}, wlock=wlock) | |
|
330 | n = repo.commit(files, message, user, force=1, lock=lock, | |
|
331 | wlock=wlock) | |
|
332 | ||
|
333 | if n == None: | |
|
334 | self.ui.warn("repo commit failed\n") | |
|
335 | sys.exit(1) | |
|
336 | ||
|
337 | if update_status: | |
|
338 | self.applied.append(revlog.hex(n) + ":" + patch) | |
|
339 | ||
|
340 | if patcherr: | |
|
341 | if not patchfound: | |
|
342 | self.ui.warn("patch %s is empty\n" % patch) | |
|
343 | err = 0 | |
|
344 | else: | |
|
345 | self.ui.warn("patch failed, rejects left in working dir\n") | |
|
346 | err = 1 | |
|
347 | break | |
|
348 | ||
|
349 | if fuzz and strict: | |
|
350 | self.ui.warn("fuzz found when applying patch, stopping\n") | |
|
351 | err = 1 | |
|
352 | break | |
|
353 | tr.close() | |
|
354 | os.chdir(pwd) | |
|
355 | return (err, n) | |
|
356 | ||
|
357 | def delete(self, repo, patch): | |
|
358 | patch = self.lookup(patch) | |
|
359 | info = self.isapplied(patch) | |
|
360 | if info: | |
|
361 | self.ui.warn("cannot delete applied patch %s\n" % patch) | |
|
362 | sys.exit(1) | |
|
363 | if patch not in self.series: | |
|
364 | self.ui.warn("patch %s not in series file\n" % patch) | |
|
365 | sys.exit(1) | |
|
366 | i = self.find_series(patch) | |
|
367 | del self.full_series[i] | |
|
368 | self.read_series(self.full_series) | |
|
369 | self.series_dirty = 1 | |
|
370 | ||
|
371 | def check_toppatch(self, repo): | |
|
372 | if len(self.applied) > 0: | |
|
373 | (top, patch) = self.applied[-1].split(':') | |
|
374 | top = revlog.bin(top) | |
|
375 | pp = repo.dirstate.parents() | |
|
376 | if top not in pp: | |
|
377 | self.ui.warn("queue top not at dirstate parents. top %s dirstate %s %s\n" %( revlog.short(top), revlog.short(pp[0]), revlog.short(pp[1]))) | |
|
378 | sys.exit(1) | |
|
379 | return top | |
|
380 | return None | |
|
381 | def check_localchanges(self, repo): | |
|
382 | (c, a, r, d, u) = repo.changes(None, None) | |
|
383 | if c or a or d or r: | |
|
384 | self.ui.write("Local changes found, refresh first\n") | |
|
385 | sys.exit(1) | |
|
386 | def new(self, repo, patch, msg=None, force=None): | |
|
387 | if not force: | |
|
388 | self.check_localchanges(repo) | |
|
389 | self.check_toppatch(repo) | |
|
390 | wlock = repo.wlock() | |
|
391 | insert = self.series_end() | |
|
392 | if msg: | |
|
393 | n = repo.commit([], "[mq]: %s" % msg, force=True, wlock=wlock) | |
|
394 | else: | |
|
395 | n = repo.commit([], | |
|
396 | "New patch: %s" % patch, force=True, wlock=wlock) | |
|
397 | if n == None: | |
|
398 | self.ui.warn("repo commit failed\n") | |
|
399 | sys.exit(1) | |
|
400 | self.full_series[insert:insert] = [patch] | |
|
401 | self.applied.append(revlog.hex(n) + ":" + patch) | |
|
402 | self.read_series(self.full_series) | |
|
403 | self.series_dirty = 1 | |
|
404 | self.applied_dirty = 1 | |
|
405 | p = self.opener(os.path.join(self.path, patch), "w") | |
|
406 | if msg: | |
|
407 | msg = msg + "\n" | |
|
408 | p.write(msg) | |
|
409 | p.close() | |
|
410 | wlock = None | |
|
411 | r = self.qrepo() | |
|
412 | if r: r.add([patch]) | |
|
413 | ||
|
414 | def strip(self, repo, rev, update=True, backup="all", wlock=None): | |
|
415 | def limitheads(chlog, stop): | |
|
416 | """return the list of all nodes that have no children""" | |
|
417 | p = {} | |
|
418 | h = [] | |
|
419 | stoprev = 0 | |
|
420 | if stop in chlog.nodemap: | |
|
421 | stoprev = chlog.rev(stop) | |
|
422 | ||
|
423 | for r in range(chlog.count() - 1, -1, -1): | |
|
424 | n = chlog.node(r) | |
|
425 | if n not in p: | |
|
426 | h.append(n) | |
|
427 | if n == stop: | |
|
428 | break | |
|
429 | if r < stoprev: | |
|
430 | break | |
|
431 | for pn in chlog.parents(n): | |
|
432 | p[pn] = 1 | |
|
433 | return h | |
|
434 | ||
|
435 | def bundle(cg): | |
|
436 | backupdir = repo.join("strip-backup") | |
|
437 | if not os.path.isdir(backupdir): | |
|
438 | os.mkdir(backupdir) | |
|
439 | name = os.path.join(backupdir, "%s" % revlog.short(rev)) | |
|
440 | name = savename(name) | |
|
441 | self.ui.warn("saving bundle to %s\n" % name) | |
|
442 | # TODO, exclusive open | |
|
443 | f = open(name, "wb") | |
|
444 | try: | |
|
445 | f.write("HG10") | |
|
446 | z = bz2.BZ2Compressor(9) | |
|
447 | while 1: | |
|
448 | chunk = cg.read(4096) | |
|
449 | if not chunk: | |
|
450 | break | |
|
451 | f.write(z.compress(chunk)) | |
|
452 | f.write(z.flush()) | |
|
453 | except: | |
|
454 | os.unlink(name) | |
|
455 | raise | |
|
456 | f.close() | |
|
457 | return name | |
|
458 | ||
|
459 | def stripall(rev, revnum): | |
|
460 | cl = repo.changelog | |
|
461 | c = cl.read(rev) | |
|
462 | mm = repo.manifest.read(c[0]) | |
|
463 | seen = {} | |
|
464 | ||
|
465 | for x in xrange(revnum, cl.count()): | |
|
466 | c = cl.read(cl.node(x)) | |
|
467 | for f in c[3]: | |
|
468 | if f in seen: | |
|
469 | continue | |
|
470 | seen[f] = 1 | |
|
471 | if f in mm: | |
|
472 | filerev = mm[f] | |
|
473 | else: | |
|
474 | filerev = 0 | |
|
475 | seen[f] = filerev | |
|
476 | # we go in two steps here so the strip loop happens in a | |
|
477 | # sensible order. When stripping many files, this helps keep | |
|
478 | # our disk access patterns under control. | |
|
479 | list = seen.keys() | |
|
480 | list.sort() | |
|
481 | for f in list: | |
|
482 | ff = repo.file(f) | |
|
483 | filerev = seen[f] | |
|
484 | if filerev != 0: | |
|
485 | if filerev in ff.nodemap: | |
|
486 | filerev = ff.rev(filerev) | |
|
487 | else: | |
|
488 | filerev = 0 | |
|
489 | ff.strip(filerev, revnum) | |
|
490 | ||
|
491 | if not wlock: | |
|
492 | wlock = repo.wlock() | |
|
493 | lock = repo.lock() | |
|
494 | chlog = repo.changelog | |
|
495 | # TODO delete the undo files, and handle undo of merge sets | |
|
496 | pp = chlog.parents(rev) | |
|
497 | revnum = chlog.rev(rev) | |
|
498 | ||
|
499 | if update: | |
|
500 | urev = self.qparents(repo, rev) | |
|
501 | repo.update(urev, allow=False, force=True, wlock=wlock) | |
|
502 | repo.dirstate.write() | |
|
503 | ||
|
504 | # save is a list of all the branches we are truncating away | |
|
505 | # that we actually want to keep. changegroup will be used | |
|
506 | # to preserve them and add them back after the truncate | |
|
507 | saveheads = [] | |
|
508 | savebases = {} | |
|
509 | ||
|
510 | tip = chlog.tip() | |
|
511 | heads = limitheads(chlog, rev) | |
|
512 | seen = {} | |
|
513 | ||
|
514 | # search through all the heads, finding those where the revision | |
|
515 | # we want to strip away is an ancestor. Also look for merges | |
|
516 | # that might be turned into new heads by the strip. | |
|
517 | while heads: | |
|
518 | h = heads.pop() | |
|
519 | n = h | |
|
520 | while True: | |
|
521 | seen[n] = 1 | |
|
522 | pp = chlog.parents(n) | |
|
523 | if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum: | |
|
524 | if pp[1] not in seen: | |
|
525 | heads.append(pp[1]) | |
|
526 | if pp[0] == revlog.nullid: | |
|
527 | break | |
|
528 | if chlog.rev(pp[0]) < revnum: | |
|
529 | break | |
|
530 | n = pp[0] | |
|
531 | if n == rev: | |
|
532 | break | |
|
533 | r = chlog.reachable(h, rev) | |
|
534 | if rev not in r: | |
|
535 | saveheads.append(h) | |
|
536 | for x in r: | |
|
537 | if chlog.rev(x) > revnum: | |
|
538 | savebases[x] = 1 | |
|
539 | ||
|
540 | # create a changegroup for all the branches we need to keep | |
|
541 | if backup is "all": | |
|
542 | backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip') | |
|
543 | bundle(backupch) | |
|
544 | if saveheads: | |
|
545 | backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip') | |
|
546 | chgrpfile = bundle(backupch) | |
|
547 | ||
|
548 | stripall(rev, revnum) | |
|
549 | ||
|
550 | change = chlog.read(rev) | |
|
551 | repo.manifest.strip(repo.manifest.rev(change[0]), revnum) | |
|
552 | chlog.strip(revnum, revnum) | |
|
553 | if saveheads: | |
|
554 | self.ui.status("adding branch\n") | |
|
555 | commands.unbundle(self.ui, repo, chgrpfile, update=False) | |
|
556 | if backup is not "strip": | |
|
557 | os.unlink(chgrpfile) | |
|
558 | ||
|
559 | def isapplied(self, patch): | |
|
560 | """returns (index, rev, patch)""" | |
|
561 | for i in xrange(len(self.applied)): | |
|
562 | p = self.applied[i] | |
|
563 | a = p.split(':') | |
|
564 | if a[1] == patch: | |
|
565 | return (i, a[0], a[1]) | |
|
566 | return None | |
|
567 | ||
|
568 | def lookup(self, patch): | |
|
569 | if patch == None: | |
|
570 | return None | |
|
571 | if patch in self.series: | |
|
572 | return patch | |
|
573 | if not os.path.isfile(os.path.join(self.path, patch)): | |
|
574 | try: | |
|
575 | sno = int(patch) | |
|
576 | except(ValueError, OverflowError): | |
|
577 | self.ui.warn("patch %s not in series\n" % patch) | |
|
578 | sys.exit(1) | |
|
579 | if sno >= len(self.series): | |
|
580 | self.ui.warn("patch number %d is out of range\n" % sno) | |
|
581 | sys.exit(1) | |
|
582 | patch = self.series[sno] | |
|
583 | else: | |
|
584 | self.ui.warn("patch %s not in series\n" % patch) | |
|
585 | sys.exit(1) | |
|
586 | return patch | |
|
587 | ||
|
588 | def push(self, repo, patch=None, force=False, list=False, | |
|
589 | mergeq=None, wlock=None): | |
|
590 | if not wlock: | |
|
591 | wlock = repo.wlock() | |
|
592 | patch = self.lookup(patch) | |
|
593 | if patch and self.isapplied(patch): | |
|
594 | self.ui.warn("patch %s is already applied\n" % patch) | |
|
595 | sys.exit(1) | |
|
596 | if self.series_end() == len(self.series): | |
|
597 | self.ui.warn("File series fully applied\n") | |
|
598 | sys.exit(1) | |
|
599 | if not force: | |
|
600 | self.check_localchanges(repo) | |
|
601 | ||
|
602 | self.applied_dirty = 1; | |
|
603 | start = self.series_end() | |
|
604 | if start > 0: | |
|
605 | self.check_toppatch(repo) | |
|
606 | if not patch: | |
|
607 | patch = self.series[start] | |
|
608 | end = start + 1 | |
|
609 | else: | |
|
610 | end = self.series.index(patch, start) + 1 | |
|
611 | s = self.series[start:end] | |
|
612 | if mergeq: | |
|
613 | ret = self.mergepatch(repo, mergeq, s, wlock) | |
|
614 | else: | |
|
615 | ret = self.apply(repo, s, list, wlock=wlock) | |
|
616 | top = self.applied[-1].split(':')[1] | |
|
617 | if ret[0]: | |
|
618 | self.ui.write("Errors during apply, please fix and refresh %s\n" % | |
|
619 | top) | |
|
620 | else: | |
|
621 | self.ui.write("Now at: %s\n" % top) | |
|
622 | return ret[0] | |
|
623 | ||
|
624 | def pop(self, repo, patch=None, force=False, update=True, wlock=None): | |
|
625 | def getfile(f, rev): | |
|
626 | t = repo.file(f).read(rev) | |
|
627 | try: | |
|
628 | repo.wfile(f, "w").write(t) | |
|
629 | except IOError: | |
|
630 | os.makedirs(os.path.dirname(repo.wjoin(f))) | |
|
631 | repo.wfile(f, "w").write(t) | |
|
632 | ||
|
633 | if not wlock: | |
|
634 | wlock = repo.wlock() | |
|
635 | if patch: | |
|
636 | # index, rev, patch | |
|
637 | info = self.isapplied(patch) | |
|
638 | if not info: | |
|
639 | patch = self.lookup(patch) | |
|
640 | info = self.isapplied(patch) | |
|
641 | if not info: | |
|
642 | self.ui.warn("patch %s is not applied\n" % patch) | |
|
643 | sys.exit(1) | |
|
644 | if len(self.applied) == 0: | |
|
645 | self.ui.warn("No patches applied\n") | |
|
646 | sys.exit(1) | |
|
647 | ||
|
648 | if not update: | |
|
649 | parents = repo.dirstate.parents() | |
|
650 | rr = [ revlog.bin(x.split(':')[0]) for x in self.applied ] | |
|
651 | for p in parents: | |
|
652 | if p in rr: | |
|
653 | self.ui.warn("qpop: forcing dirstate update\n") | |
|
654 | update = True | |
|
655 | ||
|
656 | if not force and update: | |
|
657 | self.check_localchanges(repo) | |
|
658 | ||
|
659 | self.applied_dirty = 1; | |
|
660 | end = len(self.applied) | |
|
661 | if not patch: | |
|
662 | info = [len(self.applied) - 1] + self.applied[-1].split(':') | |
|
663 | start = info[0] | |
|
664 | rev = revlog.bin(info[1]) | |
|
665 | ||
|
666 | # we know there are no local changes, so we can make a simplified | |
|
667 | # form of hg.update. | |
|
668 | if update: | |
|
669 | top = self.check_toppatch(repo) | |
|
670 | qp = self.qparents(repo, rev) | |
|
671 | changes = repo.changelog.read(qp) | |
|
672 | mf1 = repo.manifest.readflags(changes[0]) | |
|
673 | mmap = repo.manifest.read(changes[0]) | |
|
674 | (c, a, r, d, u) = repo.changes(qp, top) | |
|
675 | if d: | |
|
676 | raise util.Abort("deletions found between repo revs") | |
|
677 | for f in c: | |
|
678 | getfile(f, mmap[f]) | |
|
679 | for f in r: | |
|
680 | getfile(f, mmap[f]) | |
|
681 | util.set_exec(repo.wjoin(f), mf1[f]) | |
|
682 | repo.dirstate.update(c + r, 'n') | |
|
683 | for f in a: | |
|
684 | try: os.unlink(repo.wjoin(f)) | |
|
685 | except: raise | |
|
686 | try: os.removedirs(os.path.dirname(repo.wjoin(f))) | |
|
687 | except: pass | |
|
688 | if a: | |
|
689 | repo.dirstate.forget(a) | |
|
690 | repo.dirstate.setparents(qp, revlog.nullid) | |
|
691 | self.strip(repo, rev, update=False, backup='strip', wlock=wlock) | |
|
692 | del self.applied[start:end] | |
|
693 | if len(self.applied): | |
|
694 | self.ui.write("Now at: %s\n" % self.applied[-1].split(':')[1]) | |
|
695 | else: | |
|
696 | self.ui.write("Patch queue now empty\n") | |
|
697 | ||
|
698 | def diff(self, repo, files): | |
|
699 | top = self.check_toppatch(repo) | |
|
700 | if not top: | |
|
701 | self.ui.write("No patches applied\n") | |
|
702 | return | |
|
703 | qp = self.qparents(repo, top) | |
|
704 | commands.dodiff(sys.stdout, self.ui, repo, qp, None, files) | |
|
705 | ||
|
706 | def refresh(self, repo, short=False): | |
|
707 | if len(self.applied) == 0: | |
|
708 | self.ui.write("No patches applied\n") | |
|
709 | return | |
|
710 | wlock = repo.wlock() | |
|
711 | self.check_toppatch(repo) | |
|
712 | qp = self.qparents(repo) | |
|
713 | (top, patch) = self.applied[-1].split(':') | |
|
714 | top = revlog.bin(top) | |
|
715 | cparents = repo.changelog.parents(top) | |
|
716 | patchparent = self.qparents(repo, top) | |
|
717 | message, comments, user, patchfound = self.readheaders(patch) | |
|
718 | ||
|
719 | patchf = self.opener(os.path.join(self.path, patch), "w") | |
|
720 | if comments: | |
|
721 | comments = "\n".join(comments) + '\n\n' | |
|
722 | patchf.write(comments) | |
|
723 | ||
|
724 | tip = repo.changelog.tip() | |
|
725 | if top == tip: | |
|
726 | # if the top of our patch queue is also the tip, there is an | |
|
727 | # optimization here. We update the dirstate in place and strip | |
|
728 | # off the tip commit. Then just commit the current directory | |
|
729 | # tree. We can also send repo.commit the list of files | |
|
730 | # changed to speed up the diff | |
|
731 | # | |
|
732 | # in short mode, we only diff the files included in the | |
|
733 | # patch already | |
|
734 | # | |
|
735 | # this should really read: | |
|
736 | #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent) | |
|
737 | # but we do it backwards to take advantage of manifest/chlog | |
|
738 | # caching against the next repo.changes call | |
|
739 | # | |
|
740 | (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip) | |
|
741 | if short: | |
|
742 | filelist = cc + aa + dd | |
|
743 | else: | |
|
744 | filelist = None | |
|
745 | (c, a, r, d, u) = repo.changes(None, None, filelist) | |
|
746 | ||
|
747 | # we might end up with files that were added between tip and | |
|
748 | # the dirstate parent, but then changed in the local dirstate. | |
|
749 | # in this case, we want them to only show up in the added section | |
|
750 | for x in c: | |
|
751 | if x not in aa: | |
|
752 | cc.append(x) | |
|
753 | # we might end up with files added by the local dirstate that | |
|
754 | # were deleted by the patch. In this case, they should only | |
|
755 | # show up in the changed section. | |
|
756 | for x in a: | |
|
757 | if x in dd: | |
|
758 | del dd[dd.index(x)] | |
|
759 | cc.append(x) | |
|
760 | else: | |
|
761 | aa.append(x) | |
|
762 | # make sure any files deleted in the local dirstate | |
|
763 | # are not in the add or change column of the patch | |
|
764 | forget = [] | |
|
765 | for x in d + r: | |
|
766 | if x in aa: | |
|
767 | del aa[aa.index(x)] | |
|
768 | forget.append(x) | |
|
769 | continue | |
|
770 | elif x in cc: | |
|
771 | del cc[cc.index(x)] | |
|
772 | dd.append(x) | |
|
773 | ||
|
774 | c = list(util.unique(cc)) | |
|
775 | r = list(util.unique(dd)) | |
|
776 | a = list(util.unique(aa)) | |
|
777 | filelist = list(util.unique(c + r + a )) | |
|
778 | commands.dodiff(patchf, self.ui, repo, patchparent, None, | |
|
779 | filelist, changes=(c, a, r, [], u)) | |
|
780 | patchf.close() | |
|
781 | ||
|
782 | changes = repo.changelog.read(tip) | |
|
783 | repo.dirstate.setparents(*cparents) | |
|
784 | repo.dirstate.update(a, 'a') | |
|
785 | repo.dirstate.update(r, 'r') | |
|
786 | repo.dirstate.update(c, 'n') | |
|
787 | repo.dirstate.forget(forget) | |
|
788 | ||
|
789 | if not message: | |
|
790 | message = "patch queue: %s\n" % patch | |
|
791 | else: | |
|
792 | message = "\n".join(message) | |
|
793 | self.strip(repo, top, update=False, backup='strip', wlock=wlock) | |
|
794 | n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock) | |
|
795 | self.applied[-1] = revlog.hex(n) + ':' + patch | |
|
796 | self.applied_dirty = 1 | |
|
797 | else: | |
|
798 | commands.dodiff(patchf, self.ui, repo, patchparent, None) | |
|
799 | patchf.close() | |
|
800 | self.pop(repo, force=True, wlock=wlock) | |
|
801 | self.push(repo, force=True, wlock=wlock) | |
|
802 | ||
|
803 | def init(self, repo, create=False): | |
|
804 | if os.path.isdir(self.path): | |
|
805 | raise util.Abort("patch queue directory already exists") | |
|
806 | os.mkdir(self.path) | |
|
807 | if create: | |
|
808 | return self.qrepo(create=True) | |
|
809 | ||
|
810 | def unapplied(self, repo, patch=None): | |
|
811 | if patch and patch not in self.series: | |
|
812 | self.ui.warn("%s not in the series file\n" % patch) | |
|
813 | sys.exit(1) | |
|
814 | if not patch: | |
|
815 | start = self.series_end() | |
|
816 | else: | |
|
817 | start = self.series.index(patch) + 1 | |
|
818 | for p in self.series[start:]: | |
|
819 | self.ui.write("%s\n" % p) | |
|
820 | ||
|
821 | def qseries(self, repo, missing=None): | |
|
822 | start = self.series_end() | |
|
823 | if not missing: | |
|
824 | for p in self.series[:start]: | |
|
825 | if self.ui.verbose: | |
|
826 | self.ui.write("%d A " % self.series.index(p)) | |
|
827 | self.ui.write("%s\n" % p) | |
|
828 | for p in self.series[start:]: | |
|
829 | if self.ui.verbose: | |
|
830 | self.ui.write("%d U " % self.series.index(p)) | |
|
831 | self.ui.write("%s\n" % p) | |
|
832 | else: | |
|
833 | list = [] | |
|
834 | for root, dirs, files in os.walk(self.path): | |
|
835 | d = root[len(self.path) + 1:] | |
|
836 | for f in files: | |
|
837 | fl = os.path.join(d, f) | |
|
838 | if (fl not in self.series and fl != "status" and | |
|
839 | fl != "series" and not fl.startswith('.')): | |
|
840 | list.append(fl) | |
|
841 | list.sort() | |
|
842 | if list: | |
|
843 | for x in list: | |
|
844 | if self.ui.verbose: | |
|
845 | self.ui.write("D ") | |
|
846 | self.ui.write("%s\n" % x) | |
|
847 | ||
|
848 | def issaveline(self, l): | |
|
849 | name = l.split(':')[1] | |
|
850 | if name == '.hg.patches.save.line': | |
|
851 | return True | |
|
852 | ||
|
853 | def qrepo(self, create=False): | |
|
854 | if create or os.path.isdir(os.path.join(self.path, ".hg")): | |
|
855 | return hg.repository(ui=self.ui, path=self.path, create=create) | |
|
856 | ||
|
857 | def restore(self, repo, rev, delete=None, qupdate=None): | |
|
858 | c = repo.changelog.read(rev) | |
|
859 | desc = c[4].strip() | |
|
860 | lines = desc.splitlines() | |
|
861 | i = 0 | |
|
862 | datastart = None | |
|
863 | series = [] | |
|
864 | applied = [] | |
|
865 | qpp = None | |
|
866 | for i in xrange(0, len(lines)): | |
|
867 | if lines[i] == 'Patch Data:': | |
|
868 | datastart = i + 1 | |
|
869 | elif lines[i].startswith('Dirstate:'): | |
|
870 | l = lines[i].rstrip() | |
|
871 | l = l[10:].split(' ') | |
|
872 | qpp = [ hg.bin(x) for x in l ] | |
|
873 | elif datastart != None: | |
|
874 | l = lines[i].rstrip() | |
|
875 | index = l.index(':') | |
|
876 | id = l[:index] | |
|
877 | file = l[index + 1:] | |
|
878 | if id: | |
|
879 | applied.append(l) | |
|
880 | series.append(file) | |
|
881 | if datastart == None: | |
|
882 | self.ui.warn("No saved patch data found\n") | |
|
883 | return 1 | |
|
884 | self.ui.warn("restoring status: %s\n" % lines[0]) | |
|
885 | self.full_series = series | |
|
886 | self.applied = applied | |
|
887 | self.read_series(self.full_series) | |
|
888 | self.series_dirty = 1 | |
|
889 | self.applied_dirty = 1 | |
|
890 | heads = repo.changelog.heads() | |
|
891 | if delete: | |
|
892 | if rev not in heads: | |
|
893 | self.ui.warn("save entry has children, leaving it alone\n") | |
|
894 | else: | |
|
895 | self.ui.warn("removing save entry %s\n" % hg.short(rev)) | |
|
896 | pp = repo.dirstate.parents() | |
|
897 | if rev in pp: | |
|
898 | update = True | |
|
899 | else: | |
|
900 | update = False | |
|
901 | self.strip(repo, rev, update=update, backup='strip') | |
|
902 | if qpp: | |
|
903 | self.ui.warn("saved queue repository parents: %s %s\n" % | |
|
904 | (hg.short(qpp[0]), hg.short(qpp[1]))) | |
|
905 | if qupdate: | |
|
906 | print "queue directory updating" | |
|
907 | r = self.qrepo() | |
|
908 | if not r: | |
|
909 | self.ui.warn("Unable to load queue repository\n") | |
|
910 | return 1 | |
|
911 | r.update(qpp[0], allow=False, force=True) | |
|
912 | ||
|
913 | def save(self, repo, msg=None): | |
|
914 | if len(self.applied) == 0: | |
|
915 | self.ui.warn("save: no patches applied, exiting\n") | |
|
916 | return 1 | |
|
917 | if self.issaveline(self.applied[-1]): | |
|
918 | self.ui.warn("status is already saved\n") | |
|
919 | return 1 | |
|
920 | ||
|
921 | ar = [ ':' + x for x in self.full_series ] | |
|
922 | if not msg: | |
|
923 | msg = "hg patches saved state" | |
|
924 | else: | |
|
925 | msg = "hg patches: " + msg.rstrip('\r\n') | |
|
926 | r = self.qrepo() | |
|
927 | if r: | |
|
928 | pp = r.dirstate.parents() | |
|
929 | msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1])) | |
|
930 | msg += "\n\nPatch Data:\n" | |
|
931 | text = msg + "\n".join(self.applied) + '\n' + (ar and "\n".join(ar) | |
|
932 | + '\n' or "") | |
|
933 | n = repo.commit(None, text, user=None, force=1) | |
|
934 | if not n: | |
|
935 | self.ui.warn("repo commit failed\n") | |
|
936 | return 1 | |
|
937 | self.applied.append(revlog.hex(n) + ":" + '.hg.patches.save.line') | |
|
938 | self.applied_dirty = 1 | |
|
939 | ||
|
940 | def series_end(self): | |
|
941 | end = 0 | |
|
942 | if len(self.applied) > 0: | |
|
943 | (top, p) = self.applied[-1].split(':') | |
|
944 | try: | |
|
945 | end = self.series.index(p) | |
|
946 | except ValueError: | |
|
947 | return 0 | |
|
948 | return end + 1 | |
|
949 | return end | |
|
950 | ||
|
951 | def qapplied(self, repo, patch=None): | |
|
952 | if patch and patch not in self.series: | |
|
953 | self.ui.warn("%s not in the series file\n" % patch) | |
|
954 | sys.exit(1) | |
|
955 | if not patch: | |
|
956 | end = len(self.applied) | |
|
957 | else: | |
|
958 | end = self.series.index(patch) + 1 | |
|
959 | for x in xrange(end): | |
|
960 | p = self.appliedname(x) | |
|
961 | self.ui.write("%s\n" % p) | |
|
962 | ||
|
963 | def appliedname(self, index): | |
|
964 | p = self.applied[index] | |
|
965 | if not self.ui.verbose: | |
|
966 | p = p.split(':')[1] | |
|
967 | return p | |
|
968 | ||
|
969 | def top(self, repo): | |
|
970 | if len(self.applied): | |
|
971 | p = self.appliedname(-1) | |
|
972 | self.ui.write(p + '\n') | |
|
973 | else: | |
|
974 | self.ui.write("No patches applied\n") | |
|
975 | ||
|
976 | def next(self, repo): | |
|
977 | end = self.series_end() | |
|
978 | if end == len(self.series): | |
|
979 | self.ui.write("All patches applied\n") | |
|
980 | else: | |
|
981 | self.ui.write(self.series[end] + '\n') | |
|
982 | ||
|
983 | def prev(self, repo): | |
|
984 | if len(self.applied) > 1: | |
|
985 | p = self.appliedname(-2) | |
|
986 | self.ui.write(p + '\n') | |
|
987 | elif len(self.applied) == 1: | |
|
988 | self.ui.write("Only one patch applied\n") | |
|
989 | else: | |
|
990 | self.ui.write("No patches applied\n") | |
|
991 | ||
|
992 | def qimport(self, repo, files, patch=None, existing=None, force=None): | |
|
993 | if len(files) > 1 and patch: | |
|
994 | self.ui.warn("-n option not valid when importing multiple files\n") | |
|
995 | sys.exit(1) | |
|
996 | i = 0 | |
|
997 | for filename in files: | |
|
998 | if existing: | |
|
999 | if not patch: | |
|
1000 | patch = filename | |
|
1001 | if not os.path.isfile(os.path.join(self.path, patch)): | |
|
1002 | self.ui.warn("patch %s does not exist\n" % patch) | |
|
1003 | sys.exit(1) | |
|
1004 | else: | |
|
1005 | try: | |
|
1006 | text = file(filename).read() | |
|
1007 | except IOError: | |
|
1008 | self.ui.warn("Unable to read %s\n" % patch) | |
|
1009 | sys.exit(1) | |
|
1010 | if not patch: | |
|
1011 | patch = os.path.split(filename)[1] | |
|
1012 | if not force and os.path.isfile(os.path.join(self.path, patch)): | |
|
1013 | self.ui.warn("patch %s already exists\n" % patch) | |
|
1014 | sys.exit(1) | |
|
1015 | patchf = self.opener(os.path.join(self.path, patch), "w") | |
|
1016 | patchf.write(text) | |
|
1017 | if patch in self.series: | |
|
1018 | self.ui.warn("patch %s is already in the series file\n" % patch) | |
|
1019 | sys.exit(1) | |
|
1020 | index = self.series_end() + i | |
|
1021 | self.full_series[index:index] = [patch] | |
|
1022 | self.read_series(self.full_series) | |
|
1023 | self.ui.warn("adding %s to series file\n" % patch) | |
|
1024 | i += 1 | |
|
1025 | patch = None | |
|
1026 | self.series_dirty = 1 | |
|
1027 | ||
|
1028 | def delete(ui, repo, patch, **opts): | |
|
1029 | """remove a patch from the series file""" | |
|
1030 | q = repomap[repo] | |
|
1031 | q.delete(repo, patch) | |
|
1032 | q.save_dirty() | |
|
1033 | return 0 | |
|
1034 | ||
|
1035 | def applied(ui, repo, patch=None, **opts): | |
|
1036 | """print the patches already applied""" | |
|
1037 | repomap[repo].qapplied(repo, patch) | |
|
1038 | return 0 | |
|
1039 | ||
|
1040 | def unapplied(ui, repo, patch=None, **opts): | |
|
1041 | """print the patches not yet applied""" | |
|
1042 | repomap[repo].unapplied(repo, patch) | |
|
1043 | return 0 | |
|
1044 | ||
|
1045 | def qimport(ui, repo, *filename, **opts): | |
|
1046 | """import a patch""" | |
|
1047 | q = repomap[repo] | |
|
1048 | q.qimport(repo, filename, patch=opts['name'], | |
|
1049 | existing=opts['existing'], force=opts['force']) | |
|
1050 | q.save_dirty() | |
|
1051 | return 0 | |
|
1052 | ||
|
1053 | def init(ui, repo, **opts): | |
|
1054 | """init a new queue repository""" | |
|
1055 | q = repomap[repo] | |
|
1056 | r = q.init(repo, create=opts['create_repo']) | |
|
1057 | q.save_dirty() | |
|
1058 | if r: | |
|
1059 | fp = r.wopener('.hgignore', 'w') | |
|
1060 | print >> fp, 'syntax: glob' | |
|
1061 | print >> fp, 'status' | |
|
1062 | fp.close() | |
|
1063 | r.wopener('series', 'w').close() | |
|
1064 | r.add(['.hgignore', 'series']) | |
|
1065 | return 0 | |
|
1066 | ||
|
1067 | def commit(ui, repo, *pats, **opts): | |
|
1068 | q = repomap[repo] | |
|
1069 | r = q.qrepo() | |
|
1070 | if not r: raise util.Abort('no queue repository') | |
|
1071 | commands.commit(r.ui, r, *pats, **opts) | |
|
1072 | ||
|
1073 | def series(ui, repo, **opts): | |
|
1074 | """print the entire series file""" | |
|
1075 | repomap[repo].qseries(repo, missing=opts['missing']) | |
|
1076 | return 0 | |
|
1077 | ||
|
1078 | def top(ui, repo, **opts): | |
|
1079 | """print the name of the current patch""" | |
|
1080 | repomap[repo].top(repo) | |
|
1081 | return 0 | |
|
1082 | ||
|
1083 | def next(ui, repo, **opts): | |
|
1084 | """print the name of the next patch""" | |
|
1085 | repomap[repo].next(repo) | |
|
1086 | return 0 | |
|
1087 | ||
|
1088 | def prev(ui, repo, **opts): | |
|
1089 | """print the name of the previous patch""" | |
|
1090 | repomap[repo].prev(repo) | |
|
1091 | return 0 | |
|
1092 | ||
|
1093 | def new(ui, repo, patch, **opts): | |
|
1094 | """create a new patch""" | |
|
1095 | q = repomap[repo] | |
|
1096 | q.new(repo, patch, msg=opts['message'], force=opts['force']) | |
|
1097 | q.save_dirty() | |
|
1098 | return 0 | |
|
1099 | ||
|
1100 | def refresh(ui, repo, **opts): | |
|
1101 | """update the current patch""" | |
|
1102 | q = repomap[repo] | |
|
1103 | q.refresh(repo, short=opts['short']) | |
|
1104 | q.save_dirty() | |
|
1105 | return 0 | |
|
1106 | ||
|
1107 | def diff(ui, repo, *files, **opts): | |
|
1108 | """diff of the current patch""" | |
|
1109 | repomap[repo].diff(repo, files) | |
|
1110 | return 0 | |
|
1111 | ||
|
1112 | def lastsavename(path): | |
|
1113 | (dir, base) = os.path.split(path) | |
|
1114 | names = os.listdir(dir) | |
|
1115 | namere = re.compile("%s.([0-9]+)" % base) | |
|
1116 | max = None | |
|
1117 | maxname = None | |
|
1118 | for f in names: | |
|
1119 | m = namere.match(f) | |
|
1120 | if m: | |
|
1121 | index = int(m.group(1)) | |
|
1122 | if max == None or index > max: | |
|
1123 | max = index | |
|
1124 | maxname = f | |
|
1125 | if maxname: | |
|
1126 | return (os.path.join(dir, maxname), max) | |
|
1127 | return (None, None) | |
|
1128 | ||
|
1129 | def savename(path): | |
|
1130 | (last, index) = lastsavename(path) | |
|
1131 | if last is None: | |
|
1132 | index = 0 | |
|
1133 | newpath = path + ".%d" % (index + 1) | |
|
1134 | return newpath | |
|
1135 | ||
|
1136 | def push(ui, repo, patch=None, **opts): | |
|
1137 | """push the next patch onto the stack""" | |
|
1138 | q = repomap[repo] | |
|
1139 | mergeq = None | |
|
1140 | ||
|
1141 | if opts['all']: | |
|
1142 | patch = q.series[-1] | |
|
1143 | if opts['merge']: | |
|
1144 | if opts['name']: | |
|
1145 | newpath = opts['name'] | |
|
1146 | else: | |
|
1147 | newpath, i = lastsavename(q.path) | |
|
1148 | if not newpath: | |
|
1149 | ui.warn("no saved queues found, please use -n\n") | |
|
1150 | return 1 | |
|
1151 | mergeq = queue(ui, repo.join(""), newpath) | |
|
1152 | ui.warn("merging with queue at: %s\n" % mergeq.path) | |
|
1153 | ret = q.push(repo, patch, force=opts['force'], list=opts['list'], | |
|
1154 | mergeq=mergeq) | |
|
1155 | q.save_dirty() | |
|
1156 | return ret | |
|
1157 | ||
|
1158 | def pop(ui, repo, patch=None, **opts): | |
|
1159 | """pop the current patch off the stack""" | |
|
1160 | localupdate = True | |
|
1161 | if opts['name']: | |
|
1162 | q = queue(ui, repo.join(""), repo.join(opts['name'])) | |
|
1163 | ui.warn('using patch queue: %s\n' % q.path) | |
|
1164 | localupdate = False | |
|
1165 | else: | |
|
1166 | q = repomap[repo] | |
|
1167 | if opts['all'] and len(q.applied) > 0: | |
|
1168 | patch = q.applied[0].split(':')[1] | |
|
1169 | q.pop(repo, patch, force=opts['force'], update=localupdate) | |
|
1170 | q.save_dirty() | |
|
1171 | return 0 | |
|
1172 | ||
|
1173 | def restore(ui, repo, rev, **opts): | |
|
1174 | """restore the queue state saved by a rev""" | |
|
1175 | rev = repo.lookup(rev) | |
|
1176 | q = repomap[repo] | |
|
1177 | q.restore(repo, rev, delete=opts['delete'], | |
|
1178 | qupdate=opts['update']) | |
|
1179 | q.save_dirty() | |
|
1180 | return 0 | |
|
1181 | ||
|
1182 | def save(ui, repo, **opts): | |
|
1183 | """save current queue state""" | |
|
1184 | q = repomap[repo] | |
|
1185 | ret = q.save(repo, msg=opts['message']) | |
|
1186 | if ret: | |
|
1187 | return ret | |
|
1188 | q.save_dirty() | |
|
1189 | if opts['copy']: | |
|
1190 | path = q.path | |
|
1191 | if opts['name']: | |
|
1192 | newpath = os.path.join(q.basepath, opts['name']) | |
|
1193 | if os.path.exists(newpath): | |
|
1194 | if not os.path.isdir(newpath): | |
|
1195 | ui.warn("destination %s exists and is not a directory\n" % | |
|
1196 | newpath) | |
|
1197 | sys.exit(1) | |
|
1198 | if not opts['force']: | |
|
1199 | ui.warn("destination %s exists, use -f to force\n" % | |
|
1200 | newpath) | |
|
1201 | sys.exit(1) | |
|
1202 | else: | |
|
1203 | newpath = savename(path) | |
|
1204 | ui.warn("copy %s to %s\n" % (path, newpath)) | |
|
1205 | util.copyfiles(path, newpath) | |
|
1206 | if opts['empty']: | |
|
1207 | try: | |
|
1208 | os.unlink(q.status_path) | |
|
1209 | except: | |
|
1210 | pass | |
|
1211 | return 0 | |
|
1212 | ||
|
1213 | def strip(ui, repo, rev, **opts): | |
|
1214 | """strip a revision and all later revs on the same branch""" | |
|
1215 | rev = repo.lookup(rev) | |
|
1216 | backup = 'all' | |
|
1217 | if opts['backup']: | |
|
1218 | backup = 'strip' | |
|
1219 | elif opts['nobackup']: | |
|
1220 | backup = 'none' | |
|
1221 | repomap[repo].strip(repo, rev, backup=backup) | |
|
1222 | return 0 | |
|
1223 | ||
|
1224 | def version(ui, q=None): | |
|
1225 | """print the version number""" | |
|
1226 | ui.write("mq version %s\n" % versionstr) | |
|
1227 | return 0 | |
|
1228 | ||
|
1229 | def reposetup(ui, repo): | |
|
1230 | repomap[repo] = queue(ui, repo.join("")) | |
|
1231 | ||
|
1232 | cmdtable = { | |
|
1233 | "qapplied": (applied, [], 'hg qapplied [patch]'), | |
|
1234 | "qcommit|qci": | |
|
1235 | (commit, | |
|
1236 | [('A', 'addremove', None, _('run addremove during commit')), | |
|
1237 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
1238 | ('X', 'exclude', [], _('exclude names matching the given patterns')), | |
|
1239 | ('m', 'message', '', _('use <text> as commit message')), | |
|
1240 | ('l', 'logfile', '', _('read the commit message from <file>')), | |
|
1241 | ('d', 'date', '', _('record datecode as commit date')), | |
|
1242 | ('u', 'user', '', _('record user as commiter'))], | |
|
1243 | 'hg qcommit [options] [files]'), | |
|
1244 | "^qdiff": (diff, [], 'hg qdiff [files]'), | |
|
1245 | "qdelete": (delete, [], 'hg qdelete [patch]'), | |
|
1246 | "^qimport": | |
|
1247 | (qimport, | |
|
1248 | [('e', 'existing', None, 'import file in patch dir'), | |
|
1249 | ('n', 'name', '', 'patch file name'), | |
|
1250 | ('f', 'force', None, 'overwrite existing files')], | |
|
1251 | 'hg qimport'), | |
|
1252 | "^qinit": | |
|
1253 | (init, | |
|
1254 | [('c', 'create-repo', None, 'create patch repository')], | |
|
1255 | 'hg [-c] qinit'), | |
|
1256 | "qnew": | |
|
1257 | (new, | |
|
1258 | [('m', 'message', '', 'commit message'), | |
|
1259 | ('f', 'force', None, 'force')], | |
|
1260 | 'hg qnew [-m message ] patch'), | |
|
1261 | "qnext": (next, [], 'hg qnext'), | |
|
1262 | "qprev": (prev, [], 'hg qprev'), | |
|
1263 | "^qpop": | |
|
1264 | (pop, | |
|
1265 | [('a', 'all', None, 'pop all patches'), | |
|
1266 | ('n', 'name', '', 'queue name to pop'), | |
|
1267 | ('f', 'force', None, 'forget any local changes')], | |
|
1268 | 'hg qpop [options] [patch/index]'), | |
|
1269 | "^qpush": | |
|
1270 | (push, | |
|
1271 | [('f', 'force', None, 'apply if the patch has rejects'), | |
|
1272 | ('l', 'list', None, 'list patch name in commit text'), | |
|
1273 | ('a', 'all', None, 'apply all patches'), | |
|
1274 | ('m', 'merge', None, 'merge from another queue'), | |
|
1275 | ('n', 'name', '', 'merge queue name')], | |
|
1276 | 'hg qpush [options] [patch/index]'), | |
|
1277 | "^qrefresh": | |
|
1278 | (refresh, | |
|
1279 | [('s', 'short', None, 'short refresh')], | |
|
1280 | 'hg qrefresh'), | |
|
1281 | "qrestore": | |
|
1282 | (restore, | |
|
1283 | [('d', 'delete', None, 'delete save entry'), | |
|
1284 | ('u', 'update', None, 'update queue working dir')], | |
|
1285 | 'hg qrestore rev'), | |
|
1286 | "qsave": | |
|
1287 | (save, | |
|
1288 | [('m', 'message', '', 'commit message'), | |
|
1289 | ('c', 'copy', None, 'copy patch directory'), | |
|
1290 | ('n', 'name', '', 'copy directory name'), | |
|
1291 | ('e', 'empty', None, 'clear queue status file'), | |
|
1292 | ('f', 'force', None, 'force copy')], | |
|
1293 | 'hg qsave'), | |
|
1294 | "qseries": | |
|
1295 | (series, | |
|
1296 | [('m', 'missing', None, 'print patches not in series')], | |
|
1297 | 'hg qseries'), | |
|
1298 | "^strip": | |
|
1299 | (strip, | |
|
1300 | [('f', 'force', None, 'force multi-head removal'), | |
|
1301 | ('b', 'backup', None, 'bundle unrelated changesets'), | |
|
1302 | ('n', 'nobackup', None, 'no backups')], | |
|
1303 | 'hg strip rev'), | |
|
1304 | "qtop": (top, [], 'hg qtop'), | |
|
1305 | "qunapplied": (unapplied, [], 'hg qunapplied [patch]'), | |
|
1306 | "qversion": (version, [], 'hg qversion') | |
|
1307 | } | |
|
1308 |
|
1 | NO CONTENT: new file 100644, binary diff hidden |
@@ -0,0 +1,48 b'' | |||
|
1 | body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; } | |
|
2 | a { color:#0000cc; } | |
|
3 | a:hover, a:visited, a:active { color:#880000; } | |
|
4 | div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; } | |
|
5 | div.page_header a:visited { color:#0000cc; } | |
|
6 | div.page_header a:hover { color:#880000; } | |
|
7 | div.page_nav { padding:8px; } | |
|
8 | div.page_nav a:visited { color:#0000cc; } | |
|
9 | div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px} | |
|
10 | div.page_footer { height:17px; padding:4px 8px; background-color: #d9d8d1; } | |
|
11 | div.page_footer_text { float:left; color:#555555; font-style:italic; } | |
|
12 | div.page_body { padding:8px; } | |
|
13 | div.title, a.title { | |
|
14 | display:block; padding:6px 8px; | |
|
15 | font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000; | |
|
16 | } | |
|
17 | a.title:hover { background-color: #d9d8d1; } | |
|
18 | div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; } | |
|
19 | div.log_body { padding:8px 8px 8px 150px; } | |
|
20 | span.age { position:relative; float:left; width:142px; font-style:italic; } | |
|
21 | div.log_link { | |
|
22 | padding:0px 8px; | |
|
23 | font-size:10px; font-family:sans-serif; font-style:normal; | |
|
24 | position:relative; float:left; width:136px; | |
|
25 | } | |
|
26 | div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; } | |
|
27 | a.list { text-decoration:none; color:#000000; } | |
|
28 | a.list:hover { text-decoration:underline; color:#880000; } | |
|
29 | table { padding:8px 4px; } | |
|
30 | th { padding:2px 5px; font-size:12px; text-align:left; } | |
|
31 | tr.light:hover, .parity0:hover { background-color:#edece6; } | |
|
32 | tr.dark, .parity1 { background-color:#f6f6f0; } | |
|
33 | tr.dark:hover, .parity1:hover { background-color:#edece6; } | |
|
34 | td { padding:2px 5px; font-size:12px; vertical-align:top; } | |
|
35 | td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; } | |
|
36 | div.pre { font-family:monospace; font-size:12px; white-space:pre; } | |
|
37 | div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; } | |
|
38 | div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; } | |
|
39 | div.search { margin:4px 8px; position:absolute; top:56px; right:12px } | |
|
40 | .linenr { color:#999999; text-decoration:none } | |
|
41 | a.rss_logo { | |
|
42 | float:right; padding:3px 0px; width:35px; line-height:10px; | |
|
43 | border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; | |
|
44 | color:#ffffff; background-color:#ff6600; | |
|
45 | font-weight:bold; font-family:sans-serif; font-size:10px; | |
|
46 | text-align:center; text-decoration:none; | |
|
47 | } | |
|
48 | a.rss_logo:hover { background-color:#ee5500; } |
@@ -0,0 +1,70 b'' | |||
|
1 | a { text-decoration:none; } | |
|
2 | .parity0 { background-color: #dddddd; } | |
|
3 | .parity1 { background-color: #eeeeee; } | |
|
4 | .lineno { width: 60px; color: #aaaaaa; font-size: smaller; | |
|
5 | text-align: right; padding-right:1em; } | |
|
6 | .plusline { color: green; } | |
|
7 | .minusline { color: red; } | |
|
8 | .atline { color: purple; } | |
|
9 | .annotate { font-size: smaller; text-align: right; padding-right: 1em; } | |
|
10 | .buttons a { | |
|
11 | background-color: #666666; | |
|
12 | padding: 2pt; | |
|
13 | color: white; | |
|
14 | font-family: sans; | |
|
15 | font-weight: bold; | |
|
16 | } | |
|
17 | .navigate a { | |
|
18 | background-color: #ccc; | |
|
19 | padding: 2pt; | |
|
20 | font-family: sans; | |
|
21 | color: black; | |
|
22 | } | |
|
23 | ||
|
24 | .metatag { | |
|
25 | background-color: #888888; | |
|
26 | color: white; | |
|
27 | text-align: right; | |
|
28 | } | |
|
29 | ||
|
30 | /* Common */ | |
|
31 | pre { margin: 0; } | |
|
32 | ||
|
33 | .logo { | |
|
34 | background-color: #333; | |
|
35 | padding: 4pt; | |
|
36 | margin: 8pt 0 8pt 8pt; | |
|
37 | font-family: sans; | |
|
38 | font-size: 60%; | |
|
39 | color: white; | |
|
40 | float: right; | |
|
41 | clear: right; | |
|
42 | text-align: left; | |
|
43 | } | |
|
44 | ||
|
45 | .logo a { | |
|
46 | font-weight: bold; | |
|
47 | font-size: 150%; | |
|
48 | color: #999; | |
|
49 | } | |
|
50 | ||
|
51 | /* Changelog entries */ | |
|
52 | .changelogEntry { width: 100%; } | |
|
53 | .changelogEntry th { font-weight: normal; text-align: right; vertical-align: top; } | |
|
54 | .changelogEntry th.age, .changelogEntry th.firstline { font-weight: bold; } | |
|
55 | .changelogEntry th.firstline { text-align: left; width: inherit; } | |
|
56 | ||
|
57 | /* Tag entries */ | |
|
58 | #tagEntries { list-style: none; margin: 0; padding: 0; } | |
|
59 | #tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; } | |
|
60 | #tagEntries .tagEntry span.node { font-family: monospace; } | |
|
61 | ||
|
62 | /* Changeset entry */ | |
|
63 | #changesetEntry { } | |
|
64 | #changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } | |
|
65 | #changesetEntry th.files, #changesetEntry th.description { vertical-align: top; } | |
|
66 | ||
|
67 | /* File diff view */ | |
|
68 | #filediffEntry { } | |
|
69 | #filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } | |
|
70 |
@@ -0,0 +1,6 b'' | |||
|
1 | <item> | |
|
2 | <title>#tag|escape#</title> | |
|
3 | <link>#url#?cs=#node|short#</link> | |
|
4 | <description><![CDATA[#tag|strip|escape|addbreaks#]]></description> | |
|
5 | <pubDate>#date|rfc822date#</pubDate> | |
|
6 | </item> |
@@ -0,0 +1,6 b'' | |||
|
1 | #header# | |
|
2 | <title>#repo|escape#: tags </title> | |
|
3 | <description>#repo|escape# tag history</description> | |
|
4 | #entriesnotip%tagentry# | |
|
5 | </channel> | |
|
6 | </rss> |
@@ -0,0 +1,32 b'' | |||
|
1 | #!/bin/sh | |
|
2 | # | |
|
3 | # Corrupt an hg repo with a pull started during an aborted commit | |
|
4 | # | |
|
5 | ||
|
6 | # Create two repos, so that one of them can pull from the other one. | |
|
7 | hg init source | |
|
8 | cd source | |
|
9 | touch foo | |
|
10 | hg add foo | |
|
11 | hg ci -m 'add foo' | |
|
12 | hg clone . ../corrupted | |
|
13 | echo >> foo | |
|
14 | hg ci -m 'change foo' | |
|
15 | ||
|
16 | # Add a hook to wait 5 seconds and then abort the commit | |
|
17 | cd ../corrupted | |
|
18 | echo '[hooks]' >> .hg/hgrc | |
|
19 | echo 'pretxncommit = sleep 5; exit 1' >> .hg/hgrc | |
|
20 | ||
|
21 | # start a commit... | |
|
22 | touch bar | |
|
23 | hg add bar | |
|
24 | hg ci -m 'add bar' & | |
|
25 | ||
|
26 | # ... and start a pull while the commit is still running | |
|
27 | sleep 1 | |
|
28 | hg pull ../source 2>/dev/null | |
|
29 | ||
|
30 | # see what happened | |
|
31 | wait | |
|
32 | hg verify |
@@ -0,0 +1,15 b'' | |||
|
1 | pulling from ../source | |
|
2 | abort: pretxncommit hook exited with status 1 | |
|
3 | transaction abort! | |
|
4 | rollback completed | |
|
5 | searching for changes | |
|
6 | adding changesets | |
|
7 | adding manifests | |
|
8 | adding file changes | |
|
9 | added 1 changesets with 1 changes to 1 files | |
|
10 | (run 'hg update' to get a working copy) | |
|
11 | checking changesets | |
|
12 | checking manifests | |
|
13 | crosschecking files in changesets and manifests | |
|
14 | checking files | |
|
15 | 1 files, 2 changesets, 2 total revisions |
@@ -0,0 +1,41 b'' | |||
|
1 | #!/bin/sh | |
|
2 | # | |
|
3 | # Corrupt an hg repo with two pulls. | |
|
4 | # | |
|
5 | ||
|
6 | # create one repo with a long history | |
|
7 | hg init source1 | |
|
8 | cd source1 | |
|
9 | touch foo | |
|
10 | hg add foo | |
|
11 | for i in 1 2 3 4 5 6 7 8 9 10; do | |
|
12 | echo $i >> foo | |
|
13 | hg ci -m $i | |
|
14 | done | |
|
15 | cd .. | |
|
16 | ||
|
17 | # create one repo with a shorter history | |
|
18 | hg clone -r 0 source1 source2 | |
|
19 | cd source2 | |
|
20 | echo a >> foo | |
|
21 | hg ci -m a | |
|
22 | cd .. | |
|
23 | ||
|
24 | # create a third repo to pull both other repos into it | |
|
25 | hg init corrupted | |
|
26 | cd corrupted | |
|
27 | # use a hook to make the second pull start while the first one is still running | |
|
28 | echo '[hooks]' >> .hg/hgrc | |
|
29 | echo 'prechangegroup = sleep 5' >> .hg/hgrc | |
|
30 | ||
|
31 | # start a pull... | |
|
32 | hg pull ../source1 & | |
|
33 | ||
|
34 | # ... and start another pull before the first one has finished | |
|
35 | sleep 1 | |
|
36 | hg pull ../source2 2>/dev/null | |
|
37 | ||
|
38 | # see the result | |
|
39 | wait | |
|
40 | hg verify | |
|
41 |
@@ -0,0 +1,24 b'' | |||
|
1 | requesting all changes | |
|
2 | adding changesets | |
|
3 | adding manifests | |
|
4 | adding file changes | |
|
5 | added 1 changesets with 1 changes to 1 files | |
|
6 | pulling from ../source2 | |
|
7 | pulling from ../source1 | |
|
8 | requesting all changes | |
|
9 | adding changesets | |
|
10 | adding manifests | |
|
11 | adding file changes | |
|
12 | added 10 changesets with 10 changes to 1 files | |
|
13 | (run 'hg update' to get a working copy) | |
|
14 | searching for changes | |
|
15 | adding changesets | |
|
16 | adding manifests | |
|
17 | adding file changes | |
|
18 | added 1 changesets with 1 changes to 1 files (+1 heads) | |
|
19 | (run 'hg update' to get a working copy) | |
|
20 | checking changesets | |
|
21 | checking manifests | |
|
22 | crosschecking files in changesets and manifests | |
|
23 | checking files | |
|
24 | 1 files, 11 changesets, 11 total revisions |
@@ -0,0 +1,61 b'' | |||
|
1 | #!/bin/bash | |
|
2 | ||
|
3 | hg init test | |
|
4 | cd test | |
|
5 | cat >>afile <<EOF | |
|
6 | 0 | |
|
7 | EOF | |
|
8 | hg add afile | |
|
9 | hg commit -m "0.0" | |
|
10 | cat >>afile <<EOF | |
|
11 | 1 | |
|
12 | EOF | |
|
13 | hg commit -m "0.1" | |
|
14 | cat >>afile <<EOF | |
|
15 | 2 | |
|
16 | EOF | |
|
17 | hg commit -m "0.2" | |
|
18 | cat >>afile <<EOF | |
|
19 | 3 | |
|
20 | EOF | |
|
21 | hg commit -m "0.3" | |
|
22 | hg update -C 0 | |
|
23 | cat >>afile <<EOF | |
|
24 | 1 | |
|
25 | EOF | |
|
26 | hg commit -m "1.1" | |
|
27 | cat >>afile <<EOF | |
|
28 | 2 | |
|
29 | EOF | |
|
30 | hg commit -m "1.2" | |
|
31 | cat >fred <<EOF | |
|
32 | a line | |
|
33 | EOF | |
|
34 | cat >>afile <<EOF | |
|
35 | 3 | |
|
36 | EOF | |
|
37 | hg add fred | |
|
38 | hg commit -m "1.3" | |
|
39 | hg mv afile adifferentfile | |
|
40 | hg commit -m "1.3m" | |
|
41 | hg update -C 3 | |
|
42 | hg mv afile anotherfile | |
|
43 | hg commit -m "0.3m" | |
|
44 | hg debugindex .hg/data/afile.i | |
|
45 | hg debugindex .hg/data/adifferentfile.i | |
|
46 | hg debugindex .hg/data/anotherfile.i | |
|
47 | hg debugindex .hg/data/fred.i | |
|
48 | hg debugindex .hg/00manifest.i | |
|
49 | hg verify | |
|
50 | cd .. | |
|
51 | for i in 0 1 2 3 4 5 6 7 8; do | |
|
52 | mkdir test-"$i" | |
|
53 | hg --cwd test-"$i" init | |
|
54 | hg -R test push -r "$i" test-"$i" | |
|
55 | cd test-"$i" | |
|
56 | hg verify | |
|
57 | cd .. | |
|
58 | done | |
|
59 | cd test-8 | |
|
60 | hg pull ../test-7 | |
|
61 | hg verify |
@@ -0,0 +1,135 b'' | |||
|
1 | rev offset length base linkrev nodeid p1 p2 | |
|
2 | 0 0 3 0 0 362fef284ce2 000000000000 000000000000 | |
|
3 | 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000 | |
|
4 | 2 8 7 2 2 4c982badb186 125144f7e028 000000000000 | |
|
5 | 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000 | |
|
6 | rev offset length base linkrev nodeid p1 p2 | |
|
7 | 0 0 75 0 7 905359268f77 000000000000 000000000000 | |
|
8 | rev offset length base linkrev nodeid p1 p2 | |
|
9 | 0 0 75 0 8 905359268f77 000000000000 000000000000 | |
|
10 | rev offset length base linkrev nodeid p1 p2 | |
|
11 | 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000 | |
|
12 | rev offset length base linkrev nodeid p1 p2 | |
|
13 | 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000 | |
|
14 | 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000 | |
|
15 | 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000 | |
|
16 | 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000 | |
|
17 | 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000 | |
|
18 | 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000 | |
|
19 | 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000 | |
|
20 | checking changesets | |
|
21 | checking manifests | |
|
22 | crosschecking files in changesets and manifests | |
|
23 | checking files | |
|
24 | 4 files, 9 changesets, 7 total revisions | |
|
25 | pushing to test-0 | |
|
26 | searching for changes | |
|
27 | adding changesets | |
|
28 | adding manifests | |
|
29 | adding file changes | |
|
30 | added 1 changesets with 1 changes to 1 files | |
|
31 | checking changesets | |
|
32 | checking manifests | |
|
33 | crosschecking files in changesets and manifests | |
|
34 | checking files | |
|
35 | 1 files, 1 changesets, 1 total revisions | |
|
36 | pushing to test-1 | |
|
37 | searching for changes | |
|
38 | adding changesets | |
|
39 | adding manifests | |
|
40 | adding file changes | |
|
41 | added 2 changesets with 2 changes to 1 files | |
|
42 | checking changesets | |
|
43 | checking manifests | |
|
44 | crosschecking files in changesets and manifests | |
|
45 | checking files | |
|
46 | 1 files, 2 changesets, 2 total revisions | |
|
47 | pushing to test-2 | |
|
48 | searching for changes | |
|
49 | adding changesets | |
|
50 | adding manifests | |
|
51 | adding file changes | |
|
52 | added 3 changesets with 3 changes to 1 files | |
|
53 | checking changesets | |
|
54 | checking manifests | |
|
55 | crosschecking files in changesets and manifests | |
|
56 | checking files | |
|
57 | 1 files, 3 changesets, 3 total revisions | |
|
58 | pushing to test-3 | |
|
59 | searching for changes | |
|
60 | adding changesets | |
|
61 | adding manifests | |
|
62 | adding file changes | |
|
63 | added 4 changesets with 4 changes to 1 files | |
|
64 | checking changesets | |
|
65 | checking manifests | |
|
66 | crosschecking files in changesets and manifests | |
|
67 | checking files | |
|
68 | 1 files, 4 changesets, 4 total revisions | |
|
69 | pushing to test-4 | |
|
70 | searching for changes | |
|
71 | adding changesets | |
|
72 | adding manifests | |
|
73 | adding file changes | |
|
74 | added 2 changesets with 2 changes to 1 files | |
|
75 | checking changesets | |
|
76 | checking manifests | |
|
77 | crosschecking files in changesets and manifests | |
|
78 | checking files | |
|
79 | 1 files, 2 changesets, 2 total revisions | |
|
80 | pushing to test-5 | |
|
81 | searching for changes | |
|
82 | adding changesets | |
|
83 | adding manifests | |
|
84 | adding file changes | |
|
85 | added 3 changesets with 3 changes to 1 files | |
|
86 | checking changesets | |
|
87 | checking manifests | |
|
88 | crosschecking files in changesets and manifests | |
|
89 | checking files | |
|
90 | 1 files, 3 changesets, 3 total revisions | |
|
91 | pushing to test-6 | |
|
92 | searching for changes | |
|
93 | adding changesets | |
|
94 | adding manifests | |
|
95 | adding file changes | |
|
96 | added 4 changesets with 5 changes to 2 files | |
|
97 | checking changesets | |
|
98 | checking manifests | |
|
99 | crosschecking files in changesets and manifests | |
|
100 | checking files | |
|
101 | 2 files, 4 changesets, 5 total revisions | |
|
102 | pushing to test-7 | |
|
103 | searching for changes | |
|
104 | adding changesets | |
|
105 | adding manifests | |
|
106 | adding file changes | |
|
107 | added 5 changesets with 6 changes to 3 files | |
|
108 | checking changesets | |
|
109 | checking manifests | |
|
110 | crosschecking files in changesets and manifests | |
|
111 | checking files | |
|
112 | 3 files, 5 changesets, 6 total revisions | |
|
113 | pushing to test-8 | |
|
114 | searching for changes | |
|
115 | adding changesets | |
|
116 | adding manifests | |
|
117 | adding file changes | |
|
118 | added 5 changesets with 5 changes to 2 files | |
|
119 | checking changesets | |
|
120 | checking manifests | |
|
121 | crosschecking files in changesets and manifests | |
|
122 | checking files | |
|
123 | 2 files, 5 changesets, 5 total revisions | |
|
124 | pulling from ../test-7 | |
|
125 | searching for changes | |
|
126 | adding changesets | |
|
127 | adding manifests | |
|
128 | adding file changes | |
|
129 | added 4 changesets with 2 changes to 3 files (+1 heads) | |
|
130 | (run 'hg update' to get a working copy) | |
|
131 | checking changesets | |
|
132 | checking manifests | |
|
133 | crosschecking files in changesets and manifests | |
|
134 | checking files | |
|
135 | 4 files, 9 changesets, 7 total revisions |
@@ -12,6 +12,7 b' tests/*.err' | |||
|
12 | 12 | build |
|
13 | 13 | dist |
|
14 | 14 | doc/*.[0-9] |
|
15 | doc/*.[0-9].gendoc.txt | |
|
15 | 16 | doc/*.[0-9].{x,ht}ml |
|
16 | 17 | MANIFEST |
|
17 | 18 | patches |
@@ -3,23 +3,26 b' shopt -s extglob' | |||
|
3 | 3 | _hg_command_list() |
|
4 | 4 | { |
|
5 | 5 | "$hg" --debug help 2>/dev/null | \ |
|
6 | awk 'function command_line(line) { | |
|
7 | gsub(/,/, "", line) | |
|
8 | gsub(/:.*/, "", line) | |
|
9 | split(line, aliases) | |
|
6 | awk -F', ' '/^list of commands:/ {commands=1} | |
|
7 | commands==1 && /^ [^ ]/ { | |
|
8 | line = substr($0, 2) | |
|
9 | colon = index(line, ":") | |
|
10 | if (colon > 0) | |
|
11 | line = substr(line, 1, colon-1) | |
|
12 | n = split(line, aliases) | |
|
10 | 13 | command = aliases[1] |
|
11 | delete aliases[1] | |
|
14 | if (index(command, "debug") == 1) { | |
|
15 | for (i=1; i<=n; i++) | |
|
16 | debug[j++] = aliases[i] | |
|
17 | next | |
|
18 | } | |
|
12 | 19 | print command |
|
13 |
for (i |
|
|
20 | for (i=2; i<=n; i++) | |
|
14 | 21 | if (index(command, aliases[i]) != 1) |
|
15 | 22 | print aliases[i] |
|
16 | 23 | } |
|
17 | /^list of commands:/ {commands=1} | |
|
18 | commands && /^ debug/ {a[i++] = $0; next;} | |
|
19 | commands && /^ [^ ]/ {command_line($0)} | |
|
20 | 24 | /^global options:/ {exit 0} |
|
21 |
END {for (i in |
|
|
22 | ||
|
25 | END {for (i in debug) print debug[i]}' | |
|
23 | 26 | } |
|
24 | 27 | |
|
25 | 28 | _hg_option_list() |
@@ -187,7 +187,7 b' class bisect(object):' | |||
|
187 | 187 | check_clean(self.ui, self.repo) |
|
188 | 188 | rev = self.next() |
|
189 | 189 | self.ui.write("Now testing %s\n" % hg.hex(rev)) |
|
190 |
return self.repo.update(rev, |
|
|
190 | return self.repo.update(rev, force=True) | |
|
191 | 191 | |
|
192 | 192 | def good(self, rev): |
|
193 | 193 | self.goodrevs.append(rev) |
@@ -232,7 +232,7 b' def test(ui, repo, rev):' | |||
|
232 | 232 | b.good(new_rev) |
|
233 | 233 | ui.write("it is good\n") |
|
234 | 234 | anc = b.ancestors() |
|
235 |
repo.update(new_rev, |
|
|
235 | repo.update(new_rev, force=True) | |
|
236 | 236 | for v in anc: |
|
237 | 237 | if v != rev: |
|
238 | 238 | ui.warn("fail to found cset! :(\n") |
@@ -8,6 +8,12 b' man: $(MAN)' | |||
|
8 | 8 | |
|
9 | 9 | html: $(HTML) |
|
10 | 10 | |
|
11 | hg.1.txt: hg.1.gendoc.txt | |
|
12 | touch hg.1.txt | |
|
13 | ||
|
14 | hg.1.gendoc.txt: ../mercurial/commands.py | |
|
15 | python gendoc.py > $@ | |
|
16 | ||
|
11 | 17 | %: %.xml |
|
12 | 18 | xmlto man $*.xml |
|
13 | 19 |
This diff has been collapsed as it changes many lines, (616 lines changed) Show them Hide them | |||
@@ -14,42 +14,6 b' DESCRIPTION' | |||
|
14 | 14 | ----------- |
|
15 | 15 | The hg(1) command provides a command line interface to the Mercurial system. |
|
16 | 16 | |
|
17 | OPTIONS | |
|
18 | ------- | |
|
19 | ||
|
20 | -R, --repository:: | |
|
21 | repository root directory | |
|
22 | ||
|
23 | --cwd:: | |
|
24 | change working directory | |
|
25 | ||
|
26 | -y, --noninteractive:: | |
|
27 | do not prompt, assume 'yes' for any required answers | |
|
28 | ||
|
29 | -q, --quiet:: | |
|
30 | suppress output | |
|
31 | ||
|
32 | -v, --verbose:: | |
|
33 | enable additional output | |
|
34 | ||
|
35 | --debug:: | |
|
36 | enable debugging output | |
|
37 | ||
|
38 | --traceback:: | |
|
39 | print traceback on exception | |
|
40 | ||
|
41 | --time:: | |
|
42 | time how long the command takes | |
|
43 | ||
|
44 | --profile:: | |
|
45 | print command execution profile | |
|
46 | ||
|
47 | --version:: | |
|
48 | output version information and exit | |
|
49 | ||
|
50 | -h, --help:: | |
|
51 | display help and exit | |
|
52 | ||
|
53 | 17 | COMMAND ELEMENTS |
|
54 | 18 | ---------------- |
|
55 | 19 | |
@@ -70,586 +34,8 b' repository path::' | |||
|
70 | 34 | fast and the old-http:// protocol which is much slower but does not |
|
71 | 35 | require a special server on the web host. |
|
72 | 36 | |
|
73 | COMMANDS | |
|
74 | -------- | |
|
75 | 37 | |
|
76 | add [options] [files ...]:: | |
|
77 | Schedule files to be version controlled and added to the repository. | |
|
78 | ||
|
79 | The files will be added to the repository at the next commit. | |
|
80 | ||
|
81 | If no names are given, add all files in the current directory and | |
|
82 | its subdirectories. | |
|
83 | ||
|
84 | addremove [options] [files ...]:: | |
|
85 | Add all new files and remove all missing files from the repository. | |
|
86 | ||
|
87 | New files are ignored if they match any of the patterns in .hgignore. As | |
|
88 | with add, these changes take effect at the next commit. | |
|
89 | ||
|
90 | annotate [-r <rev> -u -n -c -d] [files ...]:: | |
|
91 | List changes in files, showing the revision id responsible for each line | |
|
92 | ||
|
93 | This command is useful to discover who did a change or when a change took | |
|
94 | place. | |
|
95 | ||
|
96 | Without the -a option, annotate will avoid processing files it | |
|
97 | detects as binary. With -a, annotate will generate an annotation | |
|
98 | anyway, probably with undesirable results. | |
|
99 | ||
|
100 | options: | |
|
101 | -a, --text treat all files as text | |
|
102 | -I, --include <pat> include names matching the given patterns | |
|
103 | -X, --exclude <pat> exclude names matching the given patterns | |
|
104 | -r, --revision <rev> annotate the specified revision | |
|
105 | -u, --user list the author | |
|
106 | -d, --date list the commit date | |
|
107 | -c, --changeset list the changeset | |
|
108 | -n, --number list the revision number (default) | |
|
109 | ||
|
110 | bundle <file> <other>:: | |
|
111 | (EXPERIMENTAL) | |
|
112 | ||
|
113 | Generate a compressed changegroup file collecting all changesets | |
|
114 | not found in the other repository. | |
|
115 | ||
|
116 | This file can then be transferred using conventional means and | |
|
117 | applied to another repository with the unbundle command. This is | |
|
118 | useful when native push and pull are not available or when | |
|
119 | exporting an entire repository is undesirable. The standard file | |
|
120 | extension is ".hg". | |
|
121 | ||
|
122 | Unlike import/export, this exactly preserves all changeset | |
|
123 | contents including permissions, rename data, and revision history. | |
|
124 | ||
|
125 | cat [options] <file ...>:: | |
|
126 | Print the specified files as they were at the given revision. | |
|
127 | If no revision is given then the tip is used. | |
|
128 | ||
|
129 | Output may be to a file, in which case the name of the file is | |
|
130 | given using a format string. The formatting rules are the same as | |
|
131 | for the export command, with the following additions: | |
|
132 | ||
|
133 | %s basename of file being printed | |
|
134 | %d dirname of file being printed, or '.' if in repo root | |
|
135 | %p root-relative path name of file being printed | |
|
136 | ||
|
137 | options: | |
|
138 | -I, --include <pat> include names matching the given patterns | |
|
139 | -X, --exclude <pat> exclude names matching the given patterns | |
|
140 | -o, --output <filespec> print output to file with formatted name | |
|
141 | -r, --rev <rev> print the given revision | |
|
142 | ||
|
143 | clone [options] <source> [dest]:: | |
|
144 | Create a copy of an existing repository in a new directory. | |
|
145 | ||
|
146 | If no destination directory name is specified, it defaults to the | |
|
147 | basename of the source. | |
|
148 | ||
|
149 | The location of the source is added to the new repository's | |
|
150 | .hg/hgrc file, as the default to be used for future pulls. | |
|
151 | ||
|
152 | For efficiency, hardlinks are used for cloning whenever the source | |
|
153 | and destination are on the same filesystem. Some filesystems, | |
|
154 | such as AFS, implement hardlinking incorrectly, but do not report | |
|
155 | errors. In these cases, use the --pull option to avoid | |
|
156 | hardlinking. | |
|
157 | ||
|
158 | See pull for valid source format details. | |
|
159 | ||
|
160 | options: | |
|
161 | -U, --noupdate do not update the new working directory | |
|
162 | --pull use pull protocol to copy metadata | |
|
163 | -e, --ssh specify ssh command to use | |
|
164 | --remotecmd specify hg command to run on the remote side | |
|
165 | ||
|
166 | commit [options] [files...]:: | |
|
167 | Commit changes to the given files into the repository. | |
|
168 | ||
|
169 | If a list of files is omitted, all changes reported by "hg status" | |
|
170 | from the root of the repository will be commited. | |
|
171 | ||
|
172 | The HGEDITOR or EDITOR environment variables are used to start an | |
|
173 | editor to add a commit comment. | |
|
174 | ||
|
175 | Options: | |
|
176 | ||
|
177 | -A, --addremove run addremove during commit | |
|
178 | -I, --include <pat> include names matching the given patterns | |
|
179 | -X, --exclude <pat> exclude names matching the given patterns | |
|
180 | -m, --message <text> use <text> as commit message | |
|
181 | -l, --logfile <file> read the commit message from <file> | |
|
182 | -d, --date <datecode> record datecode as commit date | |
|
183 | -u, --user <user> record user as commiter | |
|
184 | ||
|
185 | aliases: ci | |
|
186 | ||
|
187 | copy <source ...> <dest>:: | |
|
188 | Mark dest as having copies of source files. If dest is a | |
|
189 | directory, copies are put in that directory. If dest is a file, | |
|
190 | there can only be one source. | |
|
191 | ||
|
192 | By default, this command copies the contents of files as they | |
|
193 | stand in the working directory. If invoked with --after, the | |
|
194 | operation is recorded, but no copying is performed. | |
|
195 | ||
|
196 | This command takes effect in the next commit. | |
|
197 | ||
|
198 | NOTE: This command should be treated as experimental. While it | |
|
199 | should properly record copied files, this information is not yet | |
|
200 | fully used by merge, nor fully reported by log. | |
|
201 | ||
|
202 | Options: | |
|
203 | -A, --after record a copy that has already occurred | |
|
204 | -I, --include <pat> include names matching the given patterns | |
|
205 | -X, --exclude <pat> exclude names matching the given patterns | |
|
206 | -f, --force forcibly copy over an existing managed file | |
|
207 | ||
|
208 | aliases: cp | |
|
209 | ||
|
210 | diff [-a] [-r revision] [-r revision] [files ...]:: | |
|
211 | Show differences between revisions for the specified files. | |
|
212 | ||
|
213 | Differences between files are shown using the unified diff format. | |
|
214 | ||
|
215 | When two revision arguments are given, then changes are shown | |
|
216 | between those revisions. If only one revision is specified then | |
|
217 | that revision is compared to the working directory, and, when no | |
|
218 | revisions are specified, the working directory files are compared | |
|
219 | to its parent. | |
|
220 | ||
|
221 | Without the -a option, diff will avoid generating diffs of files | |
|
222 | it detects as binary. With -a, diff will generate a diff anyway, | |
|
223 | probably with undesirable results. | |
|
224 | ||
|
225 | options: | |
|
226 | -a, --text treat all files as text | |
|
227 | -I, --include <pat> include names matching the given patterns | |
|
228 | -p, --show-function show which function each change is in | |
|
229 | -X, --exclude <pat> exclude names matching the given patterns | |
|
230 | -w, --ignore-all-space ignore white space when comparing lines | |
|
231 | ||
|
232 | export [-o filespec] [revision] ...:: | |
|
233 | Print the changeset header and diffs for one or more revisions. | |
|
234 | ||
|
235 | The information shown in the changeset header is: author, | |
|
236 | changeset hash, parent and commit comment. | |
|
237 | ||
|
238 | Output may be to a file, in which case the name of the file is | |
|
239 | given using a format string. The formatting rules are as follows: | |
|
240 | ||
|
241 | %% literal "%" character | |
|
242 | %H changeset hash (40 bytes of hexadecimal) | |
|
243 | %N number of patches being generated | |
|
244 | %R changeset revision number | |
|
245 | %b basename of the exporting repository | |
|
246 | %h short-form changeset hash (12 bytes of hexadecimal) | |
|
247 | %n zero-padded sequence number, starting at 1 | |
|
248 | %r zero-padded changeset revision number | |
|
249 | ||
|
250 | Without the -a option, export will avoid generating diffs of files | |
|
251 | it detects as binary. With -a, export will generate a diff anyway, | |
|
252 | probably with undesirable results. | |
|
253 | ||
|
254 | options: | |
|
255 | -a, --text treat all files as text | |
|
256 | -o, --output <filespec> print output to file with formatted name | |
|
257 | ||
|
258 | forget [options] [files]:: | |
|
259 | Undo an 'hg add' scheduled for the next commit. | |
|
260 | ||
|
261 | options: | |
|
262 | -I, --include <pat> include names matching the given patterns | |
|
263 | -X, --exclude <pat> exclude names matching the given patterns | |
|
264 | ||
|
265 | grep [options] pattern [files]:: | |
|
266 | Search revisions of files for a regular expression. | |
|
267 | ||
|
268 | This command behaves differently than Unix grep. It only accepts | |
|
269 | Python/Perl regexps. It searches repository history, not the | |
|
270 | working directory. It always prints the revision number in which | |
|
271 | a match appears. | |
|
272 | ||
|
273 | By default, grep only prints output for the first revision of a | |
|
274 | file in which it finds a match. To get it to print every revision | |
|
275 | that contains a change in match status ("-" for a match that | |
|
276 | becomes a non-match, or "+" for a non-match that becomes a match), | |
|
277 | use the --all flag. | |
|
278 | ||
|
279 | options: | |
|
280 | -0, --print0 end fields with NUL | |
|
281 | -I, --include <pat> include names matching the given patterns | |
|
282 | -X, --exclude <pat> exclude names matching the given patterns | |
|
283 | --all print all revisions that match | |
|
284 | -i, --ignore-case ignore case when matching | |
|
285 | -l, --files-with-matches print only filenames and revs that match | |
|
286 | -n, --line-number print matching line numbers | |
|
287 | -r <rev>, --rev <rev> search in given revision range | |
|
288 | -u, --user print user who committed change | |
|
289 | ||
|
290 | heads:: | |
|
291 | Show all repository head changesets. | |
|
292 | ||
|
293 | Repository "heads" are changesets that don't have children | |
|
294 | changesets. They are where development generally takes place and | |
|
295 | are the usual targets for update and merge operations. | |
|
296 | ||
|
297 | identify:: | |
|
298 | Print a short summary of the current state of the repo. | |
|
299 | ||
|
300 | This summary identifies the repository state using one or two parent | |
|
301 | hash identifiers, followed by a "+" if there are uncommitted changes | |
|
302 | in the working directory, followed by a list of tags for this revision. | |
|
303 | ||
|
304 | aliases: id | |
|
305 | ||
|
306 | import [-p <n> -b <base> -f] <patches>:: | |
|
307 | Import a list of patches and commit them individually. | |
|
308 | ||
|
309 | If there are outstanding changes in the working directory, import | |
|
310 | will abort unless given the -f flag. | |
|
311 | ||
|
312 | If a patch looks like a mail message (its first line starts with | |
|
313 | "From " or looks like an RFC822 header), it will not be applied | |
|
314 | unless the -f option is used. The importer neither parses nor | |
|
315 | discards mail headers, so use -f only to override the "mailness" | |
|
316 | safety check, not to import a real mail message. | |
|
317 | ||
|
318 | options: | |
|
319 | -p, --strip <n> directory strip option for patch. This has the same | |
|
320 | meaning as the corresponding patch option | |
|
321 | -b <path> base directory to read patches from | |
|
322 | -f, --force skip check for outstanding uncommitted changes | |
|
323 | ||
|
324 | aliases: patch | |
|
325 | ||
|
326 | incoming [-p] [source]:: | |
|
327 | Show new changesets found in the specified repo or the default | |
|
328 | pull repo. These are the changesets that would be pulled if a pull | |
|
329 | was requested. | |
|
330 | ||
|
331 | Currently only local repositories are supported. | |
|
332 | ||
|
333 | options: | |
|
334 | -p, --patch show patch | |
|
335 | ||
|
336 | aliases: in | |
|
337 | ||
|
338 | init [dest]:: | |
|
339 | Initialize a new repository in the given directory. If the given | |
|
340 | directory does not exist, it is created. | |
|
341 | ||
|
342 | If no directory is given, the current directory is used. | |
|
343 | ||
|
344 | locate [options] [files]:: | |
|
345 | Print all files under Mercurial control whose names match the | |
|
346 | given patterns. | |
|
347 | ||
|
348 | This command searches the current directory and its | |
|
349 | subdirectories. To search an entire repository, move to the root | |
|
350 | of the repository. | |
|
351 | ||
|
352 | If no patterns are given to match, this command prints all file | |
|
353 | names. | |
|
354 | ||
|
355 | If you want to feed the output of this command into the "xargs" | |
|
356 | command, use the "-0" option to both this command and "xargs". | |
|
357 | This will avoid the problem of "xargs" treating single filenames | |
|
358 | that contain white space as multiple filenames. | |
|
359 | ||
|
360 | options: | |
|
361 | ||
|
362 | -0, --print0 end filenames with NUL, for use with xargs | |
|
363 | -f, --fullpath print complete paths from the filesystem root | |
|
364 | -I, --include <pat> include names matching the given patterns | |
|
365 | -r, --rev <rev> search the repository as it stood at rev | |
|
366 | -X, --exclude <pat> exclude names matching the given patterns | |
|
367 | ||
|
368 | log [-r revision ...] [-p] [files]:: | |
|
369 | Print the revision history of the specified files or the entire project. | |
|
370 | ||
|
371 | By default this command outputs: changeset id and hash, tags, | |
|
372 | parents, user, date and time, and a summary for each commit. The | |
|
373 | -v switch adds some more detail, such as changed files, manifest | |
|
374 | hashes or message signatures. | |
|
375 | ||
|
376 | options: | |
|
377 | -I, --include <pat> include names matching the given patterns | |
|
378 | -X, --exclude <pat> exclude names matching the given patterns | |
|
379 | -r, --rev <A> show the specified revision or range | |
|
380 | -p, --patch show patch | |
|
381 | ||
|
382 | aliases: history | |
|
383 | ||
|
384 | manifest [revision]:: | |
|
385 | Print a list of version controlled files for the given revision. | |
|
386 | ||
|
387 | The manifest is the list of files being version controlled. If no revision | |
|
388 | is given then the tip is used. | |
|
389 | ||
|
390 | outgoing [-p] [dest]:: | |
|
391 | Show changesets not found in the specified destination repo or the | |
|
392 | default push repo. These are the changesets that would be pushed | |
|
393 | if a push was requested. | |
|
394 | ||
|
395 | See pull for valid source format details. | |
|
396 | ||
|
397 | options: | |
|
398 | -p, --patch show patch | |
|
399 | ||
|
400 | aliases: out | |
|
401 | ||
|
402 | parents:: | |
|
403 | Print the working directory's parent revisions. | |
|
404 | ||
|
405 | paths [NAME]:: | |
|
406 | Show definition of symbolic path name NAME. If no name is given, show | |
|
407 | definition of available names. | |
|
408 | ||
|
409 | Path names are defined in the [paths] section of /etc/mercurial/hgrc | |
|
410 | and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too. | |
|
411 | ||
|
412 | pull <repository path>:: | |
|
413 | Pull changes from a remote repository to a local one. | |
|
414 | ||
|
415 | This finds all changes from the repository at the specified path | |
|
416 | or URL and adds them to the local repository. By default, this | |
|
417 | does not update the copy of the project in the working directory. | |
|
418 | ||
|
419 | Valid URLs are of the form: | |
|
420 | ||
|
421 | local/filesystem/path | |
|
422 | http://[user@]host[:port][/path] | |
|
423 | https://[user@]host[:port][/path] | |
|
424 | ssh://[user@]host[:port][/path] | |
|
425 | ||
|
426 | SSH requires an accessible shell account on the destination machine | |
|
427 | and a copy of hg in the remote path. With SSH, paths are relative | |
|
428 | to the remote user's home directory by default; use two slashes at | |
|
429 | the start of a path to specify it as relative to the filesystem root. | |
|
430 | ||
|
431 | options: | |
|
432 | -u, --update update the working directory to tip after pull | |
|
433 | -e, --ssh specify ssh command to use | |
|
434 | --remotecmd specify hg command to run on the remote side | |
|
435 | ||
|
436 | push <destination>:: | |
|
437 | Push changes from the local repository to the given destination. | |
|
438 | ||
|
439 | This is the symmetrical operation for pull. It helps to move | |
|
440 | changes from the current repository to a different one. If the | |
|
441 | destination is local this is identical to a pull in that directory | |
|
442 | from the current one. | |
|
443 | ||
|
444 | By default, push will refuse to run if it detects the result would | |
|
445 | increase the number of remote heads. This generally indicates the | |
|
446 | the client has forgotten to sync and merge before pushing. | |
|
447 | ||
|
448 | Valid URLs are of the form: | |
|
449 | ||
|
450 | local/filesystem/path | |
|
451 | ssh://[user@]host[:port][/path] | |
|
452 | ||
|
453 | SSH requires an accessible shell account on the destination | |
|
454 | machine and a copy of hg in the remote path. | |
|
455 | ||
|
456 | options: | |
|
457 | ||
|
458 | -f, --force force update | |
|
459 | -e, --ssh specify ssh command to use | |
|
460 | --remotecmd specify hg command to run on the remote side | |
|
461 | ||
|
462 | rawcommit [-p -d -u -F -m -l]:: | |
|
463 | Lowlevel commit, for use in helper scripts. (DEPRECATED) | |
|
464 | ||
|
465 | This command is not intended to be used by normal users, as it is | |
|
466 | primarily useful for importing from other SCMs. | |
|
467 | ||
|
468 | This command is now deprecated and will be removed in a future | |
|
469 | release, please use debugsetparents and commit instead. | |
|
470 | ||
|
471 | recover:: | |
|
472 | Recover from an interrupted commit or pull. | |
|
473 | ||
|
474 | This command tries to fix the repository status after an interrupted | |
|
475 | operation. It should only be necessary when Mercurial suggests it. | |
|
476 | ||
|
477 | remove [options] [files ...]:: | |
|
478 | Schedule the indicated files for removal from the repository. | |
|
479 | ||
|
480 | This command schedules the files to be removed at the next commit. | |
|
481 | This only removes files from the current branch, not from the | |
|
482 | entire project history. If the files still exist in the working | |
|
483 | directory, they will be deleted from it. | |
|
484 | ||
|
485 | aliases: rm | |
|
486 | ||
|
487 | rename <source ...> <dest>:: | |
|
488 | Mark dest as copies of sources; mark sources for deletion. If | |
|
489 | dest is a directory, copies are put in that directory. If dest is | |
|
490 | a file, there can only be one source. | |
|
491 | ||
|
492 | By default, this command copies the contents of files as they | |
|
493 | stand in the working directory. If invoked with --after, the | |
|
494 | operation is recorded, but no copying is performed. | |
|
495 | ||
|
496 | This command takes effect in the next commit. | |
|
497 | ||
|
498 | NOTE: This command should be treated as experimental. While it | |
|
499 | should properly record rename files, this information is not yet | |
|
500 | fully used by merge, nor fully reported by log. | |
|
501 | ||
|
502 | Options: | |
|
503 | -A, --after record a rename that has already occurred | |
|
504 | -f, --force forcibly copy over an existing managed file | |
|
505 | ||
|
506 | aliases: mv | |
|
507 | ||
|
508 | revert [names ...]:: | |
|
509 | The revert command has two modes of operation. | |
|
510 | ||
|
511 | In its default mode, it reverts any uncommitted modifications made | |
|
512 | to the named files or directories. This restores the contents of | |
|
513 | the affected files to an unmodified state. | |
|
514 | ||
|
515 | Using the -r option, it reverts the given files or directories to | |
|
516 | their state as of an earlier revision. This can be helpful to "roll | |
|
517 | back" some or all of a change that should not have been committed. | |
|
518 | ||
|
519 | Revert modifies the working directory. It does not commit any | |
|
520 | changes, or change the parent of the current working directory. | |
|
521 | ||
|
522 | If a file has been deleted, it is recreated. If the executable | |
|
523 | mode of a file was changed, it is reset. | |
|
524 | ||
|
525 | If a directory is given, all files in that directory and its | |
|
526 | subdirectories are reverted. | |
|
527 | ||
|
528 | If no arguments are given, all files in the current directory and | |
|
529 | its subdirectories are reverted. | |
|
530 | ||
|
531 | options: | |
|
532 | -r, --rev <rev> revision to revert to | |
|
533 | -n, --nonrecursive do not recurse into subdirectories | |
|
534 | ||
|
535 | root:: | |
|
536 | Print the root directory of the current repository. | |
|
537 | ||
|
538 | serve [options]:: | |
|
539 | Start a local HTTP repository browser and pull server. | |
|
540 | ||
|
541 | By default, the server logs accesses to stdout and errors to | |
|
542 | stderr. Use the "-A" and "-E" options to log to files. | |
|
543 | ||
|
544 | options: | |
|
545 | -A, --accesslog <file> name of access log file to write to | |
|
546 | -E, --errorlog <file> name of error log file to write to | |
|
547 | -a, --address <addr> address to use | |
|
548 | -p, --port <n> port to use (default: 8000) | |
|
549 | -n, --name <name> name to show in web pages (default: working dir) | |
|
550 | -t, --templatedir <path> web templates to use | |
|
551 | -6, --ipv6 use IPv6 in addition to IPv4 | |
|
552 | ||
|
553 | status [options] [files]:: | |
|
554 | Show changed files in the working directory. If no names are | |
|
555 | given, all files are shown. Otherwise, only files matching the | |
|
556 | given names are shown. | |
|
557 | ||
|
558 | The codes used to show the status of files are: | |
|
559 | ||
|
560 | M = changed | |
|
561 | A = added | |
|
562 | R = removed | |
|
563 | ? = not tracked | |
|
564 | ||
|
565 | options: | |
|
566 | ||
|
567 | -m, --modified show only modified files | |
|
568 | -a, --added show only added files | |
|
569 | -r, --removed show only removed files | |
|
570 | -u, --unknown show only unknown (not tracked) files | |
|
571 | -n, --no-status hide status prefix | |
|
572 | -0, --print0 end filenames with NUL, for use with xargs | |
|
573 | -I, --include <pat> include names matching the given patterns | |
|
574 | -X, --exclude <pat> exclude names matching the given patterns | |
|
575 | ||
|
576 | tag [-l -m <text> -d <datecode> -u <user>] <name> [revision]:: | |
|
577 | Name a particular revision using <name>. | |
|
578 | ||
|
579 | Tags are used to name particular revisions of the repository and are | |
|
580 | very useful to compare different revision, to go back to significant | |
|
581 | earlier versions or to mark branch points as releases, etc. | |
|
582 | ||
|
583 | If no revision is given, the tip is used. | |
|
584 | ||
|
585 | To facilitate version control, distribution, and merging of tags, | |
|
586 | they are stored as a file named ".hgtags" which is managed | |
|
587 | similarly to other project files and can be hand-edited if | |
|
588 | necessary. | |
|
589 | ||
|
590 | options: | |
|
591 | -l, --local make the tag local | |
|
592 | -m, --message <text> message for tag commit log entry | |
|
593 | -d, --date <datecode> datecode for commit | |
|
594 | -u, --user <user> user for commit | |
|
595 | ||
|
596 | Note: Local tags are not version-controlled or distributed and are | |
|
597 | stored in the .hg/localtags file. If there exists a local tag and | |
|
598 | a public tag with the same name, local tag is used. | |
|
599 | ||
|
600 | tags:: | |
|
601 | List the repository tags. | |
|
602 | ||
|
603 | This lists both regular and local tags. | |
|
604 | ||
|
605 | tip [-p]:: | |
|
606 | Show the tip revision. | |
|
607 | ||
|
608 | options: | |
|
609 | -p, --patch show patch | |
|
610 | ||
|
611 | unbundle <file>:: | |
|
612 | (EXPERIMENTAL) | |
|
613 | ||
|
614 | Apply a compressed changegroup file generated by the bundle | |
|
615 | command. | |
|
616 | ||
|
617 | undo:: | |
|
618 | Undo the last commit or pull transaction. | |
|
619 | ||
|
620 | Roll back the last pull or commit transaction on the | |
|
621 | repository, restoring the project to its earlier state. | |
|
622 | ||
|
623 | This command should be used with care. There is only one level of | |
|
624 | undo and there is no redo. | |
|
625 | ||
|
626 | This command is not intended for use on public repositories. Once | |
|
627 | a change is visible for pull by other users, undoing it locally is | |
|
628 | ineffective. | |
|
629 | ||
|
630 | update [-m -C] [revision]:: | |
|
631 | Update the working directory to the specified revision. | |
|
632 | ||
|
633 | By default, update will refuse to run if doing so would require | |
|
634 | merging or discarding local changes. | |
|
635 | ||
|
636 | With the -m option, a merge will be performed. | |
|
637 | ||
|
638 | With the -C option, local changes will be lost. | |
|
639 | ||
|
640 | options: | |
|
641 | -m, --merge allow merging of branches | |
|
642 | -C, --clean overwrite locally modified files | |
|
643 | ||
|
644 | aliases: up checkout co | |
|
645 | ||
|
646 | verify:: | |
|
647 | Verify the integrity of the current repository. | |
|
648 | ||
|
649 | This will perform an extensive check of the repository's | |
|
650 | integrity, validating the hashes and checksums of each entry in | |
|
651 | the changelog, manifest, and tracked files, as well as the | |
|
652 | integrity of their crosslinks and indices. | |
|
38 | include::hg.1.gendoc.txt[] | |
|
653 | 39 | |
|
654 | 40 | FILE NAME PATTERNS |
|
655 | 41 | ------------------ |
@@ -247,6 +247,9 b' ui::' | |||
|
247 | 247 | remote command to use for clone/push/pull operations. Default is 'hg'. |
|
248 | 248 | ssh;; |
|
249 | 249 | command to use for SSH connections. Default is 'ssh'. |
|
250 | timeout;; | |
|
251 | The timeout used when a lock is held (in seconds), a negative value | |
|
252 | means no timeout. Default is 600. | |
|
250 | 253 | username;; |
|
251 | 254 | The committer of a changeset created when running "commit". |
|
252 | 255 | Typically a person's name and email address, e.g. "Fred Widget |
@@ -49,20 +49,11 b'' | |||
|
49 | 49 | # to = recipient1, recipient2, ... |
|
50 | 50 | # cc = cc1, cc2, ... |
|
51 | 51 | |
|
52 | from email.MIMEMultipart import MIMEMultipart | |
|
53 | from email.MIMEText import MIMEText | |
|
54 | from email.Utils import parseaddr | |
|
55 | from mercurial import commands | |
|
56 | from mercurial import hg | |
|
57 | from mercurial import ui | |
|
52 | from mercurial.demandload import * | |
|
53 | demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils | |
|
54 | mercurial:commands,hg,ui | |
|
55 | os popen2 smtplib socket sys tempfile time''') | |
|
58 | 56 | from mercurial.i18n import gettext as _ |
|
59 | import os | |
|
60 | import popen2 | |
|
61 | import smtplib | |
|
62 | import socket | |
|
63 | import sys | |
|
64 | import tempfile | |
|
65 | import time | |
|
66 | 57 | |
|
67 | 58 | try: |
|
68 | 59 | # readline gives raw_input editing capabilities, but is not |
@@ -149,7 +140,7 b' def patchbomb(ui, repo, *revs, **opts):' | |||
|
149 | 140 | if opts['diffstat']: |
|
150 | 141 | body += cdiffstat('\n'.join(desc), patch) + '\n\n' |
|
151 | 142 | body += '\n'.join(patch) |
|
152 | msg = MIMEText(body) | |
|
143 | msg = email.MIMEText.MIMEText(body) | |
|
153 | 144 | subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip()) |
|
154 | 145 | if subj.endswith('.'): subj = subj[:-1] |
|
155 | 146 | msg['Subject'] = subj |
@@ -194,7 +185,7 b' def patchbomb(ui, repo, *revs, **opts):' | |||
|
194 | 185 | sender = (opts['from'] or ui.config('patchbomb', 'from') or |
|
195 | 186 | prompt('From', ui.username())) |
|
196 | 187 | |
|
197 | msg = MIMEMultipart() | |
|
188 | msg = email.MIMEMultipart.MIMEMultipart() | |
|
198 | 189 | msg['Subject'] = '[PATCH 0 of %d] %s' % ( |
|
199 | 190 | len(patches), |
|
200 | 191 | opts['subject'] or |
@@ -217,13 +208,13 b' def patchbomb(ui, repo, *revs, **opts):' | |||
|
217 | 208 | if l == '.': break |
|
218 | 209 | body.append(l) |
|
219 | 210 | |
|
220 | msg.attach(MIMEText('\n'.join(body) + '\n')) | |
|
211 | msg.attach(email.MIMEText.MIMEText('\n'.join(body) + '\n')) | |
|
221 | 212 | |
|
222 | 213 | ui.write('\n') |
|
223 | 214 | |
|
224 | 215 | if opts['diffstat']: |
|
225 | 216 | d = cdiffstat(_('Final summary:\n'), jumbo) |
|
226 | if d: msg.attach(MIMEText(d)) | |
|
217 | if d: msg.attach(email.MIMEText.MIMEText(d)) | |
|
227 | 218 | |
|
228 | 219 | msgs.insert(0, msg) |
|
229 | 220 | |
@@ -241,7 +232,7 b' def patchbomb(ui, repo, *revs, **opts):' | |||
|
241 | 232 | s.login(username, password) |
|
242 | 233 | parent = None |
|
243 | 234 | tz = time.strftime('%z') |
|
244 | sender_addr = parseaddr(sender)[1] | |
|
235 | sender_addr = email.Utils.parseaddr(sender)[1] | |
|
245 | 236 | for m in msgs: |
|
246 | 237 | try: |
|
247 | 238 | m['Message-Id'] = genmsgid(m['X-Mercurial-Node']) |
@@ -17,28 +17,32 b' fi' | |||
|
17 | 17 | |
|
18 | 18 | # find decent versions of our utilities, insisting on the GNU versions where we |
|
19 | 19 | # need to |
|
20 | MERGE=merge | |
|
21 | DIFF3=gdiff3 | |
|
22 | DIFF=gdiff | |
|
23 | PATCH=gpatch | |
|
20 | MERGE="merge" | |
|
21 | DIFF3="gdiff3" | |
|
22 | DIFF="gdiff" | |
|
23 | PATCH="gpatch" | |
|
24 | 24 | |
|
25 | type $MERGE >/dev/null 2>&1 || MERGE= | |
|
26 | type $DIFF3 >/dev/null 2>&1 || DIFF3=diff3 | |
|
27 | type $DIFF >/dev/null 2>&1 || DIFF=diff | |
|
28 | type $PATCH >/dev/null 2>&1 || PATCH=patch | |
|
25 | type "$MERGE" >/dev/null 2>&1 || MERGE= | |
|
26 | type "$DIFF3" >/dev/null 2>&1 || DIFF3="diff3" | |
|
29 | 27 | $DIFF3 --version >/dev/null 2>&1 || DIFF3= |
|
28 | type "$DIFF" >/dev/null 2>&1 || DIFF="diff" | |
|
29 | type "$DIFF" >/dev/null 2>&1 || DIFF= | |
|
30 | type "$PATCH" >/dev/null 2>&1 || PATCH="patch" | |
|
31 | type "$PATCH" >/dev/null 2>&1 || PATCH= | |
|
30 | 32 | |
|
31 | 33 | # find optional visual utilities |
|
32 |
FILEMERGE= |
|
|
33 | KDIFF3=kdiff3 | |
|
34 | TKDIFF=tkdiff | |
|
34 | FILEMERGE="/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge" | |
|
35 | KDIFF3="kdiff3" | |
|
36 | TKDIFF="tkdiff" | |
|
37 | MELD="meld" | |
|
35 | 38 | |
|
36 | type $FILEMERGE >/dev/null 2>&1 || FILEMERGE= | |
|
37 | type $KDIFF3 >/dev/null 2>&1 || KDIFF3= | |
|
38 | type $TKDIFF >/dev/null 2>&1 || TKDIFF= | |
|
39 | type "$FILEMERGE" >/dev/null 2>&1 || FILEMERGE= | |
|
40 | type "$KDIFF3" >/dev/null 2>&1 || KDIFF3= | |
|
41 | type "$TKDIFF" >/dev/null 2>&1 || TKDIFF= | |
|
42 | type "$MELD" >/dev/null 2>&1 || MELD= | |
|
39 | 43 | |
|
40 | 44 | # random part of names |
|
41 |
RAND="$RANDOM |
|
|
45 | RAND="$RANDOM$RANDOM" | |
|
42 | 46 | |
|
43 | 47 | # temporary directory for diff+patch merge |
|
44 | 48 | HGTMP="${TMPDIR-/tmp}/hgmerge.$RAND" |
@@ -68,6 +72,19 b' failure() {' | |||
|
68 | 72 | exit 1 |
|
69 | 73 | } |
|
70 | 74 | |
|
75 | # Ask if the merge was successful | |
|
76 | ask_if_merged() { | |
|
77 | while true; do | |
|
78 | echo "$LOCAL seems unchanged." | |
|
79 | echo "Was the merge successful? [y/n]" | |
|
80 | read answer | |
|
81 | case "$answer" in | |
|
82 | y*|Y*) success;; | |
|
83 | n*|N*) failure;; | |
|
84 | esac | |
|
85 | done | |
|
86 | } | |
|
87 | ||
|
71 | 88 | # Clean up when interrupted |
|
72 | 89 | trap "failure" 1 2 3 6 15 # HUP INT QUIT ABRT TERM |
|
73 | 90 | |
@@ -76,18 +93,16 b' mv "$LOCAL" "$BACKUP"' | |||
|
76 | 93 | cp "$BACKUP" "$LOCAL" |
|
77 | 94 | |
|
78 | 95 | # Attempt to do a non-interactive merge |
|
79 | if [ -n "$MERGE" ]; then | |
|
80 | $MERGE "$LOCAL" "$BASE" "$OTHER" 2> /dev/null && success | |
|
81 | cp "$BACKUP" "$LOCAL" | |
|
82 | elif [ -n "$DIFF3" ]; then | |
|
83 |
|
|
|
84 | $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" > "$LOCAL" && success | |
|
85 |
if [ $? - |
|
|
86 |
echo " |
|
|
87 | cp "$BACKUP" "$LOCAL" | |
|
96 | if [ -n "$MERGE" -o -n "$DIFF3" ]; then | |
|
97 | if [ -n "$MERGE" ]; then | |
|
98 | $MERGE "$LOCAL" "$BASE" "$OTHER" 2> /dev/null && success | |
|
99 | elif [ -n "$DIFF3" ]; then | |
|
100 | $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" > "$LOCAL" && success | |
|
101 | fi | |
|
102 | if [ $? -gt 1 ]; then | |
|
103 | echo "automatic merge failed! Exiting." 1>&2 | |
|
88 | 104 | failure |
|
89 | 105 | fi |
|
90 | cp "$BACKUP" "$LOCAL" | |
|
91 | 106 | fi |
|
92 | 107 | |
|
93 | 108 | # on MacOS X try FileMerge.app, shipped with Apple's developer tools |
@@ -97,71 +112,66 b' if [ -n "$FILEMERGE" ]; then' | |||
|
97 | 112 | # filemerge prefers the right by default |
|
98 | 113 | $FILEMERGE -left "$OTHER" -right "$LOCAL" -ancestor "$BASE" -merge "$LOCAL" |
|
99 | 114 | [ $? -ne 0 ] && echo "FileMerge failed to launch" && failure |
|
100 |
|
|
|
101 | then | |
|
102 | success | |
|
103 | else | |
|
104 | echo "$LOCAL seems unchanged. Was the merge successful?" | |
|
105 | select answer in yes no | |
|
106 | do | |
|
107 | test "$answer" == "yes" && success || failure | |
|
108 | done | |
|
109 | fi | |
|
110 | failure | |
|
115 | test "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged | |
|
111 | 116 | fi |
|
112 | 117 | |
|
113 | 118 | if [ -n "$DISPLAY" ]; then |
|
114 | 119 | # try using kdiff3, which is fairly nice |
|
115 | 120 | if [ -n "$KDIFF3" ]; then |
|
116 |
|
|
|
117 |
|
|
|
121 | $KDIFF3 --auto "$BASE" "$BACKUP" "$OTHER" -o "$LOCAL" || failure | |
|
122 | success | |
|
118 | 123 | fi |
|
119 | 124 | |
|
120 | 125 | # try using tkdiff, which is a bit less sophisticated |
|
121 | 126 | if [ -n "$TKDIFF" ]; then |
|
122 |
|
|
|
123 |
|
|
|
127 | $TKDIFF "$BACKUP" "$OTHER" -a "$BASE" -o "$LOCAL" || failure | |
|
128 | success | |
|
129 | fi | |
|
130 | ||
|
131 | if [ -n "$MELD" ]; then | |
|
132 | cp "$BACKUP" "$CHGTEST" | |
|
133 | # protect our feet - meld allows us to save to the left file | |
|
134 | cp "$BACKUP" "$LOCAL.tmp.$RAND" | |
|
135 | # Meld doesn't have automatic merging, so to reduce intervention | |
|
136 | # use the file with conflicts | |
|
137 | $MELD "$LOCAL.tmp.$RAND" "$LOCAL" "$OTHER" || failure | |
|
138 | # Also it doesn't return good error code | |
|
139 | test "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged | |
|
124 | 140 | fi |
|
125 | 141 | fi |
|
126 | 142 | |
|
127 | 143 | # Attempt to do a merge with $EDITOR |
|
128 | if [ -n "$MERGE" ]; then | |
|
129 | echo "conflicts detected in $LOCAL" | |
|
130 | $MERGE "$LOCAL" "$BASE" "$OTHER" 2>/dev/null || $EDITOR "$LOCAL" | |
|
131 | success | |
|
132 | fi | |
|
133 | ||
|
134 | if [ -n "$DIFF3" ]; then | |
|
144 | if [ -n "$MERGE" -o -n "$DIFF3" ]; then | |
|
135 | 145 | echo "conflicts detected in $LOCAL" |
|
136 | $DIFF3 -m "$BACKUP" "$BASE" "$OTHER" > "$LOCAL" || { | |
|
137 | case $? in | |
|
138 | 1) | |
|
139 | $EDITOR "$LOCAL" ;; | |
|
140 | 2) echo "$DIFF3 failed! Exiting." 1>&2 | |
|
141 | cp "$BACKUP" "$LOCAL" | |
|
142 | failure ;; | |
|
143 | esac | |
|
144 | success | |
|
145 | } | |
|
146 | cp "$BACKUP" "$CHGTEST" | |
|
147 | $EDITOR "$LOCAL" || failure | |
|
148 | # Some editors do not return meaningful error codes | |
|
149 | # Do not take any chances | |
|
150 | test "$LOCAL" -nt "$CHGTEST" && success || ask_if_merged | |
|
146 | 151 | fi |
|
147 | 152 | |
|
148 | 153 | # attempt to manually merge with diff and patch |
|
149 | 154 | if [ -n "$DIFF" -a -n "$PATCH" ]; then |
|
150 | 155 | |
|
151 | 156 | (umask 077 && mkdir "$HGTMP") || { |
|
152 |
|
|
|
153 |
|
|
|
157 | echo "Could not create temporary directory $HGTMP" 1>&2 | |
|
158 | failure | |
|
154 | 159 | } |
|
155 | 160 | |
|
156 | 161 | $DIFF -u "$BASE" "$OTHER" > "$HGTMP/diff" || : |
|
157 | 162 | if $PATCH "$LOCAL" < "$HGTMP/diff"; then |
|
158 |
|
|
|
163 | success | |
|
159 | 164 | else |
|
160 |
|
|
|
161 |
|
|
|
165 | # If rejects are empty after using the editor, merge was ok | |
|
166 | $EDITOR "$LOCAL" "$LOCAL.rej" || failure | |
|
167 | test -s "$LOCAL.rej" || success | |
|
162 | 168 | fi |
|
163 | 169 | failure |
|
164 | 170 | fi |
|
165 | 171 | |
|
166 | echo "hgmerge: unable to find merge, tkdiff, kdiff3, or diff+patch!" | |
|
172 | echo | |
|
173 | echo "hgmerge: unable to find any merge utility!" | |
|
174 | echo "supported programs:" | |
|
175 | echo "merge, FileMerge, tkdiff, kdiff3, meld, diff+patch" | |
|
176 | echo | |
|
167 | 177 | failure |
@@ -8,10 +8,21 b' cgitb.enable()' | |||
|
8 | 8 | # sys.path.insert(0, "/path/to/python/lib") # if not a system-wide install |
|
9 | 9 | from mercurial import hgweb |
|
10 | 10 | |
|
11 |
# The config file looks like this |
|
|
11 | # The config file looks like this. You can have paths to individual | |
|
12 | # repos, collections of repos in a directory tree, or both. | |
|
13 | # | |
|
12 | 14 | # [paths] |
|
13 | 15 | # virtual/path = /real/path |
|
14 | 16 | # virtual/path = /real/path |
|
17 | # | |
|
18 | # [collections] | |
|
19 | # /prefix/to/strip/off = /root/of/tree/full/of/repos | |
|
20 | # | |
|
21 | # collections example: say directory tree /foo contains repos /foo/bar, | |
|
22 | # /foo/quux/baz. Give this config section: | |
|
23 | # [collections] | |
|
24 | # /foo = /foo | |
|
25 | # Then repos will list as bar and quux/baz. | |
|
15 | 26 | |
|
16 | 27 | # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples |
|
17 | 28 | # or use a dictionary with entries like 'virtual/path': '/real/path' |
@@ -17,6 +17,10 b'' | |||
|
17 | 17 | #define inline |
|
18 | 18 | #endif |
|
19 | 19 | |
|
20 | #ifdef __SUNPRO_C | |
|
21 | # define inline | |
|
22 | #endif | |
|
23 | ||
|
20 | 24 | #ifdef _WIN32 |
|
21 | 25 | #ifdef _MSC_VER |
|
22 | 26 | #define inline __inline |
@@ -82,6 +82,21 b' def walkchangerevs(ui, repo, pats, opts)' | |||
|
82 | 82 | "iter", rev, None: in-order traversal of the revs earlier iterated |
|
83 | 83 | over with "add" - use to display data''' |
|
84 | 84 | |
|
85 | def increasing_windows(start, end, windowsize=8, sizelimit=512): | |
|
86 | if start < end: | |
|
87 | while start < end: | |
|
88 | yield start, min(windowsize, end-start) | |
|
89 | start += windowsize | |
|
90 | if windowsize < sizelimit: | |
|
91 | windowsize *= 2 | |
|
92 | else: | |
|
93 | while start > end: | |
|
94 | yield start, min(windowsize, start-end-1) | |
|
95 | start -= windowsize | |
|
96 | if windowsize < sizelimit: | |
|
97 | windowsize *= 2 | |
|
98 | ||
|
99 | ||
|
85 | 100 | files, matchfn, anypats = matchpats(repo, pats, opts) |
|
86 | 101 | |
|
87 | 102 | if repo.changelog.count() == 0: |
@@ -90,7 +105,6 b' def walkchangerevs(ui, repo, pats, opts)' | |||
|
90 | 105 | revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0'])) |
|
91 | 106 | wanted = {} |
|
92 | 107 | slowpath = anypats |
|
93 | window = 300 | |
|
94 | 108 | fncache = {} |
|
95 | 109 | |
|
96 | 110 | chcache = {} |
@@ -106,17 +120,17 b' def walkchangerevs(ui, repo, pats, opts)' | |||
|
106 | 120 | if not slowpath: |
|
107 | 121 | # Only files, no patterns. Check the history of each file. |
|
108 | 122 | def filerevgen(filelog): |
|
109 |
for i in |
|
|
123 | for i, window in increasing_windows(filelog.count()-1, -1): | |
|
110 | 124 | revs = [] |
|
111 |
for j in xrange( |
|
|
125 | for j in xrange(i - window, i + 1): | |
|
112 | 126 | revs.append(filelog.linkrev(filelog.node(j))) |
|
113 | 127 | revs.reverse() |
|
114 | 128 | for rev in revs: |
|
115 | 129 | yield rev |
|
116 | 130 | |
|
117 | 131 | minrev, maxrev = min(revs), max(revs) |
|
118 | for file in files: | |
|
119 | filelog = repo.file(file) | |
|
132 | for file_ in files: | |
|
133 | filelog = repo.file(file_) | |
|
120 | 134 | # A zero count may be a directory or deleted file, so |
|
121 | 135 | # try to find matching entries on the slow path. |
|
122 | 136 | if filelog.count() == 0: |
@@ -127,13 +141,13 b' def walkchangerevs(ui, repo, pats, opts)' | |||
|
127 | 141 | if rev < minrev: |
|
128 | 142 | break |
|
129 | 143 | fncache.setdefault(rev, []) |
|
130 | fncache[rev].append(file) | |
|
144 | fncache[rev].append(file_) | |
|
131 | 145 | wanted[rev] = 1 |
|
132 | 146 | if slowpath: |
|
133 | 147 | # The slow path checks files modified in every changeset. |
|
134 | 148 | def changerevgen(): |
|
135 |
for i in |
|
|
136 |
for j in xrange( |
|
|
149 | for i, window in increasing_windows(repo.changelog.count()-1, -1): | |
|
150 | for j in xrange(i - window, i + 1): | |
|
137 | 151 | yield j, getchange(j)[3] |
|
138 | 152 | |
|
139 | 153 | for rev, changefiles in changerevgen(): |
@@ -143,9 +157,9 b' def walkchangerevs(ui, repo, pats, opts)' | |||
|
143 | 157 | wanted[rev] = 1 |
|
144 | 158 | |
|
145 | 159 | def iterate(): |
|
146 |
for i in |
|
|
160 | for i, window in increasing_windows(0, len(revs)): | |
|
147 | 161 | yield 'window', revs[0] < revs[-1], revs[-1] |
|
148 |
nrevs = [rev for rev in revs[i: |
|
|
162 | nrevs = [rev for rev in revs[i:i+window] | |
|
149 | 163 | if rev in wanted] |
|
150 | 164 | srevs = list(nrevs) |
|
151 | 165 | srevs.sort() |
@@ -262,6 +276,14 b' def make_file(repo, r, pat, node=None,' | |||
|
262 | 276 | |
|
263 | 277 | def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always, |
|
264 | 278 | changes=None, text=False, opts={}): |
|
279 | if not node1: | |
|
280 | node1 = repo.dirstate.parents()[0] | |
|
281 | # reading the data for node1 early allows it to play nicely | |
|
282 | # with repo.changes and the revlog cache. | |
|
283 | change = repo.changelog.read(node1) | |
|
284 | mmap = repo.manifest.read(change[0]) | |
|
285 | date1 = util.datestr(change[2]) | |
|
286 | ||
|
265 | 287 | if not changes: |
|
266 | 288 | changes = repo.changes(node1, node2, files, match=match) |
|
267 | 289 | modified, added, removed, deleted, unknown = changes |
@@ -280,8 +302,6 b' def dodiff(fp, ui, repo, node1, node2, f' | |||
|
280 | 302 | return repo.file(f).read(mmap2[f]) |
|
281 | 303 | else: |
|
282 | 304 | date2 = util.datestr() |
|
283 | if not node1: | |
|
284 | node1 = repo.dirstate.parents()[0] | |
|
285 | 305 | def read(f): |
|
286 | 306 | return repo.wread(f) |
|
287 | 307 | |
@@ -291,10 +311,6 b' def dodiff(fp, ui, repo, node1, node2, f' | |||
|
291 | 311 | hexfunc = ui.verbose and hex or short |
|
292 | 312 | r = [hexfunc(node) for node in [node1, node2] if node] |
|
293 | 313 | |
|
294 | change = repo.changelog.read(node1) | |
|
295 | mmap = repo.manifest.read(change[0]) | |
|
296 | date1 = util.datestr(change[2]) | |
|
297 | ||
|
298 | 314 | diffopts = ui.diffopts() |
|
299 | 315 | showfunc = opts.get('show_function') or diffopts['showfunc'] |
|
300 | 316 | ignorews = opts.get('ignore_all_space') or diffopts['ignorews'] |
@@ -447,7 +463,6 b' def help_(ui, cmd=None, with_version=Fal' | |||
|
447 | 463 | f = f.lstrip("^") |
|
448 | 464 | if not ui.debugflag and f.startswith("debug"): |
|
449 | 465 | continue |
|
450 | d = "" | |
|
451 | 466 | doc = e[0].__doc__ |
|
452 | 467 | if not doc: |
|
453 | 468 | doc = _("(No help text available)") |
@@ -681,6 +696,8 b' def clone(ui, source, dest=None, **opts)' | |||
|
681 | 696 | such as AFS, implement hardlinking incorrectly, but do not report |
|
682 | 697 | errors. In these cases, use the --pull option to avoid |
|
683 | 698 | hardlinking. |
|
699 | ||
|
700 | See pull for valid source format details. | |
|
684 | 701 | """ |
|
685 | 702 | if dest is None: |
|
686 | 703 | dest = os.path.basename(os.path.normpath(source)) |
@@ -725,8 +742,8 b' def clone(ui, source, dest=None, **opts)' | |||
|
725 | 742 | # can end up with extra data in the cloned revlogs that's |
|
726 | 743 | # not pointed to by changesets, thus causing verify to |
|
727 | 744 | # fail |
|
728 | l1 = lock.lock(os.path.join(source, ".hg", "lock")) | |
|
729 |
except |
|
|
745 | l1 = other.lock() | |
|
746 | except lock.LockException: | |
|
730 | 747 | copy = False |
|
731 | 748 | |
|
732 | 749 | if copy: |
@@ -808,7 +825,8 b' def commit(ui, repo, *pats, **opts):' | |||
|
808 | 825 | except ValueError, inst: |
|
809 | 826 | raise util.Abort(str(inst)) |
|
810 | 827 | |
|
811 | def docopy(ui, repo, pats, opts): | |
|
828 | def docopy(ui, repo, pats, opts, wlock): | |
|
829 | # called with the repo lock held | |
|
812 | 830 | cwd = repo.getcwd() |
|
813 | 831 | errors = 0 |
|
814 | 832 | copied = [] |
@@ -818,14 +836,19 b' def docopy(ui, repo, pats, opts):' | |||
|
818 | 836 | reasons = {'?': _('is not managed'), |
|
819 | 837 | 'a': _('has been marked for add'), |
|
820 | 838 | 'r': _('has been marked for remove')} |
|
821 |
|
|
|
839 | state = repo.dirstate.state(abs) | |
|
840 | reason = reasons.get(state) | |
|
822 | 841 | if reason: |
|
842 | if state == 'a': | |
|
843 | origsrc = repo.dirstate.copied(abs) | |
|
844 | if origsrc is not None: | |
|
845 | return origsrc | |
|
823 | 846 | if exact: |
|
824 | 847 | ui.warn(_('%s: not copying - file %s\n') % (rel, reason)) |
|
825 | 848 | else: |
|
826 |
return |
|
|
827 | ||
|
828 | def copy(abssrc, relsrc, target, exact): | |
|
849 | return abs | |
|
850 | ||
|
851 | def copy(origsrc, abssrc, relsrc, target, exact): | |
|
829 | 852 | abstarget = util.canonpath(repo.root, cwd, target) |
|
830 | 853 | reltarget = util.pathto(cwd, abstarget) |
|
831 | 854 | prevsrc = targets.get(abstarget) |
@@ -849,8 +872,16 b' def docopy(ui, repo, pats, opts):' | |||
|
849 | 872 | if not os.path.isdir(targetdir): |
|
850 | 873 | os.makedirs(targetdir) |
|
851 | 874 | try: |
|
852 | shutil.copyfile(relsrc, reltarget) | |
|
853 | shutil.copymode(relsrc, reltarget) | |
|
875 | restore = repo.dirstate.state(abstarget) == 'r' | |
|
876 | if restore: | |
|
877 | repo.undelete([abstarget], wlock) | |
|
878 | try: | |
|
879 | shutil.copyfile(relsrc, reltarget) | |
|
880 | shutil.copymode(relsrc, reltarget) | |
|
881 | restore = False | |
|
882 | finally: | |
|
883 | if restore: | |
|
884 | repo.remove([abstarget], wlock) | |
|
854 | 885 | except shutil.Error, inst: |
|
855 | 886 | raise util.Abort(str(inst)) |
|
856 | 887 | except IOError, inst: |
@@ -864,7 +895,8 b' def docopy(ui, repo, pats, opts):' | |||
|
864 | 895 | if ui.verbose or not exact: |
|
865 | 896 | ui.status(_('copying %s to %s\n') % (relsrc, reltarget)) |
|
866 | 897 | targets[abstarget] = abssrc |
|
867 | repo.copy(abssrc, abstarget) | |
|
898 | if abstarget != origsrc: | |
|
899 | repo.copy(origsrc, abstarget, wlock) | |
|
868 | 900 | copied.append((abssrc, relsrc, exact)) |
|
869 | 901 | |
|
870 | 902 | def targetpathfn(pat, dest, srcs): |
@@ -938,8 +970,9 b' def docopy(ui, repo, pats, opts):' | |||
|
938 | 970 | for pat in pats: |
|
939 | 971 | srcs = [] |
|
940 | 972 | for tag, abssrc, relsrc, exact in walk(repo, [pat], opts): |
|
941 |
|
|
|
942 | srcs.append((abssrc, relsrc, exact)) | |
|
973 | origsrc = okaytocopy(abssrc, relsrc, exact) | |
|
974 | if origsrc: | |
|
975 | srcs.append((origsrc, abssrc, relsrc, exact)) | |
|
943 | 976 | if not srcs: |
|
944 | 977 | continue |
|
945 | 978 | copylist.append((tfn(pat, dest, srcs), srcs)) |
@@ -947,8 +980,8 b' def docopy(ui, repo, pats, opts):' | |||
|
947 | 980 | raise util.Abort(_('no files to copy')) |
|
948 | 981 | |
|
949 | 982 | for targetpath, srcs in copylist: |
|
950 | for abssrc, relsrc, exact in srcs: | |
|
951 | copy(abssrc, relsrc, targetpath(abssrc), exact) | |
|
983 | for origsrc, abssrc, relsrc, exact in srcs: | |
|
984 | copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact) | |
|
952 | 985 | |
|
953 | 986 | if errors: |
|
954 | 987 | ui.warn(_('(consider using --after)\n')) |
@@ -971,15 +1004,32 b' def copy(ui, repo, *pats, **opts):' | |||
|
971 | 1004 | should properly record copied files, this information is not yet |
|
972 | 1005 | fully used by merge, nor fully reported by log. |
|
973 | 1006 | """ |
|
974 | errs, copied = docopy(ui, repo, pats, opts) | |
|
1007 | try: | |
|
1008 | wlock = repo.wlock(0) | |
|
1009 | errs, copied = docopy(ui, repo, pats, opts, wlock) | |
|
1010 | except lock.LockHeld, inst: | |
|
1011 | ui.warn(_("repository lock held by %s\n") % inst.args[0]) | |
|
1012 | errs = 1 | |
|
975 | 1013 | return errs |
|
976 | 1014 | |
|
977 | 1015 | def debugancestor(ui, index, rev1, rev2): |
|
978 | 1016 | """find the ancestor revision of two revisions in a given index""" |
|
979 | r = revlog.revlog(util.opener(os.getcwd()), index, "") | |
|
1017 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "") | |
|
980 | 1018 | a = r.ancestor(r.lookup(rev1), r.lookup(rev2)) |
|
981 | 1019 | ui.write("%d:%s\n" % (r.rev(a), hex(a))) |
|
982 | 1020 | |
|
1021 | def debugrebuildstate(ui, repo, rev=None): | |
|
1022 | """rebuild the dirstate as it would look like for the given revision""" | |
|
1023 | if not rev: | |
|
1024 | rev = repo.changelog.tip() | |
|
1025 | else: | |
|
1026 | rev = repo.lookup(rev) | |
|
1027 | change = repo.changelog.read(rev) | |
|
1028 | n = change[0] | |
|
1029 | files = repo.manifest.readflags(n) | |
|
1030 | wlock = repo.wlock() | |
|
1031 | repo.dirstate.rebuild(rev, files.iteritems()) | |
|
1032 | ||
|
983 | 1033 | def debugcheckstate(ui, repo): |
|
984 | 1034 | """validate the correctness of the current dirstate""" |
|
985 | 1035 | parent1, parent2 = repo.dirstate.parents() |
@@ -1050,7 +1100,8 b' def debugstate(ui, repo):' | |||
|
1050 | 1100 | |
|
1051 | 1101 | def debugdata(ui, file_, rev): |
|
1052 | 1102 | """dump the contents of an data file revision""" |
|
1053 |
r = revlog.revlog(util.opener(os.getcwd()), |
|
|
1103 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), | |
|
1104 | file_[:-2] + ".i", file_) | |
|
1054 | 1105 | try: |
|
1055 | 1106 | ui.write(r.revision(r.lookup(rev))) |
|
1056 | 1107 | except KeyError: |
@@ -1058,7 +1109,7 b' def debugdata(ui, file_, rev):' | |||
|
1058 | 1109 | |
|
1059 | 1110 | def debugindex(ui, file_): |
|
1060 | 1111 | """dump the contents of an index file""" |
|
1061 | r = revlog.revlog(util.opener(os.getcwd()), file_, "") | |
|
1112 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "") | |
|
1062 | 1113 | ui.write(" rev offset length base linkrev" + |
|
1063 | 1114 | " nodeid p1 p2\n") |
|
1064 | 1115 | for i in range(r.count()): |
@@ -1069,7 +1120,7 b' def debugindex(ui, file_):' | |||
|
1069 | 1120 | |
|
1070 | 1121 | def debugindexdot(ui, file_): |
|
1071 | 1122 | """dump an index DAG as a .dot file""" |
|
1072 | r = revlog.revlog(util.opener(os.getcwd()), file_, "") | |
|
1123 | r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "") | |
|
1073 | 1124 | ui.write("digraph G {\n") |
|
1074 | 1125 | for i in range(r.count()): |
|
1075 | 1126 | e = r.index[i] |
@@ -1284,6 +1335,7 b' def grep(ui, repo, pattern, *pats, **opt' | |||
|
1284 | 1335 | s = linestate(line, lnum, cstart, cend) |
|
1285 | 1336 | m[s] = s |
|
1286 | 1337 | |
|
1338 | # FIXME: prev isn't used, why ? | |
|
1287 | 1339 | prev = {} |
|
1288 | 1340 | ucache = {} |
|
1289 | 1341 | def display(fn, rev, states, prevstates): |
@@ -1593,7 +1645,19 b' def log(ui, repo, *pats, **opts):' | |||
|
1593 | 1645 | self.write(*args) |
|
1594 | 1646 | def __getattr__(self, key): |
|
1595 | 1647 | return getattr(self.ui, key) |
|
1648 | ||
|
1596 | 1649 | changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts) |
|
1650 | ||
|
1651 | if opts['limit']: | |
|
1652 | try: | |
|
1653 | limit = int(opts['limit']) | |
|
1654 | except ValueError: | |
|
1655 | raise util.Abort(_('limit must be a positive integer')) | |
|
1656 | if limit <= 0: raise util.Abort(_('limit must be positive')) | |
|
1657 | else: | |
|
1658 | limit = sys.maxint | |
|
1659 | count = 0 | |
|
1660 | ||
|
1597 | 1661 | for st, rev, fns in changeiter: |
|
1598 | 1662 | if st == 'window': |
|
1599 | 1663 | du = dui(ui) |
@@ -1607,7 +1671,6 b' def log(ui, repo, *pats, **opts):' | |||
|
1607 | 1671 | if opts['only_merges'] and len(parents) != 2: |
|
1608 | 1672 | continue |
|
1609 | 1673 | |
|
1610 | br = None | |
|
1611 | 1674 | if opts['keyword']: |
|
1612 | 1675 | changes = getchange(rev) |
|
1613 | 1676 | miss = 0 |
@@ -1620,7 +1683,8 b' def log(ui, repo, *pats, **opts):' | |||
|
1620 | 1683 | if miss: |
|
1621 | 1684 | continue |
|
1622 | 1685 | |
|
1623 | if opts['branch']: | |
|
1686 | br = None | |
|
1687 | if opts['branches']: | |
|
1624 | 1688 | br = repo.branchlookup([repo.changelog.node(rev)]) |
|
1625 | 1689 | |
|
1626 | 1690 | show_changeset(du, repo, rev, brinfo=br) |
@@ -1629,8 +1693,11 b' def log(ui, repo, *pats, **opts):' | |||
|
1629 | 1693 | dodiff(du, du, repo, prev, changenode, match=matchfn) |
|
1630 | 1694 | du.write("\n\n") |
|
1631 | 1695 | elif st == 'iter': |
|
1632 | for args in du.hunk[rev]: | |
|
1633 | ui.write(*args) | |
|
1696 | if count == limit: break | |
|
1697 | if du.hunk[rev]: | |
|
1698 | count += 1 | |
|
1699 | for args in du.hunk[rev]: | |
|
1700 | ui.write(*args) | |
|
1634 | 1701 | |
|
1635 | 1702 | def manifest(ui, repo, rev=None): |
|
1636 | 1703 | """output the latest or given revision of the project manifest |
@@ -1664,6 +1731,8 b' def outgoing(ui, repo, dest="default-pus' | |||
|
1664 | 1731 | Show changesets not found in the specified destination repo or the |
|
1665 | 1732 | default push repo. These are the changesets that would be pushed |
|
1666 | 1733 | if a push was requested. |
|
1734 | ||
|
1735 | See pull for valid source format details. | |
|
1667 | 1736 | """ |
|
1668 | 1737 | dest = ui.expandpath(dest, repo.root) |
|
1669 | 1738 | other = hg.repository(ui, dest) |
@@ -1681,7 +1750,7 b' def outgoing(ui, repo, dest="default-pus' | |||
|
1681 | 1750 | dodiff(ui, ui, repo, prev, n) |
|
1682 | 1751 | ui.write("\n") |
|
1683 | 1752 | |
|
1684 | def parents(ui, repo, rev=None, branch=None): | |
|
1753 | def parents(ui, repo, rev=None, branches=None): | |
|
1685 | 1754 | """show the parents of the working dir or revision |
|
1686 | 1755 | |
|
1687 | 1756 | Print the working directory's parent revisions. |
@@ -1692,7 +1761,7 b' def parents(ui, repo, rev=None, branch=N' | |||
|
1692 | 1761 | p = repo.dirstate.parents() |
|
1693 | 1762 | |
|
1694 | 1763 | br = None |
|
1695 | if branch is not None: | |
|
1764 | if branches is not None: | |
|
1696 | 1765 | br = repo.branchlookup(p) |
|
1697 | 1766 | for n in p: |
|
1698 | 1767 | if n != nullid: |
@@ -1767,7 +1836,7 b' def pull(ui, repo, source="default", **o' | |||
|
1767 | 1836 | |
|
1768 | 1837 | return r |
|
1769 | 1838 | |
|
1770 |
def push(ui, repo, dest="default-push", |
|
|
1839 | def push(ui, repo, dest="default-push", **opts): | |
|
1771 | 1840 | """push changes to the specified destination |
|
1772 | 1841 | |
|
1773 | 1842 | Push changes from the local repository to the given destination. |
@@ -1792,18 +1861,22 b' def push(ui, repo, dest="default-push", ' | |||
|
1792 | 1861 | dest = ui.expandpath(dest, repo.root) |
|
1793 | 1862 | ui.status('pushing to %s\n' % (dest)) |
|
1794 | 1863 | |
|
1795 | if ssh: | |
|
1796 | ui.setconfig("ui", "ssh", ssh) | |
|
1797 | if remotecmd: | |
|
1798 | ui.setconfig("ui", "remotecmd", remotecmd) | |
|
1864 | if opts['ssh']: | |
|
1865 | ui.setconfig("ui", "ssh", opts['ssh']) | |
|
1866 | if opts['remotecmd']: | |
|
1867 | ui.setconfig("ui", "remotecmd", opts['remotecmd']) | |
|
1799 | 1868 | |
|
1800 | 1869 | other = hg.repository(ui, dest) |
|
1801 | r = repo.push(other, force) | |
|
1870 | revs = None | |
|
1871 | if opts['rev']: | |
|
1872 | revs = [repo.lookup(rev) for rev in opts['rev']] | |
|
1873 | r = repo.push(other, opts['force'], revs=revs) | |
|
1802 | 1874 | return r |
|
1803 | 1875 | |
|
1804 | 1876 | def rawcommit(ui, repo, *flist, **rc): |
|
1805 | 1877 | """raw commit interface (DEPRECATED) |
|
1806 | 1878 | |
|
1879 | (DEPRECATED) | |
|
1807 | 1880 | Lowlevel commit, for use in helper scripts. |
|
1808 | 1881 | |
|
1809 | 1882 | This command is not intended to be used by normal users, as it is |
@@ -1896,21 +1969,33 b' def rename(ui, repo, *pats, **opts):' | |||
|
1896 | 1969 | should properly record rename files, this information is not yet |
|
1897 | 1970 | fully used by merge, nor fully reported by log. |
|
1898 | 1971 | """ |
|
1899 | errs, copied = docopy(ui, repo, pats, opts) | |
|
1900 | names = [] | |
|
1901 | for abs, rel, exact in copied: | |
|
1902 | if ui.verbose or not exact: | |
|
1903 | ui.status(_('removing %s\n') % rel) | |
|
1904 | names.append(abs) | |
|
1905 | repo.remove(names, unlink=True) | |
|
1972 | try: | |
|
1973 | wlock = repo.wlock(0) | |
|
1974 | errs, copied = docopy(ui, repo, pats, opts, wlock) | |
|
1975 | names = [] | |
|
1976 | for abs, rel, exact in copied: | |
|
1977 | if ui.verbose or not exact: | |
|
1978 | ui.status(_('removing %s\n') % rel) | |
|
1979 | names.append(abs) | |
|
1980 | repo.remove(names, True, wlock) | |
|
1981 | except lock.LockHeld, inst: | |
|
1982 | ui.warn(_("repository lock held by %s\n") % inst.args[0]) | |
|
1983 | errs = 1 | |
|
1906 | 1984 | return errs |
|
1907 | 1985 | |
|
1908 | 1986 | def revert(ui, repo, *pats, **opts): |
|
1909 | 1987 | """revert modified files or dirs back to their unmodified states |
|
1910 | 1988 | |
|
1911 |
|
|
|
1912 |
directories. This restores the contents of |
|
|
1913 | an unmodified state. | |
|
1989 | In its default mode, it reverts any uncommitted modifications made | |
|
1990 | to the named files or directories. This restores the contents of | |
|
1991 | the affected files to an unmodified state. | |
|
1992 | ||
|
1993 | Using the -r option, it reverts the given files or directories to | |
|
1994 | their state as of an earlier revision. This can be helpful to "roll | |
|
1995 | back" some or all of a change that should not have been committed. | |
|
1996 | ||
|
1997 | Revert modifies the working directory. It does not commit any | |
|
1998 | changes, or change the parent of the current working directory. | |
|
1914 | 1999 | |
|
1915 | 2000 | If a file has been deleted, it is recreated. If the executable |
|
1916 | 2001 | mode of a file was changed, it is reset. |
@@ -1925,7 +2010,7 b' def revert(ui, repo, *pats, **opts):' | |||
|
1925 | 2010 | files, choose, anypats = matchpats(repo, pats, opts) |
|
1926 | 2011 | modified, added, removed, deleted, unknown = repo.changes(match=choose) |
|
1927 | 2012 | repo.forget(added) |
|
1928 |
repo.undelete(removed |
|
|
2013 | repo.undelete(removed) | |
|
1929 | 2014 | |
|
1930 | 2015 | return repo.update(node, False, True, choose, False) |
|
1931 | 2016 | |
@@ -2022,6 +2107,16 b' def serve(ui, repo, **opts):' | |||
|
2022 | 2107 | if opts[o]: |
|
2023 | 2108 | ui.setconfig("web", o, opts[o]) |
|
2024 | 2109 | |
|
2110 | if opts['daemon'] and not opts['daemon_pipefds']: | |
|
2111 | rfd, wfd = os.pipe() | |
|
2112 | args = sys.argv[:] | |
|
2113 | args.append('--daemon-pipefds=%d,%d' % (rfd, wfd)) | |
|
2114 | pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), | |
|
2115 | args[0], args) | |
|
2116 | os.close(wfd) | |
|
2117 | os.read(rfd, 1) | |
|
2118 | os._exit(0) | |
|
2119 | ||
|
2025 | 2120 | try: |
|
2026 | 2121 | httpd = hgweb.create_server(repo) |
|
2027 | 2122 | except socket.error, inst: |
@@ -2040,6 +2135,25 b' def serve(ui, repo, **opts):' | |||
|
2040 | 2135 | ui.status(_('listening at http://%s:%d/\n') % (addr, port)) |
|
2041 | 2136 | else: |
|
2042 | 2137 | ui.status(_('listening at http://%s/\n') % addr) |
|
2138 | ||
|
2139 | if opts['pid_file']: | |
|
2140 | fp = open(opts['pid_file'], 'w') | |
|
2141 | fp.write(str(os.getpid())) | |
|
2142 | fp.close() | |
|
2143 | ||
|
2144 | if opts['daemon_pipefds']: | |
|
2145 | rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')] | |
|
2146 | os.close(rfd) | |
|
2147 | os.write(wfd, 'y') | |
|
2148 | os.close(wfd) | |
|
2149 | sys.stdout.flush() | |
|
2150 | sys.stderr.flush() | |
|
2151 | fd = os.open(util.nulldev, os.O_RDWR) | |
|
2152 | if fd != 0: os.dup2(fd, 0) | |
|
2153 | if fd != 1: os.dup2(fd, 1) | |
|
2154 | if fd != 2: os.dup2(fd, 2) | |
|
2155 | if fd not in (0, 1, 2): os.close(fd) | |
|
2156 | ||
|
2043 | 2157 | httpd.serve_forever() |
|
2044 | 2158 | |
|
2045 | 2159 | def status(ui, repo, *pats, **opts): |
@@ -2164,7 +2278,10 b' def tip(ui, repo, **opts):' | |||
|
2164 | 2278 | Show the tip revision. |
|
2165 | 2279 | """ |
|
2166 | 2280 | n = repo.changelog.tip() |
|
2167 | show_changeset(ui, repo, changenode=n) | |
|
2281 | br = None | |
|
2282 | if opts['branches']: | |
|
2283 | br = repo.branchlookup([n]) | |
|
2284 | show_changeset(ui, repo, changenode=n, brinfo=br) | |
|
2168 | 2285 | if opts['patch']: |
|
2169 | 2286 | dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n) |
|
2170 | 2287 | |
@@ -2283,47 +2400,51 b' table = {' | |||
|
2283 | 2400 | ('c', 'changeset', None, _('list the changeset')), |
|
2284 | 2401 | ('I', 'include', [], _('include names matching the given patterns')), |
|
2285 | 2402 | ('X', 'exclude', [], _('exclude names matching the given patterns'))], |
|
2286 |
_('hg annotate [ |
|
|
2403 | _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')), | |
|
2287 | 2404 | "bundle": |
|
2288 | 2405 | (bundle, |
|
2289 | 2406 | [], |
|
2290 | 2407 | _('hg bundle FILE DEST')), |
|
2291 | 2408 | "cat": |
|
2292 | 2409 | (cat, |
|
2293 | [('I', 'include', [], _('include names matching the given patterns')), | |
|
2294 |
(' |
|
|
2295 | ('o', 'output', '', _('print output to file with formatted name')), | |
|
2296 |
(' |
|
|
2410 | [('o', 'output', '', _('print output to file with formatted name')), | |
|
2411 | ('r', 'rev', '', _('print the given revision')), | |
|
2412 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2413 | ('X', 'exclude', [], _('exclude names matching the given patterns'))], | |
|
2297 | 2414 | _('hg cat [OPTION]... FILE...')), |
|
2298 | 2415 | "^clone": |
|
2299 | 2416 | (clone, |
|
2300 | 2417 | [('U', 'noupdate', None, _('do not update the new working directory')), |
|
2301 | ('e', 'ssh', '', _('specify ssh command to use')), | |
|
2302 | ('', 'pull', None, _('use pull protocol to copy metadata')), | |
|
2303 | 2418 | ('r', 'rev', [], |
|
2304 | 2419 | _('a changeset you would like to have after cloning')), |
|
2420 | ('', 'pull', None, _('use pull protocol to copy metadata')), | |
|
2421 | ('e', 'ssh', '', _('specify ssh command to use')), | |
|
2305 | 2422 | ('', 'remotecmd', '', |
|
2306 | 2423 | _('specify hg command to run on the remote side'))], |
|
2307 | 2424 | _('hg clone [OPTION]... SOURCE [DEST]')), |
|
2308 | 2425 | "^commit|ci": |
|
2309 | 2426 | (commit, |
|
2310 | 2427 | [('A', 'addremove', None, _('run addremove during commit')), |
|
2311 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2312 | ('X', 'exclude', [], _('exclude names matching the given patterns')), | |
|
2313 | 2428 | ('m', 'message', '', _('use <text> as commit message')), |
|
2314 | 2429 | ('l', 'logfile', '', _('read the commit message from <file>')), |
|
2315 | 2430 | ('d', 'date', '', _('record datecode as commit date')), |
|
2316 |
('u', 'user', '', _('record user as commiter')) |
|
|
2431 | ('u', 'user', '', _('record user as commiter')), | |
|
2432 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2433 | ('X', 'exclude', [], _('exclude names matching the given patterns'))], | |
|
2317 | 2434 | _('hg commit [OPTION]... [FILE]...')), |
|
2318 | 2435 | "copy|cp": |
|
2319 | 2436 | (copy, |
|
2320 | [('I', 'include', [], _('include names matching the given patterns')), | |
|
2321 | ('X', 'exclude', [], _('exclude names matching the given patterns')), | |
|
2322 | ('A', 'after', None, _('record a copy that has already occurred')), | |
|
2437 | [('A', 'after', None, _('record a copy that has already occurred')), | |
|
2323 | 2438 | ('f', 'force', None, |
|
2324 |
_('forcibly copy over an existing managed file')) |
|
|
2439 | _('forcibly copy over an existing managed file')), | |
|
2440 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2441 | ('X', 'exclude', [], _('exclude names matching the given patterns'))], | |
|
2325 | 2442 | _('hg copy [OPTION]... [SOURCE]... DEST')), |
|
2326 | 2443 | "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')), |
|
2444 | "debugrebuildstate": | |
|
2445 | (debugrebuildstate, | |
|
2446 | [('r', 'rev', '', _('revision to rebuild to'))], | |
|
2447 | _('debugrebuildstate [-r REV] [REV]')), | |
|
2327 | 2448 | "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')), |
|
2328 | 2449 | "debugconfig": (debugconfig, [], _('debugconfig')), |
|
2329 | 2450 | "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')), |
@@ -2341,20 +2462,19 b' table = {' | |||
|
2341 | 2462 | (diff, |
|
2342 | 2463 | [('r', 'rev', [], _('revision')), |
|
2343 | 2464 | ('a', 'text', None, _('treat all files as text')), |
|
2344 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2345 | 2465 | ('p', 'show-function', None, |
|
2346 | 2466 | _('show which function each change is in')), |
|
2347 | 2467 | ('w', 'ignore-all-space', None, |
|
2348 | 2468 | _('ignore white space when comparing lines')), |
|
2349 | ('X', 'exclude', [], | |
|
2350 | _('exclude names matching the given patterns'))], | |
|
2469 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2470 | ('X', 'exclude', [], _('exclude names matching the given patterns'))], | |
|
2351 | 2471 | _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')), |
|
2352 | 2472 | "^export": |
|
2353 | 2473 | (export, |
|
2354 | 2474 | [('o', 'output', '', _('print output to file with formatted name')), |
|
2355 | 2475 | ('a', 'text', None, _('treat all files as text')), |
|
2356 | 2476 | ('', 'switch-parent', None, _('diff against the second parent'))], |
|
2357 | _('hg export [-a] [-o OUTFILE] REV...')), | |
|
2477 | _('hg export [-a] [-o OUTFILESPEC] REV...')), | |
|
2358 | 2478 | "forget": |
|
2359 | 2479 | (forget, |
|
2360 | 2480 | [('I', 'include', [], _('include names matching the given patterns')), |
@@ -2363,19 +2483,19 b' table = {' | |||
|
2363 | 2483 | "grep": |
|
2364 | 2484 | (grep, |
|
2365 | 2485 | [('0', 'print0', None, _('end fields with NUL')), |
|
2366 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2367 | ('X', 'exclude', [], _('exclude names matching the given patterns')), | |
|
2368 | 2486 | ('', 'all', None, _('print all revisions that match')), |
|
2369 | 2487 | ('i', 'ignore-case', None, _('ignore case when matching')), |
|
2370 | 2488 | ('l', 'files-with-matches', None, |
|
2371 | 2489 | _('print only filenames and revs that match')), |
|
2372 | 2490 | ('n', 'line-number', None, _('print matching line numbers')), |
|
2373 | 2491 | ('r', 'rev', [], _('search in given revision range')), |
|
2374 |
('u', 'user', None, _('print user who committed change')) |
|
|
2492 | ('u', 'user', None, _('print user who committed change')), | |
|
2493 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2494 | ('X', 'exclude', [], _('exclude names matching the given patterns'))], | |
|
2375 | 2495 | _('hg grep [OPTION]... PATTERN [FILE]...')), |
|
2376 | 2496 | "heads": |
|
2377 | 2497 | (heads, |
|
2378 |
[('b', 'branches', None, _(' |
|
|
2498 | [('b', 'branches', None, _('show branches')), | |
|
2379 | 2499 | ('r', 'rev', '', _('show only heads which are descendants of rev'))], |
|
2380 | 2500 | _('hg heads [-b] [-r <rev>]')), |
|
2381 | 2501 | "help": (help_, [], _('hg help [COMMAND]')), |
@@ -2385,10 +2505,10 b' table = {' | |||
|
2385 | 2505 | [('p', 'strip', 1, |
|
2386 | 2506 | _('directory strip option for patch. This has the same\n') + |
|
2387 | 2507 | _('meaning as the corresponding patch option')), |
|
2508 | ('b', 'base', '', _('base path')), | |
|
2388 | 2509 | ('f', 'force', None, |
|
2389 | _('skip check for outstanding uncommitted changes')), | |
|
2390 | ('b', 'base', '', _('base path'))], | |
|
2391 | _('hg import [-f] [-p NUM] [-b BASE] PATCH...')), | |
|
2510 | _('skip check for outstanding uncommitted changes'))], | |
|
2511 | _('hg import [-p NUM] [-b BASE] [-f] PATCH...')), | |
|
2392 | 2512 | "incoming|in": (incoming, |
|
2393 | 2513 | [('M', 'no-merges', None, _('do not show merges')), |
|
2394 | 2514 | ('p', 'patch', None, _('show patch')), |
@@ -2407,24 +2527,25 b' table = {' | |||
|
2407 | 2527 | _('hg locate [OPTION]... [PATTERN]...')), |
|
2408 | 2528 | "^log|history": |
|
2409 | 2529 | (log, |
|
2410 | [('I', 'include', [], _('include names matching the given patterns')), | |
|
2411 | ('X', 'exclude', [], _('exclude names matching the given patterns')), | |
|
2412 | ('b', 'branch', None, _('show branches')), | |
|
2530 | [('b', 'branches', None, _('show branches')), | |
|
2413 | 2531 | ('k', 'keyword', [], _('search for a keyword')), |
|
2532 | ('l', 'limit', '', _('limit number of changes displayed')), | |
|
2414 | 2533 | ('r', 'rev', [], _('show the specified revision or range')), |
|
2415 | 2534 | ('M', 'no-merges', None, _('do not show merges')), |
|
2416 | 2535 | ('m', 'only-merges', None, _('show only merges')), |
|
2417 |
('p', 'patch', None, _('show patch')) |
|
|
2418 | _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')), | |
|
2536 | ('p', 'patch', None, _('show patch')), | |
|
2537 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2538 | ('X', 'exclude', [], _('exclude names matching the given patterns'))], | |
|
2539 | _('hg log [OPTION]... [FILE]')), | |
|
2419 | 2540 | "manifest": (manifest, [], _('hg manifest [REV]')), |
|
2420 | 2541 | "outgoing|out": (outgoing, |
|
2421 | 2542 | [('M', 'no-merges', None, _('do not show merges')), |
|
2422 | 2543 | ('p', 'patch', None, _('show patch')), |
|
2423 | 2544 | ('n', 'newest-first', None, _('show newest record first'))], |
|
2424 |
_('hg outgoing [-p] [-n |
|
|
2545 | _('hg outgoing [-M] [-p] [-n] [DEST]')), | |
|
2425 | 2546 | "^parents": |
|
2426 | 2547 | (parents, |
|
2427 | [('b', 'branch', None, _('show branches'))], | |
|
2548 | [('b', 'branches', None, _('show branches'))], | |
|
2428 | 2549 | _('hg parents [-b] [REV]')), |
|
2429 | 2550 | "paths": (paths, [], _('hg paths [NAME]')), |
|
2430 | 2551 | "^pull": |
@@ -2435,15 +2556,16 b' table = {' | |||
|
2435 | 2556 | ('r', 'rev', [], _('a specific revision you would like to pull')), |
|
2436 | 2557 | ('', 'remotecmd', '', |
|
2437 | 2558 | _('specify hg command to run on the remote side'))], |
|
2438 |
_('hg pull [-u] [-e FILE] [-r |
|
|
2559 | _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')), | |
|
2439 | 2560 | "^push": |
|
2440 | 2561 | (push, |
|
2441 | 2562 | [('f', 'force', None, _('force push')), |
|
2442 | 2563 | ('e', 'ssh', '', _('specify ssh command to use')), |
|
2564 | ('r', 'rev', [], _('a specific revision you would like to push')), | |
|
2443 | 2565 | ('', 'remotecmd', '', |
|
2444 | 2566 | _('specify hg command to run on the remote side'))], |
|
2445 | _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')), | |
|
2446 | "rawcommit": | |
|
2567 | _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')), | |
|
2568 | "debugrawcommit|rawcommit": | |
|
2447 | 2569 | (rawcommit, |
|
2448 | 2570 | [('p', 'parent', [], _('parent')), |
|
2449 | 2571 | ('d', 'date', '', _('date code')), |
@@ -2451,7 +2573,7 b' table = {' | |||
|
2451 | 2573 | ('F', 'files', '', _('file list')), |
|
2452 | 2574 | ('m', 'message', '', _('commit message')), |
|
2453 | 2575 | ('l', 'logfile', '', _('commit message file'))], |
|
2454 | _('hg rawcommit [OPTION]... [FILE]...')), | |
|
2576 | _('hg debugrawcommit [OPTION]... [FILE]...')), | |
|
2455 | 2577 | "recover": (recover, [], _('hg recover')), |
|
2456 | 2578 | "^remove|rm": |
|
2457 | 2579 | (remove, |
@@ -2460,27 +2582,30 b' table = {' | |||
|
2460 | 2582 | _('hg remove [OPTION]... FILE...')), |
|
2461 | 2583 | "rename|mv": |
|
2462 | 2584 | (rename, |
|
2463 | [('I', 'include', [], _('include names matching the given patterns')), | |
|
2464 | ('X', 'exclude', [], _('exclude names matching the given patterns')), | |
|
2465 | ('A', 'after', None, _('record a rename that has already occurred')), | |
|
2585 | [('A', 'after', None, _('record a rename that has already occurred')), | |
|
2466 | 2586 | ('f', 'force', None, |
|
2467 |
_('forcibly copy over an existing managed file')) |
|
|
2587 | _('forcibly copy over an existing managed file')), | |
|
2588 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2589 | ('X', 'exclude', [], _('exclude names matching the given patterns'))], | |
|
2468 | 2590 | _('hg rename [OPTION]... [SOURCE]... DEST')), |
|
2469 | 2591 | "^revert": |
|
2470 | 2592 | (revert, |
|
2471 | [('I', 'include', [], _('include names matching the given patterns')), | |
|
2472 |
(' |
|
|
2473 | ('r', 'rev', '', _('revision to revert to'))], | |
|
2474 |
_('hg revert [- |
|
|
2593 | [('r', 'rev', '', _('revision to revert to')), | |
|
2594 | ('I', 'include', [], _('include names matching the given patterns')), | |
|
2595 | ('X', 'exclude', [], _('exclude names matching the given patterns'))], | |
|
2596 | _('hg revert [-r REV] [NAME]...')), | |
|
2475 | 2597 | "root": (root, [], _('hg root')), |
|
2476 | 2598 | "^serve": |
|
2477 | 2599 | (serve, |
|
2478 | 2600 | [('A', 'accesslog', '', _('name of access log file to write to')), |
|
2601 | ('d', 'daemon', None, _('run server in background')), | |
|
2602 | ('', 'daemon-pipefds', '', _('used internally by daemon mode')), | |
|
2479 | 2603 | ('E', 'errorlog', '', _('name of error log file to write to')), |
|
2480 | 2604 | ('p', 'port', 0, _('port to use (default: 8000)')), |
|
2481 | 2605 | ('a', 'address', '', _('address to use')), |
|
2482 | 2606 | ('n', 'name', '', |
|
2483 | 2607 | _('name to show in web pages (default: working dir)')), |
|
2608 | ('', 'pid-file', '', _('name of file to write process ID to')), | |
|
2484 | 2609 | ('', 'stdio', None, _('for remote clients')), |
|
2485 | 2610 | ('t', 'templates', '', _('web templates to use')), |
|
2486 | 2611 | ('', 'style', '', _('template style to use')), |
@@ -2506,9 +2631,13 b' table = {' | |||
|
2506 | 2631 | ('d', 'date', '', _('record datecode as commit date')), |
|
2507 | 2632 | ('u', 'user', '', _('record user as commiter')), |
|
2508 | 2633 | ('r', 'rev', '', _('revision to tag'))], |
|
2509 |
_('hg tag [- |
|
|
2634 | _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')), | |
|
2510 | 2635 | "tags": (tags, [], _('hg tags')), |
|
2511 | "tip": (tip, [('p', 'patch', None, _('show patch'))], _('hg tip')), | |
|
2636 | "tip": | |
|
2637 | (tip, | |
|
2638 | [('b', 'branches', None, _('show branches')), | |
|
2639 | ('p', 'patch', None, _('show patch'))], | |
|
2640 | _('hg tip [-b] [-p]')), | |
|
2512 | 2641 | "unbundle": |
|
2513 | 2642 | (unbundle, |
|
2514 | 2643 | [('u', 'update', None, |
@@ -2734,13 +2863,22 b' def dispatch(args):' | |||
|
2734 | 2863 | if options['profile']: |
|
2735 | 2864 | import hotshot, hotshot.stats |
|
2736 | 2865 | prof = hotshot.Profile("hg.prof") |
|
2737 |
|
|
|
2738 |
|
|
|
2739 | stats = hotshot.stats.load("hg.prof") | |
|
2740 | stats.strip_dirs() | |
|
2741 | stats.sort_stats('time', 'calls') | |
|
2742 | stats.print_stats(40) | |
|
2743 | return r | |
|
2866 | try: | |
|
2867 | try: | |
|
2868 | return prof.runcall(d) | |
|
2869 | except: | |
|
2870 | try: | |
|
2871 | u.warn(_('exception raised - generating profile ' | |
|
2872 | 'anyway\n')) | |
|
2873 | except: | |
|
2874 | pass | |
|
2875 | raise | |
|
2876 | finally: | |
|
2877 | prof.close() | |
|
2878 | stats = hotshot.stats.load("hg.prof") | |
|
2879 | stats.strip_dirs() | |
|
2880 | stats.sort_stats('time', 'calls') | |
|
2881 | stats.print_stats(40) | |
|
2744 | 2882 | else: |
|
2745 | 2883 | return d() |
|
2746 | 2884 | except: |
@@ -1,15 +1,125 b'' | |||
|
1 | def demandload(scope, modules): | |
|
2 | class d: | |
|
3 | def __getattr__(self, name): | |
|
4 | mod = self.__dict__["mod"] | |
|
5 | scope = self.__dict__["scope"] | |
|
6 | scope[mod] = __import__(mod, scope, scope, []) | |
|
7 | return getattr(scope[mod], name) | |
|
1 | '''Demand load modules when used, not when imported.''' | |
|
2 | ||
|
3 | __author__ = '''Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>. | |
|
4 | This software may be used and distributed according to the terms | |
|
5 | of the GNU General Public License, incorporated herein by reference.''' | |
|
6 | ||
|
7 | # this is based on matt's original demandload module. it is a | |
|
8 | # complete rewrite. some time, we may need to support syntax of | |
|
9 | # "import foo as bar". | |
|
10 | ||
|
11 | class _importer(object): | |
|
12 | '''import a module. it is not imported until needed, and is | |
|
13 | imported at most once per scope.''' | |
|
14 | ||
|
15 | def __init__(self, scope, modname, fromlist): | |
|
16 | '''scope is context (globals() or locals()) in which import | |
|
17 | should be made. modname is name of module to import. | |
|
18 | fromlist is list of modules for "from foo import ..." | |
|
19 | emulation.''' | |
|
20 | ||
|
21 | self.scope = scope | |
|
22 | self.modname = modname | |
|
23 | self.fromlist = fromlist | |
|
24 | self.mod = None | |
|
25 | ||
|
26 | def module(self): | |
|
27 | '''import the module if needed, and return.''' | |
|
28 | if self.mod is None: | |
|
29 | self.mod = __import__(self.modname, self.scope, self.scope, | |
|
30 | self.fromlist) | |
|
31 | del self.modname, self.fromlist | |
|
32 | return self.mod | |
|
33 | ||
|
34 | class _replacer(object): | |
|
35 | '''placeholder for a demand loaded module. demandload puts this in | |
|
36 | a target scope. when an attribute of this object is looked up, | |
|
37 | this object is replaced in the target scope with the actual | |
|
38 | module. | |
|
39 | ||
|
40 | we use __getattribute__ to avoid namespace clashes between | |
|
41 | placeholder object and real module.''' | |
|
42 | ||
|
43 | def __init__(self, importer, target): | |
|
44 | self.importer = importer | |
|
45 | self.target = target | |
|
46 | # consider case where we do this: | |
|
47 | # demandload(globals(), 'foo.bar foo.quux') | |
|
48 | # foo will already exist in target scope when we get to | |
|
49 | # foo.quux. so we remember that we will need to demandload | |
|
50 | # quux into foo's scope when we really load it. | |
|
51 | self.later = [] | |
|
52 | ||
|
53 | def module(self): | |
|
54 | return object.__getattribute__(self, 'importer').module() | |
|
55 | ||
|
56 | def __getattribute__(self, key): | |
|
57 | '''look up an attribute in a module and return it. replace the | |
|
58 | name of the module in the caller\'s dict with the actual | |
|
59 | module.''' | |
|
8 | 60 | |
|
9 | for m in modules.split(): | |
|
10 | dl = d() | |
|
11 | dl.mod = m | |
|
12 | dl.scope = scope | |
|
13 | scope[m] = dl | |
|
61 | module = object.__getattribute__(self, 'module')() | |
|
62 | target = object.__getattribute__(self, 'target') | |
|
63 | importer = object.__getattribute__(self, 'importer') | |
|
64 | later = object.__getattribute__(self, 'later') | |
|
65 | ||
|
66 | if later: | |
|
67 | demandload(module.__dict__, ' '.join(later)) | |
|
68 | ||
|
69 | importer.scope[target] = module | |
|
70 | ||
|
71 | return getattr(module, key) | |
|
72 | ||
|
73 | class _replacer_from(_replacer): | |
|
74 | '''placeholder for a demand loaded module. used for "from foo | |
|
75 | import ..." emulation. semantics of this are different than | |
|
76 | regular import, so different implementation needed.''' | |
|
77 | ||
|
78 | def module(self): | |
|
79 | importer = object.__getattribute__(self, 'importer') | |
|
80 | target = object.__getattribute__(self, 'target') | |
|
81 | ||
|
82 | return getattr(importer.module(), target) | |
|
83 | ||
|
84 | def demandload(scope, modules): | |
|
85 | '''import modules into scope when each is first used. | |
|
86 | ||
|
87 | scope should be the value of globals() in the module calling this | |
|
88 | function, or locals() in the calling function. | |
|
89 | ||
|
90 | modules is a string listing module names, separated by white | |
|
91 | space. names are handled like this: | |
|
14 | 92 |
|
|
93 | foo import foo | |
|
94 | foo bar import foo, bar | |
|
95 | foo.bar import foo.bar | |
|
96 | foo:bar from foo import bar | |
|
97 | foo:bar,quux from foo import bar, quux | |
|
98 | foo.bar:quux from foo.bar import quux''' | |
|
15 | 99 | |
|
100 | for mod in modules.split(): | |
|
101 | col = mod.find(':') | |
|
102 | if col >= 0: | |
|
103 | fromlist = mod[col+1:].split(',') | |
|
104 | mod = mod[:col] | |
|
105 | else: | |
|
106 | fromlist = [] | |
|
107 | importer = _importer(scope, mod, fromlist) | |
|
108 | if fromlist: | |
|
109 | for name in fromlist: | |
|
110 | scope[name] = _replacer_from(importer, name) | |
|
111 | else: | |
|
112 | dot = mod.find('.') | |
|
113 | if dot >= 0: | |
|
114 | basemod = mod[:dot] | |
|
115 | val = scope.get(basemod) | |
|
116 | # if base module has already been demandload()ed, | |
|
117 | # remember to load this submodule into its namespace | |
|
118 | # when needed. | |
|
119 | if isinstance(val, _replacer): | |
|
120 | later = object.__getattribute__(val, 'later') | |
|
121 | later.append(mod[dot+1:]) | |
|
122 | continue | |
|
123 | else: | |
|
124 | basemod = mod | |
|
125 | scope[basemod] = _replacer(importer, basemod) |
@@ -197,9 +197,24 b' class dirstate(object):' | |||
|
197 | 197 | |
|
198 | 198 | def clear(self): |
|
199 | 199 | self.map = {} |
|
200 | self.copies = {} | |
|
201 | self.markdirty() | |
|
202 | ||
|
203 | def rebuild(self, parent, files): | |
|
204 | self.clear() | |
|
205 | umask = os.umask(0) | |
|
206 | os.umask(umask) | |
|
207 | for f, mode in files: | |
|
208 | if mode: | |
|
209 | self.map[f] = ('n', ~umask, -1, 0) | |
|
210 | else: | |
|
211 | self.map[f] = ('n', ~umask & 0666, -1, 0) | |
|
212 | self.pl = (parent, nullid) | |
|
200 | 213 | self.markdirty() |
|
201 | 214 | |
|
202 | 215 | def write(self): |
|
216 | if not self.dirty: | |
|
217 | return | |
|
203 | 218 | st = self.opener("dirstate", "w", atomic=True) |
|
204 | 219 | st.write("".join(self.pl)) |
|
205 | 220 | for f, e in self.map.items(): |
@@ -270,11 +285,11 b' class dirstate(object):' | |||
|
270 | 285 | elif not dc: |
|
271 | 286 | dc = self.filterfiles(files) |
|
272 | 287 | |
|
273 | def statmatch(file, stat): | |
|
274 | file = util.pconvert(file) | |
|
275 | if file not in dc and self.ignore(file): | |
|
288 | def statmatch(file_, stat): | |
|
289 | file_ = util.pconvert(file_) | |
|
290 | if file_ not in dc and self.ignore(file_): | |
|
276 | 291 | return False |
|
277 | return match(file) | |
|
292 | return match(file_) | |
|
278 | 293 | |
|
279 | 294 | return self.walkhelper(files=files, statmatch=statmatch, dc=dc) |
|
280 | 295 | |
@@ -350,9 +365,9 b' class dirstate(object):' | |||
|
350 | 365 | continue |
|
351 | 366 | if stat.S_ISDIR(st.st_mode): |
|
352 | 367 | cmp1 = (lambda x, y: cmp(x[1], y[1])) |
|
353 | sorted = [ x for x in findfiles(f) ] | |
|
354 | sorted.sort(cmp1) | |
|
355 | for e in sorted: | |
|
368 | sorted_ = [ x for x in findfiles(f) ] | |
|
369 | sorted_.sort(cmp1) | |
|
370 | for e in sorted_: | |
|
356 | 371 | yield e |
|
357 | 372 | else: |
|
358 | 373 | ff = util.normpath(ff) |
@@ -380,7 +395,7 b' class dirstate(object):' | |||
|
380 | 395 | |
|
381 | 396 | for src, fn, st in self.statwalk(files, match): |
|
382 | 397 | try: |
|
383 | type, mode, size, time = self[fn] | |
|
398 | type_, mode, size, time = self[fn] | |
|
384 | 399 | except KeyError: |
|
385 | 400 | unknown.append(fn) |
|
386 | 401 | continue |
@@ -399,22 +414,23 b' class dirstate(object):' | |||
|
399 | 414 | nonexistent = False |
|
400 | 415 | # XXX: what to do with file no longer present in the fs |
|
401 | 416 | # who are not removed in the dirstate ? |
|
402 | if nonexistent and type in "nm": | |
|
417 | if nonexistent and type_ in "nm": | |
|
403 | 418 | deleted.append(fn) |
|
404 | 419 | continue |
|
405 | 420 | # check the common case first |
|
406 | if type == 'n': | |
|
421 | if type_ == 'n': | |
|
407 | 422 | if not st: |
|
408 | 423 | st = os.stat(fn) |
|
409 |
if size != st.st_size |
|
|
424 | if size >= 0 and (size != st.st_size | |
|
425 | or (mode ^ st.st_mode) & 0100): | |
|
410 | 426 | modified.append(fn) |
|
411 | 427 | elif time != st.st_mtime: |
|
412 | 428 | lookup.append(fn) |
|
413 | elif type == 'm': | |
|
429 | elif type_ == 'm': | |
|
414 | 430 | modified.append(fn) |
|
415 | elif type == 'a': | |
|
431 | elif type_ == 'a': | |
|
416 | 432 | added.append(fn) |
|
417 | elif type == 'r': | |
|
433 | elif type_ == 'r': | |
|
418 | 434 | removed.append(fn) |
|
419 | 435 | |
|
420 | 436 | return (lookup, modified, added, removed, deleted, unknown) |
@@ -7,6 +7,7 b'' | |||
|
7 | 7 | # of the GNU General Public License, incorporated herein by reference. |
|
8 | 8 | |
|
9 | 9 | import os, cgi, sys, urllib |
|
10 | import mimetypes | |
|
10 | 11 | from demandload import demandload |
|
11 | 12 | demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser") |
|
12 | 13 | demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer util") |
@@ -18,7 +19,11 b' def templatepath():' | |||
|
18 | 19 | for f in "templates", "../templates": |
|
19 | 20 | p = os.path.join(os.path.dirname(__file__), f) |
|
20 | 21 | if os.path.isdir(p): |
|
21 | return p | |
|
22 | return os.path.normpath(p) | |
|
23 | else: | |
|
24 | # executable version (py2exe) doesn't support __file__ | |
|
25 | if hasattr(sys, 'frozen'): | |
|
26 | return os.path.join(sys.prefix, "templates") | |
|
22 | 27 | |
|
23 | 28 | def age(x): |
|
24 | 29 | def plural(t, c): |
@@ -71,6 +76,30 b' def get_mtime(repo_path):' | |||
|
71 | 76 | else: |
|
72 | 77 | return os.stat(hg_path).st_mtime |
|
73 | 78 | |
|
79 | def staticfile(directory, fname): | |
|
80 | """return a file inside directory with guessed content-type header | |
|
81 | ||
|
82 | fname always uses '/' as directory separator and isn't allowed to | |
|
83 | contain unusual path components. | |
|
84 | Content-type is guessed using the mimetypes module. | |
|
85 | Return an empty string if fname is illegal or file not found. | |
|
86 | ||
|
87 | """ | |
|
88 | parts = fname.split('/') | |
|
89 | path = directory | |
|
90 | for part in parts: | |
|
91 | if (part in ('', os.curdir, os.pardir) or | |
|
92 | os.sep in part or os.altsep is not None and os.altsep in part): | |
|
93 | return "" | |
|
94 | path = os.path.join(path, part) | |
|
95 | try: | |
|
96 | os.stat(path) | |
|
97 | ct = mimetypes.guess_type(path)[0] or "text/plain" | |
|
98 | return "Content-type: %s\n\n%s" % (ct, file(path).read()) | |
|
99 | except (TypeError, OSError): | |
|
100 | # illegal fname or unreadable file | |
|
101 | return "" | |
|
102 | ||
|
74 | 103 | class hgrequest(object): |
|
75 | 104 | def __init__(self, inp=None, out=None, env=None): |
|
76 | 105 | self.inp = inp or sys.stdin |
@@ -660,9 +689,10 b' class hgweb(object):' | |||
|
660 | 689 | i = self.repo.tagslist() |
|
661 | 690 | i.reverse() |
|
662 | 691 | |
|
663 | def entries(**map): | |
|
692 | def entries(notip=False, **map): | |
|
664 | 693 | parity = 0 |
|
665 | 694 | for k,n in i: |
|
695 | if notip and k == "tip": continue | |
|
666 | 696 | yield {"parity": parity, |
|
667 | 697 | "tag": k, |
|
668 | 698 | "tagmanifest": hex(cl.read(n)[0]), |
@@ -672,7 +702,8 b' class hgweb(object):' | |||
|
672 | 702 | |
|
673 | 703 | yield self.t("tags", |
|
674 | 704 | manifest=hex(mf), |
|
675 | entries=entries) | |
|
705 | entries=lambda **x: entries(False, **x), | |
|
706 | entriesnotip=lambda **x: entries(True, **x)) | |
|
676 | 707 | |
|
677 | 708 | def summary(self): |
|
678 | 709 | cl = self.repo.changelog |
@@ -843,6 +874,7 b' class hgweb(object):' | |||
|
843 | 874 | 'ca': [('cmd', ['archive']), ('node', None)], |
|
844 | 875 | 'tags': [('cmd', ['tags'])], |
|
845 | 876 | 'tip': [('cmd', ['changeset']), ('node', ['tip'])], |
|
877 | 'static': [('cmd', ['static']), ('file', None)] | |
|
846 | 878 | } |
|
847 | 879 | |
|
848 | 880 | for k in shortcuts.iterkeys(): |
@@ -858,6 +890,7 b' class hgweb(object):' | |||
|
858 | 890 | expand_form(req.form) |
|
859 | 891 | |
|
860 | 892 | t = self.repo.ui.config("web", "templates", templatepath()) |
|
893 | static = self.repo.ui.config("web", "static", os.path.join(t,"static")) | |
|
861 | 894 | m = os.path.join(t, "map") |
|
862 | 895 | style = self.repo.ui.config("web", "style", "") |
|
863 | 896 | if req.form.has_key('style'): |
@@ -981,6 +1014,11 b' class hgweb(object):' | |||
|
981 | 1014 | |
|
982 | 1015 | req.write(self.t("error")) |
|
983 | 1016 | |
|
1017 | elif req.form['cmd'][0] == 'static': | |
|
1018 | fname = req.form['file'][0] | |
|
1019 | req.write(staticfile(static, fname) | |
|
1020 | or self.t("error", error="%r not found" % fname)) | |
|
1021 | ||
|
984 | 1022 | else: |
|
985 | 1023 | req.write(self.t("error")) |
|
986 | 1024 | |
@@ -1075,17 +1113,27 b' def create_server(repo):' | |||
|
1075 | 1113 | class hgwebdir(object): |
|
1076 | 1114 | def __init__(self, config): |
|
1077 | 1115 | def cleannames(items): |
|
1078 |
return [(name.strip( |
|
|
1116 | return [(name.strip(os.sep), path) for name, path in items] | |
|
1079 | 1117 | |
|
1080 |
if |
|
|
1118 | if isinstance(config, (list, tuple)): | |
|
1081 | 1119 | self.repos = cleannames(config) |
|
1082 |
elif |
|
|
1120 | elif isinstance(config, dict): | |
|
1083 | 1121 | self.repos = cleannames(config.items()) |
|
1084 | 1122 | self.repos.sort() |
|
1085 | 1123 | else: |
|
1086 | 1124 | cp = ConfigParser.SafeConfigParser() |
|
1087 | 1125 | cp.read(config) |
|
1088 |
self.repos = |
|
|
1126 | self.repos = [] | |
|
1127 | if cp.has_section('paths'): | |
|
1128 | self.repos.extend(cleannames(cp.items('paths'))) | |
|
1129 | if cp.has_section('collections'): | |
|
1130 | for prefix, root in cp.items('collections'): | |
|
1131 | for path in util.walkrepos(root): | |
|
1132 | repo = os.path.normpath(path) | |
|
1133 | name = repo | |
|
1134 | if name.startswith(prefix): | |
|
1135 | name = name[len(prefix):] | |
|
1136 | self.repos.append((name.lstrip(os.sep), repo)) | |
|
1089 | 1137 | self.repos.sort() |
|
1090 | 1138 | |
|
1091 | 1139 | def run(self, req=hgrequest()): |
@@ -1142,4 +1190,10 b' class hgwebdir(object):' | |||
|
1142 | 1190 | else: |
|
1143 | 1191 | req.write(tmpl("notfound", repo=virtual)) |
|
1144 | 1192 | else: |
|
1145 | req.write(tmpl("index", entries=entries)) | |
|
1193 | if req.form.has_key('static'): | |
|
1194 | static = os.path.join(templatepath(), "static") | |
|
1195 | fname = req.form['static'][0] | |
|
1196 | req.write(staticfile(static, fname) | |
|
1197 | or tmpl("error", error="%r not found" % fname)) | |
|
1198 | else: | |
|
1199 | req.write(tmpl("index", entries=entries)) |
@@ -13,6 +13,8 b' from demandload import *' | |||
|
13 | 13 | demandload(globals(), "re lock transaction tempfile stat mdiff errno") |
|
14 | 14 | |
|
15 | 15 | class localrepository(object): |
|
16 | def __del__(self): | |
|
17 | self.transhandle = None | |
|
16 | 18 | def __init__(self, ui, path=None, create=0): |
|
17 | 19 | if not path: |
|
18 | 20 | p = os.getcwd() |
@@ -37,6 +39,7 b' class localrepository(object):' | |||
|
37 | 39 | self.nodetagscache = None |
|
38 | 40 | self.encodepats = None |
|
39 | 41 | self.decodepats = None |
|
42 | self.transhandle = None | |
|
40 | 43 | |
|
41 | 44 | if create: |
|
42 | 45 | os.mkdir(self.path) |
@@ -215,6 +218,10 b' class localrepository(object):' | |||
|
215 | 218 | return self.wopener(filename, 'w').write(data) |
|
216 | 219 | |
|
217 | 220 | def transaction(self): |
|
221 | tr = self.transhandle | |
|
222 | if tr != None and tr.running(): | |
|
223 | return tr.nest() | |
|
224 | ||
|
218 | 225 | # save dirstate for undo |
|
219 | 226 | try: |
|
220 | 227 | ds = self.opener("dirstate").read() |
@@ -222,21 +229,18 b' class localrepository(object):' | |||
|
222 | 229 | ds = "" |
|
223 | 230 | self.opener("journal.dirstate", "w").write(ds) |
|
224 | 231 | |
|
225 | def after(): | |
|
226 | util.rename(self.join("journal"), self.join("undo")) | |
|
227 | util.rename(self.join("journal.dirstate"), | |
|
228 | self.join("undo.dirstate")) | |
|
229 | ||
|
230 | return transaction.transaction(self.ui.warn, self.opener, | |
|
231 | self.join("journal"), after) | |
|
232 | tr = transaction.transaction(self.ui.warn, self.opener, | |
|
233 | self.join("journal"), | |
|
234 | aftertrans(self.path)) | |
|
235 | self.transhandle = tr | |
|
236 | return tr | |
|
232 | 237 | |
|
233 | 238 | def recover(self): |
|
234 |
l |
|
|
239 | l = self.lock() | |
|
235 | 240 | if os.path.exists(self.join("journal")): |
|
236 | 241 | self.ui.status(_("rolling back interrupted transaction\n")) |
|
237 | 242 | transaction.rollback(self.opener, self.join("journal")) |
|
238 | self.manifest = manifest.manifest(self.opener) | |
|
239 | self.changelog = changelog.changelog(self.opener) | |
|
243 | self.reload() | |
|
240 | 244 | return True |
|
241 | 245 | else: |
|
242 | 246 | self.ui.warn(_("no interrupted transaction available\n")) |
@@ -245,34 +249,51 b' class localrepository(object):' | |||
|
245 | 249 | def undo(self, wlock=None): |
|
246 | 250 | if not wlock: |
|
247 | 251 | wlock = self.wlock() |
|
248 |
l |
|
|
252 | l = self.lock() | |
|
249 | 253 | if os.path.exists(self.join("undo")): |
|
250 | 254 | self.ui.status(_("rolling back last transaction\n")) |
|
251 | 255 | transaction.rollback(self.opener, self.join("undo")) |
|
252 | 256 | util.rename(self.join("undo.dirstate"), self.join("dirstate")) |
|
253 |
self. |
|
|
257 | self.reload() | |
|
258 | self.wreload() | |
|
254 | 259 | else: |
|
255 | 260 | self.ui.warn(_("no undo information available\n")) |
|
256 | 261 | |
|
257 |
def |
|
|
262 | def wreload(self): | |
|
263 | self.dirstate.read() | |
|
264 | ||
|
265 | def reload(self): | |
|
266 | self.changelog.load() | |
|
267 | self.manifest.load() | |
|
268 | self.tagscache = None | |
|
269 | self.nodetagscache = None | |
|
270 | ||
|
271 | def do_lock(self, lockname, wait, releasefn=None, acquirefn=None): | |
|
258 | 272 | try: |
|
259 |
|
|
|
260 | except lock.LockHeld, inst: | |
|
261 | if wait: | |
|
262 | self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0]) | |
|
263 | return lock.lock(self.join("lock"), wait) | |
|
264 | raise inst | |
|
265 | ||
|
266 | def wlock(self, wait=1): | |
|
267 | try: | |
|
268 | wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write) | |
|
273 | l = lock.lock(self.join(lockname), 0, releasefn) | |
|
269 | 274 | except lock.LockHeld, inst: |
|
270 | 275 | if not wait: |
|
271 | 276 | raise inst |
|
272 | 277 | self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0]) |
|
273 | wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write) | |
|
274 | self.dirstate.read() | |
|
275 | return wlock | |
|
278 | try: | |
|
279 | # default to 600 seconds timeout | |
|
280 | l = lock.lock(self.join(lockname), | |
|
281 | int(self.ui.config("ui", "timeout") or 600), | |
|
282 | releasefn) | |
|
283 | except lock.LockHeld, inst: | |
|
284 | raise util.Abort(_("timeout while waiting for " | |
|
285 | "lock held by %s") % inst.args[0]) | |
|
286 | if acquirefn: | |
|
287 | acquirefn() | |
|
288 | return l | |
|
289 | ||
|
290 | def lock(self, wait=1): | |
|
291 | return self.do_lock("lock", wait, acquirefn=self.reload) | |
|
292 | ||
|
293 | def wlock(self, wait=1): | |
|
294 | return self.do_lock("wlock", wait, | |
|
295 | self.dirstate.write, | |
|
296 | self.wreload) | |
|
276 | 297 | |
|
277 | 298 | def checkfilemerge(self, filename, text, filelog, manifest1, manifest2): |
|
278 | 299 | "determine whether a new filenode is needed" |
@@ -311,7 +332,7 b' class localrepository(object):' | |||
|
311 | 332 | |
|
312 | 333 | if not wlock: |
|
313 | 334 | wlock = self.wlock() |
|
314 |
l |
|
|
335 | l = self.lock() | |
|
315 | 336 | tr = self.transaction() |
|
316 | 337 | mm = m1.copy() |
|
317 | 338 | mfm = mf1.copy() |
@@ -350,7 +371,7 b' class localrepository(object):' | |||
|
350 | 371 | self.dirstate.setparents(n, nullid) |
|
351 | 372 | |
|
352 | 373 | def commit(self, files=None, text="", user=None, date=None, |
|
353 | match=util.always, force=False, wlock=None): | |
|
374 | match=util.always, force=False, lock=None, wlock=None): | |
|
354 | 375 | commit = [] |
|
355 | 376 | remove = [] |
|
356 | 377 | changed = [] |
@@ -388,7 +409,8 b' class localrepository(object):' | |||
|
388 | 409 | |
|
389 | 410 | if not wlock: |
|
390 | 411 | wlock = self.wlock() |
|
391 | lock = self.lock() | |
|
412 | if not lock: | |
|
413 | lock = self.lock() | |
|
392 | 414 | tr = self.transaction() |
|
393 | 415 | |
|
394 | 416 | # check in files |
@@ -503,12 +525,18 b' class localrepository(object):' | |||
|
503 | 525 | del mf[fn] |
|
504 | 526 | return mf |
|
505 | 527 | |
|
528 | if node1: | |
|
529 | # read the manifest from node1 before the manifest from node2, | |
|
530 | # so that we'll hit the manifest cache if we're going through | |
|
531 | # all the revisions in parent->child order. | |
|
532 | mf1 = mfmatches(node1) | |
|
533 | ||
|
506 | 534 | # are we comparing the working directory? |
|
507 | 535 | if not node2: |
|
508 | 536 | if not wlock: |
|
509 | 537 | try: |
|
510 | 538 | wlock = self.wlock(wait=0) |
|
511 |
except lock.Lock |
|
|
539 | except lock.LockException: | |
|
512 | 540 | wlock = None |
|
513 | 541 | lookup, modified, added, removed, deleted, unknown = ( |
|
514 | 542 | self.dirstate.changes(files, match)) |
@@ -541,8 +569,6 b' class localrepository(object):' | |||
|
541 | 569 | # flush lists from dirstate before comparing manifests |
|
542 | 570 | modified, added = [], [] |
|
543 | 571 | |
|
544 | mf1 = mfmatches(node1) | |
|
545 | ||
|
546 | 572 | for fn in mf2: |
|
547 | 573 | if mf1.has_key(fn): |
|
548 | 574 | if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)): |
@@ -597,7 +623,6 b' class localrepository(object):' | |||
|
597 | 623 | if os.path.exists(p): |
|
598 | 624 | self.ui.warn(_("%s still exists!\n") % f) |
|
599 | 625 | elif self.dirstate.state(f) == 'a': |
|
600 | self.ui.warn(_("%s never committed!\n") % f) | |
|
601 | 626 | self.dirstate.forget([f]) |
|
602 | 627 | elif f not in self.dirstate: |
|
603 | 628 | self.ui.warn(_("%s not tracked!\n") % f) |
@@ -932,7 +957,7 b' class localrepository(object):' | |||
|
932 | 957 | return subset |
|
933 | 958 | |
|
934 | 959 | def pull(self, remote, heads=None): |
|
935 |
l |
|
|
960 | l = self.lock() | |
|
936 | 961 | |
|
937 | 962 | # if we have an empty repo, fetch everything |
|
938 | 963 | if self.changelog.tip() == nullid: |
@@ -951,7 +976,7 b' class localrepository(object):' | |||
|
951 | 976 | cg = remote.changegroupsubset(fetch, heads, 'pull') |
|
952 | 977 | return self.addchangegroup(cg) |
|
953 | 978 | |
|
954 | def push(self, remote, force=False): | |
|
979 | def push(self, remote, force=False, revs=None): | |
|
955 | 980 | lock = remote.lock() |
|
956 | 981 | |
|
957 | 982 | base = {} |
@@ -963,17 +988,25 b' class localrepository(object):' | |||
|
963 | 988 | return 1 |
|
964 | 989 | |
|
965 | 990 | update = self.findoutgoing(remote, base) |
|
966 |
if not |
|
|
991 | if revs is not None: | |
|
992 | msng_cl, bases, heads = self.changelog.nodesbetween(update, revs) | |
|
993 | else: | |
|
994 | bases, heads = update, self.changelog.heads() | |
|
995 | ||
|
996 | if not bases: | |
|
967 | 997 | self.ui.status(_("no changes found\n")) |
|
968 | 998 | return 1 |
|
969 | 999 | elif not force: |
|
970 |
if len( |
|
|
1000 | if len(bases) < len(heads): | |
|
971 | 1001 | self.ui.warn(_("abort: push creates new remote branches!\n")) |
|
972 | 1002 | self.ui.status(_("(did you forget to merge?" |
|
973 | 1003 | " use push -f to force)\n")) |
|
974 | 1004 | return 1 |
|
975 | 1005 | |
|
976 | cg = self.changegroup(update, 'push') | |
|
1006 | if revs is None: | |
|
1007 | cg = self.changegroup(update, 'push') | |
|
1008 | else: | |
|
1009 | cg = self.changegroupsubset(update, revs, 'push') | |
|
977 | 1010 | return remote.addchangegroup(cg) |
|
978 | 1011 | |
|
979 | 1012 | def changegroupsubset(self, bases, heads, source): |
@@ -1646,6 +1679,7 b' class localrepository(object):' | |||
|
1646 | 1679 | remove.sort() |
|
1647 | 1680 | for f in remove: |
|
1648 | 1681 | self.ui.note(_("removing %s\n") % f) |
|
1682 | util.audit_path(f) | |
|
1649 | 1683 | try: |
|
1650 | 1684 | util.unlink(self.wjoin(f)) |
|
1651 | 1685 | except OSError, inst: |
@@ -1852,3 +1886,13 b' class localrepository(object):' | |||
|
1852 | 1886 | if errors[0]: |
|
1853 | 1887 | self.ui.warn(_("%d integrity errors encountered!\n") % errors[0]) |
|
1854 | 1888 | return 1 |
|
1889 | ||
|
1890 | # used to avoid circular references so destructors work | |
|
1891 | def aftertrans(base): | |
|
1892 | p = base | |
|
1893 | def a(): | |
|
1894 | util.rename(os.path.join(p, "journal"), os.path.join(p, "undo")) | |
|
1895 | util.rename(os.path.join(p, "journal.dirstate"), | |
|
1896 | os.path.join(p, "undo.dirstate")) | |
|
1897 | return a | |
|
1898 |
@@ -5,17 +5,21 b'' | |||
|
5 | 5 | # This software may be used and distributed according to the terms |
|
6 | 6 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | 7 | |
|
8 | import os, time | |
|
9 | import util | |
|
8 | from demandload import * | |
|
9 | demandload(globals(), 'errno os time util') | |
|
10 | 10 | |
|
11 |
class Lock |
|
|
11 | class LockException(Exception): | |
|
12 | pass | |
|
13 | class LockHeld(LockException): | |
|
14 | pass | |
|
15 | class LockUnavailable(LockException): | |
|
12 | 16 | pass |
|
13 | 17 | |
|
14 | 18 | class lock(object): |
|
15 |
def __init__(self, file, |
|
|
19 | def __init__(self, file, timeout=-1, releasefn=None): | |
|
16 | 20 | self.f = file |
|
17 | 21 | self.held = 0 |
|
18 |
self. |
|
|
22 | self.timeout = timeout | |
|
19 | 23 | self.releasefn = releasefn |
|
20 | 24 | self.lock() |
|
21 | 25 | |
@@ -23,13 +27,16 b' class lock(object):' | |||
|
23 | 27 | self.release() |
|
24 | 28 | |
|
25 | 29 | def lock(self): |
|
30 | timeout = self.timeout | |
|
26 | 31 | while 1: |
|
27 | 32 | try: |
|
28 | 33 | self.trylock() |
|
29 | 34 | return 1 |
|
30 | 35 | except LockHeld, inst: |
|
31 |
if |
|
|
36 | if timeout != 0: | |
|
32 | 37 | time.sleep(1) |
|
38 | if timeout > 0: | |
|
39 | timeout -= 1 | |
|
33 | 40 | continue |
|
34 | 41 | raise inst |
|
35 | 42 | |
@@ -38,8 +45,11 b' class lock(object):' | |||
|
38 | 45 | try: |
|
39 | 46 | util.makelock(str(pid), self.f) |
|
40 | 47 | self.held = 1 |
|
41 | except (OSError, IOError): | |
|
42 | raise LockHeld(util.readlock(self.f)) | |
|
48 | except (OSError, IOError), why: | |
|
49 | if why.errno == errno.EEXIST: | |
|
50 | raise LockHeld(util.readlock(self.f)) | |
|
51 | else: | |
|
52 | raise LockUnavailable(why) | |
|
43 | 53 | |
|
44 | 54 | def release(self): |
|
45 | 55 | if self.held: |
@@ -66,7 +66,7 b' static struct flist *lalloc(int size)' | |||
|
66 | 66 | a = NULL; |
|
67 | 67 | } else |
|
68 | 68 | a->head = a->tail = a->base; |
|
69 |
|
|
|
69 | return a; | |
|
70 | 70 | } |
|
71 | 71 | if (!PyErr_Occurred()) |
|
72 | 72 | PyErr_NoMemory(); |
@@ -13,7 +13,7 b' of the GNU General Public License, incor' | |||
|
13 | 13 | from node import * |
|
14 | 14 | from i18n import gettext as _ |
|
15 | 15 | from demandload import demandload |
|
16 | demandload(globals(), "binascii errno heapq mdiff sha struct zlib") | |
|
16 | demandload(globals(), "binascii errno heapq mdiff os sha struct zlib") | |
|
17 | 17 | |
|
18 | 18 | def hash(text, p1, p2): |
|
19 | 19 | """generate a hash from the given text and its parent hashes |
@@ -187,15 +187,33 b' class revlog(object):' | |||
|
187 | 187 | self.indexfile = indexfile |
|
188 | 188 | self.datafile = datafile |
|
189 | 189 | self.opener = opener |
|
190 | ||
|
191 | self.indexstat = None | |
|
190 | 192 | self.cache = None |
|
191 | 193 | self.chunkcache = None |
|
194 | self.load() | |
|
192 | 195 | |
|
196 | def load(self): | |
|
193 | 197 | try: |
|
194 |
|
|
|
198 | f = self.opener(self.indexfile) | |
|
195 | 199 | except IOError, inst: |
|
196 | 200 | if inst.errno != errno.ENOENT: |
|
197 | 201 | raise |
|
198 | 202 | i = "" |
|
203 | else: | |
|
204 | try: | |
|
205 | st = os.fstat(f.fileno()) | |
|
206 | except AttributeError, inst: | |
|
207 | st = None | |
|
208 | else: | |
|
209 | oldst = self.indexstat | |
|
210 | if (oldst and st.st_dev == oldst.st_dev | |
|
211 | and st.st_ino == oldst.st_ino | |
|
212 | and st.st_mtime == oldst.st_mtime | |
|
213 | and st.st_ctime == oldst.st_ctime): | |
|
214 | return | |
|
215 | self.indexstat = st | |
|
216 | i = f.read() | |
|
199 | 217 | |
|
200 | 218 | if i and i[:4] != "\0\0\0\0": |
|
201 | 219 | raise RevlogError(_("incompatible revlog signature on %s") % |
@@ -624,12 +642,10 b' class revlog(object):' | |||
|
624 | 642 | # we store negative distances because heap returns smallest member |
|
625 | 643 | h = [(-dist[node], node)] |
|
626 | 644 | seen = {} |
|
627 | earliest = self.count() | |
|
628 | 645 | while h: |
|
629 | 646 | d, n = heapq.heappop(h) |
|
630 | 647 | if n not in seen: |
|
631 | 648 | seen[n] = 1 |
|
632 | r = self.rev(n) | |
|
633 | 649 | yield (-d, n) |
|
634 | 650 | for p in self.parents(n): |
|
635 | 651 | heapq.heappush(h, (-dist[p], p)) |
@@ -690,11 +706,6 b' class revlog(object):' | |||
|
690 | 706 | p = self.parents(self.node(revs[0]))[0] |
|
691 | 707 | revs.insert(0, self.rev(p)) |
|
692 | 708 | |
|
693 | # helper to reconstruct intermediate versions | |
|
694 | def construct(text, base, rev): | |
|
695 | bins = [self.chunk(r) for r in xrange(base + 1, rev + 1)] | |
|
696 | return mdiff.patches(text, bins) | |
|
697 | ||
|
698 | 709 | # build deltas |
|
699 | 710 | for d in xrange(0, len(revs) - 1): |
|
700 | 711 | a, b = revs[d], revs[d + 1] |
@@ -738,10 +749,10 b' class revlog(object):' | |||
|
738 | 749 | base = prev = -1 |
|
739 | 750 | start = end = measure = 0 |
|
740 | 751 | if r: |
|
741 |
|
|
|
752 | base = self.base(t) | |
|
753 | start = self.start(base) | |
|
742 | 754 | end = self.end(t) |
|
743 |
measure = self.length( |
|
|
744 | base = self.base(t) | |
|
755 | measure = self.length(base) | |
|
745 | 756 | prev = self.tip() |
|
746 | 757 | |
|
747 | 758 | transaction.add(self.datafile, end) |
@@ -793,14 +804,15 b' class revlog(object):' | |||
|
793 | 804 | raise RevlogError(_("consistency error adding group")) |
|
794 | 805 | measure = len(text) |
|
795 | 806 | else: |
|
796 |
e = (end, len(cdelta), |
|
|
807 | e = (end, len(cdelta), base, link, p1, p2, node) | |
|
797 | 808 | self.index.append(e) |
|
798 | 809 | self.nodemap[node] = r |
|
799 | 810 | dfh.write(cdelta) |
|
800 | 811 | ifh.write(struct.pack(indexformat, *e)) |
|
801 | 812 | |
|
802 | 813 | t, r, chain, prev = r, r + 1, node, node |
|
803 |
|
|
|
814 | base = self.base(t) | |
|
815 | start = self.start(base) | |
|
804 | 816 | end = self.end(t) |
|
805 | 817 | |
|
806 | 818 | dfh.close() |
@@ -15,8 +15,10 b' class rangereader(httprangereader.httpra' | |||
|
15 | 15 | def read(self, size=None): |
|
16 | 16 | try: |
|
17 | 17 | return httprangereader.httprangereader.read(self, size) |
|
18 | except urllib2.HTTPError, inst: | |
|
19 | raise IOError(None, inst) | |
|
18 | 20 | except urllib2.URLError, inst: |
|
19 |
raise IOError(None, |
|
|
21 | raise IOError(None, inst.reason[1]) | |
|
20 | 22 | |
|
21 | 23 | def opener(base): |
|
22 | 24 | """return a function that opens files over http""" |
@@ -22,6 +22,7 b' class transaction(object):' | |||
|
22 | 22 | if os.path.exists(journal): |
|
23 | 23 | raise AssertionError(_("journal already exists - run hg recover")) |
|
24 | 24 | |
|
25 | self.count = 1 | |
|
25 | 26 | self.report = report |
|
26 | 27 | self.opener = opener |
|
27 | 28 | self.after = after |
@@ -46,7 +47,17 b' class transaction(object):' | |||
|
46 | 47 | self.file.write("%s\0%d\n" % (file, offset)) |
|
47 | 48 | self.file.flush() |
|
48 | 49 | |
|
50 | def nest(self): | |
|
51 | self.count += 1 | |
|
52 | return self | |
|
53 | ||
|
54 | def running(self): | |
|
55 | return self.count > 0 | |
|
56 | ||
|
49 | 57 | def close(self): |
|
58 | self.count -= 1 | |
|
59 | if self.count != 0: | |
|
60 | return | |
|
50 | 61 | self.file.close() |
|
51 | 62 | self.entries = [] |
|
52 | 63 | if self.after: |
@@ -179,7 +179,7 b' def canonpath(root, cwd, myname):' | |||
|
179 | 179 | if root == os.sep: |
|
180 | 180 | rootsep = os.sep |
|
181 | 181 | else: |
|
182 |
|
|
|
182 | rootsep = root + os.sep | |
|
183 | 183 | name = myname |
|
184 | 184 | if not name.startswith(os.sep): |
|
185 | 185 | name = os.path.join(root, cwd, name) |
@@ -363,7 +363,14 b' def copyfiles(src, dst, hardlink=None):' | |||
|
363 | 363 | else: |
|
364 | 364 | shutil.copy(src, dst) |
|
365 | 365 | |
|
366 | def opener(base): | |
|
366 | def audit_path(path): | |
|
367 | """Abort if path contains dangerous components""" | |
|
368 | parts = os.path.normcase(path).split(os.sep) | |
|
369 | if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '') | |
|
370 | or os.pardir in parts): | |
|
371 | raise Abort(_("path contains illegal component: %s\n") % path) | |
|
372 | ||
|
373 | def opener(base, audit=True): | |
|
367 | 374 | """ |
|
368 | 375 | return a function that opens files relative to base |
|
369 | 376 | |
@@ -371,6 +378,7 b' def opener(base):' | |||
|
371 | 378 | remote file access from higher level code. |
|
372 | 379 | """ |
|
373 | 380 | p = base |
|
381 | audit_p = audit | |
|
374 | 382 | |
|
375 | 383 | def mktempcopy(name): |
|
376 | 384 | d, fn = os.path.split(name) |
@@ -401,6 +409,8 b' def opener(base):' | |||
|
401 | 409 | self.close() |
|
402 | 410 | |
|
403 | 411 | def o(path, mode="r", text=False, atomic=False): |
|
412 | if audit_p: | |
|
413 | audit_path(path) | |
|
404 | 414 | f = os.path.join(p, path) |
|
405 | 415 | |
|
406 | 416 | if not text: |
@@ -690,3 +700,16 b" def datestr(date=None, format='%c'):" | |||
|
690 | 700 | (time.strftime(format, time.gmtime(float(t) - tz)), |
|
691 | 701 | -tz / 3600, |
|
692 | 702 | ((-tz % 3600) / 60))) |
|
703 | ||
|
704 | def walkrepos(path): | |
|
705 | '''yield every hg repository under path, recursively.''' | |
|
706 | def errhandler(err): | |
|
707 | if err.filename == path: | |
|
708 | raise err | |
|
709 | ||
|
710 | for root, dirs, files in os.walk(path, onerror=errhandler): | |
|
711 | for d in dirs: | |
|
712 | if d == '.hg': | |
|
713 | yield root | |
|
714 | dirs[:] = [] | |
|
715 | break |
@@ -89,7 +89,9 b' try:' | |||
|
89 | 89 | data_files=[('mercurial/templates', |
|
90 | 90 | ['templates/map'] + |
|
91 | 91 | glob.glob('templates/map-*') + |
|
92 |
glob.glob('templates/*.tmpl')) |
|
|
92 | glob.glob('templates/*.tmpl')), | |
|
93 | ('mercurial/templates/static', | |
|
94 | glob.glob('templates/static/*'))], | |
|
93 | 95 | cmdclass=cmdclass, |
|
94 | 96 | scripts=['hg', 'hgmerge'], |
|
95 | 97 | options=dict(bdist_mpkg=dict(zipdist=True, |
@@ -1,11 +1,21 b'' | |||
|
1 | 1 | #header# |
|
2 | <title>#repo|escape#: Error</title> | |
|
3 | <link rel="alternate" type="application/rss+xml" | |
|
4 | href="?cmd=changelog;style=rss" title="RSS feed for #repo|escape#"> | |
|
5 | </head> | |
|
6 | <body> | |
|
7 | ||
|
8 | <div class="page_header"> | |
|
9 | <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="?cmd=summary;style=gitweb">#repo|escape#</a> / error | |
|
10 | </div> | |
|
11 | ||
|
2 | 12 | <div class="page_nav"> |
|
3 | <a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">log</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a><br/> | |
|
13 | <a href="?cmd=summary;style=gitweb">summary</a> | <a href="?cmd=changelog;style=gitweb">changelog</a> | <a href="?cmd=tags;style=gitweb">tags</a> | <a href="?cmd=manifest;manifest=#manifest#;path=/;style=gitweb">manifest</a><br/> | |
|
4 | 14 | </div> |
|
5 | 15 | |
|
6 | 16 | <div> |
|
7 | 17 | <br/> |
|
8 |
<i> |
|
|
18 | <i>An error occured while processing your request</i><br/> | |
|
9 | 19 | <br/> |
|
10 | 20 | </div> |
|
11 | 21 |
@@ -4,56 +4,8 b' Content-type: text/html' | |||
|
4 | 4 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> |
|
5 | 5 | <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US"> |
|
6 | 6 | <head> |
|
7 | <link rel="icon" href="?static=hgicon.png" type="image/png"> | |
|
7 | 8 | <meta http-equiv="content-type" content="text/html; charset=utf-8"/> |
|
8 | 9 | <meta name="robots" content="index, nofollow"/> |
|
9 | <style type="text/css"> | |
|
10 | body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; } | |
|
11 | a { color:#0000cc; } | |
|
12 | a:hover, a:visited, a:active { color:#880000; } | |
|
13 | div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; } | |
|
14 | div.page_header a:visited { color:#0000cc; } | |
|
15 | div.page_header a:hover { color:#880000; } | |
|
16 | div.page_nav { padding:8px; } | |
|
17 | div.page_nav a:visited { color:#0000cc; } | |
|
18 | div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px} | |
|
19 | div.page_footer { height:17px; padding:4px 8px; background-color: #d9d8d1; } | |
|
20 | div.page_footer_text { float:left; color:#555555; font-style:italic; } | |
|
21 | div.page_body { padding:8px; } | |
|
22 | div.title, a.title { | |
|
23 | display:block; padding:6px 8px; | |
|
24 | font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000; | |
|
25 | } | |
|
26 | a.title:hover { background-color: #d9d8d1; } | |
|
27 | div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; } | |
|
28 | div.log_body { padding:8px 8px 8px 150px; } | |
|
29 | span.age { position:relative; float:left; width:142px; font-style:italic; } | |
|
30 | div.log_link { | |
|
31 | padding:0px 8px; | |
|
32 | font-size:10px; font-family:sans-serif; font-style:normal; | |
|
33 | position:relative; float:left; width:136px; | |
|
34 | } | |
|
35 | div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; } | |
|
36 | a.list { text-decoration:none; color:#000000; } | |
|
37 | a.list:hover { text-decoration:underline; color:#880000; } | |
|
38 | table { padding:8px 4px; } | |
|
39 | th { padding:2px 5px; font-size:12px; text-align:left; } | |
|
40 | tr.light:hover, .parity0:hover { background-color:#edece6; } | |
|
41 | tr.dark, .parity1 { background-color:#f6f6f0; } | |
|
42 | tr.dark:hover, .parity1:hover { background-color:#edece6; } | |
|
43 | td { padding:2px 5px; font-size:12px; vertical-align:top; } | |
|
44 | td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; } | |
|
45 | div.pre { font-family:monospace; font-size:12px; white-space:pre; } | |
|
46 | div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; } | |
|
47 | div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; } | |
|
48 | div.search { margin:4px 8px; position:absolute; top:56px; right:12px } | |
|
49 | .linenr { color:#999999; text-decoration:none } | |
|
50 | a.rss_logo { | |
|
51 | float:right; padding:3px 0px; width:35px; line-height:10px; | |
|
52 | border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; | |
|
53 | color:#ffffff; background-color:#ff6600; | |
|
54 | font-weight:bold; font-family:sans-serif; font-size:10px; | |
|
55 | text-align:center; text-decoration:none; | |
|
56 | } | |
|
57 | a.rss_logo:hover { background-color:#ee5500; } | |
|
58 | </style> | |
|
10 | <style type="text/css">/*<![CDATA[*/ @import "?static=style-gitweb.css"; /*]]>*/</style> | |
|
59 | 11 |
@@ -3,78 +3,6 b' Content-type: text/html' | |||
|
3 | 3 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"> |
|
4 | 4 | <html> |
|
5 | 5 | <head> |
|
6 | <link rel="icon" href="?static=hgicon.png" type="image/png"> | |
|
6 | 7 | <meta name="robots" content="index, nofollow" /> |
|
7 | <style type="text/css"> | |
|
8 | <!-- | |
|
9 | a { text-decoration:none; } | |
|
10 | .parity0 { background-color: #dddddd; } | |
|
11 | .parity1 { background-color: #eeeeee; } | |
|
12 | .lineno { width: 60px; color: #aaaaaa; font-size: smaller; | |
|
13 | text-align: right; padding-right:1em; } | |
|
14 | .plusline { color: green; } | |
|
15 | .minusline { color: red; } | |
|
16 | .atline { color: purple; } | |
|
17 | .annotate { font-size: smaller; text-align: right; padding-right: 1em; } | |
|
18 | .buttons a { | |
|
19 | background-color: #666666; | |
|
20 | padding: 2pt; | |
|
21 | color: white; | |
|
22 | font-family: sans; | |
|
23 | font-weight: bold; | |
|
24 | } | |
|
25 | .navigate a { | |
|
26 | background-color: #ccc; | |
|
27 | padding: 2pt; | |
|
28 | font-family: sans; | |
|
29 | color: black; | |
|
30 | } | |
|
31 | ||
|
32 | .metatag { | |
|
33 | background-color: #888888; | |
|
34 | color: white; | |
|
35 | text-align: right; | |
|
36 | } | |
|
37 | ||
|
38 | /* Common */ | |
|
39 | pre { margin: 0; } | |
|
40 | ||
|
41 | .logo { | |
|
42 | background-color: #333; | |
|
43 | padding: 4pt; | |
|
44 | margin: 8pt 0 8pt 8pt; | |
|
45 | font-family: sans; | |
|
46 | font-size: 60%; | |
|
47 | color: white; | |
|
48 | float: right; | |
|
49 | clear: right; | |
|
50 | text-align: left; | |
|
51 | } | |
|
52 | ||
|
53 | .logo a { | |
|
54 | font-weight: bold; | |
|
55 | font-size: 150%; | |
|
56 | color: #999; | |
|
57 | } | |
|
58 | ||
|
59 | /* Changelog entries */ | |
|
60 | .changelogEntry { width: 100%; } | |
|
61 | .changelogEntry th { font-weight: normal; text-align: right; vertical-align: top; } | |
|
62 | .changelogEntry th.age, .changelogEntry th.firstline { font-weight: bold; } | |
|
63 | .changelogEntry th.firstline { text-align: left; width: inherit; } | |
|
64 | ||
|
65 | /* Tag entries */ | |
|
66 | #tagEntries { list-style: none; margin: 0; padding: 0; } | |
|
67 | #tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; } | |
|
68 | #tagEntries .tagEntry span.node { font-family: monospace; } | |
|
69 | ||
|
70 | /* Changeset entry */ | |
|
71 | #changesetEntry { } | |
|
72 | #changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } | |
|
73 | #changesetEntry th.files, #changesetEntry th.description { vertical-align: top; } | |
|
74 | ||
|
75 | /* File diff view */ | |
|
76 | #filediffEntry { } | |
|
77 | #filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } | |
|
78 | ||
|
79 | --> | |
|
80 | </style> | |
|
8 | <style type="text/css">/*<![CDATA[*/ @import "?static=style.css"; /*]]>*/</style> |
@@ -4,3 +4,5 b' changelog = changelog-rss.tmpl' | |||
|
4 | 4 | changelogentry = changelogentry-rss.tmpl |
|
5 | 5 | filelog = filelog-rss.tmpl |
|
6 | 6 | filelogentry = filelogentry-rss.tmpl |
|
7 | tags = tags-rss.tmpl | |
|
8 | tagentry = tagentry-rss.tmpl |
@@ -1,11 +1,14 b'' | |||
|
1 | 1 | #header# |
|
2 | 2 | <title>#repo|escape#: tags</title> |
|
3 | <link rel="alternate" type="application/rss+xml" | |
|
4 | href="?cmd=tags;style=rss" title="RSS feed for #repo|escape#: tags"> | |
|
3 | 5 | </head> |
|
4 | 6 | <body> |
|
5 | 7 | |
|
6 | 8 | <div class="buttons"> |
|
7 | 9 | <a href="?cl=tip">changelog</a> |
|
8 | 10 | <a href="?mf=#manifest|short#;path=/">manifest</a> |
|
11 | <a type="application/rss+xml" href="?cmd=tags;style=rss">rss</a> | |
|
9 | 12 | </div> |
|
10 | 13 | |
|
11 | 14 | <h2>tags:</h2> |
@@ -18,8 +18,7 b' echo "name = test-archive" >> .hg/hgrc' | |||
|
18 | 18 | echo "allowzip = true" >> .hg/hgrc |
|
19 | 19 | echo "allowgz = true" >> .hg/hgrc |
|
20 | 20 | echo "allowbz2 = true" >> .hg/hgrc |
|
21 |
hg serve -p 20059 |
|
|
22 | sleep 1 # wait for server to be started | |
|
21 | hg serve -p 20059 -d --pid-file=hg.pid | |
|
23 | 22 | |
|
24 | 23 | TIP=`hg id -v | cut -f1 -d' '` |
|
25 | 24 | QTIP=`hg id -q` |
@@ -35,5 +34,5 b' http_proxy= python getarchive.py "$TIP" ' | |||
|
35 | 34 | http_proxy= python getarchive.py "$TIP" zip > archive.zip |
|
36 | 35 | unzip -t archive.zip | sed "s/$QTIP/TIP/" |
|
37 | 36 | |
|
38 | kill $! | |
|
37 | kill `cat hg.pid` | |
|
39 | 38 | sleep 1 # wait for server to scream and die |
@@ -12,4 +12,3 b' Archive: archive.zip' | |||
|
12 | 12 | testing: test-archive-TIP/baz/bletch OK |
|
13 | 13 | testing: test-archive-TIP/foo OK |
|
14 | 14 | No errors detected in compressed data of archive.zip. |
|
15 | killed! |
@@ -1,5 +1,3 b'' | |||
|
1 | transaction abort! | |
|
2 | rollback completed | |
|
3 | 1 | abort: impossible time zone offset: 4444444 |
|
4 | 2 | transaction abort! |
|
5 | 3 | rollback completed |
@@ -13,4 +11,6 b" abort: invalid date: ' 1 4444'" | |||
|
13 | 11 | transaction abort! |
|
14 | 12 | rollback completed |
|
15 | 13 | abort: date exceeds 32 bits: 111111111111 |
|
14 | transaction abort! | |
|
15 | rollback completed | |
|
16 | 16 | abort: No such file or directory: .../test/bar |
@@ -64,7 +64,6 b' list of commands (use "hg help -v" to sh' | |||
|
64 | 64 | paths show definition of symbolic path names |
|
65 | 65 | pull pull changes from the specified source |
|
66 | 66 | push push changes to the specified destination |
|
67 | rawcommit raw commit interface (DEPRECATED) | |
|
68 | 67 | recover roll back an interrupted transaction |
|
69 | 68 | remove remove the specified files on the next commit |
|
70 | 69 | rename rename files; equivalent of copy + remove |
@@ -106,7 +105,6 b' list of commands (use "hg help -v" to sh' | |||
|
106 | 105 | paths show definition of symbolic path names |
|
107 | 106 | pull pull changes from the specified source |
|
108 | 107 | push push changes to the specified destination |
|
109 | rawcommit raw commit interface (DEPRECATED) | |
|
110 | 108 | recover roll back an interrupted transaction |
|
111 | 109 | remove remove the specified files on the next commit |
|
112 | 110 | rename rename files; equivalent of copy + remove |
@@ -173,9 +171,9 b' options:' | |||
|
173 | 171 | |
|
174 | 172 | -r --rev revision |
|
175 | 173 | -a --text treat all files as text |
|
176 | -I --include include names matching the given patterns | |
|
177 | 174 | -p --show-function show which function each change is in |
|
178 | 175 | -w --ignore-all-space ignore white space when comparing lines |
|
176 | -I --include include names matching the given patterns | |
|
179 | 177 | -X --exclude exclude names matching the given patterns |
|
180 | 178 | hg status [OPTION]... [FILE]... |
|
181 | 179 |
@@ -7,8 +7,7 b' hg init' | |||
|
7 | 7 | hg addremove |
|
8 | 8 | hg commit -m 1 |
|
9 | 9 | hg verify |
|
10 |
hg serve -p 20059 |
|
|
11 | sleep 1 # wait for server to be started | |
|
10 | hg serve -p 20059 -d --pid-file=hg.pid | |
|
12 | 11 | cd .. |
|
13 | 12 | |
|
14 | 13 | http_proxy= hg clone http://localhost:20059/ copy |
@@ -19,4 +18,4 b' cat foo' | |||
|
19 | 18 | hg manifest |
|
20 | 19 | hg pull |
|
21 | 20 | |
|
22 | kill $! | |
|
21 | kill `cat ../test/hg.pid` |
@@ -12,9 +12,8 b' chmod -w .hg' | |||
|
12 | 12 | cd .. |
|
13 | 13 | |
|
14 | 14 | hg clone a b |
|
15 | ||
|
16 | chmod +w a/.hg # let test clean up | |
|
17 | ||
|
15 | 18 | cd b |
|
16 | 19 | hg verify |
|
17 | ||
|
18 | cd .. | |
|
19 | ||
|
20 | chmod +w a/.hg # let test clean up |
@@ -19,4 +19,3 b' 2ed2a3912a0b24502043eae84ee4b279c18b90dd' | |||
|
19 | 19 | pulling from http://localhost:20059/ |
|
20 | 20 | searching for changes |
|
21 | 21 | no changes found |
|
22 | killed! |
@@ -158,3 +158,24 b' hg remove d1/b' | |||
|
158 | 158 | hg rename d1 d3 |
|
159 | 159 | hg status |
|
160 | 160 | hg update -C |
|
161 | ||
|
162 | echo "# transitive rename" | |
|
163 | hg rename d1/b d1/bb | |
|
164 | hg rename d1/bb d1/bc | |
|
165 | hg status | |
|
166 | hg update -C | |
|
167 | ||
|
168 | echo "# transitive rename --after" | |
|
169 | hg rename d1/b d1/bb | |
|
170 | mv d1/bb d1/bc | |
|
171 | hg rename --after d1/bb d1/bc | |
|
172 | hg status | |
|
173 | hg update -C | |
|
174 | ||
|
175 | echo "# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)" | |
|
176 | hg rename d1/b d1/bb | |
|
177 | echo "some stuff added to d1/bb" >> d1/bb | |
|
178 | hg rename d1/bb d1/b | |
|
179 | hg status | |
|
180 | hg debugstate | grep copy | |
|
181 | hg update -C |
General Comments 0
You need to be logged in to leave comments.
Login now