Show More
@@ -1,19 +1,19 b'' | |||||
1 | #!/bin/sh |
|
1 | #!/bin/sh | |
2 | # |
|
2 | # | |
3 | # Build a Mercurial RPM in place. |
|
3 | # Build a Mercurial RPM in place. | |
4 | # Known to work on: |
|
|||
5 | # - Fedora 9 |
|
|||
6 | # - Fedora 10 |
|
|||
7 | # |
|
4 | # | |
8 | # Bryan O'Sullivan <bos@serpentine.com> |
|
5 | # Bryan O'Sullivan <bos@serpentine.com> | |
|
6 | # | |||
|
7 | # Tested on | |||
|
8 | # - Fedora 10 | |||
|
9 | # - Fedora 11 | |||
|
10 | # - Centos 5.3 (with Fedora EPEL repo for asciidoc) | |||
9 |
|
11 | |||
10 | if hg --version > /dev/null 2>&1; then : |
|
12 | HG="`dirname $0`/../hg" | |
11 | else |
|
13 | PYTHONPATH="`dirname $0`/../mercurial/pure" | |
12 | echo 'hg command not available!' 1>&2 |
|
14 | export PYTHONPATH | |
13 | exit 1 |
|
|||
14 | fi |
|
|||
15 |
|
15 | |||
16 |
root="` |
|
16 | root="`$HG root 2>/dev/null`" | |
17 | specfile=contrib/mercurial.spec |
|
17 | specfile=contrib/mercurial.spec | |
18 |
|
18 | |||
19 | if [ -z "$root" ]; then |
|
19 | if [ -z "$root" ]; then | |
@@ -26,7 +26,7 b' rpmdir=/tmp/"`basename $root | sed \'s/ /' | |||||
26 | cd "$root" |
|
26 | cd "$root" | |
27 | rm -rf $rpmdir |
|
27 | rm -rf $rpmdir | |
28 | mkdir -p $rpmdir/RPMS |
|
28 | mkdir -p $rpmdir/RPMS | |
29 |
|
|
29 | $HG clone "$root" $rpmdir/BUILD | |
30 |
|
30 | |||
31 | if [ ! -f $specfile ]; then |
|
31 | if [ ! -f $specfile ]; then | |
32 | echo "Cannot find $specfile!" 1>&2 |
|
32 | echo "Cannot find $specfile!" 1>&2 | |
@@ -35,11 +35,11 b' fi' | |||||
35 |
|
35 | |||
36 | tmpspec=/tmp/`basename "$specfile"`.$$ # FIXME: Insecure /tmp handling |
|
36 | tmpspec=/tmp/`basename "$specfile"`.$$ # FIXME: Insecure /tmp handling | |
37 | # Use the most recent tag as the version. |
|
37 | # Use the most recent tag as the version. | |
38 | version=`hg tags | perl -e 'while(<STDIN>){if(/^(\d\S+)/){print$1;exit}}'` |
|
38 | version=`$HG tags | python -c 'import sys; print [l for l in sys.stdin.readlines() if l[0].isdigit()][0].split()[0]'` | |
39 | # Compute the release number as the difference in revision numbers |
|
39 | # Compute the release number as the difference in revision numbers | |
40 | # between the tip and the most recent tag. |
|
40 | # between the tip and the most recent tag. | |
41 | release=`hg tags | perl -e 'while(<STDIN>){($tag,$id)=/^(\S+)\s+(\d+)/;if($tag eq "tip"){$tip = $id}elsif($tag=~/^\d/){print $tip-$id+1;exit}}'` |
|
41 | release=`$HG tags | python -c 'import sys; l = sys.stdin.readlines(); print int(l[0].split()[1].split(":")[0]) - int([x for x in l if x[0].isdigit()][0].split()[1].split(":")[0])'` | |
42 |
tip=` |
|
42 | tip=`$HG -q tip` | |
43 |
|
43 | |||
44 | # Beat up the spec file |
|
44 | # Beat up the spec file | |
45 | sed -e 's,^Source:.*,Source: /dev/null,' \ |
|
45 | sed -e 's,^Source:.*,Source: /dev/null,' \ | |
@@ -51,11 +51,11 b" sed -e 's,^Source:.*,Source: /dev/null,'" | |||||
51 |
|
51 | |||
52 | cat <<EOF >> $tmpspec |
|
52 | cat <<EOF >> $tmpspec | |
53 | %changelog |
|
53 | %changelog | |
54 |
* `date +'%a %b %d %Y'` ` |
|
54 | * `date +'%a %b %d %Y'` `$HG showconfig ui.username` $version-$release | |
55 | - Automatically built via $0 |
|
55 | - Automatically built via $0 | |
56 |
|
56 | |||
57 | EOF |
|
57 | EOF | |
58 |
|
|
58 | $HG log \ | |
59 | --template '* {date|rfc822date} {author}\n- {desc|firstline}\n\n' \ |
|
59 | --template '* {date|rfc822date} {author}\n- {desc|firstline}\n\n' \ | |
60 | .hgtags \ |
|
60 | .hgtags \ | |
61 | | sed -e 's/^\(\* [MTWFS][a-z][a-z]\), \([0-3][0-9]\) \([A-Z][a-z][a-z]\) /\1 \3 \2 /' \ |
|
61 | | sed -e 's/^\(\* [MTWFS][a-z][a-z]\), \([0-3][0-9]\) \([A-Z][a-z][a-z]\) /\1 \3 \2 /' \ |
@@ -71,6 +71,8 b' rm -rf $RPM_BUILD_ROOT' | |||||
71 | %{_bindir}/hg-viz |
|
71 | %{_bindir}/hg-viz | |
72 | %{_bindir}/git-rev-tree |
|
72 | %{_bindir}/git-rev-tree | |
73 | %{_bindir}/mercurial-convert-repo |
|
73 | %{_bindir}/mercurial-convert-repo | |
74 | %{_libdir}/python%{pythonver}/site-packages/%{name}-*-py2.5.egg-info |
|
74 | %if "%{?pythonver}" != "2.4" | |
|
75 | %{_libdir}/python%{pythonver}/site-packages/%{name}-*-py%{pythonver}.egg-info | |||
|
76 | %endif | |||
75 | %{pythonlib} |
|
77 | %{pythonlib} | |
76 | %{hgext} |
|
78 | %{hgext} |
@@ -1,4 +1,5 b'' | |||||
1 | # perf.py - performance test routines |
|
1 | # perf.py - performance test routines | |
|
2 | '''helper extension to measure performance''' | |||
2 |
|
3 | |||
3 | from mercurial import cmdutil, match, commands |
|
4 | from mercurial import cmdutil, match, commands | |
4 | import time, os, sys |
|
5 | import time, os, sys |
@@ -142,6 +142,11 b' Example:' | |||||
142 | foo.password = bar |
|
142 | foo.password = bar | |
143 | foo.schemes = http https |
|
143 | foo.schemes = http https | |
144 |
|
144 | |||
|
145 | bar.prefix = secure.example.org | |||
|
146 | bar.key = path/to/file.key | |||
|
147 | bar.cert = path/to/file.cert | |||
|
148 | bar.schemes = https | |||
|
149 | ||||
145 | Supported arguments: |
|
150 | Supported arguments: | |
146 |
|
151 | |||
147 | prefix;; |
|
152 | prefix;; | |
@@ -152,10 +157,17 b' Supported arguments:' | |||||
152 | against the URI with its scheme stripped as well, and the schemes |
|
157 | against the URI with its scheme stripped as well, and the schemes | |
153 | argument, q.v., is then subsequently consulted. |
|
158 | argument, q.v., is then subsequently consulted. | |
154 | username;; |
|
159 | username;; | |
155 | Username to authenticate with. |
|
160 | Optional. Username to authenticate with. If not given, and the | |
|
161 | remote site requires basic or digest authentication, the user | |||
|
162 | will be prompted for it. | |||
156 | password;; |
|
163 | password;; | |
157 |
Optional. Password to authenticate with. If not given the |
|
164 | Optional. Password to authenticate with. If not given, and the | |
|
165 | remote site requires basic or digest authentication, the user | |||
158 | will be prompted for it. |
|
166 | will be prompted for it. | |
|
167 | key;; | |||
|
168 | Optional. PEM encoded client certificate key file. | |||
|
169 | cert;; | |||
|
170 | Optional. PEM encoded client certificate chain file. | |||
159 | schemes;; |
|
171 | schemes;; | |
160 | Optional. Space separated list of URI schemes to use this |
|
172 | Optional. Space separated list of URI schemes to use this | |
161 | authentication entry with. Only used if the prefix doesn't include |
|
173 | authentication entry with. Only used if the prefix doesn't include |
@@ -5,49 +5,49 b'' | |||||
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2, incorporated herein by reference. |
|
6 | # GNU General Public License version 2, incorporated herein by reference. | |
7 | # |
|
7 | # | |
8 | # this hook allows to allow or deny access to parts of a repo when |
|
8 | ||
9 | # taking incoming changesets. |
|
9 | '''provide simple hooks for access control | |
10 | # |
|
10 | ||
11 |
|
|
11 | Authorization is against local user name on system where hook is run, not | |
12 |
|
|
12 | committer of original changeset (since that is easy to spoof). | |
13 | # spoof). |
|
13 | ||
14 | # |
|
14 | The acl hook is best to use if you use hgsh to set up restricted shells for | |
15 | # acl hook is best to use if you use hgsh to set up restricted shells |
|
15 | authenticated users to only push to / pull from. It's not safe if user has | |
16 | # for authenticated users to only push to / pull from. not safe if |
|
16 | interactive shell access, because they can disable the hook. It's also not | |
17 | # user has interactive shell access, because they can disable hook. |
|
17 | safe if remote users share one local account, because then there's no way to | |
18 | # also not safe if remote users share one local account, because then |
|
18 | tell remote users apart. | |
19 | # no way to tell remote users apart. |
|
19 | ||
20 | # |
|
20 | To use, configure the acl extension in hgrc like this: | |
21 | # to use, configure acl extension in hgrc like this: |
|
21 | ||
22 | # |
|
22 | [extensions] | |
23 | # [extensions] |
|
23 | hgext.acl = | |
24 | # hgext.acl = |
|
24 | ||
25 | # |
|
25 | [hooks] | |
26 | # [hooks] |
|
26 | pretxnchangegroup.acl = python:hgext.acl.hook | |
27 | # pretxnchangegroup.acl = python:hgext.acl.hook |
|
27 | ||
28 | # |
|
28 | [acl] | |
29 | # [acl] |
|
29 | sources = serve # check if source of incoming changes in this list | |
30 | # sources = serve # check if source of incoming changes in this list |
|
30 | # ("serve" == ssh or http, "push", "pull", "bundle") | |
31 | # # ("serve" == ssh or http, "push", "pull", "bundle") |
|
31 | ||
32 | # |
|
32 | Allow and deny lists have a subtree pattern (default syntax is glob) on the | |
33 | # allow and deny lists have subtree pattern (default syntax is glob) |
|
33 | left and user names on right. The deny list is checked before the allow list. | |
34 | # on left, user names on right. deny list checked before allow list. |
|
34 | ||
35 | # |
|
35 | [acl.allow] | |
36 | # [acl.allow] |
|
36 | # if acl.allow not present, all users allowed by default | |
37 |
|
|
37 | # empty acl.allow = no users allowed | |
38 | # # empty acl.allow = no users allowed |
|
38 | docs/** = doc_writer | |
39 | # docs/** = doc_writer |
|
39 | .hgtags = release_engineer | |
40 | # .hgtags = release_engineer |
|
40 | ||
41 | # |
|
41 | [acl.deny] | |
42 | # [acl.deny] |
|
42 | # if acl.deny not present, no users denied by default | |
43 | # # if acl.deny not present, no users denied by default |
|
43 | # empty acl.deny = all users allowed | |
44 | # # empty acl.deny = all users allowed |
|
44 | glob pattern = user4, user5 | |
45 | # glob pattern = user4, user5 |
|
45 | ** = user6 | |
46 | # ** = user6 |
|
46 | ''' | |
47 |
|
47 | |||
48 | from mercurial.i18n import _ |
|
48 | from mercurial.i18n import _ | |
49 | from mercurial import util, match |
|
49 | from mercurial import util, match | |
50 | import getpass |
|
50 | import getpass, urllib | |
51 |
|
51 | |||
52 | def buildmatch(ui, repo, user, key): |
|
52 | def buildmatch(ui, repo, user, key): | |
53 | '''return tuple of (match function, list enabled).''' |
|
53 | '''return tuple of (match function, list enabled).''' | |
@@ -72,7 +72,15 b' def hook(ui, repo, hooktype, node=None, ' | |||||
72 | ui.debug(_('acl: changes have source "%s" - skipping\n') % source) |
|
72 | ui.debug(_('acl: changes have source "%s" - skipping\n') % source) | |
73 | return |
|
73 | return | |
74 |
|
74 | |||
75 | user = getpass.getuser() |
|
75 | user = None | |
|
76 | if source == 'serve' and 'url' in kwargs: | |||
|
77 | url = kwargs['url'].split(':') | |||
|
78 | if url[0] == 'remote' and url[1].startswith('http'): | |||
|
79 | user = urllib.unquote(url[2]) | |||
|
80 | ||||
|
81 | if user is None: | |||
|
82 | user = getpass.getuser() | |||
|
83 | ||||
76 | cfg = ui.config('acl', 'config') |
|
84 | cfg = ui.config('acl', 'config') | |
77 | if cfg: |
|
85 | if cfg: | |
78 | ui.readconfig(cfg, sections = ['acl.allow', 'acl.deny']) |
|
86 | ui.readconfig(cfg, sections = ['acl.allow', 'acl.deny']) |
@@ -64,10 +64,14 b' def write(repo, refs):' | |||||
64 | util.copyfile(repo.join('bookmarks'), repo.join('undo.bookmarks')) |
|
64 | util.copyfile(repo.join('bookmarks'), repo.join('undo.bookmarks')) | |
65 | if current(repo) not in refs: |
|
65 | if current(repo) not in refs: | |
66 | setcurrent(repo, None) |
|
66 | setcurrent(repo, None) | |
67 | file = repo.opener('bookmarks', 'w+') |
|
67 | wlock = repo.wlock() | |
68 | for refspec, node in refs.iteritems(): |
|
68 | try: | |
69 | file.write("%s %s\n" % (hex(node), refspec)) |
|
69 | file = repo.opener('bookmarks', 'w', atomictemp=True) | |
70 | file.close() |
|
70 | for refspec, node in refs.iteritems(): | |
|
71 | file.write("%s %s\n" % (hex(node), refspec)) | |||
|
72 | file.rename() | |||
|
73 | finally: | |||
|
74 | wlock.release() | |||
71 |
|
75 | |||
72 | def current(repo): |
|
76 | def current(repo): | |
73 | '''Get the current bookmark |
|
77 | '''Get the current bookmark | |
@@ -106,9 +110,13 b' def setcurrent(repo, mark):' | |||||
106 | return |
|
110 | return | |
107 | if mark not in refs: |
|
111 | if mark not in refs: | |
108 | mark = '' |
|
112 | mark = '' | |
109 | file = repo.opener('bookmarks.current', 'w+') |
|
113 | wlock = repo.wlock() | |
110 | file.write(mark) |
|
114 | try: | |
111 | file.close() |
|
115 | file = repo.opener('bookmarks.current', 'w', atomictemp=True) | |
|
116 | file.write(mark) | |||
|
117 | file.rename() | |||
|
118 | finally: | |||
|
119 | wlock.release() | |||
112 | repo._bookmarkcurrent = mark |
|
120 | repo._bookmarkcurrent = mark | |
113 |
|
121 | |||
114 | def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None): |
|
122 | def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None): | |
@@ -242,26 +250,30 b' def reposetup(ui, repo):' | |||||
242 | def commit(self, *k, **kw): |
|
250 | def commit(self, *k, **kw): | |
243 | """Add a revision to the repository and |
|
251 | """Add a revision to the repository and | |
244 | move the bookmark""" |
|
252 | move the bookmark""" | |
245 | node = super(bookmark_repo, self).commit(*k, **kw) |
|
253 | wlock = self.wlock() # do both commit and bookmark with lock held | |
246 | if node is None: |
|
254 | try: | |
247 | return None |
|
255 | node = super(bookmark_repo, self).commit(*k, **kw) | |
248 | parents = repo.changelog.parents(node) |
|
256 | if node is None: | |
249 | if parents[1] == nullid: |
|
257 | return None | |
250 |
parents = ( |
|
258 | parents = repo.changelog.parents(node) | |
251 | marks = parse(repo) |
|
259 | if parents[1] == nullid: | |
252 | update = False |
|
260 | parents = (parents[0],) | |
253 |
|
|
261 | marks = parse(repo) | |
254 | if ui.configbool('bookmarks', 'track.current'): |
|
262 | update = False | |
255 |
|
|
263 | for mark, n in marks.items(): | |
256 | marks[mark] = node |
|
264 | if ui.configbool('bookmarks', 'track.current'): | |
257 | update = True |
|
265 | if mark == current(repo) and n in parents: | |
258 | else: |
|
266 | marks[mark] = node | |
259 |
|
|
267 | update = True | |
260 |
|
|
268 | else: | |
261 |
|
|
269 | if n in parents: | |
262 | if update: |
|
270 | marks[mark] = node | |
263 | write(repo, marks) |
|
271 | update = True | |
264 | return node |
|
272 | if update: | |
|
273 | write(repo, marks) | |||
|
274 | return node | |||
|
275 | finally: | |||
|
276 | wlock.release() | |||
265 |
|
277 | |||
266 | def addchangegroup(self, source, srctype, url, emptyok=False): |
|
278 | def addchangegroup(self, source, srctype, url, emptyok=False): | |
267 | parents = repo.dirstate.parents() |
|
279 | parents = repo.dirstate.parents() |
@@ -8,6 +8,8 b'' | |||||
8 | # This software may be used and distributed according to the terms of the |
|
8 | # This software may be used and distributed according to the terms of the | |
9 | # GNU General Public License version 2, incorporated herein by reference. |
|
9 | # GNU General Public License version 2, incorporated herein by reference. | |
10 |
|
10 | |||
|
11 | '''provides children command to show children changesets''' | |||
|
12 | ||||
11 | from mercurial import cmdutil |
|
13 | from mercurial import cmdutil | |
12 | from mercurial.commands import templateopts |
|
14 | from mercurial.commands import templateopts | |
13 | from mercurial.i18n import _ |
|
15 | from mercurial.i18n import _ |
@@ -120,7 +120,7 b' def churn(ui, repo, *pats, **opts):' | |||||
120 |
|
120 | |||
121 | It is possible to map alternate email addresses to a main address |
|
121 | It is possible to map alternate email addresses to a main address | |
122 | by providing a file using the following format: |
|
122 | by providing a file using the following format: | |
123 |
|
123 | |||
124 | <alias email> <actual email> |
|
124 | <alias email> <actual email> | |
125 |
|
125 | |||
126 | Such a file may be specified with the --aliases option, otherwise a |
|
126 | Such a file may be specified with the --aliases option, otherwise a |
@@ -29,10 +29,6 b' also available. Effects are rendered wit' | |||||
29 | function (aka ANSI escape codes). This module also provides the |
|
29 | function (aka ANSI escape codes). This module also provides the | |
30 | render_text function, which can be used to add effects to any text. |
|
30 | render_text function, which can be used to add effects to any text. | |
31 |
|
31 | |||
32 | To enable this extension, add this to your .hgrc file: |
|
|||
33 | [extensions] |
|
|||
34 | color = |
|
|||
35 |
|
||||
36 | Default effects may be overridden from the .hgrc file: |
|
32 | Default effects may be overridden from the .hgrc file: | |
37 |
|
33 | |||
38 | [color] |
|
34 | [color] |
@@ -80,7 +80,7 b' class converter(object):' | |||||
80 | self.authorfile = None |
|
80 | self.authorfile = None | |
81 |
|
81 | |||
82 | # Record converted revisions persistently: maps source revision |
|
82 | # Record converted revisions persistently: maps source revision | |
83 |
# ID to target revision ID (both strings). (This is how |
|
83 | # ID to target revision ID (both strings). (This is how | |
84 | # incremental conversions work.) |
|
84 | # incremental conversions work.) | |
85 | self.map = mapfile(ui, revmapfile) |
|
85 | self.map = mapfile(ui, revmapfile) | |
86 |
|
86 | |||
@@ -297,7 +297,7 b' class converter(object):' | |||||
297 | parents = [self.map.get(p, p) for p in parents] |
|
297 | parents = [self.map.get(p, p) for p in parents] | |
298 | except KeyError: |
|
298 | except KeyError: | |
299 | parents = [b[0] for b in pbranches] |
|
299 | parents = [b[0] for b in pbranches] | |
300 |
newnode = self.dest.putcommit(files, copies, parents, commit, |
|
300 | newnode = self.dest.putcommit(files, copies, parents, commit, | |
301 | self.source, self.map) |
|
301 | self.source, self.map) | |
302 | self.source.converted(rev, newnode) |
|
302 | self.source.converted(rev, newnode) | |
303 | self.map[rev] = newnode |
|
303 | self.map[rev] = newnode |
@@ -159,7 +159,7 b' class p4_source(converter_source):' | |||||
159 |
|
159 | |||
160 | if code == "error": |
|
160 | if code == "error": | |
161 | raise IOError(d["generic"], data) |
|
161 | raise IOError(d["generic"], data) | |
162 |
|
162 | |||
163 | elif code == "stat": |
|
163 | elif code == "stat": | |
164 | p4type = self.re_type.match(d["type"]) |
|
164 | p4type = self.re_type.match(d["type"]) | |
165 | if p4type: |
|
165 | if p4type: | |
@@ -173,7 +173,7 b' class p4_source(converter_source):' | |||||
173 | keywords = self.re_keywords_old |
|
173 | keywords = self.re_keywords_old | |
174 | elif "k" in flags: |
|
174 | elif "k" in flags: | |
175 | keywords = self.re_keywords |
|
175 | keywords = self.re_keywords | |
176 |
|
176 | |||
177 | elif code == "text" or code == "binary": |
|
177 | elif code == "text" or code == "binary": | |
178 | contents += data |
|
178 | contents += data | |
179 |
|
179 |
@@ -472,7 +472,7 b' class svn_source(converter_source):' | |||||
472 | # Here/tags/tag.1 discarded as well as its children. |
|
472 | # Here/tags/tag.1 discarded as well as its children. | |
473 | # It happens with tools like cvs2svn. Such tags cannot |
|
473 | # It happens with tools like cvs2svn. Such tags cannot | |
474 | # be represented in mercurial. |
|
474 | # be represented in mercurial. | |
475 |
addeds = dict((p, e.copyfrom_path) for p,e |
|
475 | addeds = dict((p, e.copyfrom_path) for p, e | |
476 | in origpaths.iteritems() if e.action == 'A') |
|
476 | in origpaths.iteritems() if e.action == 'A') | |
477 | badroots = set() |
|
477 | badroots = set() | |
478 | for destroot in addeds: |
|
478 | for destroot in addeds: | |
@@ -484,7 +484,7 b' class svn_source(converter_source):' | |||||
484 | break |
|
484 | break | |
485 |
|
485 | |||
486 | for badroot in badroots: |
|
486 | for badroot in badroots: | |
487 |
pendings = [p for p in pendings if p[2] != badroot |
|
487 | pendings = [p for p in pendings if p[2] != badroot | |
488 | and not p[2].startswith(badroot + '/')] |
|
488 | and not p[2].startswith(badroot + '/')] | |
489 |
|
489 | |||
490 | # Tell tag renamings from tag creations |
|
490 | # Tell tag renamings from tag creations | |
@@ -497,7 +497,7 b' class svn_source(converter_source):' | |||||
497 | if tagname in tags: |
|
497 | if tagname in tags: | |
498 | # Keep the latest tag value |
|
498 | # Keep the latest tag value | |
499 | continue |
|
499 | continue | |
500 |
# From revision may be fake, get one with changes |
|
500 | # From revision may be fake, get one with changes | |
501 | try: |
|
501 | try: | |
502 | tagid = self.latest(source, sourcerev) |
|
502 | tagid = self.latest(source, sourcerev) | |
503 | if tagid and tagname not in tags: |
|
503 | if tagid and tagname not in tags: |
@@ -5,18 +5,14 b'' | |||||
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2, incorporated herein by reference. |
|
6 | # GNU General Public License version 2, incorporated herein by reference. | |
7 |
|
7 | |||
8 | ''' |
|
8 | '''allow external programs to compare revisions | |
|
9 | ||||
9 | The `extdiff' Mercurial extension allows you to use external programs |
|
10 | The `extdiff' Mercurial extension allows you to use external programs | |
10 | to compare revisions, or revision with working directory. The external diff |
|
11 | to compare revisions, or revision with working directory. The external diff | |
11 | programs are called with a configurable set of options and two |
|
12 | programs are called with a configurable set of options and two | |
12 | non-option arguments: paths to directories containing snapshots of |
|
13 | non-option arguments: paths to directories containing snapshots of | |
13 | files to compare. |
|
14 | files to compare. | |
14 |
|
15 | |||
15 | To enable this extension: |
|
|||
16 |
|
||||
17 | [extensions] |
|
|||
18 | hgext.extdiff = |
|
|||
19 |
|
||||
20 | The `extdiff' extension also allows to configure new diff commands, so |
|
16 | The `extdiff' extension also allows to configure new diff commands, so | |
21 | you do not need to type "hg extdiff -p kdiff3" always. |
|
17 | you do not need to type "hg extdiff -p kdiff3" always. | |
22 |
|
18 |
@@ -1,10 +1,10 b'' | |||||
1 | # GnuPG signing extension for Mercurial |
|
|||
2 | # |
|
|||
3 |
|
|
1 | # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org> | |
4 | # |
|
2 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
3 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2, incorporated herein by reference. |
|
4 | # GNU General Public License version 2, incorporated herein by reference. | |
7 |
|
5 | |||
|
6 | '''GnuPG signing extension for Mercurial''' | |||
|
7 | ||||
8 | import os, tempfile, binascii |
|
8 | import os, tempfile, binascii | |
9 | from mercurial import util, commands, match |
|
9 | from mercurial import util, commands, match | |
10 | from mercurial import node as hgnode |
|
10 | from mercurial import node as hgnode |
@@ -12,59 +12,32 b' commands. When this options is given, an' | |||||
12 | revision graph is also shown. |
|
12 | revision graph is also shown. | |
13 | ''' |
|
13 | ''' | |
14 |
|
14 | |||
15 | import os |
|
15 | import os, sys | |
16 | from mercurial.cmdutil import revrange, show_changeset |
|
16 | from mercurial.cmdutil import revrange, show_changeset | |
17 | from mercurial.commands import templateopts |
|
17 | from mercurial.commands import templateopts | |
18 | from mercurial.i18n import _ |
|
18 | from mercurial.i18n import _ | |
19 | from mercurial.node import nullrev |
|
19 | from mercurial.node import nullrev | |
20 | from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions |
|
20 | from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions | |
21 | from mercurial import hg, url, util |
|
21 | from mercurial import hg, url, util, graphmod | |
22 |
|
||||
23 | def revisions(repo, start, stop): |
|
|||
24 | """cset DAG generator yielding (rev, node, [parents]) tuples |
|
|||
25 |
|
22 | |||
26 | This generator function walks through the revision history from revision |
|
23 | ASCIIDATA = 'ASC' | |
27 | start to revision stop (which must be less than or equal to start). |
|
|||
28 | """ |
|
|||
29 | assert start >= stop |
|
|||
30 | cur = start |
|
|||
31 | while cur >= stop: |
|
|||
32 | ctx = repo[cur] |
|
|||
33 | parents = [p.rev() for p in ctx.parents() if p.rev() != nullrev] |
|
|||
34 | parents.sort() |
|
|||
35 | yield (ctx, parents) |
|
|||
36 | cur -= 1 |
|
|||
37 |
|
||||
38 | def filerevs(repo, path, start, stop): |
|
|||
39 | """file cset DAG generator yielding (rev, node, [parents]) tuples |
|
|||
40 |
|
24 | |||
41 | This generator function walks through the revision history of a single |
|
25 | def asciiformat(ui, repo, revdag, opts): | |
42 | file from revision start to revision stop (which must be less than or |
|
26 | """formats a changelog DAG walk for ASCII output""" | |
43 | equal to start). |
|
27 | showparents = [ctx.node() for ctx in repo[None].parents()] | |
44 | """ |
|
28 | displayer = show_changeset(ui, repo, opts, buffered=True) | |
45 | assert start >= stop |
|
29 | for (id, type, ctx, parentids) in revdag: | |
46 | filerev = len(repo.file(path)) - 1 |
|
30 | if type != graphmod.CHANGESET: | |
47 | while filerev >= 0: |
|
31 | continue | |
48 | fctx = repo.filectx(path, fileid=filerev) |
|
32 | displayer.show(ctx) | |
49 | parents = [f.linkrev() for f in fctx.parents() if f.path() == path] |
|
33 | lines = displayer.hunk.pop(ctx.rev()).split('\n')[:-1] | |
50 | parents.sort() |
|
34 | char = ctx.node() in showparents and '@' or 'o' | |
51 | if fctx.rev() <= start: |
|
35 | yield (id, ASCIIDATA, (char, lines), parentids) | |
52 | yield (fctx, parents) |
|
|||
53 | if fctx.rev() <= stop: |
|
|||
54 | break |
|
|||
55 | filerev -= 1 |
|
|||
56 |
|
36 | |||
57 |
def |
|
37 | def asciiedges(nodes): | |
58 | """grapher for asciigraph on a list of nodes and their parents |
|
38 | """adds edge info to changelog DAG walk suitable for ascii()""" | |
59 |
|
||||
60 | nodes must generate tuples (node, parents, char, lines) where |
|
|||
61 | - parents must generate the parents of node, in sorted order, |
|
|||
62 | and max length 2, |
|
|||
63 | - char is the char to print as the node symbol, and |
|
|||
64 | - lines are the lines to display next to the node. |
|
|||
65 | """ |
|
|||
66 | seen = [] |
|
39 | seen = [] | |
67 |
for node, p |
|
40 | for node, type, data, parents in nodes: | |
68 | if node not in seen: |
|
41 | if node not in seen: | |
69 | seen.append(node) |
|
42 | seen.append(node) | |
70 | nodeidx = seen.index(node) |
|
43 | nodeidx = seen.index(node) | |
@@ -88,7 +61,7 b' def grapher(nodes):' | |||||
88 | edges.append((nodeidx, nodeidx + 1)) |
|
61 | edges.append((nodeidx, nodeidx + 1)) | |
89 | nmorecols = len(nextseen) - ncols |
|
62 | nmorecols = len(nextseen) - ncols | |
90 | seen = nextseen |
|
63 | seen = nextseen | |
91 |
yield ( |
|
64 | yield (nodeidx, type, data, edges, ncols, nmorecols) | |
92 |
|
65 | |||
93 | def fix_long_right_edges(edges): |
|
66 | def fix_long_right_edges(edges): | |
94 | for (i, (start, end)) in enumerate(edges): |
|
67 | for (i, (start, end)) in enumerate(edges): | |
@@ -142,14 +115,16 b' def get_padding_line(ni, n_columns, edge' | |||||
142 | line.extend(["|", " "] * (n_columns - ni - 1)) |
|
115 | line.extend(["|", " "] * (n_columns - ni - 1)) | |
143 | return line |
|
116 | return line | |
144 |
|
117 | |||
145 |
def ascii(ui, |
|
118 | def ascii(ui, dag): | |
146 |
"""prints an ASCII graph of the DAG |
|
119 | """prints an ASCII graph of the DAG | |
|
120 | ||||
|
121 | dag is a generator that emits tuples with the following elements: | |||
147 |
|
122 | |||
148 | grapher is a generator that emits tuples with the following elements: |
|
|||
149 |
|
||||
150 | - Character to use as node's symbol. |
|
|||
151 | - List of lines to display as the node's text. |
|
|||
152 | - Column of the current node in the set of ongoing edges. |
|
123 | - Column of the current node in the set of ongoing edges. | |
|
124 | - Type indicator of node data == ASCIIDATA. | |||
|
125 | - Payload: (char, lines): | |||
|
126 | - Character to use as node's symbol. | |||
|
127 | - List of lines to display as the node's text. | |||
153 | - Edges; a list of (col, next_col) indicating the edges between |
|
128 | - Edges; a list of (col, next_col) indicating the edges between | |
154 | the current node and its parents. |
|
129 | the current node and its parents. | |
155 | - Number of columns (ongoing edges) in the current revision. |
|
130 | - Number of columns (ongoing edges) in the current revision. | |
@@ -160,7 +135,7 b' def ascii(ui, grapher):' | |||||
160 | """ |
|
135 | """ | |
161 | prev_n_columns_diff = 0 |
|
136 | prev_n_columns_diff = 0 | |
162 | prev_node_index = 0 |
|
137 | prev_node_index = 0 | |
163 |
for (node_ch, node_lines |
|
138 | for (node_index, type, (node_ch, node_lines), edges, n_columns, n_columns_diff) in dag: | |
164 |
|
139 | |||
165 | assert -2 < n_columns_diff < 2 |
|
140 | assert -2 < n_columns_diff < 2 | |
166 | if n_columns_diff == -1: |
|
141 | if n_columns_diff == -1: | |
@@ -278,34 +253,19 b' def graphlog(ui, repo, path=None, **opts' | |||||
278 | if path: |
|
253 | if path: | |
279 | path = util.canonpath(repo.root, os.getcwd(), path) |
|
254 | path = util.canonpath(repo.root, os.getcwd(), path) | |
280 | if path: # could be reset in canonpath |
|
255 | if path: # could be reset in canonpath | |
281 | revdag = filerevs(repo, path, start, stop) |
|
256 | revdag = graphmod.filerevs(repo, path, start, stop) | |
282 | else: |
|
257 | else: | |
283 | revdag = revisions(repo, start, stop) |
|
258 | revdag = graphmod.revisions(repo, start, stop) | |
284 |
|
259 | |||
285 |
|
|
260 | fmtdag = asciiformat(ui, repo, revdag, opts) | |
286 |
ascii(ui, |
|
261 | ascii(ui, asciiedges(fmtdag)) | |
287 |
|
262 | |||
288 | def graphrevs(repo, nodes, opts): |
|
263 | def graphrevs(repo, nodes, opts): | |
289 | include = set(nodes) |
|
|||
290 | limit = cmdutil.loglimit(opts) |
|
264 | limit = cmdutil.loglimit(opts) | |
291 | count = 0 |
|
265 | nodes.reverse() | |
292 | for node in reversed(nodes): |
|
266 | if limit < sys.maxint: | |
293 | if count >= limit: |
|
267 | nodes = nodes[:limit] | |
294 | break |
|
268 | return graphmod.nodes(repo, nodes) | |
295 | ctx = repo[node] |
|
|||
296 | parents = [p.rev() for p in ctx.parents() if p.node() in include] |
|
|||
297 | parents.sort() |
|
|||
298 | yield (ctx, parents) |
|
|||
299 | count += 1 |
|
|||
300 |
|
||||
301 | def graphabledag(ui, repo, revdag, opts): |
|
|||
302 | showparents = [ctx.node() for ctx in repo[None].parents()] |
|
|||
303 | displayer = show_changeset(ui, repo, opts, buffered=True) |
|
|||
304 | for (ctx, parents) in revdag: |
|
|||
305 | displayer.show(ctx) |
|
|||
306 | lines = displayer.hunk.pop(ctx.rev()).split('\n')[:-1] |
|
|||
307 | char = ctx.node() in showparents and '@' or 'o' |
|
|||
308 | yield (ctx.rev(), parents, char, lines) |
|
|||
309 |
|
269 | |||
310 | def goutgoing(ui, repo, dest=None, **opts): |
|
270 | def goutgoing(ui, repo, dest=None, **opts): | |
311 | """show the outgoing changesets alongside an ASCII revision graph |
|
271 | """show the outgoing changesets alongside an ASCII revision graph | |
@@ -332,8 +292,8 b' def goutgoing(ui, repo, dest=None, **opt' | |||||
332 |
|
292 | |||
333 | o = repo.changelog.nodesbetween(o, revs)[0] |
|
293 | o = repo.changelog.nodesbetween(o, revs)[0] | |
334 | revdag = graphrevs(repo, o, opts) |
|
294 | revdag = graphrevs(repo, o, opts) | |
335 |
|
|
295 | fmtdag = asciiformat(ui, repo, revdag, opts) | |
336 |
ascii(ui, |
|
296 | ascii(ui, asciiedges(fmtdag)) | |
337 |
|
297 | |||
338 | def gincoming(ui, repo, source="default", **opts): |
|
298 | def gincoming(ui, repo, source="default", **opts): | |
339 | """show the incoming changesets alongside an ASCII revision graph |
|
299 | """show the incoming changesets alongside an ASCII revision graph | |
@@ -381,8 +341,8 b' def gincoming(ui, repo, source="default"' | |||||
381 |
|
341 | |||
382 | chlist = other.changelog.nodesbetween(incoming, revs)[0] |
|
342 | chlist = other.changelog.nodesbetween(incoming, revs)[0] | |
383 | revdag = graphrevs(other, chlist, opts) |
|
343 | revdag = graphrevs(other, chlist, opts) | |
384 |
|
|
344 | fmtdag = asciiformat(ui, repo, revdag, opts) | |
385 |
ascii(ui, |
|
345 | ascii(ui, asciiedges(fmtdag)) | |
386 |
|
346 | |||
387 | finally: |
|
347 | finally: | |
388 | if hasattr(other, 'close'): |
|
348 | if hasattr(other, 'close'): |
@@ -14,20 +14,8 b' distributed with Mercurial.)' | |||||
14 | hgk consists of two parts: a Tcl script that does the displaying and |
|
14 | hgk consists of two parts: a Tcl script that does the displaying and | |
15 | querying of information, and an extension to Mercurial named hgk.py, |
|
15 | querying of information, and an extension to Mercurial named hgk.py, | |
16 | which provides hooks for hgk to get information. hgk can be found in |
|
16 | which provides hooks for hgk to get information. hgk can be found in | |
17 |
the contrib directory, and |
|
17 | the contrib directory, and the extension is shipped in the hgext | |
18 |
|
18 | repository, and needs to be enabled. | ||
19 | To load the hgext.py extension, add it to your .hgrc file (you have to |
|
|||
20 | use your global $HOME/.hgrc file, not one in a repository). You can |
|
|||
21 | specify an absolute path: |
|
|||
22 |
|
||||
23 | [extensions] |
|
|||
24 | hgk=/usr/local/lib/hgk.py |
|
|||
25 |
|
||||
26 | Mercurial can also scan the default python library path for a file |
|
|||
27 | named 'hgk.py' if you set hgk empty: |
|
|||
28 |
|
||||
29 | [extensions] |
|
|||
30 | hgk= |
|
|||
31 |
|
19 | |||
32 | The hg view command will launch the hgk Tcl script. For this command |
|
20 | The hg view command will launch the hgk Tcl script. For this command | |
33 | to work, hgk must be in your search path. Alternately, you can specify |
|
21 | to work, hgk must be in your search path. Alternately, you can specify |
@@ -13,11 +13,6 b'' | |||||
13 | It depends on the Pygments syntax highlighting library: |
|
13 | It depends on the Pygments syntax highlighting library: | |
14 | http://pygments.org/ |
|
14 | http://pygments.org/ | |
15 |
|
15 | |||
16 | To enable the extension add this to hgrc: |
|
|||
17 |
|
||||
18 | [extensions] |
|
|||
19 | hgext.highlight = |
|
|||
20 |
|
||||
21 | There is a single configuration option: |
|
16 | There is a single configuration option: | |
22 |
|
17 | |||
23 | [web] |
|
18 | [web] | |
@@ -30,10 +25,10 b" The default is 'colorful'." | |||||
30 |
|
25 | |||
31 | import highlight |
|
26 | import highlight | |
32 | from mercurial.hgweb import webcommands, webutil, common |
|
27 | from mercurial.hgweb import webcommands, webutil, common | |
33 | from mercurial import extensions |
|
28 | from mercurial import extensions, encoding | |
34 |
|
29 | |||
35 | def filerevision_highlight(orig, web, tmpl, fctx): |
|
30 | def filerevision_highlight(orig, web, tmpl, fctx): | |
36 |
mt = ''.join(tmpl('mimetype', encoding= |
|
31 | mt = ''.join(tmpl('mimetype', encoding=encoding.encoding)) | |
37 | # only pygmentize for mimetype containing 'html' so we both match |
|
32 | # only pygmentize for mimetype containing 'html' so we both match | |
38 | # 'text/html' and possibly 'application/xhtml+xml' in the future |
|
33 | # 'text/html' and possibly 'application/xhtml+xml' in the future | |
39 | # so that we don't have to touch the extension when the mimetype |
|
34 | # so that we don't have to touch the extension when the mimetype | |
@@ -47,7 +42,7 b' def filerevision_highlight(orig, web, tm' | |||||
47 | return orig(web, tmpl, fctx) |
|
42 | return orig(web, tmpl, fctx) | |
48 |
|
43 | |||
49 | def annotate_highlight(orig, web, req, tmpl): |
|
44 | def annotate_highlight(orig, web, req, tmpl): | |
50 |
mt = ''.join(tmpl('mimetype', encoding= |
|
45 | mt = ''.join(tmpl('mimetype', encoding=encoding.encoding)) | |
51 | if 'html' in mt: |
|
46 | if 'html' in mt: | |
52 | fctx = webutil.filectx(web.repo, req) |
|
47 | fctx = webutil.filectx(web.repo, req) | |
53 | style = web.config('web', 'pygments_style', 'colorful') |
|
48 | style = web.config('web', 'pygments_style', 'colorful') |
@@ -14,12 +14,8 b' This extension allows the use of a speci' | |||||
14 | which will be automatically expanded into links or any other |
|
14 | which will be automatically expanded into links or any other | |
15 | arbitrary expression, much like InterWiki does. |
|
15 | arbitrary expression, much like InterWiki does. | |
16 |
|
16 | |||
17 | To enable this extension, add the following lines to your hgrc: |
|
17 | A few example patterns (link to bug tracking, etc.) that may | |
18 |
|
18 | be used in your hgrc: | ||
19 | [extensions] |
|
|||
20 | interhg = |
|
|||
21 |
|
||||
22 | A few example patterns (link to bug tracking, etc.): |
|
|||
23 |
|
19 | |||
24 | [interhg] |
|
20 | [interhg] | |
25 | issues = s!issue(\\d+)!<a href="http://bts/issue\\1">issue\\1</a>! |
|
21 | issues = s!issue(\\d+)!<a href="http://bts/issue\\1">issue\\1</a>! |
@@ -21,12 +21,6 b'' | |||||
21 | # |
|
21 | # | |
22 | # Binary files are not touched. |
|
22 | # Binary files are not touched. | |
23 | # |
|
23 | # | |
24 | # Setup in hgrc: |
|
|||
25 | # |
|
|||
26 | # [extensions] |
|
|||
27 | # # enable extension |
|
|||
28 | # hgext.keyword = |
|
|||
29 | # |
|
|||
30 | # Files to act upon/ignore are specified in the [keyword] section. |
|
24 | # Files to act upon/ignore are specified in the [keyword] section. | |
31 | # Customized keyword template mappings in the [keywordmaps] section. |
|
25 | # Customized keyword template mappings in the [keywordmaps] section. | |
32 | # |
|
26 | # |
@@ -543,6 +543,8 b' class queue(object):' | |||||
543 |
|
543 | |||
544 | def _apply(self, repo, series, list=False, update_status=True, |
|
544 | def _apply(self, repo, series, list=False, update_status=True, | |
545 | strict=False, patchdir=None, merge=None, all_files={}): |
|
545 | strict=False, patchdir=None, merge=None, all_files={}): | |
|
546 | '''returns (error, hash) | |||
|
547 | error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz''' | |||
546 | # TODO unify with commands.py |
|
548 | # TODO unify with commands.py | |
547 | if not patchdir: |
|
549 | if not patchdir: | |
548 | patchdir = self.path |
|
550 | patchdir = self.path | |
@@ -559,7 +561,7 b' class queue(object):' | |||||
559 | try: |
|
561 | try: | |
560 | ph = patchheader(self.join(patchname)) |
|
562 | ph = patchheader(self.join(patchname)) | |
561 | except: |
|
563 | except: | |
562 |
self.ui.warn(_(" |
|
564 | self.ui.warn(_("unable to read %s\n") % patchname) | |
563 | err = 1 |
|
565 | err = 1 | |
564 | break |
|
566 | break | |
565 |
|
567 | |||
@@ -607,46 +609,60 b' class queue(object):' | |||||
607 |
|
609 | |||
608 | if patcherr: |
|
610 | if patcherr: | |
609 | self.ui.warn(_("patch failed, rejects left in working dir\n")) |
|
611 | self.ui.warn(_("patch failed, rejects left in working dir\n")) | |
610 |
err = |
|
612 | err = 2 | |
611 | break |
|
613 | break | |
612 |
|
614 | |||
613 | if fuzz and strict: |
|
615 | if fuzz and strict: | |
614 | self.ui.warn(_("fuzz found when applying patch, stopping\n")) |
|
616 | self.ui.warn(_("fuzz found when applying patch, stopping\n")) | |
615 |
err = |
|
617 | err = 3 | |
616 | break |
|
618 | break | |
617 | return (err, n) |
|
619 | return (err, n) | |
618 |
|
620 | |||
619 |
def _clean |
|
621 | def _cleanup(self, patches, numrevs, keep=False): | |
|
622 | if not keep: | |||
|
623 | r = self.qrepo() | |||
|
624 | if r: | |||
|
625 | r.remove(patches, True) | |||
|
626 | else: | |||
|
627 | for p in patches: | |||
|
628 | os.unlink(self.join(p)) | |||
|
629 | ||||
|
630 | if numrevs: | |||
|
631 | del self.applied[:numrevs] | |||
|
632 | self.applied_dirty = 1 | |||
|
633 | ||||
620 | for i in sorted([self.find_series(p) for p in patches], reverse=True): |
|
634 | for i in sorted([self.find_series(p) for p in patches], reverse=True): | |
621 | del self.full_series[i] |
|
635 | del self.full_series[i] | |
622 | self.parse_series() |
|
636 | self.parse_series() | |
623 | self.series_dirty = 1 |
|
637 | self.series_dirty = 1 | |
624 |
|
638 | |||
625 |
def |
|
639 | def _revpatches(self, repo, revs): | |
626 | firstrev = repo[self.applied[0].rev].rev() |
|
640 | firstrev = repo[self.applied[0].rev].rev() | |
627 | appliedbase = 0 |
|
|||
628 | patches = [] |
|
641 | patches = [] | |
629 |
for rev in |
|
642 | for i, rev in enumerate(revs): | |
|
643 | ||||
630 | if rev < firstrev: |
|
644 | if rev < firstrev: | |
631 | raise util.Abort(_('revision %d is not managed') % rev) |
|
645 | raise util.Abort(_('revision %d is not managed') % rev) | |
632 | base = bin(self.applied[appliedbase].rev) |
|
646 | ||
633 |
|
|
647 | ctx = repo[rev] | |
634 | if node != base: |
|
648 | base = bin(self.applied[i].rev) | |
635 | raise util.Abort(_('cannot delete revision %d above ' |
|
649 | if ctx.node() != base: | |
636 | 'applied patches') % rev) |
|
650 | msg = _('cannot delete revision %d above applied patches') | |
637 | patches.append(self.applied[appliedbase].name) |
|
651 | raise util.Abort(msg % rev) | |
638 | appliedbase += 1 |
|
|||
639 |
|
652 | |||
640 | r = self.qrepo() |
|
653 | patch = self.applied[i].name | |
641 | if r: |
|
654 | for fmt in ('[mq]: %s', 'imported patch %s'): | |
642 | r.remove(patches, True) |
|
655 | if ctx.description() == fmt % patch: | |
643 | else: |
|
656 | msg = _('patch %s finalized without changeset message\n') | |
644 | for p in patches: |
|
657 | repo.ui.status(msg % patch) | |
645 | os.unlink(self.join(p)) |
|
658 | break | |
646 |
|
659 | |||
647 | del self.applied[:appliedbase] |
|
660 | patches.append(patch) | |
648 | self.applied_dirty = 1 |
|
661 | return patches | |
649 | self._clean_series(patches) |
|
662 | ||
|
663 | def finish(self, repo, revs): | |||
|
664 | patches = self._revpatches(repo, sorted(revs)) | |||
|
665 | self._cleanup(patches, len(patches)) | |||
650 |
|
666 | |||
651 | def delete(self, repo, patches, opts): |
|
667 | def delete(self, repo, patches, opts): | |
652 | if not patches and not opts.get('rev'): |
|
668 | if not patches and not opts.get('rev'): | |
@@ -663,37 +679,18 b' class queue(object):' | |||||
663 | raise util.Abort(_("patch %s not in series file") % patch) |
|
679 | raise util.Abort(_("patch %s not in series file") % patch) | |
664 | realpatches.append(patch) |
|
680 | realpatches.append(patch) | |
665 |
|
681 | |||
666 |
|
|
682 | numrevs = 0 | |
667 | if opts.get('rev'): |
|
683 | if opts.get('rev'): | |
668 | if not self.applied: |
|
684 | if not self.applied: | |
669 | raise util.Abort(_('no patches applied')) |
|
685 | raise util.Abort(_('no patches applied')) | |
670 | revs = cmdutil.revrange(repo, opts['rev']) |
|
686 | revs = cmdutil.revrange(repo, opts['rev']) | |
671 | if len(revs) > 1 and revs[0] > revs[1]: |
|
687 | if len(revs) > 1 and revs[0] > revs[1]: | |
672 | revs.reverse() |
|
688 | revs.reverse() | |
673 | for rev in revs: |
|
689 | revpatches = self._revpatches(repo, revs) | |
674 | if appliedbase >= len(self.applied): |
|
690 | realpatches += revpatches | |
675 | raise util.Abort(_("revision %d is not managed") % rev) |
|
691 | numrevs = len(revpatches) | |
676 |
|
||||
677 | base = bin(self.applied[appliedbase].rev) |
|
|||
678 | node = repo.changelog.node(rev) |
|
|||
679 | if node != base: |
|
|||
680 | raise util.Abort(_("cannot delete revision %d above " |
|
|||
681 | "applied patches") % rev) |
|
|||
682 | realpatches.append(self.applied[appliedbase].name) |
|
|||
683 | appliedbase += 1 |
|
|||
684 |
|
692 | |||
685 |
|
|
693 | self._cleanup(realpatches, numrevs, opts.get('keep')) | |
686 | r = self.qrepo() |
|
|||
687 | if r: |
|
|||
688 | r.remove(realpatches, True) |
|
|||
689 | else: |
|
|||
690 | for p in realpatches: |
|
|||
691 | os.unlink(self.join(p)) |
|
|||
692 |
|
||||
693 | if appliedbase: |
|
|||
694 | del self.applied[:appliedbase] |
|
|||
695 | self.applied_dirty = 1 |
|
|||
696 | self._clean_series(realpatches) |
|
|||
697 |
|
694 | |||
698 | def check_toppatch(self, repo): |
|
695 | def check_toppatch(self, repo): | |
699 | if len(self.applied) > 0: |
|
696 | if len(self.applied) > 0: | |
@@ -958,6 +955,7 b' class queue(object):' | |||||
958 | end = start + 1 |
|
955 | end = start + 1 | |
959 | else: |
|
956 | else: | |
960 | end = self.series.index(patch, start) + 1 |
|
957 | end = self.series.index(patch, start) + 1 | |
|
958 | ||||
961 | s = self.series[start:end] |
|
959 | s = self.series[start:end] | |
962 | all_files = {} |
|
960 | all_files = {} | |
963 | try: |
|
961 | try: | |
@@ -977,13 +975,15 b' class queue(object):' | |||||
977 | util.unlink(repo.wjoin(f)) |
|
975 | util.unlink(repo.wjoin(f)) | |
978 | self.ui.warn(_('done\n')) |
|
976 | self.ui.warn(_('done\n')) | |
979 | raise |
|
977 | raise | |
|
978 | ||||
980 | top = self.applied[-1].name |
|
979 | top = self.applied[-1].name | |
981 | if ret[0]: |
|
980 | if ret[0] and ret[0] > 1: | |
982 |
|
|
981 | msg = _("errors during apply, please fix and refresh %s\n") | |
983 | "refresh %s\n") % top) |
|
982 | self.ui.write(msg % top) | |
984 | else: |
|
983 | else: | |
985 | self.ui.write(_("now at: %s\n") % top) |
|
984 | self.ui.write(_("now at: %s\n") % top) | |
986 | return ret[0] |
|
985 | return ret[0] | |
|
986 | ||||
987 | finally: |
|
987 | finally: | |
988 | wlock.release() |
|
988 | wlock.release() | |
989 |
|
989 |
@@ -5,8 +5,7 b'' | |||||
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2, incorporated herein by reference. |
|
6 | # GNU General Public License version 2, incorporated herein by reference. | |
7 |
|
7 | |||
8 | '''\ |
|
8 | '''use suffixes to refer to ancestor revisions | |
9 | use suffixes to refer to ancestor revisions |
|
|||
10 |
|
9 | |||
11 | This extension allows you to use git-style suffixes to refer to the |
|
10 | This extension allows you to use git-style suffixes to refer to the | |
12 | ancestors of a specific revision. |
|
11 | ancestors of a specific revision. |
@@ -28,11 +28,6 b' With the -d/--diffstat option, you will ' | |||||
28 | with a diffstat summary and the changeset summary, so you can be sure |
|
28 | with a diffstat summary and the changeset summary, so you can be sure | |
29 | you are sending the right changes. |
|
29 | you are sending the right changes. | |
30 |
|
30 | |||
31 | To enable this extension: |
|
|||
32 |
|
||||
33 | [extensions] |
|
|||
34 | hgext.patchbomb = |
|
|||
35 |
|
||||
36 | To configure other defaults, add a section like this to your hgrc |
|
31 | To configure other defaults, add a section like this to your hgrc | |
37 | file: |
|
32 | file: | |
38 |
|
33 |
@@ -6,10 +6,6 b'' | |||||
6 | # This program was inspired by the "cvspurge" script contained in CVS utilities |
|
6 | # This program was inspired by the "cvspurge" script contained in CVS utilities | |
7 | # (http://www.red-bean.com/cvsutils/). |
|
7 | # (http://www.red-bean.com/cvsutils/). | |
8 | # |
|
8 | # | |
9 | # To enable the "purge" extension put these lines in your ~/.hgrc: |
|
|||
10 | # [extensions] |
|
|||
11 | # hgext.purge = |
|
|||
12 | # |
|
|||
13 | # For help on the usage of "hg purge" use: |
|
9 | # For help on the usage of "hg purge" use: | |
14 | # hg help purge |
|
10 | # hg help purge | |
15 | # |
|
11 | # | |
@@ -27,6 +23,8 b'' | |||||
27 | # along with this program; if not, write to the Free Software |
|
23 | # along with this program; if not, write to the Free Software | |
28 | # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. |
|
24 | # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. | |
29 |
|
25 | |||
|
26 | '''enable removing untracked files only''' | |||
|
27 | ||||
30 | from mercurial import util, commands, cmdutil |
|
28 | from mercurial import util, commands, cmdutil | |
31 | from mercurial.i18n import _ |
|
29 | from mercurial.i18n import _ | |
32 | import os, stat |
|
30 | import os, stat |
@@ -1,10 +1,10 b'' | |||||
1 | # Mercurial extension to provide the 'hg share' command |
|
|||
2 | # |
|
|||
3 |
|
|
1 | # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
2 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
3 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2, incorporated herein by reference. |
|
4 | # GNU General Public License version 2, incorporated herein by reference. | |
7 |
|
5 | |||
|
6 | '''provides the hg share command''' | |||
|
7 | ||||
8 | import os |
|
8 | import os | |
9 | from mercurial.i18n import _ |
|
9 | from mercurial.i18n import _ | |
10 | from mercurial import hg, commands |
|
10 | from mercurial import hg, commands |
@@ -33,11 +33,6 b' Note that there are some limitations on ' | |||||
33 | * You should set same encoding for the repository by locale or |
|
33 | * You should set same encoding for the repository by locale or | |
34 | HGENCODING. |
|
34 | HGENCODING. | |
35 |
|
35 | |||
36 | To use this extension, enable the extension in .hg/hgrc or ~/.hgrc: |
|
|||
37 |
|
||||
38 | [extensions] |
|
|||
39 | hgext.win32mbcs = |
|
|||
40 |
|
||||
41 | Path encoding conversion are done between Unicode and |
|
36 | Path encoding conversion are done between Unicode and | |
42 | encoding.encoding which is decided by Mercurial from current locale |
|
37 | encoding.encoding which is decided by Mercurial from current locale | |
43 | setting or HGENCODING. |
|
38 | setting or HGENCODING. |
@@ -4,31 +4,34 b'' | |||||
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2, incorporated herein by reference. |
|
6 | # GNU General Public License version 2, incorporated herein by reference. | |
7 | # |
|
7 | ||
8 | # To perform automatic newline conversion, use: |
|
8 | '''LF <-> CRLF/CR translation utilities | |
9 | # |
|
9 | ||
10 | # [extensions] |
|
10 | To perform automatic newline conversion, use: | |
11 | # hgext.win32text = |
|
11 | ||
12 | # [encode] |
|
12 | [extensions] | |
13 | # ** = cleverencode: |
|
13 | hgext.win32text = | |
14 | # # or ** = macencode: |
|
14 | [encode] | |
15 | # [decode] |
|
15 | ** = cleverencode: | |
16 |
# |
|
16 | # or ** = macencode: | |
17 | # # or ** = macdecode: |
|
17 | ||
18 | # |
|
18 | [decode] | |
19 | # If not doing conversion, to make sure you do not commit CRLF/CR by |
|
19 | ** = cleverdecode: | |
20 | # accident: |
|
20 | # or ** = macdecode: | |
21 | # |
|
21 | ||
22 | # [hooks] |
|
22 | If not doing conversion, to make sure you do not commit CRLF/CR by accident: | |
23 | # pretxncommit.crlf = python:hgext.win32text.forbidcrlf |
|
23 | ||
24 | # # or pretxncommit.cr = python:hgext.win32text.forbidcr |
|
24 | [hooks] | |
25 | # |
|
25 | pretxncommit.crlf = python:hgext.win32text.forbidcrlf | |
26 | # To do the same check on a server to prevent CRLF/CR from being |
|
26 | # or pretxncommit.cr = python:hgext.win32text.forbidcr | |
27 | # pushed or pulled: |
|
27 | ||
28 | # |
|
28 | To do the same check on a server to prevent CRLF/CR from being | |
29 | # [hooks] |
|
29 | pushed or pulled: | |
30 | # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf |
|
30 | ||
31 | # # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr |
|
31 | [hooks] | |
|
32 | pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf | |||
|
33 | # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr | |||
|
34 | ''' | |||
32 |
|
35 | |||
33 | from mercurial.i18n import _ |
|
36 | from mercurial.i18n import _ | |
34 | from mercurial.node import short |
|
37 | from mercurial.node import short |
@@ -11,12 +11,6 b' Zeroconf enabled repositories will be an' | |||||
11 | the need to configure a server or a service. They can be discovered |
|
11 | the need to configure a server or a service. They can be discovered | |
12 | without knowing their actual IP address. |
|
12 | without knowing their actual IP address. | |
13 |
|
13 | |||
14 | To use the zeroconf extension add the following entry to your hgrc |
|
|||
15 | file: |
|
|||
16 |
|
||||
17 | [extensions] |
|
|||
18 | hgext.zeroconf = |
|
|||
19 |
|
||||
20 | To allow other people to discover your repository using run "hg serve" |
|
14 | To allow other people to discover your repository using run "hg serve" | |
21 | in your repository. |
|
15 | in your repository. | |
22 |
|
16 |
@@ -18,6 +18,10 b'' | |||||
18 | # define inline |
|
18 | # define inline | |
19 | #endif |
|
19 | #endif | |
20 |
|
20 | |||
|
21 | #ifdef __linux | |||
|
22 | # define inline __inline | |||
|
23 | #endif | |||
|
24 | ||||
21 | #ifdef _WIN32 |
|
25 | #ifdef _WIN32 | |
22 | #ifdef _MSC_VER |
|
26 | #ifdef _MSC_VER | |
23 | #define inline __inline |
|
27 | #define inline __inline |
@@ -1230,16 +1230,14 b' def grep(ui, repo, pattern, *pats, **opt' | |||||
1230 | for i in xrange(blo, bhi): |
|
1230 | for i in xrange(blo, bhi): | |
1231 | yield ('+', b[i]) |
|
1231 | yield ('+', b[i]) | |
1232 |
|
1232 | |||
1233 | prev = {} |
|
1233 | def display(fn, r, pstates, states): | |
1234 | def display(fn, rev, states, prevstates): |
|
|||
1235 | datefunc = ui.quiet and util.shortdate or util.datestr |
|
1234 | datefunc = ui.quiet and util.shortdate or util.datestr | |
1236 | found = False |
|
1235 | found = False | |
1237 | filerevmatches = {} |
|
1236 | filerevmatches = {} | |
1238 | r = prev.get(fn, -1) |
|
|||
1239 | if opts.get('all'): |
|
1237 | if opts.get('all'): | |
1240 |
iter = difflinestates(states, |
|
1238 | iter = difflinestates(pstates, states) | |
1241 | else: |
|
1239 | else: | |
1242 |
iter = [('', l) for l in |
|
1240 | iter = [('', l) for l in states] | |
1243 | for change, l in iter: |
|
1241 | for change, l in iter: | |
1244 | cols = [fn, str(r)] |
|
1242 | cols = [fn, str(r)] | |
1245 | if opts.get('line_number'): |
|
1243 | if opts.get('line_number'): | |
@@ -1261,8 +1259,8 b' def grep(ui, repo, pattern, *pats, **opt' | |||||
1261 | found = True |
|
1259 | found = True | |
1262 | return found |
|
1260 | return found | |
1263 |
|
1261 | |||
1264 | fstate = {} |
|
|||
1265 | skip = {} |
|
1262 | skip = {} | |
|
1263 | revfiles = {} | |||
1266 | get = util.cachefunc(lambda r: repo[r].changeset()) |
|
1264 | get = util.cachefunc(lambda r: repo[r].changeset()) | |
1267 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) |
|
1265 | changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) | |
1268 | found = False |
|
1266 | found = False | |
@@ -1270,46 +1268,58 b' def grep(ui, repo, pattern, *pats, **opt' | |||||
1270 | for st, rev, fns in changeiter: |
|
1268 | for st, rev, fns in changeiter: | |
1271 | if st == 'window': |
|
1269 | if st == 'window': | |
1272 | matches.clear() |
|
1270 | matches.clear() | |
|
1271 | revfiles.clear() | |||
1273 | elif st == 'add': |
|
1272 | elif st == 'add': | |
1274 | ctx = repo[rev] |
|
1273 | ctx = repo[rev] | |
1275 | matches[rev] = {} |
|
1274 | pctx = ctx.parents()[0] | |
|
1275 | parent = pctx.rev() | |||
|
1276 | matches.setdefault(rev, {}) | |||
|
1277 | matches.setdefault(parent, {}) | |||
|
1278 | files = revfiles.setdefault(rev, []) | |||
1276 | for fn in fns: |
|
1279 | for fn in fns: | |
1277 | if fn in skip: |
|
1280 | flog = getfile(fn) | |
1278 | continue |
|
|||
1279 | try: |
|
1281 | try: | |
1280 |
|
|
1282 | fnode = ctx.filenode(fn) | |
1281 | fstate.setdefault(fn, []) |
|
|||
1282 | if follow: |
|
|||
1283 | copied = getfile(fn).renamed(ctx.filenode(fn)) |
|
|||
1284 | if copied: |
|
|||
1285 | copies.setdefault(rev, {})[fn] = copied[0] |
|
|||
1286 | except error.LookupError: |
|
1283 | except error.LookupError: | |
1287 |
|
|
1284 | continue | |
|
1285 | ||||
|
1286 | copied = flog.renamed(fnode) | |||
|
1287 | copy = follow and copied and copied[0] | |||
|
1288 | if copy: | |||
|
1289 | copies.setdefault(rev, {})[fn] = copy | |||
|
1290 | if fn in skip: | |||
|
1291 | if copy: | |||
|
1292 | skip[copy] = True | |||
|
1293 | continue | |||
|
1294 | files.append(fn) | |||
|
1295 | ||||
|
1296 | if not matches[rev].has_key(fn): | |||
|
1297 | grepbody(fn, rev, flog.read(fnode)) | |||
|
1298 | ||||
|
1299 | pfn = copy or fn | |||
|
1300 | if not matches[parent].has_key(pfn): | |||
|
1301 | try: | |||
|
1302 | fnode = pctx.filenode(pfn) | |||
|
1303 | grepbody(pfn, parent, flog.read(fnode)) | |||
|
1304 | except error.LookupError: | |||
|
1305 | pass | |||
1288 | elif st == 'iter': |
|
1306 | elif st == 'iter': | |
1289 | for fn, m in sorted(matches[rev].items()): |
|
1307 | parent = repo[rev].parents()[0].rev() | |
|
1308 | for fn in sorted(revfiles.get(rev, [])): | |||
|
1309 | states = matches[rev][fn] | |||
1290 | copy = copies.get(rev, {}).get(fn) |
|
1310 | copy = copies.get(rev, {}).get(fn) | |
1291 | if fn in skip: |
|
1311 | if fn in skip: | |
1292 | if copy: |
|
1312 | if copy: | |
1293 | skip[copy] = True |
|
1313 | skip[copy] = True | |
1294 | continue |
|
1314 | continue | |
1295 | if fn in prev or fstate[fn]: |
|
1315 | pstates = matches.get(parent, {}).get(copy or fn, []) | |
1296 | r = display(fn, rev, m, fstate[fn]) |
|
1316 | if pstates or states: | |
|
1317 | r = display(fn, rev, pstates, states) | |||
1297 | found = found or r |
|
1318 | found = found or r | |
1298 | if r and not opts.get('all'): |
|
1319 | if r and not opts.get('all'): | |
1299 | skip[fn] = True |
|
1320 | skip[fn] = True | |
1300 | if copy: |
|
1321 | if copy: | |
1301 | skip[copy] = True |
|
1322 | skip[copy] = True | |
1302 | fstate[fn] = m |
|
|||
1303 | if copy: |
|
|||
1304 | fstate[copy] = m |
|
|||
1305 | prev[fn] = rev |
|
|||
1306 |
|
||||
1307 | for fn, state in sorted(fstate.items()): |
|
|||
1308 | if fn in skip: |
|
|||
1309 | continue |
|
|||
1310 | if fn not in copies.get(prev[fn], {}): |
|
|||
1311 | found = display(fn, rev, {}, state) or found |
|
|||
1312 | return (not found and 1) or 0 |
|
|||
1313 |
|
1323 | |||
1314 | def heads(ui, repo, *branchrevs, **opts): |
|
1324 | def heads(ui, repo, *branchrevs, **opts): | |
1315 | """show current repository heads or show branch heads |
|
1325 | """show current repository heads or show branch heads | |
@@ -1473,18 +1483,9 b' def help_(ui, name=None, with_version=Fa' | |||||
1473 | else: |
|
1483 | else: | |
1474 | ui.write(' %-*s %s\n' % (m, f, h[f])) |
|
1484 | ui.write(' %-*s %s\n' % (m, f, h[f])) | |
1475 |
|
1485 | |||
1476 | exts = list(extensions.extensions()) |
|
1486 | if name != 'shortlist': | |
1477 | if exts and name != 'shortlist': |
|
1487 | exts, maxlength = extensions.enabled() | |
1478 |
ui.write(_(' |
|
1488 | ui.write(help.listexts(_('enabled extensions:'), exts, maxlength)) | |
1479 | maxlength = 0 |
|
|||
1480 | exthelps = [] |
|
|||
1481 | for ename, ext in exts: |
|
|||
1482 | doc = (gettext(ext.__doc__) or _('(no help text available)')) |
|
|||
1483 | ename = ename.split('.')[-1] |
|
|||
1484 | maxlength = max(len(ename), maxlength) |
|
|||
1485 | exthelps.append((ename, doc.splitlines(0)[0].strip())) |
|
|||
1486 | for ename, text in exthelps: |
|
|||
1487 | ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text)) |
|
|||
1488 |
|
1489 | |||
1489 | if not ui.quiet: |
|
1490 | if not ui.quiet: | |
1490 | addglobalopts(True) |
|
1491 | addglobalopts(True) | |
@@ -2108,7 +2109,7 b' def merge(ui, repo, node=None, **opts):' | |||||
2108 | 'use "hg update" or merge with an explicit rev')) |
|
2109 | 'use "hg update" or merge with an explicit rev')) | |
2109 | node = parent == bheads[0] and bheads[-1] or bheads[0] |
|
2110 | node = parent == bheads[0] and bheads[-1] or bheads[0] | |
2110 |
|
2111 | |||
2111 |
if opts.get(' |
|
2112 | if opts.get('preview'): | |
2112 | p1 = repo['.'] |
|
2113 | p1 = repo['.'] | |
2113 | p2 = repo[node] |
|
2114 | p2 = repo[node] | |
2114 | common = p1.ancestor(p2) |
|
2115 | common = p1.ancestor(p2) | |
@@ -2670,7 +2671,8 b' def rollback(ui, repo):' | |||||
2670 | This command should be used with care. There is only one level of |
|
2671 | This command should be used with care. There is only one level of | |
2671 | rollback, and there is no way to undo a rollback. It will also |
|
2672 | rollback, and there is no way to undo a rollback. It will also | |
2672 | restore the dirstate at the time of the last transaction, losing |
|
2673 | restore the dirstate at the time of the last transaction, losing | |
2673 | any dirstate changes since that time. |
|
2674 | any dirstate changes since that time. This command does not alter | |
|
2675 | the working directory. | |||
2674 |
|
2676 | |||
2675 | Transactions are used to encapsulate the effects of all commands |
|
2677 | Transactions are used to encapsulate the effects of all commands | |
2676 | that create new changesets or propagate existing changesets into a |
|
2678 | that create new changesets or propagate existing changesets into a | |
@@ -2718,9 +2720,9 b' def serve(ui, repo, **opts):' | |||||
2718 |
|
2720 | |||
2719 | baseui = repo and repo.baseui or ui |
|
2721 | baseui = repo and repo.baseui or ui | |
2720 | optlist = ("name templates style address port prefix ipv6" |
|
2722 | optlist = ("name templates style address port prefix ipv6" | |
2721 | " accesslog errorlog webdir_conf certificate") |
|
2723 | " accesslog errorlog webdir_conf certificate encoding") | |
2722 | for o in optlist.split(): |
|
2724 | for o in optlist.split(): | |
2723 |
if opts |
|
2725 | if opts.get(o, None): | |
2724 | baseui.setconfig("web", o, str(opts[o])) |
|
2726 | baseui.setconfig("web", o, str(opts[o])) | |
2725 | if (repo is not None) and (repo.ui != baseui): |
|
2727 | if (repo is not None) and (repo.ui != baseui): | |
2726 | repo.ui.setconfig("web", o, str(opts[o])) |
|
2728 | repo.ui.setconfig("web", o, str(opts[o])) | |
@@ -2965,7 +2967,7 b' def unbundle(ui, repo, fname1, *fnames, ' | |||||
2965 |
|
2967 | |||
2966 | return postincoming(ui, repo, modheads, opts.get('update'), None) |
|
2968 | return postincoming(ui, repo, modheads, opts.get('update'), None) | |
2967 |
|
2969 | |||
2968 | def update(ui, repo, node=None, rev=None, clean=False, date=None): |
|
2970 | def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False): | |
2969 | """update working directory |
|
2971 | """update working directory | |
2970 |
|
2972 | |||
2971 | Update the repository's working directory to the specified |
|
2973 | Update the repository's working directory to the specified | |
@@ -2981,7 +2983,8 b' def update(ui, repo, node=None, rev=None' | |||||
2981 |
|
2983 | |||
2982 | When there are uncommitted changes, use option -C/--clean to |
|
2984 | When there are uncommitted changes, use option -C/--clean to | |
2983 | discard them, forcibly replacing the state of the working |
|
2985 | discard them, forcibly replacing the state of the working | |
2984 | directory with the requested revision. |
|
2986 | directory with the requested revision. Alternately, use -c/--check | |
|
2987 | to abort. | |||
2985 |
|
2988 | |||
2986 | When there are uncommitted changes and option -C/--clean is not |
|
2989 | When there are uncommitted changes and option -C/--clean is not | |
2987 | used, and the parent revision and requested revision are on the |
|
2990 | used, and the parent revision and requested revision are on the | |
@@ -3001,6 +3004,12 b' def update(ui, repo, node=None, rev=None' | |||||
3001 | if not rev: |
|
3004 | if not rev: | |
3002 | rev = node |
|
3005 | rev = node | |
3003 |
|
3006 | |||
|
3007 | if not clean and check: | |||
|
3008 | # we could use dirty() but we can ignore merge and branch trivia | |||
|
3009 | c = repo[None] | |||
|
3010 | if c.modified() or c.added() or c.removed(): | |||
|
3011 | raise util.Abort(_("uncommitted local changes")) | |||
|
3012 | ||||
3004 | if date: |
|
3013 | if date: | |
3005 | if rev: |
|
3014 | if rev: | |
3006 | raise util.Abort(_("you can't specify a revision and a date")) |
|
3015 | raise util.Abort(_("you can't specify a revision and a date")) | |
@@ -3358,7 +3367,7 b' table = {' | |||||
3358 | (merge, |
|
3367 | (merge, | |
3359 | [('f', 'force', None, _('force a merge with outstanding changes')), |
|
3368 | [('f', 'force', None, _('force a merge with outstanding changes')), | |
3360 | ('r', 'rev', '', _('revision to merge')), |
|
3369 | ('r', 'rev', '', _('revision to merge')), | |
3361 |
(' |
|
3370 | ('P', 'preview', None, | |
3362 | _('review revisions to merge (no merge is performed)'))], |
|
3371 | _('review revisions to merge (no merge is performed)'))], | |
3363 | _('[-f] [[-r] REV]')), |
|
3372 | _('[-f] [[-r] REV]')), | |
3364 | "outgoing|out": |
|
3373 | "outgoing|out": | |
@@ -3492,6 +3501,7 b' table = {' | |||||
3492 | "^update|up|checkout|co": |
|
3501 | "^update|up|checkout|co": | |
3493 | (update, |
|
3502 | (update, | |
3494 | [('C', 'clean', None, _('overwrite locally modified files (no backup)')), |
|
3503 | [('C', 'clean', None, _('overwrite locally modified files (no backup)')), | |
|
3504 | ('c', 'check', None, _('check for uncommitted changes')), | |||
3495 | ('d', 'date', '', _('tipmost revision matching date')), |
|
3505 | ('d', 'date', '', _('tipmost revision matching date')), | |
3496 | ('r', 'rev', '', _('revision'))], |
|
3506 | ('r', 'rev', '', _('revision'))], | |
3497 | _('[-C] [-d DATE] [[-r] REV]')), |
|
3507 | _('[-C] [-d DATE] [[-r] REV]')), |
@@ -224,7 +224,6 b' def addaliases(ui, cmdtable):' | |||||
224 | # but only if they have been defined prior to the current definition. |
|
224 | # but only if they have been defined prior to the current definition. | |
225 | for alias, definition in ui.configitems('alias'): |
|
225 | for alias, definition in ui.configitems('alias'): | |
226 | aliasdef = cmdalias(alias, definition, cmdtable) |
|
226 | aliasdef = cmdalias(alias, definition, cmdtable) | |
227 |
|
||||
228 | cmdtable[alias] = (aliasdef, aliasdef.opts, aliasdef.help) |
|
227 | cmdtable[alias] = (aliasdef, aliasdef.opts, aliasdef.help) | |
229 | if aliasdef.norepo: |
|
228 | if aliasdef.norepo: | |
230 | commands.norepo += ' %s' % alias |
|
229 | commands.norepo += ' %s' % alias |
@@ -5,9 +5,9 b'' | |||||
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2, incorporated herein by reference. |
|
6 | # GNU General Public License version 2, incorporated herein by reference. | |
7 |
|
7 | |||
8 | import imp, os |
|
8 | import imp, os, sys | |
9 | import util, cmdutil |
|
9 | import util, cmdutil, help | |
10 | from i18n import _ |
|
10 | from i18n import _, gettext | |
11 |
|
11 | |||
12 | _extensions = {} |
|
12 | _extensions = {} | |
13 | _order = [] |
|
13 | _order = [] | |
@@ -117,3 +117,61 b' def wrapfunction(container, funcname, wr' | |||||
117 | origfn = getattr(container, funcname) |
|
117 | origfn = getattr(container, funcname) | |
118 | setattr(container, funcname, wrap) |
|
118 | setattr(container, funcname, wrap) | |
119 | return origfn |
|
119 | return origfn | |
|
120 | ||||
|
121 | def disabled(): | |||
|
122 | '''find disabled extensions from hgext | |||
|
123 | returns a dict of {name: desc}, and the max name length''' | |||
|
124 | ||||
|
125 | import hgext | |||
|
126 | extpath = os.path.dirname(os.path.abspath(hgext.__file__)) | |||
|
127 | ||||
|
128 | exts = {} | |||
|
129 | maxlength = 0 | |||
|
130 | for e in os.listdir(extpath): | |||
|
131 | ||||
|
132 | if e.endswith('.py'): | |||
|
133 | name = e.rsplit('.', 1)[0] | |||
|
134 | path = os.path.join(extpath, e) | |||
|
135 | else: | |||
|
136 | name = e | |||
|
137 | path = os.path.join(extpath, e, '__init__.py') | |||
|
138 | if not os.path.exists(path): | |||
|
139 | continue | |||
|
140 | ||||
|
141 | if name in exts or name in _order or name == '__init__': | |||
|
142 | continue | |||
|
143 | ||||
|
144 | try: | |||
|
145 | file = open(path) | |||
|
146 | except IOError: | |||
|
147 | continue | |||
|
148 | else: | |||
|
149 | doc = help.moduledoc(file) | |||
|
150 | file.close() | |||
|
151 | ||||
|
152 | if doc: # extracting localized synopsis | |||
|
153 | exts[name] = gettext(doc).splitlines()[0] | |||
|
154 | else: | |||
|
155 | exts[name] = _('(no help text available)') | |||
|
156 | ||||
|
157 | if len(name) > maxlength: | |||
|
158 | maxlength = len(name) | |||
|
159 | ||||
|
160 | return exts, maxlength | |||
|
161 | ||||
|
162 | def enabled(): | |||
|
163 | '''return a dict of {name: desc} of extensions, and the max name length''' | |||
|
164 | ||||
|
165 | if not enabled: | |||
|
166 | return {}, 0 | |||
|
167 | ||||
|
168 | exts = {} | |||
|
169 | maxlength = 0 | |||
|
170 | exthelps = [] | |||
|
171 | for ename, ext in extensions(): | |||
|
172 | doc = (gettext(ext.__doc__) or _('(no help text available)')) | |||
|
173 | ename = ename.split('.')[-1] | |||
|
174 | maxlength = max(len(ename), maxlength) | |||
|
175 | exts[ename] = doc.splitlines(0)[0].strip() | |||
|
176 | ||||
|
177 | return exts, maxlength |
@@ -195,8 +195,8 b' def filemerge(repo, mynode, orig, fcd, f' | |||||
195 | elif tool == 'internal:dump': |
|
195 | elif tool == 'internal:dump': | |
196 | a = repo.wjoin(fd) |
|
196 | a = repo.wjoin(fd) | |
197 | util.copyfile(a, a + ".local") |
|
197 | util.copyfile(a, a + ".local") | |
198 |
repo.wwrite( |
|
198 | repo.wwrite(fd + ".other", fco.data(), fco.flags()) | |
199 |
repo.wwrite( |
|
199 | repo.wwrite(fd + ".base", fca.data(), fca.flags()) | |
200 | return 1 # unresolved |
|
200 | return 1 # unresolved | |
201 | else: |
|
201 | else: | |
202 | args = _toolstr(ui, tool, "args", '$local $base $other') |
|
202 | args = _toolstr(ui, tool, "args", '$local $base $other') |
@@ -6,70 +6,114 b'' | |||||
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2, incorporated herein by reference. |
|
7 | # GNU General Public License version 2, incorporated herein by reference. | |
8 |
|
8 | |||
9 | from node import nullrev |
|
9 | """supports walking the history as DAGs suitable for graphical output | |
10 |
|
|
10 | ||
11 | def graph(repo, start_rev, stop_rev): |
|
11 | The most basic format we use is that of:: | |
12 | """incremental revision grapher |
|
12 | ||
|
13 | (id, type, data, [parentids]) | |||
13 |
|
14 | |||
14 | This generator function walks through the revision history from |
|
15 | The node and parent ids are arbitrary integers which identify a node in the | |
15 | revision start_rev to revision stop_rev (which must be less than |
|
16 | context of the graph returned. Type is a constant specifying the node type. | |
16 | or equal to start_rev) and for each revision emits tuples with the |
|
17 | Data depends on type. | |
17 | following elements: |
|
18 | """ | |
|
19 | ||||
|
20 | from mercurial.node import nullrev | |||
|
21 | ||||
|
22 | CHANGESET = 'C' | |||
18 |
|
23 | |||
19 | - Current node |
|
24 | def revisions(repo, start, stop): | |
20 | - Column and color for the current node |
|
25 | """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples | |
21 | - Edges; a list of (col, next_col, color) indicating the edges between |
|
26 | ||
22 | the current node and its parents. |
|
27 | This generator function walks through the revision history from revision | |
23 | - First line of the changeset description |
|
28 | start to revision stop (which must be less than or equal to start). It | |
24 | - The changeset author |
|
29 | returns a tuple for each node. The node and parent ids are arbitrary | |
25 | - The changeset date/time |
|
30 | integers which identify a node in the context of the graph returned. | |
26 | """ |
|
31 | """ | |
|
32 | cur = start | |||
|
33 | while cur >= stop: | |||
|
34 | ctx = repo[cur] | |||
|
35 | parents = [p.rev() for p in ctx.parents() if p.rev() != nullrev] | |||
|
36 | yield (cur, CHANGESET, ctx, sorted(parents)) | |||
|
37 | cur -= 1 | |||
27 |
|
38 | |||
28 | if start_rev == nullrev and not stop_rev: |
|
39 | def filerevs(repo, path, start, stop): | |
29 | return |
|
40 | """file cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples | |
30 |
|
|
41 | ||
31 | assert start_rev >= stop_rev |
|
42 | This generator function walks through the revision history of a single | |
32 | assert stop_rev >= 0 |
|
43 | file from revision start down to revision stop. | |
33 | curr_rev = start_rev |
|
44 | """ | |
34 | revs = [] |
|
45 | filerev = len(repo.file(path)) - 1 | |
35 | cl = repo.changelog |
|
46 | while filerev >= 0: | |
36 | colors = {} |
|
47 | fctx = repo.filectx(path, fileid=filerev) | |
37 | new_color = 1 |
|
48 | parents = [f.linkrev() for f in fctx.parents() if f.path() == path] | |
|
49 | rev = fctx.rev() | |||
|
50 | if rev <= start: | |||
|
51 | yield (rev, CHANGESET, fctx, sorted(parents)) | |||
|
52 | if rev <= stop: | |||
|
53 | break | |||
|
54 | filerev -= 1 | |||
|
55 | ||||
|
56 | def nodes(repo, nodes): | |||
|
57 | """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples | |||
|
58 | ||||
|
59 | This generator function walks the given nodes. It only returns parents | |||
|
60 | that are in nodes, too. | |||
|
61 | """ | |||
|
62 | include = set(nodes) | |||
|
63 | for node in nodes: | |||
|
64 | ctx = repo[node] | |||
|
65 | parents = [p.rev() for p in ctx.parents() if p.node() in include] | |||
|
66 | yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) | |||
|
67 | ||||
|
68 | def colored(dag): | |||
|
69 | """annotates a DAG with colored edge information | |||
38 |
|
|
70 | ||
39 | while curr_rev >= stop_rev: |
|
71 | For each DAG node this function emits tuples:: | |
40 | # Compute revs and next_revs |
|
72 | ||
41 | if curr_rev not in revs: |
|
73 | (id, type, data, (col, color), [(col, nextcol, color)]) | |
42 | revs.append(curr_rev) # new head |
|
74 | ||
43 | colors[curr_rev] = new_color |
|
75 | with the following new elements: | |
44 | new_color += 1 |
|
|||
45 |
|
|
76 | ||
46 | idx = revs.index(curr_rev) |
|
77 | - Tuple (col, color) with column and color index for the current node | |
47 | color = colors.pop(curr_rev) |
|
78 | - A list of tuples indicating the edges between the current node and its | |
48 | next = revs[:] |
|
79 | parents. | |
|
80 | """ | |||
|
81 | seen = [] | |||
|
82 | colors = {} | |||
|
83 | newcolor = 1 | |||
|
84 | for (cur, type, data, parents) in dag: | |||
49 |
|
85 | |||
50 | # Add parents to next_revs |
|
86 | # Compute seen and next | |
51 | parents = [x for x in cl.parentrevs(curr_rev) if x != nullrev] |
|
87 | if cur not in seen: | |
|
88 | seen.append(cur) # new head | |||
|
89 | colors[cur] = newcolor | |||
|
90 | newcolor += 1 | |||
|
91 | ||||
|
92 | col = seen.index(cur) | |||
|
93 | color = colors.pop(cur) | |||
|
94 | next = seen[:] | |||
|
95 | ||||
|
96 | # Add parents to next | |||
52 | addparents = [p for p in parents if p not in next] |
|
97 | addparents = [p for p in parents if p not in next] | |
53 |
next[ |
|
98 | next[col:col + 1] = addparents | |
54 |
|
99 | |||
55 | # Set colors for the parents |
|
100 | # Set colors for the parents | |
56 | for i, p in enumerate(addparents): |
|
101 | for i, p in enumerate(addparents): | |
57 | if not i: |
|
102 | if not i: | |
58 | colors[p] = color |
|
103 | colors[p] = color | |
59 | else: |
|
104 | else: | |
60 |
colors[p] = new |
|
105 | colors[p] = newcolor | |
61 |
new |
|
106 | newcolor += 1 | |
62 |
|
107 | |||
63 | # Add edges to the graph |
|
108 | # Add edges to the graph | |
64 | edges = [] |
|
109 | edges = [] | |
65 |
for col, |
|
110 | for ecol, eid in enumerate(seen): | |
66 |
if |
|
111 | if eid in next: | |
67 |
edges.append((col, next.index( |
|
112 | edges.append((ecol, next.index(eid), colors[eid])) | |
68 |
elif |
|
113 | elif eid == cur: | |
69 | for p in parents: |
|
114 | for p in parents: | |
70 | edges.append((col, next.index(p), colors[p])) |
|
115 | edges.append((ecol, next.index(p), colors[p])) | |
71 |
|
116 | |||
72 | # Yield and move on |
|
117 | # Yield and move on | |
73 |
yield ( |
|
118 | yield (cur, type, data, (col, color), edges) | |
74 |
|
|
119 | seen = next | |
75 | curr_rev -= 1 |
|
@@ -6,6 +6,90 b'' | |||||
6 | # GNU General Public License version 2, incorporated herein by reference. |
|
6 | # GNU General Public License version 2, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from i18n import _ |
|
8 | from i18n import _ | |
|
9 | import extensions | |||
|
10 | ||||
|
11 | ||||
|
12 | def moduledoc(file): | |||
|
13 | '''return the top-level python documentation for the given file | |||
|
14 | ||||
|
15 | Loosely inspired by pydoc.source_synopsis(), but rewritten to handle \''' | |||
|
16 | as well as """ and to return the whole text instead of just the synopsis''' | |||
|
17 | result = [] | |||
|
18 | ||||
|
19 | line = file.readline() | |||
|
20 | while line[:1] == '#' or not line.strip(): | |||
|
21 | line = file.readline() | |||
|
22 | if not line: break | |||
|
23 | ||||
|
24 | start = line[:3] | |||
|
25 | if start == '"""' or start == "'''": | |||
|
26 | line = line[3:] | |||
|
27 | while line: | |||
|
28 | if line.rstrip().endswith(start): | |||
|
29 | line = line.split(start)[0] | |||
|
30 | if line: | |||
|
31 | result.append(line) | |||
|
32 | break | |||
|
33 | elif not line: | |||
|
34 | return None # unmatched delimiter | |||
|
35 | result.append(line) | |||
|
36 | line = file.readline() | |||
|
37 | else: | |||
|
38 | return None | |||
|
39 | ||||
|
40 | return ''.join(result) | |||
|
41 | ||||
|
42 | def listexts(header, exts, maxlength): | |||
|
43 | '''return a text listing of the given extensions''' | |||
|
44 | if not exts: | |||
|
45 | return '' | |||
|
46 | result = '\n%s\n\n' % header | |||
|
47 | for name, desc in sorted(exts.iteritems()): | |||
|
48 | result += ' %s %s\n' % (name.ljust(maxlength), desc) | |||
|
49 | return result | |||
|
50 | ||||
|
51 | def extshelp(): | |||
|
52 | doc = _(r''' | |||
|
53 | Mercurial has a mechanism for adding new features through the | |||
|
54 | use of extensions. Extensions may bring new commands, or new | |||
|
55 | hooks, or change Mercurial's behavior. | |||
|
56 | ||||
|
57 | Extensions are not loaded by default for a variety of reasons, | |||
|
58 | they may be meant for advanced users or provide potentially | |||
|
59 | dangerous commands (e.g. mq and rebase allow history to be | |||
|
60 | rewritten), they might not be ready for prime-time yet, or | |||
|
61 | they may alter Mercurial's behavior. It is thus up to the user | |||
|
62 | to activate extensions as desired. | |||
|
63 | ||||
|
64 | To enable the "foo" extension, either shipped with Mercurial | |||
|
65 | or in the Python search path, create an entry for it in your | |||
|
66 | hgrc, like this: | |||
|
67 | ||||
|
68 | [extensions] | |||
|
69 | foo = | |||
|
70 | ||||
|
71 | You may also specify the full path to an extension: | |||
|
72 | ||||
|
73 | [extensions] | |||
|
74 | myfeature = ~/.hgext/myfeature.py | |||
|
75 | ||||
|
76 | To explicitly disable an extension enabled in an hgrc of broader | |||
|
77 | scope, prepend its path with !: | |||
|
78 | ||||
|
79 | [extensions] | |||
|
80 | # disabling extension bar residing in /ext/path | |||
|
81 | hgext.bar = !/path/to/extension/bar.py | |||
|
82 | # ditto, but no path was supplied for extension baz | |||
|
83 | hgext.baz = ! | |||
|
84 | ''') | |||
|
85 | ||||
|
86 | exts, maxlength = extensions.enabled() | |||
|
87 | doc += listexts(_('enabled extensions:'), exts, maxlength) | |||
|
88 | ||||
|
89 | exts, maxlength = extensions.disabled() | |||
|
90 | doc += listexts(_('disabled extensions:'), exts, maxlength) | |||
|
91 | ||||
|
92 | return doc | |||
9 |
|
93 | |||
10 | helptable = ( |
|
94 | helptable = ( | |
11 | (["dates"], _("Date Formats"), |
|
95 | (["dates"], _("Date Formats"), | |
@@ -418,4 +502,5 b' PYTHONPATH::' | |||||
418 | The push command will look for a path named 'default-push', and |
|
502 | The push command will look for a path named 'default-push', and | |
419 | prefer it over 'default' if both are defined. |
|
503 | prefer it over 'default' if both are defined. | |
420 | ''')), |
|
504 | ''')), | |
|
505 | (["extensions"], _("Using additional features"), extshelp), | |||
421 | ) |
|
506 | ) |
@@ -64,7 +64,8 b' class hgweb(object):' | |||||
64 | self.maxshortchanges = int(self.config("web", "maxshortchanges", 60)) |
|
64 | self.maxshortchanges = int(self.config("web", "maxshortchanges", 60)) | |
65 | self.maxfiles = int(self.config("web", "maxfiles", 10)) |
|
65 | self.maxfiles = int(self.config("web", "maxfiles", 10)) | |
66 | self.allowpull = self.configbool("web", "allowpull", True) |
|
66 | self.allowpull = self.configbool("web", "allowpull", True) | |
67 |
|
|
67 | encoding.encoding = self.config("web", "encoding", | |
|
68 | encoding.encoding) | |||
68 |
|
69 | |||
69 | def run(self): |
|
70 | def run(self): | |
70 | if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): |
|
71 | if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): | |
@@ -81,28 +82,6 b' class hgweb(object):' | |||||
81 |
|
82 | |||
82 | self.refresh() |
|
83 | self.refresh() | |
83 |
|
84 | |||
84 | # process this if it's a protocol request |
|
|||
85 | # protocol bits don't need to create any URLs |
|
|||
86 | # and the clients always use the old URL structure |
|
|||
87 |
|
||||
88 | cmd = req.form.get('cmd', [''])[0] |
|
|||
89 | if cmd and cmd in protocol.__all__: |
|
|||
90 | try: |
|
|||
91 | if cmd in perms: |
|
|||
92 | try: |
|
|||
93 | self.check_perm(req, perms[cmd]) |
|
|||
94 | except ErrorResponse, inst: |
|
|||
95 | if cmd == 'unbundle': |
|
|||
96 | req.drain() |
|
|||
97 | raise |
|
|||
98 | method = getattr(protocol, cmd) |
|
|||
99 | return method(self.repo, req) |
|
|||
100 | except ErrorResponse, inst: |
|
|||
101 | req.respond(inst, protocol.HGTYPE) |
|
|||
102 | if not inst.message: |
|
|||
103 | return [] |
|
|||
104 | return '0\n%s\n' % inst.message, |
|
|||
105 |
|
||||
106 | # work with CGI variables to create coherent structure |
|
85 | # work with CGI variables to create coherent structure | |
107 | # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME |
|
86 | # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME | |
108 |
|
87 | |||
@@ -122,6 +101,30 b' class hgweb(object):' | |||||
122 | query = req.env['QUERY_STRING'].split('&', 1)[0] |
|
101 | query = req.env['QUERY_STRING'].split('&', 1)[0] | |
123 | query = query.split(';', 1)[0] |
|
102 | query = query.split(';', 1)[0] | |
124 |
|
103 | |||
|
104 | # process this if it's a protocol request | |||
|
105 | # protocol bits don't need to create any URLs | |||
|
106 | # and the clients always use the old URL structure | |||
|
107 | ||||
|
108 | cmd = req.form.get('cmd', [''])[0] | |||
|
109 | if cmd and cmd in protocol.__all__: | |||
|
110 | if query: | |||
|
111 | raise ErrorResponse(HTTP_NOT_FOUND) | |||
|
112 | try: | |||
|
113 | if cmd in perms: | |||
|
114 | try: | |||
|
115 | self.check_perm(req, perms[cmd]) | |||
|
116 | except ErrorResponse, inst: | |||
|
117 | if cmd == 'unbundle': | |||
|
118 | req.drain() | |||
|
119 | raise | |||
|
120 | method = getattr(protocol, cmd) | |||
|
121 | return method(self.repo, req) | |||
|
122 | except ErrorResponse, inst: | |||
|
123 | req.respond(inst, protocol.HGTYPE) | |||
|
124 | if not inst.message: | |||
|
125 | return [] | |||
|
126 | return '0\n%s\n' % inst.message, | |||
|
127 | ||||
125 | # translate user-visible url structure to internal structure |
|
128 | # translate user-visible url structure to internal structure | |
126 |
|
129 | |||
127 | args = query.split('/', 2) |
|
130 | args = query.split('/', 2) | |
@@ -160,7 +163,7 b' class hgweb(object):' | |||||
160 |
|
163 | |||
161 | try: |
|
164 | try: | |
162 | tmpl = self.templater(req) |
|
165 | tmpl = self.templater(req) | |
163 |
ctype = tmpl('mimetype', encoding= |
|
166 | ctype = tmpl('mimetype', encoding=encoding.encoding) | |
164 | ctype = templater.stringify(ctype) |
|
167 | ctype = templater.stringify(ctype) | |
165 |
|
168 | |||
166 | # check read permissions non-static content |
|
169 | # check read permissions non-static content | |
@@ -219,7 +222,7 b' class hgweb(object):' | |||||
219 | # some functions for the templater |
|
222 | # some functions for the templater | |
220 |
|
223 | |||
221 | def header(**map): |
|
224 | def header(**map): | |
222 |
yield tmpl('header', encoding= |
|
225 | yield tmpl('header', encoding=encoding.encoding, **map) | |
223 |
|
226 | |||
224 | def footer(**map): |
|
227 | def footer(**map): | |
225 | yield tmpl("footer", **map) |
|
228 | yield tmpl("footer", **map) |
@@ -70,6 +70,8 b' class hgwebdir(object):' | |||||
70 | elif isinstance(self.conf, dict): |
|
70 | elif isinstance(self.conf, dict): | |
71 | paths = self.conf.items() |
|
71 | paths = self.conf.items() | |
72 |
|
72 | |||
|
73 | encoding.encoding = self.ui.config('web', 'encoding', | |||
|
74 | encoding.encoding) | |||
73 | self.motd = self.ui.config('web', 'motd') |
|
75 | self.motd = self.ui.config('web', 'motd') | |
74 | self.style = self.ui.config('web', 'style', 'paper') |
|
76 | self.style = self.ui.config('web', 'style', 'paper') | |
75 | self.stripecount = self.ui.config('web', 'stripes', 1) |
|
77 | self.stripecount = self.ui.config('web', 'stripes', 1) |
@@ -162,8 +162,10 b' def unbundle(repo, req):' | |||||
162 | sys.stderr = sys.stdout = cStringIO.StringIO() |
|
162 | sys.stderr = sys.stdout = cStringIO.StringIO() | |
163 |
|
163 | |||
164 | try: |
|
164 | try: | |
165 |
url = 'remote:%s:%s' % ( |
|
165 | url = 'remote:%s:%s:%s' % ( | |
166 | req.env.get('REMOTE_HOST', '')) |
|
166 | proto, | |
|
167 | urllib.quote(req.env.get('REMOTE_HOST', '')), | |||
|
168 | urllib.quote(req.env.get('REMOTE_USER', ''))) | |||
167 | try: |
|
169 | try: | |
168 | ret = repo.addchangegroup(gen, 'serve', url) |
|
170 | ret = repo.addchangegroup(gen, 'serve', url) | |
169 | except util.Abort, inst: |
|
171 | except util.Abort, inst: |
@@ -668,10 +668,13 b' def graph(web, req, tmpl):' | |||||
668 | count = len(web.repo) |
|
668 | count = len(web.repo) | |
669 | changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx) |
|
669 | changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx) | |
670 |
|
670 | |||
671 |
|
|
671 | dag = graphmod.revisions(web.repo, rev, downrev) | |
|
672 | tree = list(graphmod.colored(dag)) | |||
672 | canvasheight = (len(tree) + 1) * bg_height - 27; |
|
673 | canvasheight = (len(tree) + 1) * bg_height - 27; | |
673 | data = [] |
|
674 | data = [] | |
674 | for (ctx, vtx, edges) in tree: |
|
675 | for (id, type, ctx, vtx, edges) in tree: | |
|
676 | if type != graphmod.CHANGESET: | |||
|
677 | continue | |||
675 | node = short(ctx.node()) |
|
678 | node = short(ctx.node()) | |
676 | age = templatefilters.age(ctx.date()) |
|
679 | age = templatefilters.age(ctx.date()) | |
677 | desc = templatefilters.firstline(ctx.description()) |
|
680 | desc = templatefilters.firstline(ctx.description()) |
@@ -262,7 +262,7 b' class localrepository(repo.repository):' | |||||
262 | warn(_("node '%s' is not well formed") % node) |
|
262 | warn(_("node '%s' is not well formed") % node) | |
263 | continue |
|
263 | continue | |
264 | if bin_n not in self.changelog.nodemap: |
|
264 | if bin_n not in self.changelog.nodemap: | |
265 | warn(_("tag '%s' refers to unknown node") % key) |
|
265 | # silently ignore as pull -r might cause this | |
266 | continue |
|
266 | continue | |
267 |
|
267 | |||
268 | h = [] |
|
268 | h = [] | |
@@ -290,11 +290,24 b' class localrepository(repo.repository):' | |||||
290 | globaltags[k] = an, ah |
|
290 | globaltags[k] = an, ah | |
291 | tagtypes[k] = tagtype |
|
291 | tagtypes[k] = tagtype | |
292 |
|
292 | |||
293 | # read the tags file from each head, ending with the tip |
|
293 | seen = set() | |
294 | f = None |
|
294 | f = None | |
295 | for rev, node, fnode in self._hgtagsnodes(): |
|
295 | ctxs = [] | |
296 | f = (f and f.filectx(fnode) or |
|
296 | for node in self.heads(): | |
297 | self.filectx('.hgtags', fileid=fnode)) |
|
297 | try: | |
|
298 | fnode = self[node].filenode('.hgtags') | |||
|
299 | except error.LookupError: | |||
|
300 | continue | |||
|
301 | if fnode not in seen: | |||
|
302 | seen.add(fnode) | |||
|
303 | if not f: | |||
|
304 | f = self.filectx('.hgtags', fileid=fnode) | |||
|
305 | else: | |||
|
306 | f = f.filectx(fnode) | |||
|
307 | ctxs.append(f) | |||
|
308 | ||||
|
309 | # read the tags file from each head, ending with the tip | |||
|
310 | for f in reversed(ctxs): | |||
298 | readtags(f.data().splitlines(), f, "global") |
|
311 | readtags(f.data().splitlines(), f, "global") | |
299 |
|
312 | |||
300 | try: |
|
313 | try: | |
@@ -328,22 +341,6 b' class localrepository(repo.repository):' | |||||
328 |
|
341 | |||
329 | return self._tagstypecache.get(tagname) |
|
342 | return self._tagstypecache.get(tagname) | |
330 |
|
343 | |||
331 | def _hgtagsnodes(self): |
|
|||
332 | last = {} |
|
|||
333 | ret = [] |
|
|||
334 | for node in reversed(self.heads()): |
|
|||
335 | c = self[node] |
|
|||
336 | rev = c.rev() |
|
|||
337 | try: |
|
|||
338 | fnode = c.filenode('.hgtags') |
|
|||
339 | except error.LookupError: |
|
|||
340 | continue |
|
|||
341 | ret.append((rev, node, fnode)) |
|
|||
342 | if fnode in last: |
|
|||
343 | ret[last[fnode]] = None |
|
|||
344 | last[fnode] = len(ret) - 1 |
|
|||
345 | return [item for item in ret if item] |
|
|||
346 |
|
||||
347 | def tagslist(self): |
|
344 | def tagslist(self): | |
348 | '''return a list of tags ordered by revision''' |
|
345 | '''return a list of tags ordered by revision''' | |
349 | l = [] |
|
346 | l = [] |
@@ -972,7 +972,7 b' def iterhunks(ui, fp, sourcefile=None, t' | |||||
972 | def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False, |
|
972 | def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False, | |
973 | eol=None): |
|
973 | eol=None): | |
974 | """ |
|
974 | """ | |
975 |
Reads a patch from fp and tries to apply it. |
|
975 | Reads a patch from fp and tries to apply it. | |
976 |
|
976 | |||
977 | The dict 'changed' is filled in with all of the filenames changed |
|
977 | The dict 'changed' is filled in with all of the filenames changed | |
978 | by the patch. Returns 0 for a clean patch, -1 if any rejects were |
|
978 | by the patch. Returns 0 for a clean patch, -1 if any rejects were | |
@@ -1137,7 +1137,7 b' def internalpatch(patchobj, ui, strip, c' | |||||
1137 | eol = {'strict': None, 'crlf': '\r\n', 'lf': '\n'}[eolmode.lower()] |
|
1137 | eol = {'strict': None, 'crlf': '\r\n', 'lf': '\n'}[eolmode.lower()] | |
1138 | except KeyError: |
|
1138 | except KeyError: | |
1139 | raise util.Abort(_('Unsupported line endings type: %s') % eolmode) |
|
1139 | raise util.Abort(_('Unsupported line endings type: %s') % eolmode) | |
1140 |
|
1140 | |||
1141 | try: |
|
1141 | try: | |
1142 | fp = file(patchobj, 'rb') |
|
1142 | fp = file(patchobj, 'rb') | |
1143 | except TypeError: |
|
1143 | except TypeError: |
@@ -109,7 +109,9 b' class passwordmgr(urllib2.HTTPPasswordMg' | |||||
109 | return (user, passwd) |
|
109 | return (user, passwd) | |
110 |
|
110 | |||
111 | if not user: |
|
111 | if not user: | |
112 |
|
|
112 | auth = self.readauthtoken(authuri) | |
|
113 | if auth: | |||
|
114 | user, passwd = auth.get('username'), auth.get('password') | |||
113 | if not user or not passwd: |
|
115 | if not user or not passwd: | |
114 | if not self.ui.interactive(): |
|
116 | if not self.ui.interactive(): | |
115 | raise util.Abort(_('http authorization required')) |
|
117 | raise util.Abort(_('http authorization required')) | |
@@ -132,7 +134,7 b' class passwordmgr(urllib2.HTTPPasswordMg' | |||||
132 | msg = _('http auth: user %s, password %s\n') |
|
134 | msg = _('http auth: user %s, password %s\n') | |
133 | self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) |
|
135 | self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) | |
134 |
|
136 | |||
135 |
def |
|
137 | def readauthtoken(self, uri): | |
136 | # Read configuration |
|
138 | # Read configuration | |
137 | config = dict() |
|
139 | config = dict() | |
138 | for key, val in self.ui.configitems('auth'): |
|
140 | for key, val in self.ui.configitems('auth'): | |
@@ -143,7 +145,7 b' class passwordmgr(urllib2.HTTPPasswordMg' | |||||
143 | # Find the best match |
|
145 | # Find the best match | |
144 | scheme, hostpath = uri.split('://', 1) |
|
146 | scheme, hostpath = uri.split('://', 1) | |
145 | bestlen = 0 |
|
147 | bestlen = 0 | |
146 |
bestauth = None |
|
148 | bestauth = None | |
147 | for auth in config.itervalues(): |
|
149 | for auth in config.itervalues(): | |
148 | prefix = auth.get('prefix') |
|
150 | prefix = auth.get('prefix') | |
149 | if not prefix: continue |
|
151 | if not prefix: continue | |
@@ -155,7 +157,7 b' class passwordmgr(urllib2.HTTPPasswordMg' | |||||
155 | if (prefix == '*' or hostpath.startswith(prefix)) and \ |
|
157 | if (prefix == '*' or hostpath.startswith(prefix)) and \ | |
156 | len(prefix) > bestlen and scheme in schemes: |
|
158 | len(prefix) > bestlen and scheme in schemes: | |
157 | bestlen = len(prefix) |
|
159 | bestlen = len(prefix) | |
158 |
bestauth = auth |
|
160 | bestauth = auth | |
159 | return bestauth |
|
161 | return bestauth | |
160 |
|
162 | |||
161 | class proxyhandler(urllib2.ProxyHandler): |
|
163 | class proxyhandler(urllib2.ProxyHandler): | |
@@ -411,8 +413,38 b' if has_https:' | |||||
411 | send = _gen_sendfile(httplib.HTTPSConnection) |
|
413 | send = _gen_sendfile(httplib.HTTPSConnection) | |
412 |
|
414 | |||
413 | class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): |
|
415 | class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): | |
|
416 | def __init__(self, ui): | |||
|
417 | keepalive.KeepAliveHandler.__init__(self) | |||
|
418 | urllib2.HTTPSHandler.__init__(self) | |||
|
419 | self.ui = ui | |||
|
420 | self.pwmgr = passwordmgr(self.ui) | |||
|
421 | ||||
414 | def https_open(self, req): |
|
422 | def https_open(self, req): | |
415 | return self.do_open(httpsconnection, req) |
|
423 | self.auth = self.pwmgr.readauthtoken(req.get_full_url()) | |
|
424 | return self.do_open(self._makeconnection, req) | |||
|
425 | ||||
|
426 | def _makeconnection(self, host, port=443, *args, **kwargs): | |||
|
427 | keyfile = None | |||
|
428 | certfile = None | |||
|
429 | ||||
|
430 | if args: # key_file | |||
|
431 | keyfile = args.pop(0) | |||
|
432 | if args: # cert_file | |||
|
433 | certfile = args.pop(0) | |||
|
434 | ||||
|
435 | # if the user has specified different key/cert files in | |||
|
436 | # hgrc, we prefer these | |||
|
437 | if self.auth and 'key' in self.auth and 'cert' in self.auth: | |||
|
438 | keyfile = self.auth['key'] | |||
|
439 | certfile = self.auth['cert'] | |||
|
440 | ||||
|
441 | # let host port take precedence | |||
|
442 | if ':' in host and '[' not in host or ']:' in host: | |||
|
443 | host, port = host.rsplit(':', 1) | |||
|
444 | if '[' in host: | |||
|
445 | host = host[1:-1] | |||
|
446 | ||||
|
447 | return httpsconnection(host, port, keyfile, certfile, *args, **kwargs) | |||
416 |
|
448 | |||
417 | # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if |
|
449 | # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if | |
418 | # it doesn't know about the auth type requested. This can happen if |
|
450 | # it doesn't know about the auth type requested. This can happen if | |
@@ -460,7 +492,7 b' def opener(ui, authinfo=None):' | |||||
460 | ''' |
|
492 | ''' | |
461 | handlers = [httphandler()] |
|
493 | handlers = [httphandler()] | |
462 | if has_https: |
|
494 | if has_https: | |
463 | handlers.append(httpshandler()) |
|
495 | handlers.append(httpshandler(ui)) | |
464 |
|
496 | |||
465 | handlers.append(proxyhandler(ui)) |
|
497 | handlers.append(proxyhandler(ui)) | |
466 |
|
498 |
@@ -16,7 +16,7 b'' | |||||
16 | # If you change this script, it is recommended that you ensure you |
|
16 | # If you change this script, it is recommended that you ensure you | |
17 | # haven't broken it by running it in various modes with a representative |
|
17 | # haven't broken it by running it in various modes with a representative | |
18 | # sample of test scripts. For example: |
|
18 | # sample of test scripts. For example: | |
19 |
# |
|
19 | # | |
20 | # 1) serial, no coverage, temp install: |
|
20 | # 1) serial, no coverage, temp install: | |
21 | # ./run-tests.py test-s* |
|
21 | # ./run-tests.py test-s* | |
22 | # 2) serial, no coverage, local hg: |
|
22 | # 2) serial, no coverage, local hg: |
@@ -271,5 +271,5 b' emptydir does not look like a darcs repo' | |||||
271 | emptydir does not look like a monotone repo |
|
271 | emptydir does not look like a monotone repo | |
272 | emptydir does not look like a GNU Arch repo |
|
272 | emptydir does not look like a GNU Arch repo | |
273 | emptydir does not look like a Bazaar repo |
|
273 | emptydir does not look like a Bazaar repo | |
274 | emptydir does not look like a P4 repo |
|
274 | cannot find required "p4" tool | |
275 | abort: emptydir: missing or unsupported repository |
|
275 | abort: emptydir: missing or unsupported repository |
@@ -169,14 +169,14 b' diff: rev, change, text, git, nodates, s' | |||||
169 | export: output, switch-parent, text, git, nodates |
|
169 | export: output, switch-parent, text, git, nodates | |
170 | init: ssh, remotecmd |
|
170 | init: ssh, remotecmd | |
171 | log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, prune, patch, git, limit, no-merges, style, template, include, exclude |
|
171 | log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, prune, patch, git, limit, no-merges, style, template, include, exclude | |
172 |
merge: force, rev, |
|
172 | merge: force, rev, preview | |
173 | parents: rev, style, template |
|
173 | parents: rev, style, template | |
174 | pull: update, force, rev, ssh, remotecmd |
|
174 | pull: update, force, rev, ssh, remotecmd | |
175 | push: force, rev, ssh, remotecmd |
|
175 | push: force, rev, ssh, remotecmd | |
176 | remove: after, force, include, exclude |
|
176 | remove: after, force, include, exclude | |
177 | serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, webdir-conf, pid-file, stdio, templates, style, ipv6, certificate |
|
177 | serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, webdir-conf, pid-file, stdio, templates, style, ipv6, certificate | |
178 | status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, include, exclude |
|
178 | status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, include, exclude | |
179 | update: clean, date, rev |
|
179 | update: clean, check, date, rev | |
180 | addremove: similarity, include, exclude, dry-run |
|
180 | addremove: similarity, include, exclude, dry-run | |
181 | archive: no-decode, prefix, rev, type, include, exclude |
|
181 | archive: no-decode, prefix, rev, type, include, exclude | |
182 | backout: merge, parent, rev, include, exclude, message, logfile, date, user |
|
182 | backout: merge, parent, rev, include, exclude, message, logfile, date, user |
@@ -20,7 +20,7 b' hg cat foo >> foo' | |||||
20 | hg ci -m 'change foo' -d "1000000 0" |
|
20 | hg ci -m 'change foo' -d "1000000 0" | |
21 |
|
21 | |||
22 | # we get conflicts that shouldn't be there |
|
22 | # we get conflicts that shouldn't be there | |
23 |
hg merge - |
|
23 | hg merge -P | |
24 | hg merge --debug |
|
24 | hg merge --debug | |
25 |
|
25 | |||
26 | echo "-- foo --" |
|
26 | echo "-- foo --" |
@@ -208,6 +208,7 b' additional help topics:' | |||||
208 | diffs Diff Formats |
|
208 | diffs Diff Formats | |
209 | templating Template Usage |
|
209 | templating Template Usage | |
210 | urls URL Paths |
|
210 | urls URL Paths | |
|
211 | extensions Using additional features | |||
211 |
|
212 | |||
212 | use "hg -v help" to show aliases and global options |
|
213 | use "hg -v help" to show aliases and global options | |
213 | Mercurial Distributed SCM |
|
214 | Mercurial Distributed SCM | |
@@ -273,6 +274,7 b' additional help topics:' | |||||
273 | diffs Diff Formats |
|
274 | diffs Diff Formats | |
274 | templating Template Usage |
|
275 | templating Template Usage | |
275 | urls URL Paths |
|
276 | urls URL Paths | |
|
277 | extensions Using additional features | |||
276 |
|
278 | |||
277 | use "hg -v help" to show aliases and global options |
|
279 | use "hg -v help" to show aliases and global options | |
278 | %% not tested: --debugger |
|
280 | %% not tested: --debugger |
@@ -73,3 +73,25 b' hg grep octarine' | |||||
73 | # Used to crash here |
|
73 | # Used to crash here | |
74 | hg grep -r 1 octarine |
|
74 | hg grep -r 1 octarine | |
75 |
|
75 | |||
|
76 | # Issue337: grep did not compared changesets by their revision numbers | |||
|
77 | # instead of following parent-child relationships. | |||
|
78 | cd .. | |||
|
79 | echo % issue 337 | |||
|
80 | hg init issue337 | |||
|
81 | cd issue337 | |||
|
82 | ||||
|
83 | echo white > color | |||
|
84 | hg commit -A -m "0 white" | |||
|
85 | ||||
|
86 | echo red > color | |||
|
87 | hg commit -A -m "1 red" | |||
|
88 | ||||
|
89 | hg update 0 | |||
|
90 | echo black > color | |||
|
91 | hg commit -A -m "2 black" | |||
|
92 | ||||
|
93 | hg update --clean 1 | |||
|
94 | echo blue > color | |||
|
95 | hg commit -A -m "3 blue" | |||
|
96 | ||||
|
97 | hg grep --all red |
@@ -38,6 +38,13 b' adding noeol' | |||||
38 | noeol:4:no infinite loo |
|
38 | noeol:4:no infinite loo | |
39 | % issue 685 |
|
39 | % issue 685 | |
40 | adding color |
|
40 | adding color | |
|
41 | colour:1:octarine | |||
41 | color:0:octarine |
|
42 | color:0:octarine | |
42 | colour:1:octarine |
|
43 | colour:1:octarine | |
43 | colour:1:octarine |
|
44 | % issue 337 | |
|
45 | adding color | |||
|
46 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |||
|
47 | created new head | |||
|
48 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |||
|
49 | color:3:-:red | |||
|
50 | color:1:+:red |
@@ -15,6 +15,8 b' bar' | |||||
15 | 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo |
|
15 | 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo | |
16 | % |
|
16 | % | |
17 | foo |
|
17 | foo | |
|
18 | patch foo finalized without changeset message | |||
|
19 | patch bar finalized without changeset message | |||
18 | %%% |
|
20 | %%% | |
19 | 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo |
|
21 | 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo | |
20 | 430ed4828a74fa4047bc816a25500f7472ab4bfe bar |
|
22 | 430ed4828a74fa4047bc816a25500f7472ab4bfe bar |
@@ -99,6 +99,7 b' additional help topics:' | |||||
99 | diffs Diff Formats |
|
99 | diffs Diff Formats | |
100 | templating Template Usage |
|
100 | templating Template Usage | |
101 | urls URL Paths |
|
101 | urls URL Paths | |
|
102 | extensions Using additional features | |||
102 |
|
103 | |||
103 | use "hg -v help" to show aliases and global options |
|
104 | use "hg -v help" to show aliases and global options | |
104 | add add the specified files on the next commit |
|
105 | add add the specified files on the next commit | |
@@ -160,6 +161,7 b' additional help topics:' | |||||
160 | diffs Diff Formats |
|
161 | diffs Diff Formats | |
161 | templating Template Usage |
|
162 | templating Template Usage | |
162 | urls URL Paths |
|
163 | urls URL Paths | |
|
164 | extensions Using additional features | |||
163 | hg add [OPTION]... [FILE]... |
|
165 | hg add [OPTION]... [FILE]... | |
164 |
|
166 | |||
165 | add the specified files on the next commit |
|
167 | add the specified files on the next commit |
@@ -31,7 +31,7 b' hg merge 2' | |||||
31 | hg commit -mm1 |
|
31 | hg commit -mm1 | |
32 |
|
32 | |||
33 | echo % should succeed - 2 heads |
|
33 | echo % should succeed - 2 heads | |
34 |
hg merge - |
|
34 | hg merge -P | |
35 | hg merge |
|
35 | hg merge | |
36 | hg commit -mm2 |
|
36 | hg commit -mm2 | |
37 |
|
37 |
@@ -30,7 +30,7 b' hg add c' | |||||
30 | hg commit -m "commit #2" -d "1000000 0" |
|
30 | hg commit -m "commit #2" -d "1000000 0" | |
31 | echo This is file b1 > b |
|
31 | echo This is file b1 > b | |
32 | echo %% no merges expected |
|
32 | echo %% no merges expected | |
33 |
hg merge - |
|
33 | hg merge -P 1 | |
34 | hg merge 1 |
|
34 | hg merge 1 | |
35 | hg diff --nodates |
|
35 | hg diff --nodates | |
36 | hg status |
|
36 | hg status |
@@ -13,9 +13,12 b' a' | |||||
13 | b |
|
13 | b | |
14 | series |
|
14 | series | |
15 | status |
|
15 | status | |
|
16 | patch a finalized without changeset message | |||
16 | 1 [mq]: a |
|
17 | 1 [mq]: a | |
17 | 0 base |
|
18 | 0 base | |
18 | abort: cannot delete revision 3 above applied patches |
|
19 | abort: cannot delete revision 3 above applied patches | |
|
20 | patch d finalized without changeset message | |||
|
21 | patch e finalized without changeset message | |||
19 | f |
|
22 | f | |
20 | 4 [mq]: f |
|
23 | 4 [mq]: f | |
21 | 3 [mq]: e |
|
24 | 3 [mq]: e | |
@@ -32,11 +35,14 b" abort: unknown revision 'c'!" | |||||
32 | applying c |
|
35 | applying c | |
33 | patch c is empty |
|
36 | patch c is empty | |
34 | now at: c |
|
37 | now at: c | |
|
38 | patch a finalized without changeset message | |||
|
39 | patch b finalized without changeset message | |||
35 | c |
|
40 | c | |
36 | 3 imported patch c |
|
41 | 3 imported patch c | |
37 | 2 [mq]: b |
|
42 | 2 [mq]: b | |
38 | 1 [mq]: a |
|
43 | 1 [mq]: a | |
39 | 0 base |
|
44 | 0 base | |
|
45 | patch c finalized without changeset message | |||
40 | 3 imported patch c |
|
46 | 3 imported patch c | |
41 | 2 [mq]: b |
|
47 | 2 [mq]: b | |
42 | 1 [mq]: a |
|
48 | 1 [mq]: a |
@@ -27,3 +27,5 b' 1 files updated, 0 files merged, 0 files' | |||||
27 | adding another.diff to series file |
|
27 | adding another.diff to series file | |
28 | applying another.diff |
|
28 | applying another.diff | |
29 | now at: another.diff |
|
29 | now at: another.diff | |
|
30 | patch b.diff finalized without changeset message | |||
|
31 | patch another.diff finalized without changeset message |
@@ -45,3 +45,12 b' hg parents' | |||||
45 |
|
45 | |||
46 | echo '% bar should be gone; other unknown/ignored files should still be around' |
|
46 | echo '% bar should be gone; other unknown/ignored files should still be around' | |
47 | hg status -A |
|
47 | hg status -A | |
|
48 | ||||
|
49 | echo '% preparing qpush of a missing patch' | |||
|
50 | hg qpop -a | |||
|
51 | hg qpush | |||
|
52 | rm .hg/patches/patch2 | |||
|
53 | echo '% now we expect the push to fail, but it should NOT complain about patch1' | |||
|
54 | hg qpush | |||
|
55 | ||||
|
56 | true # happy ending |
@@ -19,3 +19,11 b' summary: add foo' | |||||
19 | ? untracked-file |
|
19 | ? untracked-file | |
20 | I .hgignore |
|
20 | I .hgignore | |
21 | C foo |
|
21 | C foo | |
|
22 | % preparing qpush of a missing patch | |||
|
23 | no patches applied | |||
|
24 | applying patch1 | |||
|
25 | now at: patch1 | |||
|
26 | % now we expect the push to fail, but it should NOT complain about patch1 | |||
|
27 | applying patch2 | |||
|
28 | unable to read patch2 | |||
|
29 | now at: patch1 |
@@ -26,14 +26,12 b' created new head' | |||||
26 | .hgtags@c071f74ab5eb, line 2: cannot parse entry |
|
26 | .hgtags@c071f74ab5eb, line 2: cannot parse entry | |
27 | .hgtags@c071f74ab5eb, line 4: node 'foo' is not well formed |
|
27 | .hgtags@c071f74ab5eb, line 4: node 'foo' is not well formed | |
28 | .hgtags@4ca6f1b1a68c, line 2: node 'x' is not well formed |
|
28 | .hgtags@4ca6f1b1a68c, line 2: node 'x' is not well formed | |
29 | localtags, line 1: tag 'invalid' refers to unknown node |
|
|||
30 | tip 8:4ca6f1b1a68c |
|
29 | tip 8:4ca6f1b1a68c | |
31 | first 0:0acdaf898367 |
|
30 | first 0:0acdaf898367 | |
32 | changeset: 8:4ca6f1b1a68c |
|
31 | changeset: 8:4ca6f1b1a68c | |
33 | .hgtags@c071f74ab5eb, line 2: cannot parse entry |
|
32 | .hgtags@c071f74ab5eb, line 2: cannot parse entry | |
34 | .hgtags@c071f74ab5eb, line 4: node 'foo' is not well formed |
|
33 | .hgtags@c071f74ab5eb, line 4: node 'foo' is not well formed | |
35 | .hgtags@4ca6f1b1a68c, line 2: node 'x' is not well formed |
|
34 | .hgtags@4ca6f1b1a68c, line 2: node 'x' is not well formed | |
36 | localtags, line 1: tag 'invalid' refers to unknown node |
|
|||
37 | tag: tip |
|
35 | tag: tip | |
38 | parent: 3:b2ef3841386b |
|
36 | parent: 3:b2ef3841386b | |
39 | user: test |
|
37 | user: test |
General Comments 0
You need to be logged in to leave comments.
Login now