##// END OF EJS Templates
use repo[changeid] to get a changectx
Matt Mackall -
r6747:f6c00b17 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,124 +1,124 b''
1 # acl.py - changeset access control for mercurial
1 # acl.py - changeset access control for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # this hook allows to allow or deny access to parts of a repo when
8 # this hook allows to allow or deny access to parts of a repo when
9 # taking incoming changesets.
9 # taking incoming changesets.
10 #
10 #
11 # authorization is against local user name on system where hook is
11 # authorization is against local user name on system where hook is
12 # run, not committer of original changeset (since that is easy to
12 # run, not committer of original changeset (since that is easy to
13 # spoof).
13 # spoof).
14 #
14 #
15 # acl hook is best to use if you use hgsh to set up restricted shells
15 # acl hook is best to use if you use hgsh to set up restricted shells
16 # for authenticated users to only push to / pull from. not safe if
16 # for authenticated users to only push to / pull from. not safe if
17 # user has interactive shell access, because they can disable hook.
17 # user has interactive shell access, because they can disable hook.
18 # also not safe if remote users share one local account, because then
18 # also not safe if remote users share one local account, because then
19 # no way to tell remote users apart.
19 # no way to tell remote users apart.
20 #
20 #
21 # to use, configure acl extension in hgrc like this:
21 # to use, configure acl extension in hgrc like this:
22 #
22 #
23 # [extensions]
23 # [extensions]
24 # hgext.acl =
24 # hgext.acl =
25 #
25 #
26 # [hooks]
26 # [hooks]
27 # pretxnchangegroup.acl = python:hgext.acl.hook
27 # pretxnchangegroup.acl = python:hgext.acl.hook
28 #
28 #
29 # [acl]
29 # [acl]
30 # sources = serve # check if source of incoming changes in this list
30 # sources = serve # check if source of incoming changes in this list
31 # # ("serve" == ssh or http, "push", "pull", "bundle")
31 # # ("serve" == ssh or http, "push", "pull", "bundle")
32 #
32 #
33 # allow and deny lists have subtree pattern (default syntax is glob)
33 # allow and deny lists have subtree pattern (default syntax is glob)
34 # on left, user names on right. deny list checked before allow list.
34 # on left, user names on right. deny list checked before allow list.
35 #
35 #
36 # [acl.allow]
36 # [acl.allow]
37 # # if acl.allow not present, all users allowed by default
37 # # if acl.allow not present, all users allowed by default
38 # # empty acl.allow = no users allowed
38 # # empty acl.allow = no users allowed
39 # docs/** = doc_writer
39 # docs/** = doc_writer
40 # .hgtags = release_engineer
40 # .hgtags = release_engineer
41 #
41 #
42 # [acl.deny]
42 # [acl.deny]
43 # # if acl.deny not present, no users denied by default
43 # # if acl.deny not present, no users denied by default
44 # # empty acl.deny = all users allowed
44 # # empty acl.deny = all users allowed
45 # glob pattern = user4, user5
45 # glob pattern = user4, user5
46 # ** = user6
46 # ** = user6
47
47
48 from mercurial.i18n import _
48 from mercurial.i18n import _
49 from mercurial.node import bin, short
49 from mercurial.node import bin, short
50 from mercurial import util
50 from mercurial import util
51 import getpass
51 import getpass
52
52
53 class checker(object):
53 class checker(object):
54 '''acl checker.'''
54 '''acl checker.'''
55
55
56 def buildmatch(self, key):
56 def buildmatch(self, key):
57 '''return tuple of (match function, list enabled).'''
57 '''return tuple of (match function, list enabled).'''
58 if not self.ui.has_section(key):
58 if not self.ui.has_section(key):
59 self.ui.debug(_('acl: %s not enabled\n') % key)
59 self.ui.debug(_('acl: %s not enabled\n') % key)
60 return None, False
60 return None, False
61
61
62 thisuser = self.getuser()
62 thisuser = self.getuser()
63 pats = [pat for pat, users in self.ui.configitems(key)
63 pats = [pat for pat, users in self.ui.configitems(key)
64 if thisuser in users.replace(',', ' ').split()]
64 if thisuser in users.replace(',', ' ').split()]
65 self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
65 self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
66 (key, len(pats), thisuser))
66 (key, len(pats), thisuser))
67 if pats:
67 if pats:
68 match = util.matcher(self.repo.root, names=pats)[1]
68 match = util.matcher(self.repo.root, names=pats)[1]
69 else:
69 else:
70 match = util.never
70 match = util.never
71 return match, True
71 return match, True
72
72
73 def getuser(self):
73 def getuser(self):
74 '''return name of authenticated user.'''
74 '''return name of authenticated user.'''
75 return self.user
75 return self.user
76
76
77 def __init__(self, ui, repo):
77 def __init__(self, ui, repo):
78 self.ui = ui
78 self.ui = ui
79 self.repo = repo
79 self.repo = repo
80 self.user = getpass.getuser()
80 self.user = getpass.getuser()
81 cfg = self.ui.config('acl', 'config')
81 cfg = self.ui.config('acl', 'config')
82 if cfg:
82 if cfg:
83 self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
83 self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
84 self.allow, self.allowable = self.buildmatch('acl.allow')
84 self.allow, self.allowable = self.buildmatch('acl.allow')
85 self.deny, self.deniable = self.buildmatch('acl.deny')
85 self.deny, self.deniable = self.buildmatch('acl.deny')
86
86
87 def skipsource(self, source):
87 def skipsource(self, source):
88 '''true if incoming changes from this source should be skipped.'''
88 '''true if incoming changes from this source should be skipped.'''
89 ok_sources = self.ui.config('acl', 'sources', 'serve').split()
89 ok_sources = self.ui.config('acl', 'sources', 'serve').split()
90 return source not in ok_sources
90 return source not in ok_sources
91
91
92 def check(self, node):
92 def check(self, node):
93 '''return if access allowed, raise exception if not.'''
93 '''return if access allowed, raise exception if not.'''
94 files = self.repo.changectx(node).files()
94 files = self.repo[node].files()
95 if self.deniable:
95 if self.deniable:
96 for f in files:
96 for f in files:
97 if self.deny(f):
97 if self.deny(f):
98 self.ui.debug(_('acl: user %s denied on %s\n') %
98 self.ui.debug(_('acl: user %s denied on %s\n') %
99 (self.getuser(), f))
99 (self.getuser(), f))
100 raise util.Abort(_('acl: access denied for changeset %s') %
100 raise util.Abort(_('acl: access denied for changeset %s') %
101 short(node))
101 short(node))
102 if self.allowable:
102 if self.allowable:
103 for f in files:
103 for f in files:
104 if not self.allow(f):
104 if not self.allow(f):
105 self.ui.debug(_('acl: user %s not allowed on %s\n') %
105 self.ui.debug(_('acl: user %s not allowed on %s\n') %
106 (self.getuser(), f))
106 (self.getuser(), f))
107 raise util.Abort(_('acl: access denied for changeset %s') %
107 raise util.Abort(_('acl: access denied for changeset %s') %
108 short(node))
108 short(node))
109 self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
109 self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
110
110
111 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
111 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
112 if hooktype != 'pretxnchangegroup':
112 if hooktype != 'pretxnchangegroup':
113 raise util.Abort(_('config error - hook type "%s" cannot stop '
113 raise util.Abort(_('config error - hook type "%s" cannot stop '
114 'incoming changesets') % hooktype)
114 'incoming changesets') % hooktype)
115
115
116 c = checker(ui, repo)
116 c = checker(ui, repo)
117 if c.skipsource(source):
117 if c.skipsource(source):
118 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
118 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
119 return
119 return
120
120
121 start = repo.changelog.rev(bin(node))
121 start = repo.changelog.rev(bin(node))
122 end = repo.changelog.count()
122 end = repo.changelog.count()
123 for rev in xrange(start, end):
123 for rev in xrange(start, end):
124 c.check(repo.changelog.node(rev))
124 c.check(repo.changelog.node(rev))
@@ -1,311 +1,311 b''
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to update comments of bugzilla bugs when changesets
8 # hook extension to update comments of bugzilla bugs when changesets
9 # that refer to bugs by id are seen. this hook does not change bug
9 # that refer to bugs by id are seen. this hook does not change bug
10 # status, only comments.
10 # status, only comments.
11 #
11 #
12 # to configure, add items to '[bugzilla]' section of hgrc.
12 # to configure, add items to '[bugzilla]' section of hgrc.
13 #
13 #
14 # to use, configure bugzilla extension and enable like this:
14 # to use, configure bugzilla extension and enable like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.bugzilla =
17 # hgext.bugzilla =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # run bugzilla hook on every change pulled or pushed in here
20 # # run bugzilla hook on every change pulled or pushed in here
21 # incoming.bugzilla = python:hgext.bugzilla.hook
21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 #
22 #
23 # config items:
23 # config items:
24 #
24 #
25 # section name is 'bugzilla'.
25 # section name is 'bugzilla'.
26 # [bugzilla]
26 # [bugzilla]
27 #
27 #
28 # REQUIRED:
28 # REQUIRED:
29 # host = bugzilla # mysql server where bugzilla database lives
29 # host = bugzilla # mysql server where bugzilla database lives
30 # password = ** # user's password
30 # password = ** # user's password
31 # version = 2.16 # version of bugzilla installed
31 # version = 2.16 # version of bugzilla installed
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # bzuser = ... # fallback bugzilla user name to record comments with
34 # bzuser = ... # fallback bugzilla user name to record comments with
35 # db = bugs # database to connect to
35 # db = bugs # database to connect to
36 # notify = ... # command to run to get bugzilla to send mail
36 # notify = ... # command to run to get bugzilla to send mail
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 # strip = 0 # number of slashes to strip for url paths
38 # strip = 0 # number of slashes to strip for url paths
39 # style = ... # style file to use when formatting comments
39 # style = ... # style file to use when formatting comments
40 # template = ... # template to use when formatting comments
40 # template = ... # template to use when formatting comments
41 # timeout = 5 # database connection timeout (seconds)
41 # timeout = 5 # database connection timeout (seconds)
42 # user = bugs # user to connect to database as
42 # user = bugs # user to connect to database as
43 # [web]
43 # [web]
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 #
45 #
46 # if hg committer names are not same as bugzilla user names, use
46 # if hg committer names are not same as bugzilla user names, use
47 # "usermap" feature to map from committer email to bugzilla user name.
47 # "usermap" feature to map from committer email to bugzilla user name.
48 # usermap can be in hgrc or separate config file.
48 # usermap can be in hgrc or separate config file.
49 #
49 #
50 # [bugzilla]
50 # [bugzilla]
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 # [usermap]
52 # [usermap]
53 # committer_email = bugzilla_user_name
53 # committer_email = bugzilla_user_name
54
54
55 from mercurial.i18n import _
55 from mercurial.i18n import _
56 from mercurial.node import short
56 from mercurial.node import short
57 from mercurial import cmdutil, templater, util
57 from mercurial import cmdutil, templater, util
58 import re, time
58 import re, time
59
59
60 MySQLdb = None
60 MySQLdb = None
61
61
62 def buglist(ids):
62 def buglist(ids):
63 return '(' + ','.join(map(str, ids)) + ')'
63 return '(' + ','.join(map(str, ids)) + ')'
64
64
65 class bugzilla_2_16(object):
65 class bugzilla_2_16(object):
66 '''support for bugzilla version 2.16.'''
66 '''support for bugzilla version 2.16.'''
67
67
68 def __init__(self, ui):
68 def __init__(self, ui):
69 self.ui = ui
69 self.ui = ui
70 host = self.ui.config('bugzilla', 'host', 'localhost')
70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 passwd = self.ui.config('bugzilla', 'password')
72 passwd = self.ui.config('bugzilla', 'password')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 usermap = self.ui.config('bugzilla', 'usermap')
75 usermap = self.ui.config('bugzilla', 'usermap')
76 if usermap:
76 if usermap:
77 self.ui.readsections(usermap, 'usermap')
77 self.ui.readsections(usermap, 'usermap')
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 (host, db, user, '*' * len(passwd)))
79 (host, db, user, '*' * len(passwd)))
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 db=db, connect_timeout=timeout)
81 db=db, connect_timeout=timeout)
82 self.cursor = self.conn.cursor()
82 self.cursor = self.conn.cursor()
83 self.run('select fieldid from fielddefs where name = "longdesc"')
83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 ids = self.cursor.fetchall()
84 ids = self.cursor.fetchall()
85 if len(ids) != 1:
85 if len(ids) != 1:
86 raise util.Abort(_('unknown database schema'))
86 raise util.Abort(_('unknown database schema'))
87 self.longdesc_id = ids[0][0]
87 self.longdesc_id = ids[0][0]
88 self.user_ids = {}
88 self.user_ids = {}
89
89
90 def run(self, *args, **kwargs):
90 def run(self, *args, **kwargs):
91 '''run a query.'''
91 '''run a query.'''
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 try:
93 try:
94 self.cursor.execute(*args, **kwargs)
94 self.cursor.execute(*args, **kwargs)
95 except MySQLdb.MySQLError, err:
95 except MySQLdb.MySQLError, err:
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 raise
97 raise
98
98
99 def filter_real_bug_ids(self, ids):
99 def filter_real_bug_ids(self, ids):
100 '''filter not-existing bug ids from list.'''
100 '''filter not-existing bug ids from list.'''
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 ids = [c[0] for c in self.cursor.fetchall()]
102 ids = [c[0] for c in self.cursor.fetchall()]
103 ids.sort()
103 ids.sort()
104 return ids
104 return ids
105
105
106 def filter_unknown_bug_ids(self, node, ids):
106 def filter_unknown_bug_ids(self, node, ids):
107 '''filter bug ids from list that already refer to this changeset.'''
107 '''filter bug ids from list that already refer to this changeset.'''
108
108
109 self.run('''select bug_id from longdescs where
109 self.run('''select bug_id from longdescs where
110 bug_id in %s and thetext like "%%%s%%"''' %
110 bug_id in %s and thetext like "%%%s%%"''' %
111 (buglist(ids), short(node)))
111 (buglist(ids), short(node)))
112 unknown = dict.fromkeys(ids)
112 unknown = dict.fromkeys(ids)
113 for (id,) in self.cursor.fetchall():
113 for (id,) in self.cursor.fetchall():
114 self.ui.status(_('bug %d already knows about changeset %s\n') %
114 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 (id, short(node)))
115 (id, short(node)))
116 unknown.pop(id, None)
116 unknown.pop(id, None)
117 ids = unknown.keys()
117 ids = unknown.keys()
118 ids.sort()
118 ids.sort()
119 return ids
119 return ids
120
120
121 def notify(self, ids):
121 def notify(self, ids):
122 '''tell bugzilla to send mail.'''
122 '''tell bugzilla to send mail.'''
123
123
124 self.ui.status(_('telling bugzilla to send mail:\n'))
124 self.ui.status(_('telling bugzilla to send mail:\n'))
125 for id in ids:
125 for id in ids:
126 self.ui.status(_(' bug %s\n') % id)
126 self.ui.status(_(' bug %s\n') % id)
127 cmd = self.ui.config('bugzilla', 'notify',
127 cmd = self.ui.config('bugzilla', 'notify',
128 'cd /var/www/html/bugzilla && '
128 'cd /var/www/html/bugzilla && '
129 './processmail %s nobody@nowhere.com') % id
129 './processmail %s nobody@nowhere.com') % id
130 fp = util.popen('(%s) 2>&1' % cmd)
130 fp = util.popen('(%s) 2>&1' % cmd)
131 out = fp.read()
131 out = fp.read()
132 ret = fp.close()
132 ret = fp.close()
133 if ret:
133 if ret:
134 self.ui.warn(out)
134 self.ui.warn(out)
135 raise util.Abort(_('bugzilla notify command %s') %
135 raise util.Abort(_('bugzilla notify command %s') %
136 util.explain_exit(ret)[0])
136 util.explain_exit(ret)[0])
137 self.ui.status(_('done\n'))
137 self.ui.status(_('done\n'))
138
138
139 def get_user_id(self, user):
139 def get_user_id(self, user):
140 '''look up numeric bugzilla user id.'''
140 '''look up numeric bugzilla user id.'''
141 try:
141 try:
142 return self.user_ids[user]
142 return self.user_ids[user]
143 except KeyError:
143 except KeyError:
144 try:
144 try:
145 userid = int(user)
145 userid = int(user)
146 except ValueError:
146 except ValueError:
147 self.ui.note(_('looking up user %s\n') % user)
147 self.ui.note(_('looking up user %s\n') % user)
148 self.run('''select userid from profiles
148 self.run('''select userid from profiles
149 where login_name like %s''', user)
149 where login_name like %s''', user)
150 all = self.cursor.fetchall()
150 all = self.cursor.fetchall()
151 if len(all) != 1:
151 if len(all) != 1:
152 raise KeyError(user)
152 raise KeyError(user)
153 userid = int(all[0][0])
153 userid = int(all[0][0])
154 self.user_ids[user] = userid
154 self.user_ids[user] = userid
155 return userid
155 return userid
156
156
157 def map_committer(self, user):
157 def map_committer(self, user):
158 '''map name of committer to bugzilla user name.'''
158 '''map name of committer to bugzilla user name.'''
159 for committer, bzuser in self.ui.configitems('usermap'):
159 for committer, bzuser in self.ui.configitems('usermap'):
160 if committer.lower() == user.lower():
160 if committer.lower() == user.lower():
161 return bzuser
161 return bzuser
162 return user
162 return user
163
163
164 def add_comment(self, bugid, text, committer):
164 def add_comment(self, bugid, text, committer):
165 '''add comment to bug. try adding comment as committer of
165 '''add comment to bug. try adding comment as committer of
166 changeset, otherwise as default bugzilla user.'''
166 changeset, otherwise as default bugzilla user.'''
167 user = self.map_committer(committer)
167 user = self.map_committer(committer)
168 try:
168 try:
169 userid = self.get_user_id(user)
169 userid = self.get_user_id(user)
170 except KeyError:
170 except KeyError:
171 try:
171 try:
172 defaultuser = self.ui.config('bugzilla', 'bzuser')
172 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 if not defaultuser:
173 if not defaultuser:
174 raise util.Abort(_('cannot find bugzilla user id for %s') %
174 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 user)
175 user)
176 userid = self.get_user_id(defaultuser)
176 userid = self.get_user_id(defaultuser)
177 except KeyError:
177 except KeyError:
178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 (user, defaultuser))
179 (user, defaultuser))
180 now = time.strftime('%Y-%m-%d %H:%M:%S')
180 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 self.run('''insert into longdescs
181 self.run('''insert into longdescs
182 (bug_id, who, bug_when, thetext)
182 (bug_id, who, bug_when, thetext)
183 values (%s, %s, %s, %s)''',
183 values (%s, %s, %s, %s)''',
184 (bugid, userid, now, text))
184 (bugid, userid, now, text))
185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 values (%s, %s, %s, %s)''',
186 values (%s, %s, %s, %s)''',
187 (bugid, userid, now, self.longdesc_id))
187 (bugid, userid, now, self.longdesc_id))
188
188
189 class bugzilla(object):
189 class bugzilla(object):
190 # supported versions of bugzilla. different versions have
190 # supported versions of bugzilla. different versions have
191 # different schemas.
191 # different schemas.
192 _versions = {
192 _versions = {
193 '2.16': bugzilla_2_16,
193 '2.16': bugzilla_2_16,
194 }
194 }
195
195
196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198
198
199 _bz = None
199 _bz = None
200
200
201 def __init__(self, ui, repo):
201 def __init__(self, ui, repo):
202 self.ui = ui
202 self.ui = ui
203 self.repo = repo
203 self.repo = repo
204
204
205 def bz(self):
205 def bz(self):
206 '''return object that knows how to talk to bugzilla version in
206 '''return object that knows how to talk to bugzilla version in
207 use.'''
207 use.'''
208
208
209 if bugzilla._bz is None:
209 if bugzilla._bz is None:
210 bzversion = self.ui.config('bugzilla', 'version')
210 bzversion = self.ui.config('bugzilla', 'version')
211 try:
211 try:
212 bzclass = bugzilla._versions[bzversion]
212 bzclass = bugzilla._versions[bzversion]
213 except KeyError:
213 except KeyError:
214 raise util.Abort(_('bugzilla version %s not supported') %
214 raise util.Abort(_('bugzilla version %s not supported') %
215 bzversion)
215 bzversion)
216 bugzilla._bz = bzclass(self.ui)
216 bugzilla._bz = bzclass(self.ui)
217 return bugzilla._bz
217 return bugzilla._bz
218
218
219 def __getattr__(self, key):
219 def __getattr__(self, key):
220 return getattr(self.bz(), key)
220 return getattr(self.bz(), key)
221
221
222 _bug_re = None
222 _bug_re = None
223 _split_re = None
223 _split_re = None
224
224
225 def find_bug_ids(self, ctx):
225 def find_bug_ids(self, ctx):
226 '''find valid bug ids that are referred to in changeset
226 '''find valid bug ids that are referred to in changeset
227 comments and that do not already have references to this
227 comments and that do not already have references to this
228 changeset.'''
228 changeset.'''
229
229
230 if bugzilla._bug_re is None:
230 if bugzilla._bug_re is None:
231 bugzilla._bug_re = re.compile(
231 bugzilla._bug_re = re.compile(
232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 re.IGNORECASE)
233 re.IGNORECASE)
234 bugzilla._split_re = re.compile(r'\D+')
234 bugzilla._split_re = re.compile(r'\D+')
235 start = 0
235 start = 0
236 ids = {}
236 ids = {}
237 while True:
237 while True:
238 m = bugzilla._bug_re.search(ctx.description(), start)
238 m = bugzilla._bug_re.search(ctx.description(), start)
239 if not m:
239 if not m:
240 break
240 break
241 start = m.end()
241 start = m.end()
242 for id in bugzilla._split_re.split(m.group(1)):
242 for id in bugzilla._split_re.split(m.group(1)):
243 if not id: continue
243 if not id: continue
244 ids[int(id)] = 1
244 ids[int(id)] = 1
245 ids = ids.keys()
245 ids = ids.keys()
246 if ids:
246 if ids:
247 ids = self.filter_real_bug_ids(ids)
247 ids = self.filter_real_bug_ids(ids)
248 if ids:
248 if ids:
249 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
249 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
250 return ids
250 return ids
251
251
252 def update(self, bugid, ctx):
252 def update(self, bugid, ctx):
253 '''update bugzilla bug with reference to changeset.'''
253 '''update bugzilla bug with reference to changeset.'''
254
254
255 def webroot(root):
255 def webroot(root):
256 '''strip leading prefix of repo root and turn into
256 '''strip leading prefix of repo root and turn into
257 url-safe path.'''
257 url-safe path.'''
258 count = int(self.ui.config('bugzilla', 'strip', 0))
258 count = int(self.ui.config('bugzilla', 'strip', 0))
259 root = util.pconvert(root)
259 root = util.pconvert(root)
260 while count > 0:
260 while count > 0:
261 c = root.find('/')
261 c = root.find('/')
262 if c == -1:
262 if c == -1:
263 break
263 break
264 root = root[c+1:]
264 root = root[c+1:]
265 count -= 1
265 count -= 1
266 return root
266 return root
267
267
268 mapfile = self.ui.config('bugzilla', 'style')
268 mapfile = self.ui.config('bugzilla', 'style')
269 tmpl = self.ui.config('bugzilla', 'template')
269 tmpl = self.ui.config('bugzilla', 'template')
270 t = cmdutil.changeset_templater(self.ui, self.repo,
270 t = cmdutil.changeset_templater(self.ui, self.repo,
271 False, mapfile, False)
271 False, mapfile, False)
272 if not mapfile and not tmpl:
272 if not mapfile and not tmpl:
273 tmpl = _('changeset {node|short} in repo {root} refers '
273 tmpl = _('changeset {node|short} in repo {root} refers '
274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 if tmpl:
275 if tmpl:
276 tmpl = templater.parsestring(tmpl, quoted=False)
276 tmpl = templater.parsestring(tmpl, quoted=False)
277 t.use_template(tmpl)
277 t.use_template(tmpl)
278 self.ui.pushbuffer()
278 self.ui.pushbuffer()
279 t.show(changenode=ctx.node(), changes=ctx.changeset(),
279 t.show(changenode=ctx.node(), changes=ctx.changeset(),
280 bug=str(bugid),
280 bug=str(bugid),
281 hgweb=self.ui.config('web', 'baseurl'),
281 hgweb=self.ui.config('web', 'baseurl'),
282 root=self.repo.root,
282 root=self.repo.root,
283 webroot=webroot(self.repo.root))
283 webroot=webroot(self.repo.root))
284 data = self.ui.popbuffer()
284 data = self.ui.popbuffer()
285 self.add_comment(bugid, data, util.email(ctx.user()))
285 self.add_comment(bugid, data, util.email(ctx.user()))
286
286
287 def hook(ui, repo, hooktype, node=None, **kwargs):
287 def hook(ui, repo, hooktype, node=None, **kwargs):
288 '''add comment to bugzilla for each changeset that refers to a
288 '''add comment to bugzilla for each changeset that refers to a
289 bugzilla bug id. only add a comment once per bug, so same change
289 bugzilla bug id. only add a comment once per bug, so same change
290 seen multiple times does not fill bug with duplicate data.'''
290 seen multiple times does not fill bug with duplicate data.'''
291 try:
291 try:
292 import MySQLdb as mysql
292 import MySQLdb as mysql
293 global MySQLdb
293 global MySQLdb
294 MySQLdb = mysql
294 MySQLdb = mysql
295 except ImportError, err:
295 except ImportError, err:
296 raise util.Abort(_('python mysql support not available: %s') % err)
296 raise util.Abort(_('python mysql support not available: %s') % err)
297
297
298 if node is None:
298 if node is None:
299 raise util.Abort(_('hook type %s does not pass a changeset id') %
299 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 hooktype)
300 hooktype)
301 try:
301 try:
302 bz = bugzilla(ui, repo)
302 bz = bugzilla(ui, repo)
303 ctx = repo.changectx(node)
303 ctx = repo[node]
304 ids = bz.find_bug_ids(ctx)
304 ids = bz.find_bug_ids(ctx)
305 if ids:
305 if ids:
306 for id in ids:
306 for id in ids:
307 bz.update(id, ctx)
307 bz.update(id, ctx)
308 bz.notify(ids)
308 bz.notify(ids)
309 except MySQLdb.MySQLError, err:
309 except MySQLdb.MySQLError, err:
310 raise util.Abort(_('database error: %s') % err[1])
310 raise util.Abort(_('database error: %s') % err[1])
311
311
@@ -1,41 +1,41 b''
1 # Mercurial extension to provide the 'hg children' command
1 # Mercurial extension to provide the 'hg children' command
2 #
2 #
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
3 # Copyright 2007 by Intevation GmbH <intevation@intevation.de>
4 # Author(s):
4 # Author(s):
5 # Thomas Arendsen Hein <thomas@intevation.de>
5 # Thomas Arendsen Hein <thomas@intevation.de>
6 #
6 #
7 # This software may be used and distributed according to the terms
7 # This software may be used and distributed according to the terms
8 # of the GNU General Public License, incorporated herein by reference.
8 # of the GNU General Public License, incorporated herein by reference.
9
9
10 from mercurial import cmdutil
10 from mercurial import cmdutil
11 from mercurial.commands import templateopts
11 from mercurial.commands import templateopts
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13
13
14
14
15 def children(ui, repo, file_=None, **opts):
15 def children(ui, repo, file_=None, **opts):
16 """show the children of the given or working dir revision
16 """show the children of the given or working dir revision
17
17
18 Print the children of the working directory's revisions.
18 Print the children of the working directory's revisions.
19 If a revision is given via --rev, the children of that revision
19 If a revision is given via --rev, the children of that revision
20 will be printed. If a file argument is given, revision in
20 will be printed. If a file argument is given, revision in
21 which the file was last changed (after the working directory
21 which the file was last changed (after the working directory
22 revision or the argument to --rev if given) is printed.
22 revision or the argument to --rev if given) is printed.
23 """
23 """
24 rev = opts.get('rev')
24 rev = opts.get('rev')
25 if file_:
25 if file_:
26 ctx = repo.filectx(file_, changeid=rev)
26 ctx = repo.filectx(file_, changeid=rev)
27 else:
27 else:
28 ctx = repo.changectx(rev)
28 ctx = repo[rev]
29
29
30 displayer = cmdutil.show_changeset(ui, repo, opts)
30 displayer = cmdutil.show_changeset(ui, repo, opts)
31 for node in [cp.node() for cp in ctx.children()]:
31 for node in [cp.node() for cp in ctx.children()]:
32 displayer.show(changenode=node)
32 displayer.show(changenode=node)
33
33
34
34
35 cmdtable = {
35 cmdtable = {
36 "children":
36 "children":
37 (children,
37 (children,
38 [('r', 'rev', '', _('show children of the specified rev')),
38 [('r', 'rev', '', _('show children of the specified rev')),
39 ] + templateopts,
39 ] + templateopts,
40 _('hg children [-r REV] [FILE]')),
40 _('hg children [-r REV] [FILE]')),
41 }
41 }
@@ -1,290 +1,290 b''
1 # hg backend for convert extension
1 # hg backend for convert extension
2
2
3 # Notes for hg->hg conversion:
3 # Notes for hg->hg conversion:
4 #
4 #
5 # * Old versions of Mercurial didn't trim the whitespace from the ends
5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 # of commit messages, but new versions do. Changesets created by
6 # of commit messages, but new versions do. Changesets created by
7 # those older versions, then converted, may thus have different
7 # those older versions, then converted, may thus have different
8 # hashes for changesets that are otherwise identical.
8 # hashes for changesets that are otherwise identical.
9 #
9 #
10 # * By default, the source revision is stored in the converted
10 # * By default, the source revision is stored in the converted
11 # revision. This will cause the converted revision to have a
11 # revision. This will cause the converted revision to have a
12 # different identity than the source. To avoid this, use the
12 # different identity than the source. To avoid this, use the
13 # following option: "--config convert.hg.saverev=false"
13 # following option: "--config convert.hg.saverev=false"
14
14
15
15
16 import os, time
16 import os, time
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.repo import RepoError
18 from mercurial.repo import RepoError
19 from mercurial.node import bin, hex, nullid
19 from mercurial.node import bin, hex, nullid
20 from mercurial import hg, revlog, util, context
20 from mercurial import hg, revlog, util, context
21
21
22 from common import NoRepo, commit, converter_source, converter_sink
22 from common import NoRepo, commit, converter_source, converter_sink
23
23
24 class mercurial_sink(converter_sink):
24 class mercurial_sink(converter_sink):
25 def __init__(self, ui, path):
25 def __init__(self, ui, path):
26 converter_sink.__init__(self, ui, path)
26 converter_sink.__init__(self, ui, path)
27 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
27 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
28 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
28 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
29 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
29 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
30 self.lastbranch = None
30 self.lastbranch = None
31 if os.path.isdir(path) and len(os.listdir(path)) > 0:
31 if os.path.isdir(path) and len(os.listdir(path)) > 0:
32 try:
32 try:
33 self.repo = hg.repository(self.ui, path)
33 self.repo = hg.repository(self.ui, path)
34 if not self.repo.local():
34 if not self.repo.local():
35 raise NoRepo(_('%s is not a local Mercurial repo') % path)
35 raise NoRepo(_('%s is not a local Mercurial repo') % path)
36 except RepoError, err:
36 except RepoError, err:
37 ui.print_exc()
37 ui.print_exc()
38 raise NoRepo(err.args[0])
38 raise NoRepo(err.args[0])
39 else:
39 else:
40 try:
40 try:
41 ui.status(_('initializing destination %s repository\n') % path)
41 ui.status(_('initializing destination %s repository\n') % path)
42 self.repo = hg.repository(self.ui, path, create=True)
42 self.repo = hg.repository(self.ui, path, create=True)
43 if not self.repo.local():
43 if not self.repo.local():
44 raise NoRepo(_('%s is not a local Mercurial repo') % path)
44 raise NoRepo(_('%s is not a local Mercurial repo') % path)
45 self.created.append(path)
45 self.created.append(path)
46 except RepoError, err:
46 except RepoError, err:
47 ui.print_exc()
47 ui.print_exc()
48 raise NoRepo("could not create hg repo %s as sink" % path)
48 raise NoRepo("could not create hg repo %s as sink" % path)
49 self.lock = None
49 self.lock = None
50 self.wlock = None
50 self.wlock = None
51 self.filemapmode = False
51 self.filemapmode = False
52
52
53 def before(self):
53 def before(self):
54 self.ui.debug(_('run hg sink pre-conversion action\n'))
54 self.ui.debug(_('run hg sink pre-conversion action\n'))
55 self.wlock = self.repo.wlock()
55 self.wlock = self.repo.wlock()
56 self.lock = self.repo.lock()
56 self.lock = self.repo.lock()
57
57
58 def after(self):
58 def after(self):
59 self.ui.debug(_('run hg sink post-conversion action\n'))
59 self.ui.debug(_('run hg sink post-conversion action\n'))
60 self.lock = None
60 self.lock = None
61 self.wlock = None
61 self.wlock = None
62
62
63 def revmapfile(self):
63 def revmapfile(self):
64 return os.path.join(self.path, ".hg", "shamap")
64 return os.path.join(self.path, ".hg", "shamap")
65
65
66 def authorfile(self):
66 def authorfile(self):
67 return os.path.join(self.path, ".hg", "authormap")
67 return os.path.join(self.path, ".hg", "authormap")
68
68
69 def getheads(self):
69 def getheads(self):
70 h = self.repo.changelog.heads()
70 h = self.repo.changelog.heads()
71 return [ hex(x) for x in h ]
71 return [ hex(x) for x in h ]
72
72
73 def setbranch(self, branch, pbranches):
73 def setbranch(self, branch, pbranches):
74 if not self.clonebranches:
74 if not self.clonebranches:
75 return
75 return
76
76
77 setbranch = (branch != self.lastbranch)
77 setbranch = (branch != self.lastbranch)
78 self.lastbranch = branch
78 self.lastbranch = branch
79 if not branch:
79 if not branch:
80 branch = 'default'
80 branch = 'default'
81 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
81 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
82 pbranch = pbranches and pbranches[0][1] or 'default'
82 pbranch = pbranches and pbranches[0][1] or 'default'
83
83
84 branchpath = os.path.join(self.path, branch)
84 branchpath = os.path.join(self.path, branch)
85 if setbranch:
85 if setbranch:
86 self.after()
86 self.after()
87 try:
87 try:
88 self.repo = hg.repository(self.ui, branchpath)
88 self.repo = hg.repository(self.ui, branchpath)
89 except:
89 except:
90 self.repo = hg.repository(self.ui, branchpath, create=True)
90 self.repo = hg.repository(self.ui, branchpath, create=True)
91 self.before()
91 self.before()
92
92
93 # pbranches may bring revisions from other branches (merge parents)
93 # pbranches may bring revisions from other branches (merge parents)
94 # Make sure we have them, or pull them.
94 # Make sure we have them, or pull them.
95 missings = {}
95 missings = {}
96 for b in pbranches:
96 for b in pbranches:
97 try:
97 try:
98 self.repo.lookup(b[0])
98 self.repo.lookup(b[0])
99 except:
99 except:
100 missings.setdefault(b[1], []).append(b[0])
100 missings.setdefault(b[1], []).append(b[0])
101
101
102 if missings:
102 if missings:
103 self.after()
103 self.after()
104 for pbranch, heads in missings.iteritems():
104 for pbranch, heads in missings.iteritems():
105 pbranchpath = os.path.join(self.path, pbranch)
105 pbranchpath = os.path.join(self.path, pbranch)
106 prepo = hg.repository(self.ui, pbranchpath)
106 prepo = hg.repository(self.ui, pbranchpath)
107 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
107 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
108 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
108 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
109 self.before()
109 self.before()
110
110
111 def putcommit(self, files, copies, parents, commit, source):
111 def putcommit(self, files, copies, parents, commit, source):
112
112
113 files = dict(files)
113 files = dict(files)
114 def getfilectx(repo, memctx, f):
114 def getfilectx(repo, memctx, f):
115 v = files[f]
115 v = files[f]
116 data = source.getfile(f, v)
116 data = source.getfile(f, v)
117 e = source.getmode(f, v)
117 e = source.getmode(f, v)
118 return context.memfilectx(f, data, 'l' in e, 'x' in e, copies.get(f))
118 return context.memfilectx(f, data, 'l' in e, 'x' in e, copies.get(f))
119
119
120 pl = []
120 pl = []
121 for p in parents:
121 for p in parents:
122 if p not in pl:
122 if p not in pl:
123 pl.append(p)
123 pl.append(p)
124 parents = pl
124 parents = pl
125 nparents = len(parents)
125 nparents = len(parents)
126 if self.filemapmode and nparents == 1:
126 if self.filemapmode and nparents == 1:
127 m1node = self.repo.changelog.read(bin(parents[0]))[0]
127 m1node = self.repo.changelog.read(bin(parents[0]))[0]
128 parent = parents[0]
128 parent = parents[0]
129
129
130 if len(parents) < 2: parents.append("0" * 40)
130 if len(parents) < 2: parents.append("0" * 40)
131 if len(parents) < 2: parents.append("0" * 40)
131 if len(parents) < 2: parents.append("0" * 40)
132 p2 = parents.pop(0)
132 p2 = parents.pop(0)
133
133
134 text = commit.desc
134 text = commit.desc
135 extra = commit.extra.copy()
135 extra = commit.extra.copy()
136 if self.branchnames and commit.branch:
136 if self.branchnames and commit.branch:
137 extra['branch'] = commit.branch
137 extra['branch'] = commit.branch
138 if commit.rev:
138 if commit.rev:
139 extra['convert_revision'] = commit.rev
139 extra['convert_revision'] = commit.rev
140
140
141 while parents:
141 while parents:
142 p1 = p2
142 p1 = p2
143 p2 = parents.pop(0)
143 p2 = parents.pop(0)
144 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), getfilectx,
144 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), getfilectx,
145 commit.author, commit.date, extra)
145 commit.author, commit.date, extra)
146 a = self.repo.commitctx(ctx)
146 a = self.repo.commitctx(ctx)
147 text = "(octopus merge fixup)\n"
147 text = "(octopus merge fixup)\n"
148 p2 = hex(self.repo.changelog.tip())
148 p2 = hex(self.repo.changelog.tip())
149
149
150 if self.filemapmode and nparents == 1:
150 if self.filemapmode and nparents == 1:
151 man = self.repo.manifest
151 man = self.repo.manifest
152 mnode = self.repo.changelog.read(bin(p2))[0]
152 mnode = self.repo.changelog.read(bin(p2))[0]
153 if not man.cmp(m1node, man.revision(mnode)):
153 if not man.cmp(m1node, man.revision(mnode)):
154 self.repo.rollback()
154 self.repo.rollback()
155 return parent
155 return parent
156 return p2
156 return p2
157
157
158 def puttags(self, tags):
158 def puttags(self, tags):
159 try:
159 try:
160 parentctx = self.repo.changectx(self.tagsbranch)
160 parentctx = self.repo[self.tagsbranch]
161 tagparent = parentctx.node()
161 tagparent = parentctx.node()
162 except RepoError, inst:
162 except RepoError, inst:
163 parentctx = None
163 parentctx = None
164 tagparent = nullid
164 tagparent = nullid
165
165
166 try:
166 try:
167 old = parentctx.filectx(".hgtags").data()
167 old = parentctx.filectx(".hgtags").data()
168 oldlines = old.splitlines(1)
168 oldlines = old.splitlines(1)
169 oldlines.sort()
169 oldlines.sort()
170 except:
170 except:
171 oldlines = []
171 oldlines = []
172
172
173 newlines = [("%s %s\n" % (tags[tag], tag)) for tag in tags.keys()]
173 newlines = [("%s %s\n" % (tags[tag], tag)) for tag in tags.keys()]
174 newlines.sort()
174 newlines.sort()
175
175
176 if newlines == oldlines:
176 if newlines == oldlines:
177 return None
177 return None
178 data = "".join(newlines)
178 data = "".join(newlines)
179
179
180 def getfilectx(repo, memctx, f):
180 def getfilectx(repo, memctx, f):
181 return context.memfilectx(f, data, False, False, None)
181 return context.memfilectx(f, data, False, False, None)
182
182
183 self.ui.status("updating tags\n")
183 self.ui.status("updating tags\n")
184 date = "%s 0" % int(time.mktime(time.gmtime()))
184 date = "%s 0" % int(time.mktime(time.gmtime()))
185 extra = {'branch': self.tagsbranch}
185 extra = {'branch': self.tagsbranch}
186 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
186 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
187 [".hgtags"], getfilectx, "convert-repo", date,
187 [".hgtags"], getfilectx, "convert-repo", date,
188 extra)
188 extra)
189 self.repo.commitctx(ctx)
189 self.repo.commitctx(ctx)
190 return hex(self.repo.changelog.tip())
190 return hex(self.repo.changelog.tip())
191
191
192 def setfilemapmode(self, active):
192 def setfilemapmode(self, active):
193 self.filemapmode = active
193 self.filemapmode = active
194
194
195 class mercurial_source(converter_source):
195 class mercurial_source(converter_source):
196 def __init__(self, ui, path, rev=None):
196 def __init__(self, ui, path, rev=None):
197 converter_source.__init__(self, ui, path, rev)
197 converter_source.__init__(self, ui, path, rev)
198 self.saverev = ui.configbool('convert', 'hg.saverev', True)
198 self.saverev = ui.configbool('convert', 'hg.saverev', True)
199 try:
199 try:
200 self.repo = hg.repository(self.ui, path)
200 self.repo = hg.repository(self.ui, path)
201 # try to provoke an exception if this isn't really a hg
201 # try to provoke an exception if this isn't really a hg
202 # repo, but some other bogus compatible-looking url
202 # repo, but some other bogus compatible-looking url
203 if not self.repo.local():
203 if not self.repo.local():
204 raise RepoError()
204 raise RepoError()
205 except RepoError:
205 except RepoError:
206 ui.print_exc()
206 ui.print_exc()
207 raise NoRepo("%s is not a local Mercurial repo" % path)
207 raise NoRepo("%s is not a local Mercurial repo" % path)
208 self.lastrev = None
208 self.lastrev = None
209 self.lastctx = None
209 self.lastctx = None
210 self._changescache = None
210 self._changescache = None
211 self.convertfp = None
211 self.convertfp = None
212
212
213 def changectx(self, rev):
213 def changectx(self, rev):
214 if self.lastrev != rev:
214 if self.lastrev != rev:
215 self.lastctx = self.repo.changectx(rev)
215 self.lastctx = self.repo[rev]
216 self.lastrev = rev
216 self.lastrev = rev
217 return self.lastctx
217 return self.lastctx
218
218
219 def getheads(self):
219 def getheads(self):
220 if self.rev:
220 if self.rev:
221 return [hex(self.repo.changectx(self.rev).node())]
221 return [hex(self.repo[self.rev].node())]
222 else:
222 else:
223 return [hex(node) for node in self.repo.heads()]
223 return [hex(node) for node in self.repo.heads()]
224
224
225 def getfile(self, name, rev):
225 def getfile(self, name, rev):
226 try:
226 try:
227 return self.changectx(rev).filectx(name).data()
227 return self.changectx(rev)[name].data()
228 except revlog.LookupError, err:
228 except revlog.LookupError, err:
229 raise IOError(err)
229 raise IOError(err)
230
230
231 def getmode(self, name, rev):
231 def getmode(self, name, rev):
232 m = self.changectx(rev).manifest()
232 m = self.changectx(rev).manifest()
233 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
233 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
234
234
235 def getchanges(self, rev):
235 def getchanges(self, rev):
236 ctx = self.changectx(rev)
236 ctx = self.changectx(rev)
237 if self._changescache and self._changescache[0] == rev:
237 if self._changescache and self._changescache[0] == rev:
238 m, a, r = self._changescache[1]
238 m, a, r = self._changescache[1]
239 else:
239 else:
240 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
240 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
241 changes = [(name, rev) for name in m + a + r]
241 changes = [(name, rev) for name in m + a + r]
242 changes.sort()
242 changes.sort()
243 return (changes, self.getcopies(ctx, m + a))
243 return (changes, self.getcopies(ctx, m + a))
244
244
245 def getcopies(self, ctx, files):
245 def getcopies(self, ctx, files):
246 copies = {}
246 copies = {}
247 for name in files:
247 for name in files:
248 try:
248 try:
249 copies[name] = ctx.filectx(name).renamed()[0]
249 copies[name] = ctx.filectx(name).renamed()[0]
250 except TypeError:
250 except TypeError:
251 pass
251 pass
252 return copies
252 return copies
253
253
254 def getcommit(self, rev):
254 def getcommit(self, rev):
255 ctx = self.changectx(rev)
255 ctx = self.changectx(rev)
256 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
256 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
257 if self.saverev:
257 if self.saverev:
258 crev = rev
258 crev = rev
259 else:
259 else:
260 crev = None
260 crev = None
261 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
261 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
262 desc=ctx.description(), rev=crev, parents=parents,
262 desc=ctx.description(), rev=crev, parents=parents,
263 branch=ctx.branch(), extra=ctx.extra())
263 branch=ctx.branch(), extra=ctx.extra())
264
264
265 def gettags(self):
265 def gettags(self):
266 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
266 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
267 return dict([(name, hex(node)) for name, node in tags])
267 return dict([(name, hex(node)) for name, node in tags])
268
268
269 def getchangedfiles(self, rev, i):
269 def getchangedfiles(self, rev, i):
270 ctx = self.changectx(rev)
270 ctx = self.changectx(rev)
271 i = i or 0
271 i = i or 0
272 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
272 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
273
273
274 if i == 0:
274 if i == 0:
275 self._changescache = (rev, changes)
275 self._changescache = (rev, changes)
276
276
277 return changes[0] + changes[1] + changes[2]
277 return changes[0] + changes[1] + changes[2]
278
278
279 def converted(self, rev, destrev):
279 def converted(self, rev, destrev):
280 if self.convertfp is None:
280 if self.convertfp is None:
281 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
281 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
282 'a')
282 'a')
283 self.convertfp.write('%s %s\n' % (destrev, rev))
283 self.convertfp.write('%s %s\n' % (destrev, rev))
284 self.convertfp.flush()
284 self.convertfp.flush()
285
285
286 def before(self):
286 def before(self):
287 self.ui.debug(_('run hg source pre-conversion action\n'))
287 self.ui.debug(_('run hg source pre-conversion action\n'))
288
288
289 def after(self):
289 def after(self):
290 self.ui.debug(_('run hg source post-conversion action\n'))
290 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,251 +1,251 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''
8 '''
9 The `extdiff' Mercurial extension allows you to use external programs
9 The `extdiff' Mercurial extension allows you to use external programs
10 to compare revisions, or revision with working dir. The external diff
10 to compare revisions, or revision with working dir. The external diff
11 programs are called with a configurable set of options and two
11 programs are called with a configurable set of options and two
12 non-option arguments: paths to directories containing snapshots of
12 non-option arguments: paths to directories containing snapshots of
13 files to compare.
13 files to compare.
14
14
15 To enable this extension:
15 To enable this extension:
16
16
17 [extensions]
17 [extensions]
18 hgext.extdiff =
18 hgext.extdiff =
19
19
20 The `extdiff' extension also allows to configure new diff commands, so
20 The `extdiff' extension also allows to configure new diff commands, so
21 you do not need to type "hg extdiff -p kdiff3" always.
21 you do not need to type "hg extdiff -p kdiff3" always.
22
22
23 [extdiff]
23 [extdiff]
24 # add new command that runs GNU diff(1) in 'context diff' mode
24 # add new command that runs GNU diff(1) in 'context diff' mode
25 cdiff = gdiff -Nprc5
25 cdiff = gdiff -Nprc5
26 ## or the old way:
26 ## or the old way:
27 #cmd.cdiff = gdiff
27 #cmd.cdiff = gdiff
28 #opts.cdiff = -Nprc5
28 #opts.cdiff = -Nprc5
29
29
30 # add new command called vdiff, runs kdiff3
30 # add new command called vdiff, runs kdiff3
31 vdiff = kdiff3
31 vdiff = kdiff3
32
32
33 # add new command called meld, runs meld (no need to name twice)
33 # add new command called meld, runs meld (no need to name twice)
34 meld =
34 meld =
35
35
36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
37 #(see http://www.vim.org/scripts/script.php?script_id=102)
37 #(see http://www.vim.org/scripts/script.php?script_id=102)
38 # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
38 # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
39 # your .vimrc
39 # your .vimrc
40 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
40 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
41
41
42 You can use -I/-X and list of file or directory names like normal
42 You can use -I/-X and list of file or directory names like normal
43 "hg diff" command. The `extdiff' extension makes snapshots of only
43 "hg diff" command. The `extdiff' extension makes snapshots of only
44 needed files, so running the external diff program will actually be
44 needed files, so running the external diff program will actually be
45 pretty fast (at least faster than having to compare the entire tree).
45 pretty fast (at least faster than having to compare the entire tree).
46 '''
46 '''
47
47
48 from mercurial.i18n import _
48 from mercurial.i18n import _
49 from mercurial.node import short
49 from mercurial.node import short
50 from mercurial import cmdutil, util, commands
50 from mercurial import cmdutil, util, commands
51 import os, shlex, shutil, tempfile
51 import os, shlex, shutil, tempfile
52
52
53 def snapshot_node(ui, repo, files, node, tmproot):
53 def snapshot_node(ui, repo, files, node, tmproot):
54 '''snapshot files as of some revision'''
54 '''snapshot files as of some revision'''
55 mf = repo.changectx(node).manifest()
56 dirname = os.path.basename(repo.root)
55 dirname = os.path.basename(repo.root)
57 if dirname == "":
56 if dirname == "":
58 dirname = "root"
57 dirname = "root"
59 dirname = '%s.%s' % (dirname, short(node))
58 dirname = '%s.%s' % (dirname, short(node))
60 base = os.path.join(tmproot, dirname)
59 base = os.path.join(tmproot, dirname)
61 os.mkdir(base)
60 os.mkdir(base)
62 ui.note(_('making snapshot of %d files from rev %s\n') %
61 ui.note(_('making snapshot of %d files from rev %s\n') %
63 (len(files), short(node)))
62 (len(files), short(node)))
63 ctx = repo[node]
64 for fn in files:
64 for fn in files:
65 if not fn in mf:
65 wfn = util.pconvert(fn)
66 if not wfn in ctx:
66 # skipping new file after a merge ?
67 # skipping new file after a merge ?
67 continue
68 continue
68 wfn = util.pconvert(fn)
69 ui.note(' %s\n' % wfn)
69 ui.note(' %s\n' % wfn)
70 dest = os.path.join(base, wfn)
70 dest = os.path.join(base, wfn)
71 destdir = os.path.dirname(dest)
71 destdir = os.path.dirname(dest)
72 if not os.path.isdir(destdir):
72 if not os.path.isdir(destdir):
73 os.makedirs(destdir)
73 os.makedirs(destdir)
74 data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
74 data = repo.wwritedata(wfn, ctx[wfn].data())
75 open(dest, 'wb').write(data)
75 open(dest, 'wb').write(data)
76 return dirname
76 return dirname
77
77
78
78
79 def snapshot_wdir(ui, repo, files, tmproot):
79 def snapshot_wdir(ui, repo, files, tmproot):
80 '''snapshot files from working directory.
80 '''snapshot files from working directory.
81 if not using snapshot, -I/-X does not work and recursive diff
81 if not using snapshot, -I/-X does not work and recursive diff
82 in tools like kdiff3 and meld displays too many files.'''
82 in tools like kdiff3 and meld displays too many files.'''
83 repo_root = repo.root
83 repo_root = repo.root
84
84
85 dirname = os.path.basename(repo_root)
85 dirname = os.path.basename(repo_root)
86 if dirname == "":
86 if dirname == "":
87 dirname = "root"
87 dirname = "root"
88 base = os.path.join(tmproot, dirname)
88 base = os.path.join(tmproot, dirname)
89 os.mkdir(base)
89 os.mkdir(base)
90 ui.note(_('making snapshot of %d files from working dir\n') %
90 ui.note(_('making snapshot of %d files from working dir\n') %
91 (len(files)))
91 (len(files)))
92
92
93 fns_and_mtime = []
93 fns_and_mtime = []
94
94
95 for fn in files:
95 for fn in files:
96 wfn = util.pconvert(fn)
96 wfn = util.pconvert(fn)
97 ui.note(' %s\n' % wfn)
97 ui.note(' %s\n' % wfn)
98 dest = os.path.join(base, wfn)
98 dest = os.path.join(base, wfn)
99 destdir = os.path.dirname(dest)
99 destdir = os.path.dirname(dest)
100 if not os.path.isdir(destdir):
100 if not os.path.isdir(destdir):
101 os.makedirs(destdir)
101 os.makedirs(destdir)
102
102
103 fp = open(dest, 'wb')
103 fp = open(dest, 'wb')
104 for chunk in util.filechunkiter(repo.wopener(wfn)):
104 for chunk in util.filechunkiter(repo.wopener(wfn)):
105 fp.write(chunk)
105 fp.write(chunk)
106 fp.close()
106 fp.close()
107
107
108 fns_and_mtime.append((dest, os.path.join(repo_root, fn),
108 fns_and_mtime.append((dest, os.path.join(repo_root, fn),
109 os.path.getmtime(dest)))
109 os.path.getmtime(dest)))
110
110
111
111
112 return dirname, fns_and_mtime
112 return dirname, fns_and_mtime
113
113
114
114
115 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
115 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
116 '''Do the actuall diff:
116 '''Do the actuall diff:
117
117
118 - copy to a temp structure if diffing 2 internal revisions
118 - copy to a temp structure if diffing 2 internal revisions
119 - copy to a temp structure if diffing working revision with
119 - copy to a temp structure if diffing working revision with
120 another one and more than 1 file is changed
120 another one and more than 1 file is changed
121 - just invoke the diff for a single file in the working dir
121 - just invoke the diff for a single file in the working dir
122 '''
122 '''
123 node1, node2 = cmdutil.revpair(repo, opts['rev'])
123 node1, node2 = cmdutil.revpair(repo, opts['rev'])
124 matcher = cmdutil.match(repo, pats, opts)
124 matcher = cmdutil.match(repo, pats, opts)
125 modified, added, removed, deleted, unknown = repo.status(
125 modified, added, removed, deleted, unknown = repo.status(
126 node1, node2, matcher)[:5]
126 node1, node2, matcher)[:5]
127 if not (modified or added or removed):
127 if not (modified or added or removed):
128 return 0
128 return 0
129
129
130 tmproot = tempfile.mkdtemp(prefix='extdiff.')
130 tmproot = tempfile.mkdtemp(prefix='extdiff.')
131 dir2root = ''
131 dir2root = ''
132 try:
132 try:
133 # Always make a copy of node1
133 # Always make a copy of node1
134 dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
134 dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
135 changes = len(modified) + len(removed) + len(added)
135 changes = len(modified) + len(removed) + len(added)
136
136
137 fns_and_mtime = []
137 fns_and_mtime = []
138
138
139 # If node2 in not the wc or there is >1 change, copy it
139 # If node2 in not the wc or there is >1 change, copy it
140 if node2:
140 if node2:
141 dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
141 dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
142 elif changes > 1:
142 elif changes > 1:
143 #we only actually need to get the files to copy back to the working
143 #we only actually need to get the files to copy back to the working
144 #dir in this case (because the other cases are: diffing 2 revisions
144 #dir in this case (because the other cases are: diffing 2 revisions
145 #or single file -- in which case the file is already directly passed
145 #or single file -- in which case the file is already directly passed
146 #to the diff tool).
146 #to the diff tool).
147 dir2, fns_and_mtime = snapshot_wdir(ui, repo, modified + added, tmproot)
147 dir2, fns_and_mtime = snapshot_wdir(ui, repo, modified + added, tmproot)
148 else:
148 else:
149 # This lets the diff tool open the changed file directly
149 # This lets the diff tool open the changed file directly
150 dir2 = ''
150 dir2 = ''
151 dir2root = repo.root
151 dir2root = repo.root
152
152
153 # If only one change, diff the files instead of the directories
153 # If only one change, diff the files instead of the directories
154 if changes == 1 :
154 if changes == 1 :
155 if len(modified):
155 if len(modified):
156 dir1 = os.path.join(dir1, util.localpath(modified[0]))
156 dir1 = os.path.join(dir1, util.localpath(modified[0]))
157 dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
157 dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
158 elif len(removed) :
158 elif len(removed) :
159 dir1 = os.path.join(dir1, util.localpath(removed[0]))
159 dir1 = os.path.join(dir1, util.localpath(removed[0]))
160 dir2 = os.devnull
160 dir2 = os.devnull
161 else:
161 else:
162 dir1 = os.devnull
162 dir1 = os.devnull
163 dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
163 dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
164
164
165 cmdline = ('%s %s %s %s' %
165 cmdline = ('%s %s %s %s' %
166 (util.shellquote(diffcmd), ' '.join(diffopts),
166 (util.shellquote(diffcmd), ' '.join(diffopts),
167 util.shellquote(dir1), util.shellquote(dir2)))
167 util.shellquote(dir1), util.shellquote(dir2)))
168 ui.debug('running %r in %s\n' % (cmdline, tmproot))
168 ui.debug('running %r in %s\n' % (cmdline, tmproot))
169 util.system(cmdline, cwd=tmproot)
169 util.system(cmdline, cwd=tmproot)
170
170
171 for copy_fn, working_fn, mtime in fns_and_mtime:
171 for copy_fn, working_fn, mtime in fns_and_mtime:
172 if os.path.getmtime(copy_fn) != mtime:
172 if os.path.getmtime(copy_fn) != mtime:
173 ui.debug('File changed while diffing. '
173 ui.debug('File changed while diffing. '
174 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
174 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
175 util.copyfile(copy_fn, working_fn)
175 util.copyfile(copy_fn, working_fn)
176
176
177 return 1
177 return 1
178 finally:
178 finally:
179 ui.note(_('cleaning up temp directory\n'))
179 ui.note(_('cleaning up temp directory\n'))
180 shutil.rmtree(tmproot)
180 shutil.rmtree(tmproot)
181
181
182 def extdiff(ui, repo, *pats, **opts):
182 def extdiff(ui, repo, *pats, **opts):
183 '''use external program to diff repository (or selected files)
183 '''use external program to diff repository (or selected files)
184
184
185 Show differences between revisions for the specified files, using
185 Show differences between revisions for the specified files, using
186 an external program. The default program used is diff, with
186 an external program. The default program used is diff, with
187 default options "-Npru".
187 default options "-Npru".
188
188
189 To select a different program, use the -p option. The program
189 To select a different program, use the -p option. The program
190 will be passed the names of two directories to compare. To pass
190 will be passed the names of two directories to compare. To pass
191 additional options to the program, use the -o option. These will
191 additional options to the program, use the -o option. These will
192 be passed before the names of the directories to compare.
192 be passed before the names of the directories to compare.
193
193
194 When two revision arguments are given, then changes are
194 When two revision arguments are given, then changes are
195 shown between those revisions. If only one revision is
195 shown between those revisions. If only one revision is
196 specified then that revision is compared to the working
196 specified then that revision is compared to the working
197 directory, and, when no revisions are specified, the
197 directory, and, when no revisions are specified, the
198 working directory files are compared to its parent.'''
198 working directory files are compared to its parent.'''
199 program = opts['program'] or 'diff'
199 program = opts['program'] or 'diff'
200 if opts['program']:
200 if opts['program']:
201 option = opts['option']
201 option = opts['option']
202 else:
202 else:
203 option = opts['option'] or ['-Npru']
203 option = opts['option'] or ['-Npru']
204 return dodiff(ui, repo, program, option, pats, opts)
204 return dodiff(ui, repo, program, option, pats, opts)
205
205
206 cmdtable = {
206 cmdtable = {
207 "extdiff":
207 "extdiff":
208 (extdiff,
208 (extdiff,
209 [('p', 'program', '', _('comparison program to run')),
209 [('p', 'program', '', _('comparison program to run')),
210 ('o', 'option', [], _('pass option to comparison program')),
210 ('o', 'option', [], _('pass option to comparison program')),
211 ('r', 'rev', [], _('revision')),
211 ('r', 'rev', [], _('revision')),
212 ] + commands.walkopts,
212 ] + commands.walkopts,
213 _('hg extdiff [OPT]... [FILE]...')),
213 _('hg extdiff [OPT]... [FILE]...')),
214 }
214 }
215
215
216 def uisetup(ui):
216 def uisetup(ui):
217 for cmd, path in ui.configitems('extdiff'):
217 for cmd, path in ui.configitems('extdiff'):
218 if cmd.startswith('cmd.'):
218 if cmd.startswith('cmd.'):
219 cmd = cmd[4:]
219 cmd = cmd[4:]
220 if not path: path = cmd
220 if not path: path = cmd
221 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
221 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
222 diffopts = diffopts and [diffopts] or []
222 diffopts = diffopts and [diffopts] or []
223 elif cmd.startswith('opts.'):
223 elif cmd.startswith('opts.'):
224 continue
224 continue
225 else:
225 else:
226 # command = path opts
226 # command = path opts
227 if path:
227 if path:
228 diffopts = shlex.split(path)
228 diffopts = shlex.split(path)
229 path = diffopts.pop(0)
229 path = diffopts.pop(0)
230 else:
230 else:
231 path, diffopts = cmd, []
231 path, diffopts = cmd, []
232 def save(cmd, path, diffopts):
232 def save(cmd, path, diffopts):
233 '''use closure to save diff command to use'''
233 '''use closure to save diff command to use'''
234 def mydiff(ui, repo, *pats, **opts):
234 def mydiff(ui, repo, *pats, **opts):
235 return dodiff(ui, repo, path, diffopts, pats, opts)
235 return dodiff(ui, repo, path, diffopts, pats, opts)
236 mydiff.__doc__ = '''use %(path)s to diff repository (or selected files)
236 mydiff.__doc__ = '''use %(path)s to diff repository (or selected files)
237
237
238 Show differences between revisions for the specified
238 Show differences between revisions for the specified
239 files, using the %(path)s program.
239 files, using the %(path)s program.
240
240
241 When two revision arguments are given, then changes are
241 When two revision arguments are given, then changes are
242 shown between those revisions. If only one revision is
242 shown between those revisions. If only one revision is
243 specified then that revision is compared to the working
243 specified then that revision is compared to the working
244 directory, and, when no revisions are specified, the
244 directory, and, when no revisions are specified, the
245 working directory files are compared to its parent.''' % {
245 working directory files are compared to its parent.''' % {
246 'path': util.uirepr(path),
246 'path': util.uirepr(path),
247 }
247 }
248 return mydiff
248 return mydiff
249 cmdtable[cmd] = (save(cmd, path, diffopts),
249 cmdtable[cmd] = (save(cmd, path, diffopts),
250 cmdtable['extdiff'][1][1:],
250 cmdtable['extdiff'][1][1:],
251 _('hg %s [OPTION]... [FILE]...') % cmd)
251 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,357 +1,357 b''
1 # Minimal support for git commands on an hg repository
1 # Minimal support for git commands on an hg repository
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 '''browsing the repository in a graphical way
7 '''browsing the repository in a graphical way
8
8
9 The hgk extension allows browsing the history of a repository in a
9 The hgk extension allows browsing the history of a repository in a
10 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is
10 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is
11 not distributed with Mercurial.)
11 not distributed with Mercurial.)
12
12
13 hgk consists of two parts: a Tcl script that does the displaying and
13 hgk consists of two parts: a Tcl script that does the displaying and
14 querying of information, and an extension to mercurial named hgk.py,
14 querying of information, and an extension to mercurial named hgk.py,
15 which provides hooks for hgk to get information. hgk can be found in
15 which provides hooks for hgk to get information. hgk can be found in
16 the contrib directory, and hgk.py can be found in the hgext directory.
16 the contrib directory, and hgk.py can be found in the hgext directory.
17
17
18 To load the hgext.py extension, add it to your .hgrc file (you have
18 To load the hgext.py extension, add it to your .hgrc file (you have
19 to use your global $HOME/.hgrc file, not one in a repository). You
19 to use your global $HOME/.hgrc file, not one in a repository). You
20 can specify an absolute path:
20 can specify an absolute path:
21
21
22 [extensions]
22 [extensions]
23 hgk=/usr/local/lib/hgk.py
23 hgk=/usr/local/lib/hgk.py
24
24
25 Mercurial can also scan the default python library path for a file
25 Mercurial can also scan the default python library path for a file
26 named 'hgk.py' if you set hgk empty:
26 named 'hgk.py' if you set hgk empty:
27
27
28 [extensions]
28 [extensions]
29 hgk=
29 hgk=
30
30
31 The hg view command will launch the hgk Tcl script. For this command
31 The hg view command will launch the hgk Tcl script. For this command
32 to work, hgk must be in your search path. Alternately, you can
32 to work, hgk must be in your search path. Alternately, you can
33 specify the path to hgk in your .hgrc file:
33 specify the path to hgk in your .hgrc file:
34
34
35 [hgk]
35 [hgk]
36 path=/location/of/hgk
36 path=/location/of/hgk
37
37
38 hgk can make use of the extdiff extension to visualize revisions.
38 hgk can make use of the extdiff extension to visualize revisions.
39 Assuming you had already configured extdiff vdiff command, just add:
39 Assuming you had already configured extdiff vdiff command, just add:
40
40
41 [hgk]
41 [hgk]
42 vdiff=vdiff
42 vdiff=vdiff
43
43
44 Revisions context menu will now display additional entries to fire
44 Revisions context menu will now display additional entries to fire
45 vdiff on hovered and selected revisions.'''
45 vdiff on hovered and selected revisions.'''
46
46
47 import os
47 import os
48 from mercurial import commands, util, patch, revlog, cmdutil
48 from mercurial import commands, util, patch, revlog, cmdutil
49 from mercurial.node import nullid, nullrev, short
49 from mercurial.node import nullid, nullrev, short
50
50
51 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
51 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
52 """diff trees from two commits"""
52 """diff trees from two commits"""
53 def __difftree(repo, node1, node2, files=[]):
53 def __difftree(repo, node1, node2, files=[]):
54 assert node2 is not None
54 assert node2 is not None
55 mmap = repo.changectx(node1).manifest()
55 mmap = repo[node1].manifest()
56 mmap2 = repo.changectx(node2).manifest()
56 mmap2 = repo[node2].manifest()
57 m = cmdutil.match(repo, files)
57 m = cmdutil.match(repo, files)
58 status = repo.status(node1, node2, match=m)[:5]
58 status = repo.status(node1, node2, match=m)[:5]
59 modified, added, removed, deleted, unknown = status
59 modified, added, removed, deleted, unknown = status
60
60
61 empty = short(nullid)
61 empty = short(nullid)
62
62
63 for f in modified:
63 for f in modified:
64 # TODO get file permissions
64 # TODO get file permissions
65 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
65 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
66 (short(mmap[f]), short(mmap2[f]), f, f))
66 (short(mmap[f]), short(mmap2[f]), f, f))
67 for f in added:
67 for f in added:
68 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
68 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
69 (empty, short(mmap2[f]), f, f))
69 (empty, short(mmap2[f]), f, f))
70 for f in removed:
70 for f in removed:
71 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
71 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
72 (short(mmap[f]), empty, f, f))
72 (short(mmap[f]), empty, f, f))
73 ##
73 ##
74
74
75 while True:
75 while True:
76 if opts['stdin']:
76 if opts['stdin']:
77 try:
77 try:
78 line = raw_input().split(' ')
78 line = raw_input().split(' ')
79 node1 = line[0]
79 node1 = line[0]
80 if len(line) > 1:
80 if len(line) > 1:
81 node2 = line[1]
81 node2 = line[1]
82 else:
82 else:
83 node2 = None
83 node2 = None
84 except EOFError:
84 except EOFError:
85 break
85 break
86 node1 = repo.lookup(node1)
86 node1 = repo.lookup(node1)
87 if node2:
87 if node2:
88 node2 = repo.lookup(node2)
88 node2 = repo.lookup(node2)
89 else:
89 else:
90 node2 = node1
90 node2 = node1
91 node1 = repo.changelog.parents(node1)[0]
91 node1 = repo.changelog.parents(node1)[0]
92 if opts['patch']:
92 if opts['patch']:
93 if opts['pretty']:
93 if opts['pretty']:
94 catcommit(ui, repo, node2, "")
94 catcommit(ui, repo, node2, "")
95 m = cmdutil.match(repo, files)
95 m = cmdutil.match(repo, files)
96 patch.diff(repo, node1, node2, match=m,
96 patch.diff(repo, node1, node2, match=m,
97 opts=patch.diffopts(ui, {'git': True}))
97 opts=patch.diffopts(ui, {'git': True}))
98 else:
98 else:
99 __difftree(repo, node1, node2, files=files)
99 __difftree(repo, node1, node2, files=files)
100 if not opts['stdin']:
100 if not opts['stdin']:
101 break
101 break
102
102
103 def catcommit(ui, repo, n, prefix, ctx=None):
103 def catcommit(ui, repo, n, prefix, ctx=None):
104 nlprefix = '\n' + prefix;
104 nlprefix = '\n' + prefix;
105 if ctx is None:
105 if ctx is None:
106 ctx = repo.changectx(n)
106 ctx = repo[n]
107 (p1, p2) = ctx.parents()
107 (p1, p2) = ctx.parents()
108 ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
108 ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
109 if p1: ui.write("parent %s\n" % short(p1.node()))
109 if p1: ui.write("parent %s\n" % short(p1.node()))
110 if p2: ui.write("parent %s\n" % short(p2.node()))
110 if p2: ui.write("parent %s\n" % short(p2.node()))
111 date = ctx.date()
111 date = ctx.date()
112 description = ctx.description().replace("\0", "")
112 description = ctx.description().replace("\0", "")
113 lines = description.splitlines()
113 lines = description.splitlines()
114 if lines and lines[-1].startswith('committer:'):
114 if lines and lines[-1].startswith('committer:'):
115 committer = lines[-1].split(': ')[1].rstrip()
115 committer = lines[-1].split(': ')[1].rstrip()
116 else:
116 else:
117 committer = ctx.user()
117 committer = ctx.user()
118
118
119 ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
119 ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
120 ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
120 ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
121 ui.write("revision %d\n" % ctx.rev())
121 ui.write("revision %d\n" % ctx.rev())
122 ui.write("branch %s\n\n" % ctx.branch())
122 ui.write("branch %s\n\n" % ctx.branch())
123
123
124 if prefix != "":
124 if prefix != "":
125 ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
125 ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
126 else:
126 else:
127 ui.write(description + "\n")
127 ui.write(description + "\n")
128 if prefix:
128 if prefix:
129 ui.write('\0')
129 ui.write('\0')
130
130
131 def base(ui, repo, node1, node2):
131 def base(ui, repo, node1, node2):
132 """Output common ancestor information"""
132 """Output common ancestor information"""
133 node1 = repo.lookup(node1)
133 node1 = repo.lookup(node1)
134 node2 = repo.lookup(node2)
134 node2 = repo.lookup(node2)
135 n = repo.changelog.ancestor(node1, node2)
135 n = repo.changelog.ancestor(node1, node2)
136 ui.write(short(n) + "\n")
136 ui.write(short(n) + "\n")
137
137
138 def catfile(ui, repo, type=None, r=None, **opts):
138 def catfile(ui, repo, type=None, r=None, **opts):
139 """cat a specific revision"""
139 """cat a specific revision"""
140 # in stdin mode, every line except the commit is prefixed with two
140 # in stdin mode, every line except the commit is prefixed with two
141 # spaces. This way the our caller can find the commit without magic
141 # spaces. This way the our caller can find the commit without magic
142 # strings
142 # strings
143 #
143 #
144 prefix = ""
144 prefix = ""
145 if opts['stdin']:
145 if opts['stdin']:
146 try:
146 try:
147 (type, r) = raw_input().split(' ');
147 (type, r) = raw_input().split(' ');
148 prefix = " "
148 prefix = " "
149 except EOFError:
149 except EOFError:
150 return
150 return
151
151
152 else:
152 else:
153 if not type or not r:
153 if not type or not r:
154 ui.warn("cat-file: type or revision not supplied\n")
154 ui.warn("cat-file: type or revision not supplied\n")
155 commands.help_(ui, 'cat-file')
155 commands.help_(ui, 'cat-file')
156
156
157 while r:
157 while r:
158 if type != "commit":
158 if type != "commit":
159 ui.warn("aborting hg cat-file only understands commits\n")
159 ui.warn("aborting hg cat-file only understands commits\n")
160 return 1;
160 return 1;
161 n = repo.lookup(r)
161 n = repo.lookup(r)
162 catcommit(ui, repo, n, prefix)
162 catcommit(ui, repo, n, prefix)
163 if opts['stdin']:
163 if opts['stdin']:
164 try:
164 try:
165 (type, r) = raw_input().split(' ');
165 (type, r) = raw_input().split(' ');
166 except EOFError:
166 except EOFError:
167 break
167 break
168 else:
168 else:
169 break
169 break
170
170
171 # git rev-tree is a confusing thing. You can supply a number of
171 # git rev-tree is a confusing thing. You can supply a number of
172 # commit sha1s on the command line, and it walks the commit history
172 # commit sha1s on the command line, and it walks the commit history
173 # telling you which commits are reachable from the supplied ones via
173 # telling you which commits are reachable from the supplied ones via
174 # a bitmask based on arg position.
174 # a bitmask based on arg position.
175 # you can specify a commit to stop at by starting the sha1 with ^
175 # you can specify a commit to stop at by starting the sha1 with ^
176 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
176 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
177 def chlogwalk():
177 def chlogwalk():
178 count = repo.changelog.count()
178 count = repo.changelog.count()
179 i = count
179 i = count
180 l = [0] * 100
180 l = [0] * 100
181 chunk = 100
181 chunk = 100
182 while True:
182 while True:
183 if chunk > i:
183 if chunk > i:
184 chunk = i
184 chunk = i
185 i = 0
185 i = 0
186 else:
186 else:
187 i -= chunk
187 i -= chunk
188
188
189 for x in xrange(0, chunk):
189 for x in xrange(0, chunk):
190 if i + x >= count:
190 if i + x >= count:
191 l[chunk - x:] = [0] * (chunk - x)
191 l[chunk - x:] = [0] * (chunk - x)
192 break
192 break
193 if full != None:
193 if full != None:
194 l[x] = repo.changectx(i + x)
194 l[x] = repo[i + x]
195 l[x].changeset() # force reading
195 l[x].changeset() # force reading
196 else:
196 else:
197 l[x] = 1
197 l[x] = 1
198 for x in xrange(chunk-1, -1, -1):
198 for x in xrange(chunk-1, -1, -1):
199 if l[x] != 0:
199 if l[x] != 0:
200 yield (i + x, full != None and l[x] or None)
200 yield (i + x, full != None and l[x] or None)
201 if i == 0:
201 if i == 0:
202 break
202 break
203
203
204 # calculate and return the reachability bitmask for sha
204 # calculate and return the reachability bitmask for sha
205 def is_reachable(ar, reachable, sha):
205 def is_reachable(ar, reachable, sha):
206 if len(ar) == 0:
206 if len(ar) == 0:
207 return 1
207 return 1
208 mask = 0
208 mask = 0
209 for i in xrange(len(ar)):
209 for i in xrange(len(ar)):
210 if sha in reachable[i]:
210 if sha in reachable[i]:
211 mask |= 1 << i
211 mask |= 1 << i
212
212
213 return mask
213 return mask
214
214
215 reachable = []
215 reachable = []
216 stop_sha1 = []
216 stop_sha1 = []
217 want_sha1 = []
217 want_sha1 = []
218 count = 0
218 count = 0
219
219
220 # figure out which commits they are asking for and which ones they
220 # figure out which commits they are asking for and which ones they
221 # want us to stop on
221 # want us to stop on
222 for i in xrange(len(args)):
222 for i in xrange(len(args)):
223 if args[i].startswith('^'):
223 if args[i].startswith('^'):
224 s = repo.lookup(args[i][1:])
224 s = repo.lookup(args[i][1:])
225 stop_sha1.append(s)
225 stop_sha1.append(s)
226 want_sha1.append(s)
226 want_sha1.append(s)
227 elif args[i] != 'HEAD':
227 elif args[i] != 'HEAD':
228 want_sha1.append(repo.lookup(args[i]))
228 want_sha1.append(repo.lookup(args[i]))
229
229
230 # calculate the graph for the supplied commits
230 # calculate the graph for the supplied commits
231 for i in xrange(len(want_sha1)):
231 for i in xrange(len(want_sha1)):
232 reachable.append({});
232 reachable.append({});
233 n = want_sha1[i];
233 n = want_sha1[i];
234 visit = [n];
234 visit = [n];
235 reachable[i][n] = 1
235 reachable[i][n] = 1
236 while visit:
236 while visit:
237 n = visit.pop(0)
237 n = visit.pop(0)
238 if n in stop_sha1:
238 if n in stop_sha1:
239 continue
239 continue
240 for p in repo.changelog.parents(n):
240 for p in repo.changelog.parents(n):
241 if p not in reachable[i]:
241 if p not in reachable[i]:
242 reachable[i][p] = 1
242 reachable[i][p] = 1
243 visit.append(p)
243 visit.append(p)
244 if p in stop_sha1:
244 if p in stop_sha1:
245 continue
245 continue
246
246
247 # walk the repository looking for commits that are in our
247 # walk the repository looking for commits that are in our
248 # reachability graph
248 # reachability graph
249 for i, ctx in chlogwalk():
249 for i, ctx in chlogwalk():
250 n = repo.changelog.node(i)
250 n = repo.changelog.node(i)
251 mask = is_reachable(want_sha1, reachable, n)
251 mask = is_reachable(want_sha1, reachable, n)
252 if mask:
252 if mask:
253 parentstr = ""
253 parentstr = ""
254 if parents:
254 if parents:
255 pp = repo.changelog.parents(n)
255 pp = repo.changelog.parents(n)
256 if pp[0] != nullid:
256 if pp[0] != nullid:
257 parentstr += " " + short(pp[0])
257 parentstr += " " + short(pp[0])
258 if pp[1] != nullid:
258 if pp[1] != nullid:
259 parentstr += " " + short(pp[1])
259 parentstr += " " + short(pp[1])
260 if not full:
260 if not full:
261 ui.write("%s%s\n" % (short(n), parentstr))
261 ui.write("%s%s\n" % (short(n), parentstr))
262 elif full == "commit":
262 elif full == "commit":
263 ui.write("%s%s\n" % (short(n), parentstr))
263 ui.write("%s%s\n" % (short(n), parentstr))
264 catcommit(ui, repo, n, ' ', ctx)
264 catcommit(ui, repo, n, ' ', ctx)
265 else:
265 else:
266 (p1, p2) = repo.changelog.parents(n)
266 (p1, p2) = repo.changelog.parents(n)
267 (h, h1, h2) = map(short, (n, p1, p2))
267 (h, h1, h2) = map(short, (n, p1, p2))
268 (i1, i2) = map(repo.changelog.rev, (p1, p2))
268 (i1, i2) = map(repo.changelog.rev, (p1, p2))
269
269
270 date = ctx.date()[0]
270 date = ctx.date()[0]
271 ui.write("%s %s:%s" % (date, h, mask))
271 ui.write("%s %s:%s" % (date, h, mask))
272 mask = is_reachable(want_sha1, reachable, p1)
272 mask = is_reachable(want_sha1, reachable, p1)
273 if i1 != nullrev and mask > 0:
273 if i1 != nullrev and mask > 0:
274 ui.write("%s:%s " % (h1, mask)),
274 ui.write("%s:%s " % (h1, mask)),
275 mask = is_reachable(want_sha1, reachable, p2)
275 mask = is_reachable(want_sha1, reachable, p2)
276 if i2 != nullrev and mask > 0:
276 if i2 != nullrev and mask > 0:
277 ui.write("%s:%s " % (h2, mask))
277 ui.write("%s:%s " % (h2, mask))
278 ui.write("\n")
278 ui.write("\n")
279 if maxnr and count >= maxnr:
279 if maxnr and count >= maxnr:
280 break
280 break
281 count += 1
281 count += 1
282
282
283 def revparse(ui, repo, *revs, **opts):
283 def revparse(ui, repo, *revs, **opts):
284 """Parse given revisions"""
284 """Parse given revisions"""
285 def revstr(rev):
285 def revstr(rev):
286 if rev == 'HEAD':
286 if rev == 'HEAD':
287 rev = 'tip'
287 rev = 'tip'
288 return revlog.hex(repo.lookup(rev))
288 return revlog.hex(repo.lookup(rev))
289
289
290 for r in revs:
290 for r in revs:
291 revrange = r.split(':', 1)
291 revrange = r.split(':', 1)
292 ui.write('%s\n' % revstr(revrange[0]))
292 ui.write('%s\n' % revstr(revrange[0]))
293 if len(revrange) == 2:
293 if len(revrange) == 2:
294 ui.write('^%s\n' % revstr(revrange[1]))
294 ui.write('^%s\n' % revstr(revrange[1]))
295
295
296 # git rev-list tries to order things by date, and has the ability to stop
296 # git rev-list tries to order things by date, and has the ability to stop
297 # at a given commit without walking the whole repo. TODO add the stop
297 # at a given commit without walking the whole repo. TODO add the stop
298 # parameter
298 # parameter
299 def revlist(ui, repo, *revs, **opts):
299 def revlist(ui, repo, *revs, **opts):
300 """print revisions"""
300 """print revisions"""
301 if opts['header']:
301 if opts['header']:
302 full = "commit"
302 full = "commit"
303 else:
303 else:
304 full = None
304 full = None
305 copy = [x for x in revs]
305 copy = [x for x in revs]
306 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
306 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
307
307
308 def config(ui, repo, **opts):
308 def config(ui, repo, **opts):
309 """print extension options"""
309 """print extension options"""
310 def writeopt(name, value):
310 def writeopt(name, value):
311 ui.write('k=%s\nv=%s\n' % (name, value))
311 ui.write('k=%s\nv=%s\n' % (name, value))
312
312
313 writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
313 writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
314
314
315
315
316 def view(ui, repo, *etc, **opts):
316 def view(ui, repo, *etc, **opts):
317 "start interactive history viewer"
317 "start interactive history viewer"
318 os.chdir(repo.root)
318 os.chdir(repo.root)
319 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
319 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
320 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
320 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
321 ui.debug("running %s\n" % cmd)
321 ui.debug("running %s\n" % cmd)
322 util.system(cmd)
322 util.system(cmd)
323
323
324 cmdtable = {
324 cmdtable = {
325 "^view":
325 "^view":
326 (view,
326 (view,
327 [('l', 'limit', '', 'limit number of changes displayed')],
327 [('l', 'limit', '', 'limit number of changes displayed')],
328 'hg view [-l LIMIT] [REVRANGE]'),
328 'hg view [-l LIMIT] [REVRANGE]'),
329 "debug-diff-tree":
329 "debug-diff-tree":
330 (difftree,
330 (difftree,
331 [('p', 'patch', None, 'generate patch'),
331 [('p', 'patch', None, 'generate patch'),
332 ('r', 'recursive', None, 'recursive'),
332 ('r', 'recursive', None, 'recursive'),
333 ('P', 'pretty', None, 'pretty'),
333 ('P', 'pretty', None, 'pretty'),
334 ('s', 'stdin', None, 'stdin'),
334 ('s', 'stdin', None, 'stdin'),
335 ('C', 'copy', None, 'detect copies'),
335 ('C', 'copy', None, 'detect copies'),
336 ('S', 'search', "", 'search')],
336 ('S', 'search', "", 'search')],
337 'hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...'),
337 'hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...'),
338 "debug-cat-file":
338 "debug-cat-file":
339 (catfile,
339 (catfile,
340 [('s', 'stdin', None, 'stdin')],
340 [('s', 'stdin', None, 'stdin')],
341 'hg debug-cat-file [OPTION]... TYPE FILE'),
341 'hg debug-cat-file [OPTION]... TYPE FILE'),
342 "debug-config":
342 "debug-config":
343 (config, [], 'hg debug-config'),
343 (config, [], 'hg debug-config'),
344 "debug-merge-base":
344 "debug-merge-base":
345 (base, [], 'hg debug-merge-base node node'),
345 (base, [], 'hg debug-merge-base node node'),
346 "debug-rev-parse":
346 "debug-rev-parse":
347 (revparse,
347 (revparse,
348 [('', 'default', '', 'ignored')],
348 [('', 'default', '', 'ignored')],
349 'hg debug-rev-parse REV'),
349 'hg debug-rev-parse REV'),
350 "debug-rev-list":
350 "debug-rev-list":
351 (revlist,
351 (revlist,
352 [('H', 'header', None, 'header'),
352 [('H', 'header', None, 'header'),
353 ('t', 'topo-order', None, 'topo-order'),
353 ('t', 'topo-order', None, 'topo-order'),
354 ('p', 'parents', None, 'parents'),
354 ('p', 'parents', None, 'parents'),
355 ('n', 'max-count', 0, 'max-count')],
355 ('n', 'max-count', 0, 'max-count')],
356 'hg debug-rev-list [options] revs'),
356 'hg debug-rev-list [options] revs'),
357 }
357 }
@@ -1,567 +1,567 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a DSCM
10 # Keyword expansion hack against the grain of a DSCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an audience
14 # files (like LaTeX packages), that are mostly addressed to an audience
15 # not running a version control system.
15 # not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Setup in hgrc:
24 # Setup in hgrc:
25 #
25 #
26 # [extensions]
26 # [extensions]
27 # # enable extension
27 # # enable extension
28 # hgext.keyword =
28 # hgext.keyword =
29 #
29 #
30 # Files to act upon/ignore are specified in the [keyword] section.
30 # Files to act upon/ignore are specified in the [keyword] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
32 #
32 #
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34
34
35 '''keyword expansion in local repositories
35 '''keyword expansion in local repositories
36
36
37 This extension expands RCS/CVS-like or self-customized $Keywords$
37 This extension expands RCS/CVS-like or self-customized $Keywords$
38 in tracked text files selected by your configuration.
38 in tracked text files selected by your configuration.
39
39
40 Keywords are only expanded in local repositories and not stored in
40 Keywords are only expanded in local repositories and not stored in
41 the change history. The mechanism can be regarded as a convenience
41 the change history. The mechanism can be regarded as a convenience
42 for the current user or for archive distribution.
42 for the current user or for archive distribution.
43
43
44 Configuration is done in the [keyword] and [keywordmaps] sections
44 Configuration is done in the [keyword] and [keywordmaps] sections
45 of hgrc files.
45 of hgrc files.
46
46
47 Example:
47 Example:
48
48
49 [keyword]
49 [keyword]
50 # expand keywords in every python file except those matching "x*"
50 # expand keywords in every python file except those matching "x*"
51 **.py =
51 **.py =
52 x* = ignore
52 x* = ignore
53
53
54 Note: the more specific you are in your filename patterns
54 Note: the more specific you are in your filename patterns
55 the less you lose speed in huge repos.
55 the less you lose speed in huge repos.
56
56
57 For [keywordmaps] template mapping and expansion demonstration and
57 For [keywordmaps] template mapping and expansion demonstration and
58 control run "hg kwdemo".
58 control run "hg kwdemo".
59
59
60 An additional date template filter {date|utcdate} is provided.
60 An additional date template filter {date|utcdate} is provided.
61
61
62 The default template mappings (view with "hg kwdemo -d") can be replaced
62 The default template mappings (view with "hg kwdemo -d") can be replaced
63 with customized keywords and templates.
63 with customized keywords and templates.
64 Again, run "hg kwdemo" to control the results of your config changes.
64 Again, run "hg kwdemo" to control the results of your config changes.
65
65
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 the risk of inadvertedly storing expanded keywords in the change history.
67 the risk of inadvertedly storing expanded keywords in the change history.
68
68
69 To force expansion after enabling it, or a configuration change, run
69 To force expansion after enabling it, or a configuration change, run
70 "hg kwexpand".
70 "hg kwexpand".
71
71
72 Also, when committing with the record extension or using mq's qrecord, be aware
72 Also, when committing with the record extension or using mq's qrecord, be aware
73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
74 question to update keyword expansions after all changes have been checked in.
74 question to update keyword expansions after all changes have been checked in.
75
75
76 Expansions spanning more than one line and incremental expansions,
76 Expansions spanning more than one line and incremental expansions,
77 like CVS' $Log$, are not supported. A keyword template map
77 like CVS' $Log$, are not supported. A keyword template map
78 "Log = {desc}" expands to the first line of the changeset description.
78 "Log = {desc}" expands to the first line of the changeset description.
79 '''
79 '''
80
80
81 from mercurial import commands, cmdutil, dispatch, filelog, revlog
81 from mercurial import commands, cmdutil, dispatch, filelog, revlog
82 from mercurial import patch, localrepo, templater, templatefilters, util
82 from mercurial import patch, localrepo, templater, templatefilters, util
83 from mercurial.hgweb import webcommands
83 from mercurial.hgweb import webcommands
84 from mercurial.node import nullid, hex
84 from mercurial.node import nullid, hex
85 from mercurial.i18n import _
85 from mercurial.i18n import _
86 import re, shutil, tempfile, time
86 import re, shutil, tempfile, time
87
87
88 commands.optionalrepo += ' kwdemo'
88 commands.optionalrepo += ' kwdemo'
89
89
90 # hg commands that do not act on keywords
90 # hg commands that do not act on keywords
91 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
91 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
92 ' log outgoing push rename rollback tip'
92 ' log outgoing push rename rollback tip'
93 ' convert email glog')
93 ' convert email glog')
94
94
95 # hg commands that trigger expansion only when writing to working dir,
95 # hg commands that trigger expansion only when writing to working dir,
96 # not when reading filelog, and unexpand when reading from working dir
96 # not when reading filelog, and unexpand when reading from working dir
97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
98
98
99 def utcdate(date):
99 def utcdate(date):
100 '''Returns hgdate in cvs-like UTC format.'''
100 '''Returns hgdate in cvs-like UTC format.'''
101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
102
102
103 # make keyword tools accessible
103 # make keyword tools accessible
104 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
104 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
105
105
106
106
107 class kwtemplater(object):
107 class kwtemplater(object):
108 '''
108 '''
109 Sets up keyword templates, corresponding keyword regex, and
109 Sets up keyword templates, corresponding keyword regex, and
110 provides keyword substitution functions.
110 provides keyword substitution functions.
111 '''
111 '''
112 templates = {
112 templates = {
113 'Revision': '{node|short}',
113 'Revision': '{node|short}',
114 'Author': '{author|user}',
114 'Author': '{author|user}',
115 'Date': '{date|utcdate}',
115 'Date': '{date|utcdate}',
116 'RCSFile': '{file|basename},v',
116 'RCSFile': '{file|basename},v',
117 'Source': '{root}/{file},v',
117 'Source': '{root}/{file},v',
118 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
118 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
119 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
119 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
120 }
120 }
121
121
122 def __init__(self, ui, repo):
122 def __init__(self, ui, repo):
123 self.ui = ui
123 self.ui = ui
124 self.repo = repo
124 self.repo = repo
125 self.matcher = util.matcher(repo.root,
125 self.matcher = util.matcher(repo.root,
126 inc=kwtools['inc'], exc=kwtools['exc'])[1]
126 inc=kwtools['inc'], exc=kwtools['exc'])[1]
127 self.restrict = kwtools['hgcmd'] in restricted.split()
127 self.restrict = kwtools['hgcmd'] in restricted.split()
128
128
129 kwmaps = self.ui.configitems('keywordmaps')
129 kwmaps = self.ui.configitems('keywordmaps')
130 if kwmaps: # override default templates
130 if kwmaps: # override default templates
131 kwmaps = [(k, templater.parsestring(v, False))
131 kwmaps = [(k, templater.parsestring(v, False))
132 for (k, v) in kwmaps]
132 for (k, v) in kwmaps]
133 self.templates = dict(kwmaps)
133 self.templates = dict(kwmaps)
134 escaped = map(re.escape, self.templates.keys())
134 escaped = map(re.escape, self.templates.keys())
135 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
135 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
136 self.re_kw = re.compile(kwpat)
136 self.re_kw = re.compile(kwpat)
137
137
138 templatefilters.filters['utcdate'] = utcdate
138 templatefilters.filters['utcdate'] = utcdate
139 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
139 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
140 False, '', False)
140 False, '', False)
141
141
142 def getnode(self, path, fnode):
142 def getnode(self, path, fnode):
143 '''Derives changenode from file path and filenode.'''
143 '''Derives changenode from file path and filenode.'''
144 # used by kwfilelog.read and kwexpand
144 # used by kwfilelog.read and kwexpand
145 c = self.repo.filectx(path, fileid=fnode)
145 c = self.repo.filectx(path, fileid=fnode)
146 return c.node()
146 return c.node()
147
147
148 def substitute(self, data, path, node, subfunc):
148 def substitute(self, data, path, node, subfunc):
149 '''Replaces keywords in data with expanded template.'''
149 '''Replaces keywords in data with expanded template.'''
150 def kwsub(mobj):
150 def kwsub(mobj):
151 kw = mobj.group(1)
151 kw = mobj.group(1)
152 self.ct.use_template(self.templates[kw])
152 self.ct.use_template(self.templates[kw])
153 self.ui.pushbuffer()
153 self.ui.pushbuffer()
154 self.ct.show(changenode=node, root=self.repo.root, file=path)
154 self.ct.show(changenode=node, root=self.repo.root, file=path)
155 ekw = templatefilters.firstline(self.ui.popbuffer())
155 ekw = templatefilters.firstline(self.ui.popbuffer())
156 return '$%s: %s $' % (kw, ekw)
156 return '$%s: %s $' % (kw, ekw)
157 return subfunc(kwsub, data)
157 return subfunc(kwsub, data)
158
158
159 def expand(self, path, node, data):
159 def expand(self, path, node, data):
160 '''Returns data with keywords expanded.'''
160 '''Returns data with keywords expanded.'''
161 if not self.restrict and self.matcher(path) and not util.binary(data):
161 if not self.restrict and self.matcher(path) and not util.binary(data):
162 changenode = self.getnode(path, node)
162 changenode = self.getnode(path, node)
163 return self.substitute(data, path, changenode, self.re_kw.sub)
163 return self.substitute(data, path, changenode, self.re_kw.sub)
164 return data
164 return data
165
165
166 def iskwfile(self, path, islink):
166 def iskwfile(self, path, islink):
167 '''Returns true if path matches [keyword] pattern
167 '''Returns true if path matches [keyword] pattern
168 and is not a symbolic link.
168 and is not a symbolic link.
169 Caveat: localrepository._link fails on Windows.'''
169 Caveat: localrepository._link fails on Windows.'''
170 return self.matcher(path) and not islink(path)
170 return self.matcher(path) and not islink(path)
171
171
172 def overwrite(self, node, expand, files):
172 def overwrite(self, node, expand, files):
173 '''Overwrites selected files expanding/shrinking keywords.'''
173 '''Overwrites selected files expanding/shrinking keywords.'''
174 if node is not None: # commit
174 if node is not None: # commit
175 ctx = self.repo.changectx(node)
175 ctx = self.repo[node]
176 mf = ctx.manifest()
176 mf = ctx.manifest()
177 files = [f for f in ctx.files() if f in mf]
177 files = [f for f in ctx.files() if f in mf]
178 notify = self.ui.debug
178 notify = self.ui.debug
179 else: # kwexpand/kwshrink
179 else: # kwexpand/kwshrink
180 ctx = self.repo.changectx('.')
180 ctx = self.repo['.']
181 mf = ctx.manifest()
181 mf = ctx.manifest()
182 notify = self.ui.note
182 notify = self.ui.note
183 candidates = [f for f in files if self.iskwfile(f, mf.linkf)]
183 candidates = [f for f in files if self.iskwfile(f, mf.linkf)]
184 if candidates:
184 if candidates:
185 self.restrict = True # do not expand when reading
185 self.restrict = True # do not expand when reading
186 candidates.sort()
186 candidates.sort()
187 action = expand and 'expanding' or 'shrinking'
187 action = expand and 'expanding' or 'shrinking'
188 for f in candidates:
188 for f in candidates:
189 fp = self.repo.file(f)
189 fp = self.repo.file(f)
190 data = fp.read(mf[f])
190 data = fp.read(mf[f])
191 if util.binary(data):
191 if util.binary(data):
192 continue
192 continue
193 if expand:
193 if expand:
194 changenode = node or self.getnode(f, mf[f])
194 changenode = node or self.getnode(f, mf[f])
195 data, found = self.substitute(data, f, changenode,
195 data, found = self.substitute(data, f, changenode,
196 self.re_kw.subn)
196 self.re_kw.subn)
197 else:
197 else:
198 found = self.re_kw.search(data)
198 found = self.re_kw.search(data)
199 if found:
199 if found:
200 notify(_('overwriting %s %s keywords\n') % (f, action))
200 notify(_('overwriting %s %s keywords\n') % (f, action))
201 self.repo.wwrite(f, data, mf.flags(f))
201 self.repo.wwrite(f, data, mf.flags(f))
202 self.repo.dirstate.normal(f)
202 self.repo.dirstate.normal(f)
203 self.restrict = False
203 self.restrict = False
204
204
205 def shrinktext(self, text):
205 def shrinktext(self, text):
206 '''Unconditionally removes all keyword substitutions from text.'''
206 '''Unconditionally removes all keyword substitutions from text.'''
207 return self.re_kw.sub(r'$\1$', text)
207 return self.re_kw.sub(r'$\1$', text)
208
208
209 def shrink(self, fname, text):
209 def shrink(self, fname, text):
210 '''Returns text with all keyword substitutions removed.'''
210 '''Returns text with all keyword substitutions removed.'''
211 if self.matcher(fname) and not util.binary(text):
211 if self.matcher(fname) and not util.binary(text):
212 return self.shrinktext(text)
212 return self.shrinktext(text)
213 return text
213 return text
214
214
215 def shrinklines(self, fname, lines):
215 def shrinklines(self, fname, lines):
216 '''Returns lines with keyword substitutions removed.'''
216 '''Returns lines with keyword substitutions removed.'''
217 if self.matcher(fname):
217 if self.matcher(fname):
218 text = ''.join(lines)
218 text = ''.join(lines)
219 if not util.binary(text):
219 if not util.binary(text):
220 return self.shrinktext(text).splitlines(True)
220 return self.shrinktext(text).splitlines(True)
221 return lines
221 return lines
222
222
223 def wread(self, fname, data):
223 def wread(self, fname, data):
224 '''If in restricted mode returns data read from wdir with
224 '''If in restricted mode returns data read from wdir with
225 keyword substitutions removed.'''
225 keyword substitutions removed.'''
226 return self.restrict and self.shrink(fname, data) or data
226 return self.restrict and self.shrink(fname, data) or data
227
227
228 class kwfilelog(filelog.filelog):
228 class kwfilelog(filelog.filelog):
229 '''
229 '''
230 Subclass of filelog to hook into its read, add, cmp methods.
230 Subclass of filelog to hook into its read, add, cmp methods.
231 Keywords are "stored" unexpanded, and processed on reading.
231 Keywords are "stored" unexpanded, and processed on reading.
232 '''
232 '''
233 def __init__(self, opener, kwt, path):
233 def __init__(self, opener, kwt, path):
234 super(kwfilelog, self).__init__(opener, path)
234 super(kwfilelog, self).__init__(opener, path)
235 self.kwt = kwt
235 self.kwt = kwt
236 self.path = path
236 self.path = path
237
237
238 def read(self, node):
238 def read(self, node):
239 '''Expands keywords when reading filelog.'''
239 '''Expands keywords when reading filelog.'''
240 data = super(kwfilelog, self).read(node)
240 data = super(kwfilelog, self).read(node)
241 return self.kwt.expand(self.path, node, data)
241 return self.kwt.expand(self.path, node, data)
242
242
243 def add(self, text, meta, tr, link, p1=None, p2=None):
243 def add(self, text, meta, tr, link, p1=None, p2=None):
244 '''Removes keyword substitutions when adding to filelog.'''
244 '''Removes keyword substitutions when adding to filelog.'''
245 text = self.kwt.shrink(self.path, text)
245 text = self.kwt.shrink(self.path, text)
246 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
246 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
247
247
248 def cmp(self, node, text):
248 def cmp(self, node, text):
249 '''Removes keyword substitutions for comparison.'''
249 '''Removes keyword substitutions for comparison.'''
250 text = self.kwt.shrink(self.path, text)
250 text = self.kwt.shrink(self.path, text)
251 if self.renamed(node):
251 if self.renamed(node):
252 t2 = super(kwfilelog, self).read(node)
252 t2 = super(kwfilelog, self).read(node)
253 return t2 != text
253 return t2 != text
254 return revlog.revlog.cmp(self, node, text)
254 return revlog.revlog.cmp(self, node, text)
255
255
256 def _status(ui, repo, kwt, *pats, **opts):
256 def _status(ui, repo, kwt, *pats, **opts):
257 '''Bails out if [keyword] configuration is not active.
257 '''Bails out if [keyword] configuration is not active.
258 Returns status of working directory.'''
258 Returns status of working directory.'''
259 if kwt:
259 if kwt:
260 matcher = cmdutil.match(repo, pats, opts)
260 matcher = cmdutil.match(repo, pats, opts)
261 return repo.status(match=matcher, list_clean=True)
261 return repo.status(match=matcher, list_clean=True)
262 if ui.configitems('keyword'):
262 if ui.configitems('keyword'):
263 raise util.Abort(_('[keyword] patterns cannot match'))
263 raise util.Abort(_('[keyword] patterns cannot match'))
264 raise util.Abort(_('no [keyword] patterns configured'))
264 raise util.Abort(_('no [keyword] patterns configured'))
265
265
266 def _kwfwrite(ui, repo, expand, *pats, **opts):
266 def _kwfwrite(ui, repo, expand, *pats, **opts):
267 '''Selects files and passes them to kwtemplater.overwrite.'''
267 '''Selects files and passes them to kwtemplater.overwrite.'''
268 if repo.dirstate.parents()[1] != nullid:
268 if repo.dirstate.parents()[1] != nullid:
269 raise util.Abort(_('outstanding uncommitted merge'))
269 raise util.Abort(_('outstanding uncommitted merge'))
270 kwt = kwtools['templater']
270 kwt = kwtools['templater']
271 status = _status(ui, repo, kwt, *pats, **opts)
271 status = _status(ui, repo, kwt, *pats, **opts)
272 modified, added, removed, deleted, unknown, ignored, clean = status
272 modified, added, removed, deleted, unknown, ignored, clean = status
273 if modified or added or removed or deleted:
273 if modified or added or removed or deleted:
274 raise util.Abort(_('outstanding uncommitted changes'))
274 raise util.Abort(_('outstanding uncommitted changes'))
275 wlock = lock = None
275 wlock = lock = None
276 try:
276 try:
277 wlock = repo.wlock()
277 wlock = repo.wlock()
278 lock = repo.lock()
278 lock = repo.lock()
279 kwt.overwrite(None, expand, clean)
279 kwt.overwrite(None, expand, clean)
280 finally:
280 finally:
281 del wlock, lock
281 del wlock, lock
282
282
283
283
284 def demo(ui, repo, *args, **opts):
284 def demo(ui, repo, *args, **opts):
285 '''print [keywordmaps] configuration and an expansion example
285 '''print [keywordmaps] configuration and an expansion example
286
286
287 Show current, custom, or default keyword template maps
287 Show current, custom, or default keyword template maps
288 and their expansion.
288 and their expansion.
289
289
290 Extend current configuration by specifying maps as arguments
290 Extend current configuration by specifying maps as arguments
291 and optionally by reading from an additional hgrc file.
291 and optionally by reading from an additional hgrc file.
292
292
293 Override current keyword template maps with "default" option.
293 Override current keyword template maps with "default" option.
294 '''
294 '''
295 def demostatus(stat):
295 def demostatus(stat):
296 ui.status(_('\n\t%s\n') % stat)
296 ui.status(_('\n\t%s\n') % stat)
297
297
298 def demoitems(section, items):
298 def demoitems(section, items):
299 ui.write('[%s]\n' % section)
299 ui.write('[%s]\n' % section)
300 for k, v in items:
300 for k, v in items:
301 ui.write('%s = %s\n' % (k, v))
301 ui.write('%s = %s\n' % (k, v))
302
302
303 msg = 'hg keyword config and expansion example'
303 msg = 'hg keyword config and expansion example'
304 kwstatus = 'current'
304 kwstatus = 'current'
305 fn = 'demo.txt'
305 fn = 'demo.txt'
306 branchname = 'demobranch'
306 branchname = 'demobranch'
307 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
307 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
308 ui.note(_('creating temporary repo at %s\n') % tmpdir)
308 ui.note(_('creating temporary repo at %s\n') % tmpdir)
309 repo = localrepo.localrepository(ui, tmpdir, True)
309 repo = localrepo.localrepository(ui, tmpdir, True)
310 ui.setconfig('keyword', fn, '')
310 ui.setconfig('keyword', fn, '')
311 if args or opts.get('rcfile'):
311 if args or opts.get('rcfile'):
312 kwstatus = 'custom'
312 kwstatus = 'custom'
313 if opts.get('rcfile'):
313 if opts.get('rcfile'):
314 ui.readconfig(opts.get('rcfile'))
314 ui.readconfig(opts.get('rcfile'))
315 if opts.get('default'):
315 if opts.get('default'):
316 kwstatus = 'default'
316 kwstatus = 'default'
317 kwmaps = kwtemplater.templates
317 kwmaps = kwtemplater.templates
318 if ui.configitems('keywordmaps'):
318 if ui.configitems('keywordmaps'):
319 # override maps from optional rcfile
319 # override maps from optional rcfile
320 for k, v in kwmaps.iteritems():
320 for k, v in kwmaps.iteritems():
321 ui.setconfig('keywordmaps', k, v)
321 ui.setconfig('keywordmaps', k, v)
322 elif args:
322 elif args:
323 # simulate hgrc parsing
323 # simulate hgrc parsing
324 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
324 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
325 fp = repo.opener('hgrc', 'w')
325 fp = repo.opener('hgrc', 'w')
326 fp.writelines(rcmaps)
326 fp.writelines(rcmaps)
327 fp.close()
327 fp.close()
328 ui.readconfig(repo.join('hgrc'))
328 ui.readconfig(repo.join('hgrc'))
329 if not opts.get('default'):
329 if not opts.get('default'):
330 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
330 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
331 uisetup(ui)
331 uisetup(ui)
332 reposetup(ui, repo)
332 reposetup(ui, repo)
333 for k, v in ui.configitems('extensions'):
333 for k, v in ui.configitems('extensions'):
334 if k.endswith('keyword'):
334 if k.endswith('keyword'):
335 extension = '%s = %s' % (k, v)
335 extension = '%s = %s' % (k, v)
336 break
336 break
337 demostatus('config using %s keyword template maps' % kwstatus)
337 demostatus('config using %s keyword template maps' % kwstatus)
338 ui.write('[extensions]\n%s\n' % extension)
338 ui.write('[extensions]\n%s\n' % extension)
339 demoitems('keyword', ui.configitems('keyword'))
339 demoitems('keyword', ui.configitems('keyword'))
340 demoitems('keywordmaps', kwmaps.iteritems())
340 demoitems('keywordmaps', kwmaps.iteritems())
341 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
341 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
342 repo.wopener(fn, 'w').write(keywords)
342 repo.wopener(fn, 'w').write(keywords)
343 repo.add([fn])
343 repo.add([fn])
344 path = repo.wjoin(fn)
344 path = repo.wjoin(fn)
345 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
345 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
346 ui.note(keywords)
346 ui.note(keywords)
347 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
347 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
348 # silence branch command if not verbose
348 # silence branch command if not verbose
349 quiet = ui.quiet
349 quiet = ui.quiet
350 ui.quiet = not ui.verbose
350 ui.quiet = not ui.verbose
351 commands.branch(ui, repo, branchname)
351 commands.branch(ui, repo, branchname)
352 ui.quiet = quiet
352 ui.quiet = quiet
353 for name, cmd in ui.configitems('hooks'):
353 for name, cmd in ui.configitems('hooks'):
354 if name.split('.', 1)[0].find('commit') > -1:
354 if name.split('.', 1)[0].find('commit') > -1:
355 repo.ui.setconfig('hooks', name, '')
355 repo.ui.setconfig('hooks', name, '')
356 ui.note(_('unhooked all commit hooks\n'))
356 ui.note(_('unhooked all commit hooks\n'))
357 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
357 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
358 repo.commit(text=msg)
358 repo.commit(text=msg)
359 format = ui.verbose and ' in %s' % path or ''
359 format = ui.verbose and ' in %s' % path or ''
360 demostatus('%s keywords expanded%s' % (kwstatus, format))
360 demostatus('%s keywords expanded%s' % (kwstatus, format))
361 ui.write(repo.wread(fn))
361 ui.write(repo.wread(fn))
362 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
362 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
363 shutil.rmtree(tmpdir, ignore_errors=True)
363 shutil.rmtree(tmpdir, ignore_errors=True)
364
364
365 def expand(ui, repo, *pats, **opts):
365 def expand(ui, repo, *pats, **opts):
366 '''expand keywords in working directory
366 '''expand keywords in working directory
367
367
368 Run after (re)enabling keyword expansion.
368 Run after (re)enabling keyword expansion.
369
369
370 kwexpand refuses to run if given files contain local changes.
370 kwexpand refuses to run if given files contain local changes.
371 '''
371 '''
372 # 3rd argument sets expansion to True
372 # 3rd argument sets expansion to True
373 _kwfwrite(ui, repo, True, *pats, **opts)
373 _kwfwrite(ui, repo, True, *pats, **opts)
374
374
375 def files(ui, repo, *pats, **opts):
375 def files(ui, repo, *pats, **opts):
376 '''print files currently configured for keyword expansion
376 '''print files currently configured for keyword expansion
377
377
378 Crosscheck which files in working directory are potential targets for
378 Crosscheck which files in working directory are potential targets for
379 keyword expansion.
379 keyword expansion.
380 That is, files matched by [keyword] config patterns but not symlinks.
380 That is, files matched by [keyword] config patterns but not symlinks.
381 '''
381 '''
382 kwt = kwtools['templater']
382 kwt = kwtools['templater']
383 status = _status(ui, repo, kwt, *pats, **opts)
383 status = _status(ui, repo, kwt, *pats, **opts)
384 modified, added, removed, deleted, unknown, ignored, clean = status
384 modified, added, removed, deleted, unknown, ignored, clean = status
385 files = modified + added + clean
385 files = modified + added + clean
386 if opts.get('untracked'):
386 if opts.get('untracked'):
387 files += unknown
387 files += unknown
388 files.sort()
388 files.sort()
389 wctx = repo.changectx(None)
389 wctx = repo[None]
390 islink = lambda p: 'l' in wctx.flags(p)
390 islink = lambda p: 'l' in wctx.flags(p)
391 kwfiles = [f for f in files if kwt.iskwfile(f, islink)]
391 kwfiles = [f for f in files if kwt.iskwfile(f, islink)]
392 cwd = pats and repo.getcwd() or ''
392 cwd = pats and repo.getcwd() or ''
393 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
393 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
394 if opts.get('all') or opts.get('ignore'):
394 if opts.get('all') or opts.get('ignore'):
395 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
395 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
396 for char, filenames in kwfstats:
396 for char, filenames in kwfstats:
397 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
397 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
398 for f in filenames:
398 for f in filenames:
399 ui.write(format % repo.pathto(f, cwd))
399 ui.write(format % repo.pathto(f, cwd))
400
400
401 def shrink(ui, repo, *pats, **opts):
401 def shrink(ui, repo, *pats, **opts):
402 '''revert expanded keywords in working directory
402 '''revert expanded keywords in working directory
403
403
404 Run before changing/disabling active keywords
404 Run before changing/disabling active keywords
405 or if you experience problems with "hg import" or "hg merge".
405 or if you experience problems with "hg import" or "hg merge".
406
406
407 kwshrink refuses to run if given files contain local changes.
407 kwshrink refuses to run if given files contain local changes.
408 '''
408 '''
409 # 3rd argument sets expansion to False
409 # 3rd argument sets expansion to False
410 _kwfwrite(ui, repo, False, *pats, **opts)
410 _kwfwrite(ui, repo, False, *pats, **opts)
411
411
412
412
413 def uisetup(ui):
413 def uisetup(ui):
414 '''Collects [keyword] config in kwtools.
414 '''Collects [keyword] config in kwtools.
415 Monkeypatches dispatch._parse if needed.'''
415 Monkeypatches dispatch._parse if needed.'''
416
416
417 for pat, opt in ui.configitems('keyword'):
417 for pat, opt in ui.configitems('keyword'):
418 if opt != 'ignore':
418 if opt != 'ignore':
419 kwtools['inc'].append(pat)
419 kwtools['inc'].append(pat)
420 else:
420 else:
421 kwtools['exc'].append(pat)
421 kwtools['exc'].append(pat)
422
422
423 if kwtools['inc']:
423 if kwtools['inc']:
424 def kwdispatch_parse(ui, args):
424 def kwdispatch_parse(ui, args):
425 '''Monkeypatch dispatch._parse to obtain running hg command.'''
425 '''Monkeypatch dispatch._parse to obtain running hg command.'''
426 cmd, func, args, options, cmdoptions = dispatch_parse(ui, args)
426 cmd, func, args, options, cmdoptions = dispatch_parse(ui, args)
427 kwtools['hgcmd'] = cmd
427 kwtools['hgcmd'] = cmd
428 return cmd, func, args, options, cmdoptions
428 return cmd, func, args, options, cmdoptions
429
429
430 dispatch_parse = dispatch._parse
430 dispatch_parse = dispatch._parse
431 dispatch._parse = kwdispatch_parse
431 dispatch._parse = kwdispatch_parse
432
432
433 def reposetup(ui, repo):
433 def reposetup(ui, repo):
434 '''Sets up repo as kwrepo for keyword substitution.
434 '''Sets up repo as kwrepo for keyword substitution.
435 Overrides file method to return kwfilelog instead of filelog
435 Overrides file method to return kwfilelog instead of filelog
436 if file matches user configuration.
436 if file matches user configuration.
437 Wraps commit to overwrite configured files with updated
437 Wraps commit to overwrite configured files with updated
438 keyword substitutions.
438 keyword substitutions.
439 Monkeypatches patch and webcommands.'''
439 Monkeypatches patch and webcommands.'''
440
440
441 try:
441 try:
442 if (not repo.local() or not kwtools['inc']
442 if (not repo.local() or not kwtools['inc']
443 or kwtools['hgcmd'] in nokwcommands.split()
443 or kwtools['hgcmd'] in nokwcommands.split()
444 or '.hg' in util.splitpath(repo.root)
444 or '.hg' in util.splitpath(repo.root)
445 or repo._url.startswith('bundle:')):
445 or repo._url.startswith('bundle:')):
446 return
446 return
447 except AttributeError:
447 except AttributeError:
448 pass
448 pass
449
449
450 kwtools['templater'] = kwt = kwtemplater(ui, repo)
450 kwtools['templater'] = kwt = kwtemplater(ui, repo)
451
451
452 class kwrepo(repo.__class__):
452 class kwrepo(repo.__class__):
453 def file(self, f):
453 def file(self, f):
454 if f[0] == '/':
454 if f[0] == '/':
455 f = f[1:]
455 f = f[1:]
456 return kwfilelog(self.sopener, kwt, f)
456 return kwfilelog(self.sopener, kwt, f)
457
457
458 def wread(self, filename):
458 def wread(self, filename):
459 data = super(kwrepo, self).wread(filename)
459 data = super(kwrepo, self).wread(filename)
460 return kwt.wread(filename, data)
460 return kwt.wread(filename, data)
461
461
462 def commit(self, files=None, text='', user=None, date=None,
462 def commit(self, files=None, text='', user=None, date=None,
463 match=None, force=False, force_editor=False,
463 match=None, force=False, force_editor=False,
464 p1=None, p2=None, extra={}, empty_ok=False):
464 p1=None, p2=None, extra={}, empty_ok=False):
465 wlock = lock = None
465 wlock = lock = None
466 _p1 = _p2 = None
466 _p1 = _p2 = None
467 try:
467 try:
468 wlock = self.wlock()
468 wlock = self.wlock()
469 lock = self.lock()
469 lock = self.lock()
470 # store and postpone commit hooks
470 # store and postpone commit hooks
471 commithooks = {}
471 commithooks = {}
472 for name, cmd in ui.configitems('hooks'):
472 for name, cmd in ui.configitems('hooks'):
473 if name.split('.', 1)[0] == 'commit':
473 if name.split('.', 1)[0] == 'commit':
474 commithooks[name] = cmd
474 commithooks[name] = cmd
475 ui.setconfig('hooks', name, None)
475 ui.setconfig('hooks', name, None)
476 if commithooks:
476 if commithooks:
477 # store parents for commit hook environment
477 # store parents for commit hook environment
478 if p1 is None:
478 if p1 is None:
479 _p1, _p2 = repo.dirstate.parents()
479 _p1, _p2 = repo.dirstate.parents()
480 else:
480 else:
481 _p1, _p2 = p1, p2 or nullid
481 _p1, _p2 = p1, p2 or nullid
482 _p1 = hex(_p1)
482 _p1 = hex(_p1)
483 if _p2 == nullid:
483 if _p2 == nullid:
484 _p2 = ''
484 _p2 = ''
485 else:
485 else:
486 _p2 = hex(_p2)
486 _p2 = hex(_p2)
487
487
488 n = super(kwrepo, self).commit(files, text, user, date, match,
488 n = super(kwrepo, self).commit(files, text, user, date, match,
489 force, force_editor, p1, p2,
489 force, force_editor, p1, p2,
490 extra, empty_ok)
490 extra, empty_ok)
491
491
492 # restore commit hooks
492 # restore commit hooks
493 for name, cmd in commithooks.iteritems():
493 for name, cmd in commithooks.iteritems():
494 ui.setconfig('hooks', name, cmd)
494 ui.setconfig('hooks', name, cmd)
495 if n is not None:
495 if n is not None:
496 kwt.overwrite(n, True, None)
496 kwt.overwrite(n, True, None)
497 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
497 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
498 return n
498 return n
499 finally:
499 finally:
500 del wlock, lock
500 del wlock, lock
501
501
502 # monkeypatches
502 # monkeypatches
503 def kwpatchfile_init(self, ui, fname, missing=False):
503 def kwpatchfile_init(self, ui, fname, missing=False):
504 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
504 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
505 rejects or conflicts due to expanded keywords in working dir.'''
505 rejects or conflicts due to expanded keywords in working dir.'''
506 patchfile_init(self, ui, fname, missing)
506 patchfile_init(self, ui, fname, missing)
507 # shrink keywords read from working dir
507 # shrink keywords read from working dir
508 self.lines = kwt.shrinklines(self.fname, self.lines)
508 self.lines = kwt.shrinklines(self.fname, self.lines)
509
509
510 def kw_diff(repo, node1=None, node2=None, match=None,
510 def kw_diff(repo, node1=None, node2=None, match=None,
511 fp=None, changes=None, opts=None):
511 fp=None, changes=None, opts=None):
512 '''Monkeypatch patch.diff to avoid expansion except when
512 '''Monkeypatch patch.diff to avoid expansion except when
513 comparing against working dir.'''
513 comparing against working dir.'''
514 if node2 is not None:
514 if node2 is not None:
515 kwt.matcher = util.never
515 kwt.matcher = util.never
516 elif node1 is not None and node1 != repo.changectx('.').node():
516 elif node1 is not None and node1 != repo['.'].node():
517 kwt.restrict = True
517 kwt.restrict = True
518 patch_diff(repo, node1, node2, match, fp, changes, opts)
518 patch_diff(repo, node1, node2, match, fp, changes, opts)
519
519
520 def kwweb_annotate(web, req, tmpl):
520 def kwweb_annotate(web, req, tmpl):
521 '''Wraps webcommands.annotate turning off keyword expansion.'''
521 '''Wraps webcommands.annotate turning off keyword expansion.'''
522 kwt.matcher = util.never
522 kwt.matcher = util.never
523 return webcommands_annotate(web, req, tmpl)
523 return webcommands_annotate(web, req, tmpl)
524
524
525 def kwweb_changeset(web, req, tmpl):
525 def kwweb_changeset(web, req, tmpl):
526 '''Wraps webcommands.changeset turning off keyword expansion.'''
526 '''Wraps webcommands.changeset turning off keyword expansion.'''
527 kwt.matcher = util.never
527 kwt.matcher = util.never
528 return webcommands_changeset(web, req, tmpl)
528 return webcommands_changeset(web, req, tmpl)
529
529
530 def kwweb_filediff(web, req, tmpl):
530 def kwweb_filediff(web, req, tmpl):
531 '''Wraps webcommands.filediff turning off keyword expansion.'''
531 '''Wraps webcommands.filediff turning off keyword expansion.'''
532 kwt.matcher = util.never
532 kwt.matcher = util.never
533 return webcommands_filediff(web, req, tmpl)
533 return webcommands_filediff(web, req, tmpl)
534
534
535 repo.__class__ = kwrepo
535 repo.__class__ = kwrepo
536
536
537 patchfile_init = patch.patchfile.__init__
537 patchfile_init = patch.patchfile.__init__
538 patch_diff = patch.diff
538 patch_diff = patch.diff
539 webcommands_annotate = webcommands.annotate
539 webcommands_annotate = webcommands.annotate
540 webcommands_changeset = webcommands.changeset
540 webcommands_changeset = webcommands.changeset
541 webcommands_filediff = webcommands.filediff
541 webcommands_filediff = webcommands.filediff
542
542
543 patch.patchfile.__init__ = kwpatchfile_init
543 patch.patchfile.__init__ = kwpatchfile_init
544 patch.diff = kw_diff
544 patch.diff = kw_diff
545 webcommands.annotate = kwweb_annotate
545 webcommands.annotate = kwweb_annotate
546 webcommands.changeset = webcommands.rev = kwweb_changeset
546 webcommands.changeset = webcommands.rev = kwweb_changeset
547 webcommands.filediff = webcommands.diff = kwweb_filediff
547 webcommands.filediff = webcommands.diff = kwweb_filediff
548
548
549
549
550 cmdtable = {
550 cmdtable = {
551 'kwdemo':
551 'kwdemo':
552 (demo,
552 (demo,
553 [('d', 'default', None, _('show default keyword template maps')),
553 [('d', 'default', None, _('show default keyword template maps')),
554 ('f', 'rcfile', [], _('read maps from rcfile'))],
554 ('f', 'rcfile', [], _('read maps from rcfile'))],
555 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
555 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
556 'kwexpand': (expand, commands.walkopts,
556 'kwexpand': (expand, commands.walkopts,
557 _('hg kwexpand [OPTION]... [FILE]...')),
557 _('hg kwexpand [OPTION]... [FILE]...')),
558 'kwfiles':
558 'kwfiles':
559 (files,
559 (files,
560 [('a', 'all', None, _('show keyword status flags of all files')),
560 [('a', 'all', None, _('show keyword status flags of all files')),
561 ('i', 'ignore', None, _('show files excluded from expansion')),
561 ('i', 'ignore', None, _('show files excluded from expansion')),
562 ('u', 'untracked', None, _('additionally show untracked files')),
562 ('u', 'untracked', None, _('additionally show untracked files')),
563 ] + commands.walkopts,
563 ] + commands.walkopts,
564 _('hg kwfiles [OPTION]... [FILE]...')),
564 _('hg kwfiles [OPTION]... [FILE]...')),
565 'kwshrink': (shrink, commands.walkopts,
565 'kwshrink': (shrink, commands.walkopts,
566 _('hg kwshrink [OPTION]... [FILE]...')),
566 _('hg kwshrink [OPTION]... [FILE]...')),
567 }
567 }
@@ -1,2458 +1,2458 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial.node import bin, hex, short
33 from mercurial.node import bin, hex, short
34 from mercurial.repo import RepoError
34 from mercurial.repo import RepoError
35 from mercurial import commands, cmdutil, hg, patch, revlog, util
35 from mercurial import commands, cmdutil, hg, patch, revlog, util
36 from mercurial import repair
36 from mercurial import repair
37 import os, sys, re, errno
37 import os, sys, re, errno
38
38
39 commands.norepo += " qclone"
39 commands.norepo += " qclone"
40
40
41 # Patch names looks like unix-file names.
41 # Patch names looks like unix-file names.
42 # They must be joinable with queue directory and result in the patch path.
42 # They must be joinable with queue directory and result in the patch path.
43 normname = util.normpath
43 normname = util.normpath
44
44
45 class statusentry:
45 class statusentry:
46 def __init__(self, rev, name=None):
46 def __init__(self, rev, name=None):
47 if not name:
47 if not name:
48 fields = rev.split(':', 1)
48 fields = rev.split(':', 1)
49 if len(fields) == 2:
49 if len(fields) == 2:
50 self.rev, self.name = fields
50 self.rev, self.name = fields
51 else:
51 else:
52 self.rev, self.name = None, None
52 self.rev, self.name = None, None
53 else:
53 else:
54 self.rev, self.name = rev, name
54 self.rev, self.name = rev, name
55
55
56 def __str__(self):
56 def __str__(self):
57 return self.rev + ':' + self.name
57 return self.rev + ':' + self.name
58
58
59 class queue:
59 class queue:
60 def __init__(self, ui, path, patchdir=None):
60 def __init__(self, ui, path, patchdir=None):
61 self.basepath = path
61 self.basepath = path
62 self.path = patchdir or os.path.join(path, "patches")
62 self.path = patchdir or os.path.join(path, "patches")
63 self.opener = util.opener(self.path)
63 self.opener = util.opener(self.path)
64 self.ui = ui
64 self.ui = ui
65 self.applied = []
65 self.applied = []
66 self.full_series = []
66 self.full_series = []
67 self.applied_dirty = 0
67 self.applied_dirty = 0
68 self.series_dirty = 0
68 self.series_dirty = 0
69 self.series_path = "series"
69 self.series_path = "series"
70 self.status_path = "status"
70 self.status_path = "status"
71 self.guards_path = "guards"
71 self.guards_path = "guards"
72 self.active_guards = None
72 self.active_guards = None
73 self.guards_dirty = False
73 self.guards_dirty = False
74 self._diffopts = None
74 self._diffopts = None
75
75
76 if os.path.exists(self.join(self.series_path)):
76 if os.path.exists(self.join(self.series_path)):
77 self.full_series = self.opener(self.series_path).read().splitlines()
77 self.full_series = self.opener(self.series_path).read().splitlines()
78 self.parse_series()
78 self.parse_series()
79
79
80 if os.path.exists(self.join(self.status_path)):
80 if os.path.exists(self.join(self.status_path)):
81 lines = self.opener(self.status_path).read().splitlines()
81 lines = self.opener(self.status_path).read().splitlines()
82 self.applied = [statusentry(l) for l in lines]
82 self.applied = [statusentry(l) for l in lines]
83
83
84 def diffopts(self):
84 def diffopts(self):
85 if self._diffopts is None:
85 if self._diffopts is None:
86 self._diffopts = patch.diffopts(self.ui)
86 self._diffopts = patch.diffopts(self.ui)
87 return self._diffopts
87 return self._diffopts
88
88
89 def join(self, *p):
89 def join(self, *p):
90 return os.path.join(self.path, *p)
90 return os.path.join(self.path, *p)
91
91
92 def find_series(self, patch):
92 def find_series(self, patch):
93 pre = re.compile("(\s*)([^#]+)")
93 pre = re.compile("(\s*)([^#]+)")
94 index = 0
94 index = 0
95 for l in self.full_series:
95 for l in self.full_series:
96 m = pre.match(l)
96 m = pre.match(l)
97 if m:
97 if m:
98 s = m.group(2)
98 s = m.group(2)
99 s = s.rstrip()
99 s = s.rstrip()
100 if s == patch:
100 if s == patch:
101 return index
101 return index
102 index += 1
102 index += 1
103 return None
103 return None
104
104
105 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
105 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
106
106
107 def parse_series(self):
107 def parse_series(self):
108 self.series = []
108 self.series = []
109 self.series_guards = []
109 self.series_guards = []
110 for l in self.full_series:
110 for l in self.full_series:
111 h = l.find('#')
111 h = l.find('#')
112 if h == -1:
112 if h == -1:
113 patch = l
113 patch = l
114 comment = ''
114 comment = ''
115 elif h == 0:
115 elif h == 0:
116 continue
116 continue
117 else:
117 else:
118 patch = l[:h]
118 patch = l[:h]
119 comment = l[h:]
119 comment = l[h:]
120 patch = patch.strip()
120 patch = patch.strip()
121 if patch:
121 if patch:
122 if patch in self.series:
122 if patch in self.series:
123 raise util.Abort(_('%s appears more than once in %s') %
123 raise util.Abort(_('%s appears more than once in %s') %
124 (patch, self.join(self.series_path)))
124 (patch, self.join(self.series_path)))
125 self.series.append(patch)
125 self.series.append(patch)
126 self.series_guards.append(self.guard_re.findall(comment))
126 self.series_guards.append(self.guard_re.findall(comment))
127
127
128 def check_guard(self, guard):
128 def check_guard(self, guard):
129 if not guard:
129 if not guard:
130 return _('guard cannot be an empty string')
130 return _('guard cannot be an empty string')
131 bad_chars = '# \t\r\n\f'
131 bad_chars = '# \t\r\n\f'
132 first = guard[0]
132 first = guard[0]
133 for c in '-+':
133 for c in '-+':
134 if first == c:
134 if first == c:
135 return (_('guard %r starts with invalid character: %r') %
135 return (_('guard %r starts with invalid character: %r') %
136 (guard, c))
136 (guard, c))
137 for c in bad_chars:
137 for c in bad_chars:
138 if c in guard:
138 if c in guard:
139 return _('invalid character in guard %r: %r') % (guard, c)
139 return _('invalid character in guard %r: %r') % (guard, c)
140
140
141 def set_active(self, guards):
141 def set_active(self, guards):
142 for guard in guards:
142 for guard in guards:
143 bad = self.check_guard(guard)
143 bad = self.check_guard(guard)
144 if bad:
144 if bad:
145 raise util.Abort(bad)
145 raise util.Abort(bad)
146 guards = dict.fromkeys(guards).keys()
146 guards = dict.fromkeys(guards).keys()
147 guards.sort()
147 guards.sort()
148 self.ui.debug('active guards: %s\n' % ' '.join(guards))
148 self.ui.debug('active guards: %s\n' % ' '.join(guards))
149 self.active_guards = guards
149 self.active_guards = guards
150 self.guards_dirty = True
150 self.guards_dirty = True
151
151
152 def active(self):
152 def active(self):
153 if self.active_guards is None:
153 if self.active_guards is None:
154 self.active_guards = []
154 self.active_guards = []
155 try:
155 try:
156 guards = self.opener(self.guards_path).read().split()
156 guards = self.opener(self.guards_path).read().split()
157 except IOError, err:
157 except IOError, err:
158 if err.errno != errno.ENOENT: raise
158 if err.errno != errno.ENOENT: raise
159 guards = []
159 guards = []
160 for i, guard in enumerate(guards):
160 for i, guard in enumerate(guards):
161 bad = self.check_guard(guard)
161 bad = self.check_guard(guard)
162 if bad:
162 if bad:
163 self.ui.warn('%s:%d: %s\n' %
163 self.ui.warn('%s:%d: %s\n' %
164 (self.join(self.guards_path), i + 1, bad))
164 (self.join(self.guards_path), i + 1, bad))
165 else:
165 else:
166 self.active_guards.append(guard)
166 self.active_guards.append(guard)
167 return self.active_guards
167 return self.active_guards
168
168
169 def set_guards(self, idx, guards):
169 def set_guards(self, idx, guards):
170 for g in guards:
170 for g in guards:
171 if len(g) < 2:
171 if len(g) < 2:
172 raise util.Abort(_('guard %r too short') % g)
172 raise util.Abort(_('guard %r too short') % g)
173 if g[0] not in '-+':
173 if g[0] not in '-+':
174 raise util.Abort(_('guard %r starts with invalid char') % g)
174 raise util.Abort(_('guard %r starts with invalid char') % g)
175 bad = self.check_guard(g[1:])
175 bad = self.check_guard(g[1:])
176 if bad:
176 if bad:
177 raise util.Abort(bad)
177 raise util.Abort(bad)
178 drop = self.guard_re.sub('', self.full_series[idx])
178 drop = self.guard_re.sub('', self.full_series[idx])
179 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
179 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
180 self.parse_series()
180 self.parse_series()
181 self.series_dirty = True
181 self.series_dirty = True
182
182
183 def pushable(self, idx):
183 def pushable(self, idx):
184 if isinstance(idx, str):
184 if isinstance(idx, str):
185 idx = self.series.index(idx)
185 idx = self.series.index(idx)
186 patchguards = self.series_guards[idx]
186 patchguards = self.series_guards[idx]
187 if not patchguards:
187 if not patchguards:
188 return True, None
188 return True, None
189 default = False
189 default = False
190 guards = self.active()
190 guards = self.active()
191 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
191 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
192 if exactneg:
192 if exactneg:
193 return False, exactneg[0]
193 return False, exactneg[0]
194 pos = [g for g in patchguards if g[0] == '+']
194 pos = [g for g in patchguards if g[0] == '+']
195 exactpos = [g for g in pos if g[1:] in guards]
195 exactpos = [g for g in pos if g[1:] in guards]
196 if pos:
196 if pos:
197 if exactpos:
197 if exactpos:
198 return True, exactpos[0]
198 return True, exactpos[0]
199 return False, pos
199 return False, pos
200 return True, ''
200 return True, ''
201
201
202 def explain_pushable(self, idx, all_patches=False):
202 def explain_pushable(self, idx, all_patches=False):
203 write = all_patches and self.ui.write or self.ui.warn
203 write = all_patches and self.ui.write or self.ui.warn
204 if all_patches or self.ui.verbose:
204 if all_patches or self.ui.verbose:
205 if isinstance(idx, str):
205 if isinstance(idx, str):
206 idx = self.series.index(idx)
206 idx = self.series.index(idx)
207 pushable, why = self.pushable(idx)
207 pushable, why = self.pushable(idx)
208 if all_patches and pushable:
208 if all_patches and pushable:
209 if why is None:
209 if why is None:
210 write(_('allowing %s - no guards in effect\n') %
210 write(_('allowing %s - no guards in effect\n') %
211 self.series[idx])
211 self.series[idx])
212 else:
212 else:
213 if not why:
213 if not why:
214 write(_('allowing %s - no matching negative guards\n') %
214 write(_('allowing %s - no matching negative guards\n') %
215 self.series[idx])
215 self.series[idx])
216 else:
216 else:
217 write(_('allowing %s - guarded by %r\n') %
217 write(_('allowing %s - guarded by %r\n') %
218 (self.series[idx], why))
218 (self.series[idx], why))
219 if not pushable:
219 if not pushable:
220 if why:
220 if why:
221 write(_('skipping %s - guarded by %r\n') %
221 write(_('skipping %s - guarded by %r\n') %
222 (self.series[idx], why))
222 (self.series[idx], why))
223 else:
223 else:
224 write(_('skipping %s - no matching guards\n') %
224 write(_('skipping %s - no matching guards\n') %
225 self.series[idx])
225 self.series[idx])
226
226
227 def save_dirty(self):
227 def save_dirty(self):
228 def write_list(items, path):
228 def write_list(items, path):
229 fp = self.opener(path, 'w')
229 fp = self.opener(path, 'w')
230 for i in items:
230 for i in items:
231 fp.write("%s\n" % i)
231 fp.write("%s\n" % i)
232 fp.close()
232 fp.close()
233 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
233 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
234 if self.series_dirty: write_list(self.full_series, self.series_path)
234 if self.series_dirty: write_list(self.full_series, self.series_path)
235 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
235 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
236
236
237 def readheaders(self, patch):
237 def readheaders(self, patch):
238 def eatdiff(lines):
238 def eatdiff(lines):
239 while lines:
239 while lines:
240 l = lines[-1]
240 l = lines[-1]
241 if (l.startswith("diff -") or
241 if (l.startswith("diff -") or
242 l.startswith("Index:") or
242 l.startswith("Index:") or
243 l.startswith("===========")):
243 l.startswith("===========")):
244 del lines[-1]
244 del lines[-1]
245 else:
245 else:
246 break
246 break
247 def eatempty(lines):
247 def eatempty(lines):
248 while lines:
248 while lines:
249 l = lines[-1]
249 l = lines[-1]
250 if re.match('\s*$', l):
250 if re.match('\s*$', l):
251 del lines[-1]
251 del lines[-1]
252 else:
252 else:
253 break
253 break
254
254
255 pf = self.join(patch)
255 pf = self.join(patch)
256 message = []
256 message = []
257 comments = []
257 comments = []
258 user = None
258 user = None
259 date = None
259 date = None
260 format = None
260 format = None
261 subject = None
261 subject = None
262 diffstart = 0
262 diffstart = 0
263
263
264 for line in file(pf):
264 for line in file(pf):
265 line = line.rstrip()
265 line = line.rstrip()
266 if line.startswith('diff --git'):
266 if line.startswith('diff --git'):
267 diffstart = 2
267 diffstart = 2
268 break
268 break
269 if diffstart:
269 if diffstart:
270 if line.startswith('+++ '):
270 if line.startswith('+++ '):
271 diffstart = 2
271 diffstart = 2
272 break
272 break
273 if line.startswith("--- "):
273 if line.startswith("--- "):
274 diffstart = 1
274 diffstart = 1
275 continue
275 continue
276 elif format == "hgpatch":
276 elif format == "hgpatch":
277 # parse values when importing the result of an hg export
277 # parse values when importing the result of an hg export
278 if line.startswith("# User "):
278 if line.startswith("# User "):
279 user = line[7:]
279 user = line[7:]
280 elif line.startswith("# Date "):
280 elif line.startswith("# Date "):
281 date = line[7:]
281 date = line[7:]
282 elif not line.startswith("# ") and line:
282 elif not line.startswith("# ") and line:
283 message.append(line)
283 message.append(line)
284 format = None
284 format = None
285 elif line == '# HG changeset patch':
285 elif line == '# HG changeset patch':
286 format = "hgpatch"
286 format = "hgpatch"
287 elif (format != "tagdone" and (line.startswith("Subject: ") or
287 elif (format != "tagdone" and (line.startswith("Subject: ") or
288 line.startswith("subject: "))):
288 line.startswith("subject: "))):
289 subject = line[9:]
289 subject = line[9:]
290 format = "tag"
290 format = "tag"
291 elif (format != "tagdone" and (line.startswith("From: ") or
291 elif (format != "tagdone" and (line.startswith("From: ") or
292 line.startswith("from: "))):
292 line.startswith("from: "))):
293 user = line[6:]
293 user = line[6:]
294 format = "tag"
294 format = "tag"
295 elif format == "tag" and line == "":
295 elif format == "tag" and line == "":
296 # when looking for tags (subject: from: etc) they
296 # when looking for tags (subject: from: etc) they
297 # end once you find a blank line in the source
297 # end once you find a blank line in the source
298 format = "tagdone"
298 format = "tagdone"
299 elif message or line:
299 elif message or line:
300 message.append(line)
300 message.append(line)
301 comments.append(line)
301 comments.append(line)
302
302
303 eatdiff(message)
303 eatdiff(message)
304 eatdiff(comments)
304 eatdiff(comments)
305 eatempty(message)
305 eatempty(message)
306 eatempty(comments)
306 eatempty(comments)
307
307
308 # make sure message isn't empty
308 # make sure message isn't empty
309 if format and format.startswith("tag") and subject:
309 if format and format.startswith("tag") and subject:
310 message.insert(0, "")
310 message.insert(0, "")
311 message.insert(0, subject)
311 message.insert(0, subject)
312 return (message, comments, user, date, diffstart > 1)
312 return (message, comments, user, date, diffstart > 1)
313
313
314 def removeundo(self, repo):
314 def removeundo(self, repo):
315 undo = repo.sjoin('undo')
315 undo = repo.sjoin('undo')
316 if not os.path.exists(undo):
316 if not os.path.exists(undo):
317 return
317 return
318 try:
318 try:
319 os.unlink(undo)
319 os.unlink(undo)
320 except OSError, inst:
320 except OSError, inst:
321 self.ui.warn('error removing undo: %s\n' % str(inst))
321 self.ui.warn('error removing undo: %s\n' % str(inst))
322
322
323 def printdiff(self, repo, node1, node2=None, files=None,
323 def printdiff(self, repo, node1, node2=None, files=None,
324 fp=None, changes=None, opts={}):
324 fp=None, changes=None, opts={}):
325 m = cmdutil.match(repo, files, opts)
325 m = cmdutil.match(repo, files, opts)
326 patch.diff(repo, node1, node2, m, fp, changes, self.diffopts())
326 patch.diff(repo, node1, node2, m, fp, changes, self.diffopts())
327
327
328 def mergeone(self, repo, mergeq, head, patch, rev):
328 def mergeone(self, repo, mergeq, head, patch, rev):
329 # first try just applying the patch
329 # first try just applying the patch
330 (err, n) = self.apply(repo, [ patch ], update_status=False,
330 (err, n) = self.apply(repo, [ patch ], update_status=False,
331 strict=True, merge=rev)
331 strict=True, merge=rev)
332
332
333 if err == 0:
333 if err == 0:
334 return (err, n)
334 return (err, n)
335
335
336 if n is None:
336 if n is None:
337 raise util.Abort(_("apply failed for patch %s") % patch)
337 raise util.Abort(_("apply failed for patch %s") % patch)
338
338
339 self.ui.warn("patch didn't work out, merging %s\n" % patch)
339 self.ui.warn("patch didn't work out, merging %s\n" % patch)
340
340
341 # apply failed, strip away that rev and merge.
341 # apply failed, strip away that rev and merge.
342 hg.clean(repo, head)
342 hg.clean(repo, head)
343 self.strip(repo, n, update=False, backup='strip')
343 self.strip(repo, n, update=False, backup='strip')
344
344
345 ctx = repo.changectx(rev)
345 ctx = repo[rev]
346 ret = hg.merge(repo, rev)
346 ret = hg.merge(repo, rev)
347 if ret:
347 if ret:
348 raise util.Abort(_("update returned %d") % ret)
348 raise util.Abort(_("update returned %d") % ret)
349 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
349 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
350 if n == None:
350 if n == None:
351 raise util.Abort(_("repo commit failed"))
351 raise util.Abort(_("repo commit failed"))
352 try:
352 try:
353 message, comments, user, date, patchfound = mergeq.readheaders(patch)
353 message, comments, user, date, patchfound = mergeq.readheaders(patch)
354 except:
354 except:
355 raise util.Abort(_("unable to read %s") % patch)
355 raise util.Abort(_("unable to read %s") % patch)
356
356
357 patchf = self.opener(patch, "w")
357 patchf = self.opener(patch, "w")
358 if comments:
358 if comments:
359 comments = "\n".join(comments) + '\n\n'
359 comments = "\n".join(comments) + '\n\n'
360 patchf.write(comments)
360 patchf.write(comments)
361 self.printdiff(repo, head, n, fp=patchf)
361 self.printdiff(repo, head, n, fp=patchf)
362 patchf.close()
362 patchf.close()
363 self.removeundo(repo)
363 self.removeundo(repo)
364 return (0, n)
364 return (0, n)
365
365
366 def qparents(self, repo, rev=None):
366 def qparents(self, repo, rev=None):
367 if rev is None:
367 if rev is None:
368 (p1, p2) = repo.dirstate.parents()
368 (p1, p2) = repo.dirstate.parents()
369 if p2 == revlog.nullid:
369 if p2 == revlog.nullid:
370 return p1
370 return p1
371 if len(self.applied) == 0:
371 if len(self.applied) == 0:
372 return None
372 return None
373 return revlog.bin(self.applied[-1].rev)
373 return revlog.bin(self.applied[-1].rev)
374 pp = repo.changelog.parents(rev)
374 pp = repo.changelog.parents(rev)
375 if pp[1] != revlog.nullid:
375 if pp[1] != revlog.nullid:
376 arevs = [ x.rev for x in self.applied ]
376 arevs = [ x.rev for x in self.applied ]
377 p0 = revlog.hex(pp[0])
377 p0 = revlog.hex(pp[0])
378 p1 = revlog.hex(pp[1])
378 p1 = revlog.hex(pp[1])
379 if p0 in arevs:
379 if p0 in arevs:
380 return pp[0]
380 return pp[0]
381 if p1 in arevs:
381 if p1 in arevs:
382 return pp[1]
382 return pp[1]
383 return pp[0]
383 return pp[0]
384
384
385 def mergepatch(self, repo, mergeq, series):
385 def mergepatch(self, repo, mergeq, series):
386 if len(self.applied) == 0:
386 if len(self.applied) == 0:
387 # each of the patches merged in will have two parents. This
387 # each of the patches merged in will have two parents. This
388 # can confuse the qrefresh, qdiff, and strip code because it
388 # can confuse the qrefresh, qdiff, and strip code because it
389 # needs to know which parent is actually in the patch queue.
389 # needs to know which parent is actually in the patch queue.
390 # so, we insert a merge marker with only one parent. This way
390 # so, we insert a merge marker with only one parent. This way
391 # the first patch in the queue is never a merge patch
391 # the first patch in the queue is never a merge patch
392 #
392 #
393 pname = ".hg.patches.merge.marker"
393 pname = ".hg.patches.merge.marker"
394 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
394 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
395 self.removeundo(repo)
395 self.removeundo(repo)
396 self.applied.append(statusentry(revlog.hex(n), pname))
396 self.applied.append(statusentry(revlog.hex(n), pname))
397 self.applied_dirty = 1
397 self.applied_dirty = 1
398
398
399 head = self.qparents(repo)
399 head = self.qparents(repo)
400
400
401 for patch in series:
401 for patch in series:
402 patch = mergeq.lookup(patch, strict=True)
402 patch = mergeq.lookup(patch, strict=True)
403 if not patch:
403 if not patch:
404 self.ui.warn("patch %s does not exist\n" % patch)
404 self.ui.warn("patch %s does not exist\n" % patch)
405 return (1, None)
405 return (1, None)
406 pushable, reason = self.pushable(patch)
406 pushable, reason = self.pushable(patch)
407 if not pushable:
407 if not pushable:
408 self.explain_pushable(patch, all_patches=True)
408 self.explain_pushable(patch, all_patches=True)
409 continue
409 continue
410 info = mergeq.isapplied(patch)
410 info = mergeq.isapplied(patch)
411 if not info:
411 if not info:
412 self.ui.warn("patch %s is not applied\n" % patch)
412 self.ui.warn("patch %s is not applied\n" % patch)
413 return (1, None)
413 return (1, None)
414 rev = revlog.bin(info[1])
414 rev = revlog.bin(info[1])
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
416 if head:
416 if head:
417 self.applied.append(statusentry(revlog.hex(head), patch))
417 self.applied.append(statusentry(revlog.hex(head), patch))
418 self.applied_dirty = 1
418 self.applied_dirty = 1
419 if err:
419 if err:
420 return (err, head)
420 return (err, head)
421 self.save_dirty()
421 self.save_dirty()
422 return (0, head)
422 return (0, head)
423
423
424 def patch(self, repo, patchfile):
424 def patch(self, repo, patchfile):
425 '''Apply patchfile to the working directory.
425 '''Apply patchfile to the working directory.
426 patchfile: file name of patch'''
426 patchfile: file name of patch'''
427 files = {}
427 files = {}
428 try:
428 try:
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 files=files)
430 files=files)
431 except Exception, inst:
431 except Exception, inst:
432 self.ui.note(str(inst) + '\n')
432 self.ui.note(str(inst) + '\n')
433 if not self.ui.verbose:
433 if not self.ui.verbose:
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 return (False, files, False)
435 return (False, files, False)
436
436
437 return (True, files, fuzz)
437 return (True, files, fuzz)
438
438
439 def apply(self, repo, series, list=False, update_status=True,
439 def apply(self, repo, series, list=False, update_status=True,
440 strict=False, patchdir=None, merge=None, all_files={}):
440 strict=False, patchdir=None, merge=None, all_files={}):
441 wlock = lock = tr = None
441 wlock = lock = tr = None
442 try:
442 try:
443 wlock = repo.wlock()
443 wlock = repo.wlock()
444 lock = repo.lock()
444 lock = repo.lock()
445 tr = repo.transaction()
445 tr = repo.transaction()
446 try:
446 try:
447 ret = self._apply(repo, series, list, update_status,
447 ret = self._apply(repo, series, list, update_status,
448 strict, patchdir, merge, all_files=all_files)
448 strict, patchdir, merge, all_files=all_files)
449 tr.close()
449 tr.close()
450 self.save_dirty()
450 self.save_dirty()
451 return ret
451 return ret
452 except:
452 except:
453 try:
453 try:
454 tr.abort()
454 tr.abort()
455 finally:
455 finally:
456 repo.invalidate()
456 repo.invalidate()
457 repo.dirstate.invalidate()
457 repo.dirstate.invalidate()
458 raise
458 raise
459 finally:
459 finally:
460 del tr, lock, wlock
460 del tr, lock, wlock
461 self.removeundo(repo)
461 self.removeundo(repo)
462
462
463 def _apply(self, repo, series, list=False, update_status=True,
463 def _apply(self, repo, series, list=False, update_status=True,
464 strict=False, patchdir=None, merge=None, all_files={}):
464 strict=False, patchdir=None, merge=None, all_files={}):
465 # TODO unify with commands.py
465 # TODO unify with commands.py
466 if not patchdir:
466 if not patchdir:
467 patchdir = self.path
467 patchdir = self.path
468 err = 0
468 err = 0
469 n = None
469 n = None
470 for patchname in series:
470 for patchname in series:
471 pushable, reason = self.pushable(patchname)
471 pushable, reason = self.pushable(patchname)
472 if not pushable:
472 if not pushable:
473 self.explain_pushable(patchname, all_patches=True)
473 self.explain_pushable(patchname, all_patches=True)
474 continue
474 continue
475 self.ui.warn("applying %s\n" % patchname)
475 self.ui.warn("applying %s\n" % patchname)
476 pf = os.path.join(patchdir, patchname)
476 pf = os.path.join(patchdir, patchname)
477
477
478 try:
478 try:
479 message, comments, user, date, patchfound = self.readheaders(patchname)
479 message, comments, user, date, patchfound = self.readheaders(patchname)
480 except:
480 except:
481 self.ui.warn("Unable to read %s\n" % patchname)
481 self.ui.warn("Unable to read %s\n" % patchname)
482 err = 1
482 err = 1
483 break
483 break
484
484
485 if not message:
485 if not message:
486 message = "imported patch %s\n" % patchname
486 message = "imported patch %s\n" % patchname
487 else:
487 else:
488 if list:
488 if list:
489 message.append("\nimported patch %s" % patchname)
489 message.append("\nimported patch %s" % patchname)
490 message = '\n'.join(message)
490 message = '\n'.join(message)
491
491
492 (patcherr, files, fuzz) = self.patch(repo, pf)
492 (patcherr, files, fuzz) = self.patch(repo, pf)
493 all_files.update(files)
493 all_files.update(files)
494 patcherr = not patcherr
494 patcherr = not patcherr
495
495
496 if merge and files:
496 if merge and files:
497 # Mark as removed/merged and update dirstate parent info
497 # Mark as removed/merged and update dirstate parent info
498 removed = []
498 removed = []
499 merged = []
499 merged = []
500 for f in files:
500 for f in files:
501 if os.path.exists(repo.wjoin(f)):
501 if os.path.exists(repo.wjoin(f)):
502 merged.append(f)
502 merged.append(f)
503 else:
503 else:
504 removed.append(f)
504 removed.append(f)
505 for f in removed:
505 for f in removed:
506 repo.dirstate.remove(f)
506 repo.dirstate.remove(f)
507 for f in merged:
507 for f in merged:
508 repo.dirstate.merge(f)
508 repo.dirstate.merge(f)
509 p1, p2 = repo.dirstate.parents()
509 p1, p2 = repo.dirstate.parents()
510 repo.dirstate.setparents(p1, merge)
510 repo.dirstate.setparents(p1, merge)
511
511
512 files = patch.updatedir(self.ui, repo, files)
512 files = patch.updatedir(self.ui, repo, files)
513 match = cmdutil.matchfiles(repo, files or [])
513 match = cmdutil.matchfiles(repo, files or [])
514 n = repo.commit(files, message, user, date, match=match,
514 n = repo.commit(files, message, user, date, match=match,
515 force=True)
515 force=True)
516
516
517 if n == None:
517 if n == None:
518 raise util.Abort(_("repo commit failed"))
518 raise util.Abort(_("repo commit failed"))
519
519
520 if update_status:
520 if update_status:
521 self.applied.append(statusentry(revlog.hex(n), patchname))
521 self.applied.append(statusentry(revlog.hex(n), patchname))
522
522
523 if patcherr:
523 if patcherr:
524 if not patchfound:
524 if not patchfound:
525 self.ui.warn("patch %s is empty\n" % patchname)
525 self.ui.warn("patch %s is empty\n" % patchname)
526 err = 0
526 err = 0
527 else:
527 else:
528 self.ui.warn("patch failed, rejects left in working dir\n")
528 self.ui.warn("patch failed, rejects left in working dir\n")
529 err = 1
529 err = 1
530 break
530 break
531
531
532 if fuzz and strict:
532 if fuzz and strict:
533 self.ui.warn("fuzz found when applying patch, stopping\n")
533 self.ui.warn("fuzz found when applying patch, stopping\n")
534 err = 1
534 err = 1
535 break
535 break
536 return (err, n)
536 return (err, n)
537
537
538 def _clean_series(self, patches):
538 def _clean_series(self, patches):
539 indices = [self.find_series(p) for p in patches]
539 indices = [self.find_series(p) for p in patches]
540 indices.sort()
540 indices.sort()
541 for i in indices[-1::-1]:
541 for i in indices[-1::-1]:
542 del self.full_series[i]
542 del self.full_series[i]
543 self.parse_series()
543 self.parse_series()
544 self.series_dirty = 1
544 self.series_dirty = 1
545
545
546 def finish(self, repo, revs):
546 def finish(self, repo, revs):
547 revs.sort()
547 revs.sort()
548 firstrev = repo.changelog.rev(revlog.bin(self.applied[0].rev))
548 firstrev = repo.changelog.rev(revlog.bin(self.applied[0].rev))
549 appliedbase = 0
549 appliedbase = 0
550 patches = []
550 patches = []
551 for rev in revs:
551 for rev in revs:
552 if rev < firstrev:
552 if rev < firstrev:
553 raise util.Abort(_('revision %d is not managed') % rev)
553 raise util.Abort(_('revision %d is not managed') % rev)
554 base = revlog.bin(self.applied[appliedbase].rev)
554 base = revlog.bin(self.applied[appliedbase].rev)
555 node = repo.changelog.node(rev)
555 node = repo.changelog.node(rev)
556 if node != base:
556 if node != base:
557 raise util.Abort(_('cannot delete revision %d above '
557 raise util.Abort(_('cannot delete revision %d above '
558 'applied patches') % rev)
558 'applied patches') % rev)
559 patches.append(self.applied[appliedbase].name)
559 patches.append(self.applied[appliedbase].name)
560 appliedbase += 1
560 appliedbase += 1
561
561
562 r = self.qrepo()
562 r = self.qrepo()
563 if r:
563 if r:
564 r.remove(patches, True)
564 r.remove(patches, True)
565 else:
565 else:
566 for p in patches:
566 for p in patches:
567 os.unlink(self.join(p))
567 os.unlink(self.join(p))
568
568
569 del self.applied[:appliedbase]
569 del self.applied[:appliedbase]
570 self.applied_dirty = 1
570 self.applied_dirty = 1
571 self._clean_series(patches)
571 self._clean_series(patches)
572
572
573 def delete(self, repo, patches, opts):
573 def delete(self, repo, patches, opts):
574 if not patches and not opts.get('rev'):
574 if not patches and not opts.get('rev'):
575 raise util.Abort(_('qdelete requires at least one revision or '
575 raise util.Abort(_('qdelete requires at least one revision or '
576 'patch name'))
576 'patch name'))
577
577
578 realpatches = []
578 realpatches = []
579 for patch in patches:
579 for patch in patches:
580 patch = self.lookup(patch, strict=True)
580 patch = self.lookup(patch, strict=True)
581 info = self.isapplied(patch)
581 info = self.isapplied(patch)
582 if info:
582 if info:
583 raise util.Abort(_("cannot delete applied patch %s") % patch)
583 raise util.Abort(_("cannot delete applied patch %s") % patch)
584 if patch not in self.series:
584 if patch not in self.series:
585 raise util.Abort(_("patch %s not in series file") % patch)
585 raise util.Abort(_("patch %s not in series file") % patch)
586 realpatches.append(patch)
586 realpatches.append(patch)
587
587
588 appliedbase = 0
588 appliedbase = 0
589 if opts.get('rev'):
589 if opts.get('rev'):
590 if not self.applied:
590 if not self.applied:
591 raise util.Abort(_('no patches applied'))
591 raise util.Abort(_('no patches applied'))
592 revs = cmdutil.revrange(repo, opts['rev'])
592 revs = cmdutil.revrange(repo, opts['rev'])
593 if len(revs) > 1 and revs[0] > revs[1]:
593 if len(revs) > 1 and revs[0] > revs[1]:
594 revs.reverse()
594 revs.reverse()
595 for rev in revs:
595 for rev in revs:
596 if appliedbase >= len(self.applied):
596 if appliedbase >= len(self.applied):
597 raise util.Abort(_("revision %d is not managed") % rev)
597 raise util.Abort(_("revision %d is not managed") % rev)
598
598
599 base = revlog.bin(self.applied[appliedbase].rev)
599 base = revlog.bin(self.applied[appliedbase].rev)
600 node = repo.changelog.node(rev)
600 node = repo.changelog.node(rev)
601 if node != base:
601 if node != base:
602 raise util.Abort(_("cannot delete revision %d above "
602 raise util.Abort(_("cannot delete revision %d above "
603 "applied patches") % rev)
603 "applied patches") % rev)
604 realpatches.append(self.applied[appliedbase].name)
604 realpatches.append(self.applied[appliedbase].name)
605 appliedbase += 1
605 appliedbase += 1
606
606
607 if not opts.get('keep'):
607 if not opts.get('keep'):
608 r = self.qrepo()
608 r = self.qrepo()
609 if r:
609 if r:
610 r.remove(realpatches, True)
610 r.remove(realpatches, True)
611 else:
611 else:
612 for p in realpatches:
612 for p in realpatches:
613 os.unlink(self.join(p))
613 os.unlink(self.join(p))
614
614
615 if appliedbase:
615 if appliedbase:
616 del self.applied[:appliedbase]
616 del self.applied[:appliedbase]
617 self.applied_dirty = 1
617 self.applied_dirty = 1
618 self._clean_series(realpatches)
618 self._clean_series(realpatches)
619
619
620 def check_toppatch(self, repo):
620 def check_toppatch(self, repo):
621 if len(self.applied) > 0:
621 if len(self.applied) > 0:
622 top = revlog.bin(self.applied[-1].rev)
622 top = revlog.bin(self.applied[-1].rev)
623 pp = repo.dirstate.parents()
623 pp = repo.dirstate.parents()
624 if top not in pp:
624 if top not in pp:
625 raise util.Abort(_("working directory revision is not qtip"))
625 raise util.Abort(_("working directory revision is not qtip"))
626 return top
626 return top
627 return None
627 return None
628 def check_localchanges(self, repo, force=False, refresh=True):
628 def check_localchanges(self, repo, force=False, refresh=True):
629 m, a, r, d = repo.status()[:4]
629 m, a, r, d = repo.status()[:4]
630 if m or a or r or d:
630 if m or a or r or d:
631 if not force:
631 if not force:
632 if refresh:
632 if refresh:
633 raise util.Abort(_("local changes found, refresh first"))
633 raise util.Abort(_("local changes found, refresh first"))
634 else:
634 else:
635 raise util.Abort(_("local changes found"))
635 raise util.Abort(_("local changes found"))
636 return m, a, r, d
636 return m, a, r, d
637
637
638 _reserved = ('series', 'status', 'guards')
638 _reserved = ('series', 'status', 'guards')
639 def check_reserved_name(self, name):
639 def check_reserved_name(self, name):
640 if (name in self._reserved or name.startswith('.hg')
640 if (name in self._reserved or name.startswith('.hg')
641 or name.startswith('.mq')):
641 or name.startswith('.mq')):
642 raise util.Abort(_('"%s" cannot be used as the name of a patch')
642 raise util.Abort(_('"%s" cannot be used as the name of a patch')
643 % name)
643 % name)
644
644
645 def new(self, repo, patch, *pats, **opts):
645 def new(self, repo, patch, *pats, **opts):
646 msg = opts.get('msg')
646 msg = opts.get('msg')
647 force = opts.get('force')
647 force = opts.get('force')
648 user = opts.get('user')
648 user = opts.get('user')
649 date = opts.get('date')
649 date = opts.get('date')
650 if date:
650 if date:
651 date = util.parsedate(date)
651 date = util.parsedate(date)
652 self.check_reserved_name(patch)
652 self.check_reserved_name(patch)
653 if os.path.exists(self.join(patch)):
653 if os.path.exists(self.join(patch)):
654 raise util.Abort(_('patch "%s" already exists') % patch)
654 raise util.Abort(_('patch "%s" already exists') % patch)
655 if opts.get('include') or opts.get('exclude') or pats:
655 if opts.get('include') or opts.get('exclude') or pats:
656 match = cmdutil.match(repo, pats, opts)
656 match = cmdutil.match(repo, pats, opts)
657 m, a, r, d = repo.status(match=match)[:4]
657 m, a, r, d = repo.status(match=match)[:4]
658 else:
658 else:
659 m, a, r, d = self.check_localchanges(repo, force)
659 m, a, r, d = self.check_localchanges(repo, force)
660 match = cmdutil.match(repo, m + a + r)
660 match = cmdutil.match(repo, m + a + r)
661 commitfiles = m + a + r
661 commitfiles = m + a + r
662 self.check_toppatch(repo)
662 self.check_toppatch(repo)
663 wlock = repo.wlock()
663 wlock = repo.wlock()
664 try:
664 try:
665 insert = self.full_series_end()
665 insert = self.full_series_end()
666 commitmsg = msg and msg or ("[mq]: %s" % patch)
666 commitmsg = msg and msg or ("[mq]: %s" % patch)
667 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
667 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
668 if n == None:
668 if n == None:
669 raise util.Abort(_("repo commit failed"))
669 raise util.Abort(_("repo commit failed"))
670 self.full_series[insert:insert] = [patch]
670 self.full_series[insert:insert] = [patch]
671 self.applied.append(statusentry(revlog.hex(n), patch))
671 self.applied.append(statusentry(revlog.hex(n), patch))
672 self.parse_series()
672 self.parse_series()
673 self.series_dirty = 1
673 self.series_dirty = 1
674 self.applied_dirty = 1
674 self.applied_dirty = 1
675 p = self.opener(patch, "w")
675 p = self.opener(patch, "w")
676 if date:
676 if date:
677 p.write("# HG changeset patch\n")
677 p.write("# HG changeset patch\n")
678 if user:
678 if user:
679 p.write("# User " + user + "\n")
679 p.write("# User " + user + "\n")
680 p.write("# Date %d %d\n" % date)
680 p.write("# Date %d %d\n" % date)
681 p.write("\n")
681 p.write("\n")
682 elif user:
682 elif user:
683 p.write("From: " + user + "\n")
683 p.write("From: " + user + "\n")
684 p.write("\n")
684 p.write("\n")
685 if msg:
685 if msg:
686 msg = msg + "\n"
686 msg = msg + "\n"
687 p.write(msg)
687 p.write(msg)
688 p.close()
688 p.close()
689 wlock = None
689 wlock = None
690 r = self.qrepo()
690 r = self.qrepo()
691 if r: r.add([patch])
691 if r: r.add([patch])
692 if commitfiles:
692 if commitfiles:
693 self.refresh(repo, short=True, git=opts.get('git'))
693 self.refresh(repo, short=True, git=opts.get('git'))
694 self.removeundo(repo)
694 self.removeundo(repo)
695 finally:
695 finally:
696 del wlock
696 del wlock
697
697
698 def strip(self, repo, rev, update=True, backup="all", force=None):
698 def strip(self, repo, rev, update=True, backup="all", force=None):
699 wlock = lock = None
699 wlock = lock = None
700 try:
700 try:
701 wlock = repo.wlock()
701 wlock = repo.wlock()
702 lock = repo.lock()
702 lock = repo.lock()
703
703
704 if update:
704 if update:
705 self.check_localchanges(repo, force=force, refresh=False)
705 self.check_localchanges(repo, force=force, refresh=False)
706 urev = self.qparents(repo, rev)
706 urev = self.qparents(repo, rev)
707 hg.clean(repo, urev)
707 hg.clean(repo, urev)
708 repo.dirstate.write()
708 repo.dirstate.write()
709
709
710 self.removeundo(repo)
710 self.removeundo(repo)
711 repair.strip(self.ui, repo, rev, backup)
711 repair.strip(self.ui, repo, rev, backup)
712 # strip may have unbundled a set of backed up revisions after
712 # strip may have unbundled a set of backed up revisions after
713 # the actual strip
713 # the actual strip
714 self.removeundo(repo)
714 self.removeundo(repo)
715 finally:
715 finally:
716 del lock, wlock
716 del lock, wlock
717
717
718 def isapplied(self, patch):
718 def isapplied(self, patch):
719 """returns (index, rev, patch)"""
719 """returns (index, rev, patch)"""
720 for i in xrange(len(self.applied)):
720 for i in xrange(len(self.applied)):
721 a = self.applied[i]
721 a = self.applied[i]
722 if a.name == patch:
722 if a.name == patch:
723 return (i, a.rev, a.name)
723 return (i, a.rev, a.name)
724 return None
724 return None
725
725
726 # if the exact patch name does not exist, we try a few
726 # if the exact patch name does not exist, we try a few
727 # variations. If strict is passed, we try only #1
727 # variations. If strict is passed, we try only #1
728 #
728 #
729 # 1) a number to indicate an offset in the series file
729 # 1) a number to indicate an offset in the series file
730 # 2) a unique substring of the patch name was given
730 # 2) a unique substring of the patch name was given
731 # 3) patchname[-+]num to indicate an offset in the series file
731 # 3) patchname[-+]num to indicate an offset in the series file
732 def lookup(self, patch, strict=False):
732 def lookup(self, patch, strict=False):
733 patch = patch and str(patch)
733 patch = patch and str(patch)
734
734
735 def partial_name(s):
735 def partial_name(s):
736 if s in self.series:
736 if s in self.series:
737 return s
737 return s
738 matches = [x for x in self.series if s in x]
738 matches = [x for x in self.series if s in x]
739 if len(matches) > 1:
739 if len(matches) > 1:
740 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
740 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
741 for m in matches:
741 for m in matches:
742 self.ui.warn(' %s\n' % m)
742 self.ui.warn(' %s\n' % m)
743 return None
743 return None
744 if matches:
744 if matches:
745 return matches[0]
745 return matches[0]
746 if len(self.series) > 0 and len(self.applied) > 0:
746 if len(self.series) > 0 and len(self.applied) > 0:
747 if s == 'qtip':
747 if s == 'qtip':
748 return self.series[self.series_end(True)-1]
748 return self.series[self.series_end(True)-1]
749 if s == 'qbase':
749 if s == 'qbase':
750 return self.series[0]
750 return self.series[0]
751 return None
751 return None
752 if patch == None:
752 if patch == None:
753 return None
753 return None
754
754
755 # we don't want to return a partial match until we make
755 # we don't want to return a partial match until we make
756 # sure the file name passed in does not exist (checked below)
756 # sure the file name passed in does not exist (checked below)
757 res = partial_name(patch)
757 res = partial_name(patch)
758 if res and res == patch:
758 if res and res == patch:
759 return res
759 return res
760
760
761 if not os.path.isfile(self.join(patch)):
761 if not os.path.isfile(self.join(patch)):
762 try:
762 try:
763 sno = int(patch)
763 sno = int(patch)
764 except(ValueError, OverflowError):
764 except(ValueError, OverflowError):
765 pass
765 pass
766 else:
766 else:
767 if sno < len(self.series):
767 if sno < len(self.series):
768 return self.series[sno]
768 return self.series[sno]
769 if not strict:
769 if not strict:
770 # return any partial match made above
770 # return any partial match made above
771 if res:
771 if res:
772 return res
772 return res
773 minus = patch.rfind('-')
773 minus = patch.rfind('-')
774 if minus >= 0:
774 if minus >= 0:
775 res = partial_name(patch[:minus])
775 res = partial_name(patch[:minus])
776 if res:
776 if res:
777 i = self.series.index(res)
777 i = self.series.index(res)
778 try:
778 try:
779 off = int(patch[minus+1:] or 1)
779 off = int(patch[minus+1:] or 1)
780 except(ValueError, OverflowError):
780 except(ValueError, OverflowError):
781 pass
781 pass
782 else:
782 else:
783 if i - off >= 0:
783 if i - off >= 0:
784 return self.series[i - off]
784 return self.series[i - off]
785 plus = patch.rfind('+')
785 plus = patch.rfind('+')
786 if plus >= 0:
786 if plus >= 0:
787 res = partial_name(patch[:plus])
787 res = partial_name(patch[:plus])
788 if res:
788 if res:
789 i = self.series.index(res)
789 i = self.series.index(res)
790 try:
790 try:
791 off = int(patch[plus+1:] or 1)
791 off = int(patch[plus+1:] or 1)
792 except(ValueError, OverflowError):
792 except(ValueError, OverflowError):
793 pass
793 pass
794 else:
794 else:
795 if i + off < len(self.series):
795 if i + off < len(self.series):
796 return self.series[i + off]
796 return self.series[i + off]
797 raise util.Abort(_("patch %s not in series") % patch)
797 raise util.Abort(_("patch %s not in series") % patch)
798
798
799 def push(self, repo, patch=None, force=False, list=False,
799 def push(self, repo, patch=None, force=False, list=False,
800 mergeq=None):
800 mergeq=None):
801 wlock = repo.wlock()
801 wlock = repo.wlock()
802 if repo.dirstate.parents()[0] != repo.changelog.tip():
802 if repo.dirstate.parents()[0] != repo.changelog.tip():
803 self.ui.status(_("(working directory not at tip)\n"))
803 self.ui.status(_("(working directory not at tip)\n"))
804
804
805 try:
805 try:
806 patch = self.lookup(patch)
806 patch = self.lookup(patch)
807 # Suppose our series file is: A B C and the current 'top'
807 # Suppose our series file is: A B C and the current 'top'
808 # patch is B. qpush C should be performed (moving forward)
808 # patch is B. qpush C should be performed (moving forward)
809 # qpush B is a NOP (no change) qpush A is an error (can't
809 # qpush B is a NOP (no change) qpush A is an error (can't
810 # go backwards with qpush)
810 # go backwards with qpush)
811 if patch:
811 if patch:
812 info = self.isapplied(patch)
812 info = self.isapplied(patch)
813 if info:
813 if info:
814 if info[0] < len(self.applied) - 1:
814 if info[0] < len(self.applied) - 1:
815 raise util.Abort(
815 raise util.Abort(
816 _("cannot push to a previous patch: %s") % patch)
816 _("cannot push to a previous patch: %s") % patch)
817 if info[0] < len(self.series) - 1:
817 if info[0] < len(self.series) - 1:
818 self.ui.warn(
818 self.ui.warn(
819 _('qpush: %s is already at the top\n') % patch)
819 _('qpush: %s is already at the top\n') % patch)
820 else:
820 else:
821 self.ui.warn(_('all patches are currently applied\n'))
821 self.ui.warn(_('all patches are currently applied\n'))
822 return
822 return
823
823
824 # Following the above example, starting at 'top' of B:
824 # Following the above example, starting at 'top' of B:
825 # qpush should be performed (pushes C), but a subsequent
825 # qpush should be performed (pushes C), but a subsequent
826 # qpush without an argument is an error (nothing to
826 # qpush without an argument is an error (nothing to
827 # apply). This allows a loop of "...while hg qpush..." to
827 # apply). This allows a loop of "...while hg qpush..." to
828 # work as it detects an error when done
828 # work as it detects an error when done
829 if self.series_end() == len(self.series):
829 if self.series_end() == len(self.series):
830 self.ui.warn(_('patch series already fully applied\n'))
830 self.ui.warn(_('patch series already fully applied\n'))
831 return 1
831 return 1
832 if not force:
832 if not force:
833 self.check_localchanges(repo)
833 self.check_localchanges(repo)
834
834
835 self.applied_dirty = 1;
835 self.applied_dirty = 1;
836 start = self.series_end()
836 start = self.series_end()
837 if start > 0:
837 if start > 0:
838 self.check_toppatch(repo)
838 self.check_toppatch(repo)
839 if not patch:
839 if not patch:
840 patch = self.series[start]
840 patch = self.series[start]
841 end = start + 1
841 end = start + 1
842 else:
842 else:
843 end = self.series.index(patch, start) + 1
843 end = self.series.index(patch, start) + 1
844 s = self.series[start:end]
844 s = self.series[start:end]
845 all_files = {}
845 all_files = {}
846 try:
846 try:
847 if mergeq:
847 if mergeq:
848 ret = self.mergepatch(repo, mergeq, s)
848 ret = self.mergepatch(repo, mergeq, s)
849 else:
849 else:
850 ret = self.apply(repo, s, list, all_files=all_files)
850 ret = self.apply(repo, s, list, all_files=all_files)
851 except:
851 except:
852 self.ui.warn(_('cleaning up working directory...'))
852 self.ui.warn(_('cleaning up working directory...'))
853 node = repo.dirstate.parents()[0]
853 node = repo.dirstate.parents()[0]
854 hg.revert(repo, node, None)
854 hg.revert(repo, node, None)
855 unknown = repo.status()[4]
855 unknown = repo.status()[4]
856 # only remove unknown files that we know we touched or
856 # only remove unknown files that we know we touched or
857 # created while patching
857 # created while patching
858 for f in unknown:
858 for f in unknown:
859 if f in all_files:
859 if f in all_files:
860 util.unlink(repo.wjoin(f))
860 util.unlink(repo.wjoin(f))
861 self.ui.warn(_('done\n'))
861 self.ui.warn(_('done\n'))
862 raise
862 raise
863 top = self.applied[-1].name
863 top = self.applied[-1].name
864 if ret[0]:
864 if ret[0]:
865 self.ui.write(
865 self.ui.write(
866 "Errors during apply, please fix and refresh %s\n" % top)
866 "Errors during apply, please fix and refresh %s\n" % top)
867 else:
867 else:
868 self.ui.write("Now at: %s\n" % top)
868 self.ui.write("Now at: %s\n" % top)
869 return ret[0]
869 return ret[0]
870 finally:
870 finally:
871 del wlock
871 del wlock
872
872
873 def pop(self, repo, patch=None, force=False, update=True, all=False):
873 def pop(self, repo, patch=None, force=False, update=True, all=False):
874 def getfile(f, rev, flags):
874 def getfile(f, rev, flags):
875 t = repo.file(f).read(rev)
875 t = repo.file(f).read(rev)
876 repo.wwrite(f, t, flags)
876 repo.wwrite(f, t, flags)
877
877
878 wlock = repo.wlock()
878 wlock = repo.wlock()
879 try:
879 try:
880 if patch:
880 if patch:
881 # index, rev, patch
881 # index, rev, patch
882 info = self.isapplied(patch)
882 info = self.isapplied(patch)
883 if not info:
883 if not info:
884 patch = self.lookup(patch)
884 patch = self.lookup(patch)
885 info = self.isapplied(patch)
885 info = self.isapplied(patch)
886 if not info:
886 if not info:
887 raise util.Abort(_("patch %s is not applied") % patch)
887 raise util.Abort(_("patch %s is not applied") % patch)
888
888
889 if len(self.applied) == 0:
889 if len(self.applied) == 0:
890 # Allow qpop -a to work repeatedly,
890 # Allow qpop -a to work repeatedly,
891 # but not qpop without an argument
891 # but not qpop without an argument
892 self.ui.warn(_("no patches applied\n"))
892 self.ui.warn(_("no patches applied\n"))
893 return not all
893 return not all
894
894
895 if not update:
895 if not update:
896 parents = repo.dirstate.parents()
896 parents = repo.dirstate.parents()
897 rr = [ revlog.bin(x.rev) for x in self.applied ]
897 rr = [ revlog.bin(x.rev) for x in self.applied ]
898 for p in parents:
898 for p in parents:
899 if p in rr:
899 if p in rr:
900 self.ui.warn("qpop: forcing dirstate update\n")
900 self.ui.warn("qpop: forcing dirstate update\n")
901 update = True
901 update = True
902
902
903 if not force and update:
903 if not force and update:
904 self.check_localchanges(repo)
904 self.check_localchanges(repo)
905
905
906 self.applied_dirty = 1;
906 self.applied_dirty = 1;
907 end = len(self.applied)
907 end = len(self.applied)
908 if not patch:
908 if not patch:
909 if all:
909 if all:
910 popi = 0
910 popi = 0
911 else:
911 else:
912 popi = len(self.applied) - 1
912 popi = len(self.applied) - 1
913 else:
913 else:
914 popi = info[0] + 1
914 popi = info[0] + 1
915 if popi >= end:
915 if popi >= end:
916 self.ui.warn("qpop: %s is already at the top\n" % patch)
916 self.ui.warn("qpop: %s is already at the top\n" % patch)
917 return
917 return
918 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
918 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
919
919
920 start = info[0]
920 start = info[0]
921 rev = revlog.bin(info[1])
921 rev = revlog.bin(info[1])
922
922
923 if update:
923 if update:
924 top = self.check_toppatch(repo)
924 top = self.check_toppatch(repo)
925
925
926 if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
926 if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
927 raise util.Abort("popping would remove a revision not "
927 raise util.Abort("popping would remove a revision not "
928 "managed by this patch queue")
928 "managed by this patch queue")
929
929
930 # we know there are no local changes, so we can make a simplified
930 # we know there are no local changes, so we can make a simplified
931 # form of hg.update.
931 # form of hg.update.
932 if update:
932 if update:
933 qp = self.qparents(repo, rev)
933 qp = self.qparents(repo, rev)
934 changes = repo.changelog.read(qp)
934 changes = repo.changelog.read(qp)
935 mmap = repo.manifest.read(changes[0])
935 mmap = repo.manifest.read(changes[0])
936 m, a, r, d, u = repo.status(qp, top)[:5]
936 m, a, r, d, u = repo.status(qp, top)[:5]
937 if d:
937 if d:
938 raise util.Abort("deletions found between repo revs")
938 raise util.Abort("deletions found between repo revs")
939 for f in m:
939 for f in m:
940 getfile(f, mmap[f], mmap.flags(f))
940 getfile(f, mmap[f], mmap.flags(f))
941 for f in r:
941 for f in r:
942 getfile(f, mmap[f], mmap.flags(f))
942 getfile(f, mmap[f], mmap.flags(f))
943 for f in m + r:
943 for f in m + r:
944 repo.dirstate.normal(f)
944 repo.dirstate.normal(f)
945 for f in a:
945 for f in a:
946 try:
946 try:
947 os.unlink(repo.wjoin(f))
947 os.unlink(repo.wjoin(f))
948 except OSError, e:
948 except OSError, e:
949 if e.errno != errno.ENOENT:
949 if e.errno != errno.ENOENT:
950 raise
950 raise
951 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
951 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
952 except: pass
952 except: pass
953 repo.dirstate.forget(f)
953 repo.dirstate.forget(f)
954 repo.dirstate.setparents(qp, revlog.nullid)
954 repo.dirstate.setparents(qp, revlog.nullid)
955 del self.applied[start:end]
955 del self.applied[start:end]
956 self.strip(repo, rev, update=False, backup='strip')
956 self.strip(repo, rev, update=False, backup='strip')
957 if len(self.applied):
957 if len(self.applied):
958 self.ui.write("Now at: %s\n" % self.applied[-1].name)
958 self.ui.write("Now at: %s\n" % self.applied[-1].name)
959 else:
959 else:
960 self.ui.write("Patch queue now empty\n")
960 self.ui.write("Patch queue now empty\n")
961 finally:
961 finally:
962 del wlock
962 del wlock
963
963
964 def diff(self, repo, pats, opts):
964 def diff(self, repo, pats, opts):
965 top = self.check_toppatch(repo)
965 top = self.check_toppatch(repo)
966 if not top:
966 if not top:
967 self.ui.write("No patches applied\n")
967 self.ui.write("No patches applied\n")
968 return
968 return
969 qp = self.qparents(repo, top)
969 qp = self.qparents(repo, top)
970 self._diffopts = patch.diffopts(self.ui, opts)
970 self._diffopts = patch.diffopts(self.ui, opts)
971 self.printdiff(repo, qp, files=pats, opts=opts)
971 self.printdiff(repo, qp, files=pats, opts=opts)
972
972
973 def refresh(self, repo, pats=None, **opts):
973 def refresh(self, repo, pats=None, **opts):
974 if len(self.applied) == 0:
974 if len(self.applied) == 0:
975 self.ui.write("No patches applied\n")
975 self.ui.write("No patches applied\n")
976 return 1
976 return 1
977 newdate = opts.get('date')
977 newdate = opts.get('date')
978 if newdate:
978 if newdate:
979 newdate = '%d %d' % util.parsedate(newdate)
979 newdate = '%d %d' % util.parsedate(newdate)
980 wlock = repo.wlock()
980 wlock = repo.wlock()
981 try:
981 try:
982 self.check_toppatch(repo)
982 self.check_toppatch(repo)
983 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
983 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
984 top = revlog.bin(top)
984 top = revlog.bin(top)
985 if repo.changelog.heads(top) != [top]:
985 if repo.changelog.heads(top) != [top]:
986 raise util.Abort("cannot refresh a revision with children")
986 raise util.Abort("cannot refresh a revision with children")
987 cparents = repo.changelog.parents(top)
987 cparents = repo.changelog.parents(top)
988 patchparent = self.qparents(repo, top)
988 patchparent = self.qparents(repo, top)
989 message, comments, user, date, patchfound = self.readheaders(patchfn)
989 message, comments, user, date, patchfound = self.readheaders(patchfn)
990
990
991 patchf = self.opener(patchfn, 'r+')
991 patchf = self.opener(patchfn, 'r+')
992
992
993 # if the patch was a git patch, refresh it as a git patch
993 # if the patch was a git patch, refresh it as a git patch
994 for line in patchf:
994 for line in patchf:
995 if line.startswith('diff --git'):
995 if line.startswith('diff --git'):
996 self.diffopts().git = True
996 self.diffopts().git = True
997 break
997 break
998
998
999 msg = opts.get('msg', '').rstrip()
999 msg = opts.get('msg', '').rstrip()
1000 if msg and comments:
1000 if msg and comments:
1001 # Remove existing message, keeping the rest of the comments
1001 # Remove existing message, keeping the rest of the comments
1002 # fields.
1002 # fields.
1003 # If comments contains 'subject: ', message will prepend
1003 # If comments contains 'subject: ', message will prepend
1004 # the field and a blank line.
1004 # the field and a blank line.
1005 if message:
1005 if message:
1006 subj = 'subject: ' + message[0].lower()
1006 subj = 'subject: ' + message[0].lower()
1007 for i in xrange(len(comments)):
1007 for i in xrange(len(comments)):
1008 if subj == comments[i].lower():
1008 if subj == comments[i].lower():
1009 del comments[i]
1009 del comments[i]
1010 message = message[2:]
1010 message = message[2:]
1011 break
1011 break
1012 ci = 0
1012 ci = 0
1013 for mi in xrange(len(message)):
1013 for mi in xrange(len(message)):
1014 while message[mi] != comments[ci]:
1014 while message[mi] != comments[ci]:
1015 ci += 1
1015 ci += 1
1016 del comments[ci]
1016 del comments[ci]
1017
1017
1018 def setheaderfield(comments, prefixes, new):
1018 def setheaderfield(comments, prefixes, new):
1019 # Update all references to a field in the patch header.
1019 # Update all references to a field in the patch header.
1020 # If none found, add it email style.
1020 # If none found, add it email style.
1021 res = False
1021 res = False
1022 for prefix in prefixes:
1022 for prefix in prefixes:
1023 for i in xrange(len(comments)):
1023 for i in xrange(len(comments)):
1024 if comments[i].startswith(prefix):
1024 if comments[i].startswith(prefix):
1025 comments[i] = prefix + new
1025 comments[i] = prefix + new
1026 res = True
1026 res = True
1027 break
1027 break
1028 return res
1028 return res
1029
1029
1030 newuser = opts.get('user')
1030 newuser = opts.get('user')
1031 if newuser:
1031 if newuser:
1032 if not setheaderfield(comments, ['From: ', '# User '], newuser):
1032 if not setheaderfield(comments, ['From: ', '# User '], newuser):
1033 try:
1033 try:
1034 patchheaderat = comments.index('# HG changeset patch')
1034 patchheaderat = comments.index('# HG changeset patch')
1035 comments.insert(patchheaderat + 1,'# User ' + newuser)
1035 comments.insert(patchheaderat + 1,'# User ' + newuser)
1036 except ValueError:
1036 except ValueError:
1037 comments = ['From: ' + newuser, ''] + comments
1037 comments = ['From: ' + newuser, ''] + comments
1038 user = newuser
1038 user = newuser
1039
1039
1040 if newdate:
1040 if newdate:
1041 if setheaderfield(comments, ['# Date '], newdate):
1041 if setheaderfield(comments, ['# Date '], newdate):
1042 date = newdate
1042 date = newdate
1043
1043
1044 if msg:
1044 if msg:
1045 comments.append(msg)
1045 comments.append(msg)
1046
1046
1047 patchf.seek(0)
1047 patchf.seek(0)
1048 patchf.truncate()
1048 patchf.truncate()
1049
1049
1050 if comments:
1050 if comments:
1051 comments = "\n".join(comments) + '\n\n'
1051 comments = "\n".join(comments) + '\n\n'
1052 patchf.write(comments)
1052 patchf.write(comments)
1053
1053
1054 if opts.get('git'):
1054 if opts.get('git'):
1055 self.diffopts().git = True
1055 self.diffopts().git = True
1056 matchfn = cmdutil.match(repo, pats, opts)
1056 matchfn = cmdutil.match(repo, pats, opts)
1057 tip = repo.changelog.tip()
1057 tip = repo.changelog.tip()
1058 if top == tip:
1058 if top == tip:
1059 # if the top of our patch queue is also the tip, there is an
1059 # if the top of our patch queue is also the tip, there is an
1060 # optimization here. We update the dirstate in place and strip
1060 # optimization here. We update the dirstate in place and strip
1061 # off the tip commit. Then just commit the current directory
1061 # off the tip commit. Then just commit the current directory
1062 # tree. We can also send repo.commit the list of files
1062 # tree. We can also send repo.commit the list of files
1063 # changed to speed up the diff
1063 # changed to speed up the diff
1064 #
1064 #
1065 # in short mode, we only diff the files included in the
1065 # in short mode, we only diff the files included in the
1066 # patch already
1066 # patch already
1067 #
1067 #
1068 # this should really read:
1068 # this should really read:
1069 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1069 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1070 # but we do it backwards to take advantage of manifest/chlog
1070 # but we do it backwards to take advantage of manifest/chlog
1071 # caching against the next repo.status call
1071 # caching against the next repo.status call
1072 #
1072 #
1073 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1073 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1074 changes = repo.changelog.read(tip)
1074 changes = repo.changelog.read(tip)
1075 man = repo.manifest.read(changes[0])
1075 man = repo.manifest.read(changes[0])
1076 aaa = aa[:]
1076 aaa = aa[:]
1077 if opts.get('short'):
1077 if opts.get('short'):
1078 match = cmdutil.matchfiles(repo, mm + aa + dd)
1078 match = cmdutil.matchfiles(repo, mm + aa + dd)
1079 else:
1079 else:
1080 match = cmdutil.matchall(repo)
1080 match = cmdutil.matchall(repo)
1081 m, a, r, d, u = repo.status(match=match)[:5]
1081 m, a, r, d, u = repo.status(match=match)[:5]
1082
1082
1083 # we might end up with files that were added between
1083 # we might end up with files that were added between
1084 # tip and the dirstate parent, but then changed in the
1084 # tip and the dirstate parent, but then changed in the
1085 # local dirstate. in this case, we want them to only
1085 # local dirstate. in this case, we want them to only
1086 # show up in the added section
1086 # show up in the added section
1087 for x in m:
1087 for x in m:
1088 if x not in aa:
1088 if x not in aa:
1089 mm.append(x)
1089 mm.append(x)
1090 # we might end up with files added by the local dirstate that
1090 # we might end up with files added by the local dirstate that
1091 # were deleted by the patch. In this case, they should only
1091 # were deleted by the patch. In this case, they should only
1092 # show up in the changed section.
1092 # show up in the changed section.
1093 for x in a:
1093 for x in a:
1094 if x in dd:
1094 if x in dd:
1095 del dd[dd.index(x)]
1095 del dd[dd.index(x)]
1096 mm.append(x)
1096 mm.append(x)
1097 else:
1097 else:
1098 aa.append(x)
1098 aa.append(x)
1099 # make sure any files deleted in the local dirstate
1099 # make sure any files deleted in the local dirstate
1100 # are not in the add or change column of the patch
1100 # are not in the add or change column of the patch
1101 forget = []
1101 forget = []
1102 for x in d + r:
1102 for x in d + r:
1103 if x in aa:
1103 if x in aa:
1104 del aa[aa.index(x)]
1104 del aa[aa.index(x)]
1105 forget.append(x)
1105 forget.append(x)
1106 continue
1106 continue
1107 elif x in mm:
1107 elif x in mm:
1108 del mm[mm.index(x)]
1108 del mm[mm.index(x)]
1109 dd.append(x)
1109 dd.append(x)
1110
1110
1111 m = util.unique(mm)
1111 m = util.unique(mm)
1112 r = util.unique(dd)
1112 r = util.unique(dd)
1113 a = util.unique(aa)
1113 a = util.unique(aa)
1114 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1114 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1115 match = cmdutil.matchfiles(repo, util.unique(c[0] + c[1] + c[2]))
1115 match = cmdutil.matchfiles(repo, util.unique(c[0] + c[1] + c[2]))
1116 patch.diff(repo, patchparent, match=match,
1116 patch.diff(repo, patchparent, match=match,
1117 fp=patchf, changes=c, opts=self.diffopts())
1117 fp=patchf, changes=c, opts=self.diffopts())
1118 patchf.close()
1118 patchf.close()
1119
1119
1120 repo.dirstate.setparents(*cparents)
1120 repo.dirstate.setparents(*cparents)
1121 copies = {}
1121 copies = {}
1122 for dst in a:
1122 for dst in a:
1123 src = repo.dirstate.copied(dst)
1123 src = repo.dirstate.copied(dst)
1124 if src is not None:
1124 if src is not None:
1125 copies.setdefault(src, []).append(dst)
1125 copies.setdefault(src, []).append(dst)
1126 repo.dirstate.add(dst)
1126 repo.dirstate.add(dst)
1127 # remember the copies between patchparent and tip
1127 # remember the copies between patchparent and tip
1128 # this may be slow, so don't do it if we're not tracking copies
1128 # this may be slow, so don't do it if we're not tracking copies
1129 if self.diffopts().git:
1129 if self.diffopts().git:
1130 for dst in aaa:
1130 for dst in aaa:
1131 f = repo.file(dst)
1131 f = repo.file(dst)
1132 src = f.renamed(man[dst])
1132 src = f.renamed(man[dst])
1133 if src:
1133 if src:
1134 copies[src[0]] = copies.get(dst, [])
1134 copies[src[0]] = copies.get(dst, [])
1135 if dst in a:
1135 if dst in a:
1136 copies[src[0]].append(dst)
1136 copies[src[0]].append(dst)
1137 # we can't copy a file created by the patch itself
1137 # we can't copy a file created by the patch itself
1138 if dst in copies:
1138 if dst in copies:
1139 del copies[dst]
1139 del copies[dst]
1140 for src, dsts in copies.iteritems():
1140 for src, dsts in copies.iteritems():
1141 for dst in dsts:
1141 for dst in dsts:
1142 repo.dirstate.copy(src, dst)
1142 repo.dirstate.copy(src, dst)
1143 for f in r:
1143 for f in r:
1144 repo.dirstate.remove(f)
1144 repo.dirstate.remove(f)
1145 # if the patch excludes a modified file, mark that
1145 # if the patch excludes a modified file, mark that
1146 # file with mtime=0 so status can see it.
1146 # file with mtime=0 so status can see it.
1147 mm = []
1147 mm = []
1148 for i in xrange(len(m)-1, -1, -1):
1148 for i in xrange(len(m)-1, -1, -1):
1149 if not matchfn(m[i]):
1149 if not matchfn(m[i]):
1150 mm.append(m[i])
1150 mm.append(m[i])
1151 del m[i]
1151 del m[i]
1152 for f in m:
1152 for f in m:
1153 repo.dirstate.normal(f)
1153 repo.dirstate.normal(f)
1154 for f in mm:
1154 for f in mm:
1155 repo.dirstate.normallookup(f)
1155 repo.dirstate.normallookup(f)
1156 for f in forget:
1156 for f in forget:
1157 repo.dirstate.forget(f)
1157 repo.dirstate.forget(f)
1158
1158
1159 if not msg:
1159 if not msg:
1160 if not message:
1160 if not message:
1161 message = "[mq]: %s\n" % patchfn
1161 message = "[mq]: %s\n" % patchfn
1162 else:
1162 else:
1163 message = "\n".join(message)
1163 message = "\n".join(message)
1164 else:
1164 else:
1165 message = msg
1165 message = msg
1166
1166
1167 if not user:
1167 if not user:
1168 user = changes[1]
1168 user = changes[1]
1169
1169
1170 self.applied.pop()
1170 self.applied.pop()
1171 self.applied_dirty = 1
1171 self.applied_dirty = 1
1172 self.strip(repo, top, update=False,
1172 self.strip(repo, top, update=False,
1173 backup='strip')
1173 backup='strip')
1174 n = repo.commit(match.files(), message, user, date, match=match,
1174 n = repo.commit(match.files(), message, user, date, match=match,
1175 force=1)
1175 force=1)
1176 self.applied.append(statusentry(revlog.hex(n), patchfn))
1176 self.applied.append(statusentry(revlog.hex(n), patchfn))
1177 self.removeundo(repo)
1177 self.removeundo(repo)
1178 else:
1178 else:
1179 self.printdiff(repo, patchparent, fp=patchf)
1179 self.printdiff(repo, patchparent, fp=patchf)
1180 patchf.close()
1180 patchf.close()
1181 added = repo.status()[1]
1181 added = repo.status()[1]
1182 for a in added:
1182 for a in added:
1183 f = repo.wjoin(a)
1183 f = repo.wjoin(a)
1184 try:
1184 try:
1185 os.unlink(f)
1185 os.unlink(f)
1186 except OSError, e:
1186 except OSError, e:
1187 if e.errno != errno.ENOENT:
1187 if e.errno != errno.ENOENT:
1188 raise
1188 raise
1189 try: os.removedirs(os.path.dirname(f))
1189 try: os.removedirs(os.path.dirname(f))
1190 except: pass
1190 except: pass
1191 # forget the file copies in the dirstate
1191 # forget the file copies in the dirstate
1192 # push should readd the files later on
1192 # push should readd the files later on
1193 repo.dirstate.forget(a)
1193 repo.dirstate.forget(a)
1194 self.pop(repo, force=True)
1194 self.pop(repo, force=True)
1195 self.push(repo, force=True)
1195 self.push(repo, force=True)
1196 finally:
1196 finally:
1197 del wlock
1197 del wlock
1198
1198
1199 def init(self, repo, create=False):
1199 def init(self, repo, create=False):
1200 if not create and os.path.isdir(self.path):
1200 if not create and os.path.isdir(self.path):
1201 raise util.Abort(_("patch queue directory already exists"))
1201 raise util.Abort(_("patch queue directory already exists"))
1202 try:
1202 try:
1203 os.mkdir(self.path)
1203 os.mkdir(self.path)
1204 except OSError, inst:
1204 except OSError, inst:
1205 if inst.errno != errno.EEXIST or not create:
1205 if inst.errno != errno.EEXIST or not create:
1206 raise
1206 raise
1207 if create:
1207 if create:
1208 return self.qrepo(create=True)
1208 return self.qrepo(create=True)
1209
1209
1210 def unapplied(self, repo, patch=None):
1210 def unapplied(self, repo, patch=None):
1211 if patch and patch not in self.series:
1211 if patch and patch not in self.series:
1212 raise util.Abort(_("patch %s is not in series file") % patch)
1212 raise util.Abort(_("patch %s is not in series file") % patch)
1213 if not patch:
1213 if not patch:
1214 start = self.series_end()
1214 start = self.series_end()
1215 else:
1215 else:
1216 start = self.series.index(patch) + 1
1216 start = self.series.index(patch) + 1
1217 unapplied = []
1217 unapplied = []
1218 for i in xrange(start, len(self.series)):
1218 for i in xrange(start, len(self.series)):
1219 pushable, reason = self.pushable(i)
1219 pushable, reason = self.pushable(i)
1220 if pushable:
1220 if pushable:
1221 unapplied.append((i, self.series[i]))
1221 unapplied.append((i, self.series[i]))
1222 self.explain_pushable(i)
1222 self.explain_pushable(i)
1223 return unapplied
1223 return unapplied
1224
1224
1225 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1225 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1226 summary=False):
1226 summary=False):
1227 def displayname(patchname):
1227 def displayname(patchname):
1228 if summary:
1228 if summary:
1229 msg = self.readheaders(patchname)[0]
1229 msg = self.readheaders(patchname)[0]
1230 msg = msg and ': ' + msg[0] or ': '
1230 msg = msg and ': ' + msg[0] or ': '
1231 else:
1231 else:
1232 msg = ''
1232 msg = ''
1233 return '%s%s' % (patchname, msg)
1233 return '%s%s' % (patchname, msg)
1234
1234
1235 applied = dict.fromkeys([p.name for p in self.applied])
1235 applied = dict.fromkeys([p.name for p in self.applied])
1236 if length is None:
1236 if length is None:
1237 length = len(self.series) - start
1237 length = len(self.series) - start
1238 if not missing:
1238 if not missing:
1239 for i in xrange(start, start+length):
1239 for i in xrange(start, start+length):
1240 patch = self.series[i]
1240 patch = self.series[i]
1241 if patch in applied:
1241 if patch in applied:
1242 stat = 'A'
1242 stat = 'A'
1243 elif self.pushable(i)[0]:
1243 elif self.pushable(i)[0]:
1244 stat = 'U'
1244 stat = 'U'
1245 else:
1245 else:
1246 stat = 'G'
1246 stat = 'G'
1247 pfx = ''
1247 pfx = ''
1248 if self.ui.verbose:
1248 if self.ui.verbose:
1249 pfx = '%d %s ' % (i, stat)
1249 pfx = '%d %s ' % (i, stat)
1250 elif status and status != stat:
1250 elif status and status != stat:
1251 continue
1251 continue
1252 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1252 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1253 else:
1253 else:
1254 msng_list = []
1254 msng_list = []
1255 for root, dirs, files in os.walk(self.path):
1255 for root, dirs, files in os.walk(self.path):
1256 d = root[len(self.path) + 1:]
1256 d = root[len(self.path) + 1:]
1257 for f in files:
1257 for f in files:
1258 fl = os.path.join(d, f)
1258 fl = os.path.join(d, f)
1259 if (fl not in self.series and
1259 if (fl not in self.series and
1260 fl not in (self.status_path, self.series_path,
1260 fl not in (self.status_path, self.series_path,
1261 self.guards_path)
1261 self.guards_path)
1262 and not fl.startswith('.')):
1262 and not fl.startswith('.')):
1263 msng_list.append(fl)
1263 msng_list.append(fl)
1264 msng_list.sort()
1264 msng_list.sort()
1265 for x in msng_list:
1265 for x in msng_list:
1266 pfx = self.ui.verbose and ('D ') or ''
1266 pfx = self.ui.verbose and ('D ') or ''
1267 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1267 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1268
1268
1269 def issaveline(self, l):
1269 def issaveline(self, l):
1270 if l.name == '.hg.patches.save.line':
1270 if l.name == '.hg.patches.save.line':
1271 return True
1271 return True
1272
1272
1273 def qrepo(self, create=False):
1273 def qrepo(self, create=False):
1274 if create or os.path.isdir(self.join(".hg")):
1274 if create or os.path.isdir(self.join(".hg")):
1275 return hg.repository(self.ui, path=self.path, create=create)
1275 return hg.repository(self.ui, path=self.path, create=create)
1276
1276
1277 def restore(self, repo, rev, delete=None, qupdate=None):
1277 def restore(self, repo, rev, delete=None, qupdate=None):
1278 c = repo.changelog.read(rev)
1278 c = repo.changelog.read(rev)
1279 desc = c[4].strip()
1279 desc = c[4].strip()
1280 lines = desc.splitlines()
1280 lines = desc.splitlines()
1281 i = 0
1281 i = 0
1282 datastart = None
1282 datastart = None
1283 series = []
1283 series = []
1284 applied = []
1284 applied = []
1285 qpp = None
1285 qpp = None
1286 for i in xrange(0, len(lines)):
1286 for i in xrange(0, len(lines)):
1287 if lines[i] == 'Patch Data:':
1287 if lines[i] == 'Patch Data:':
1288 datastart = i + 1
1288 datastart = i + 1
1289 elif lines[i].startswith('Dirstate:'):
1289 elif lines[i].startswith('Dirstate:'):
1290 l = lines[i].rstrip()
1290 l = lines[i].rstrip()
1291 l = l[10:].split(' ')
1291 l = l[10:].split(' ')
1292 qpp = [ bin(x) for x in l ]
1292 qpp = [ bin(x) for x in l ]
1293 elif datastart != None:
1293 elif datastart != None:
1294 l = lines[i].rstrip()
1294 l = lines[i].rstrip()
1295 se = statusentry(l)
1295 se = statusentry(l)
1296 file_ = se.name
1296 file_ = se.name
1297 if se.rev:
1297 if se.rev:
1298 applied.append(se)
1298 applied.append(se)
1299 else:
1299 else:
1300 series.append(file_)
1300 series.append(file_)
1301 if datastart == None:
1301 if datastart == None:
1302 self.ui.warn("No saved patch data found\n")
1302 self.ui.warn("No saved patch data found\n")
1303 return 1
1303 return 1
1304 self.ui.warn("restoring status: %s\n" % lines[0])
1304 self.ui.warn("restoring status: %s\n" % lines[0])
1305 self.full_series = series
1305 self.full_series = series
1306 self.applied = applied
1306 self.applied = applied
1307 self.parse_series()
1307 self.parse_series()
1308 self.series_dirty = 1
1308 self.series_dirty = 1
1309 self.applied_dirty = 1
1309 self.applied_dirty = 1
1310 heads = repo.changelog.heads()
1310 heads = repo.changelog.heads()
1311 if delete:
1311 if delete:
1312 if rev not in heads:
1312 if rev not in heads:
1313 self.ui.warn("save entry has children, leaving it alone\n")
1313 self.ui.warn("save entry has children, leaving it alone\n")
1314 else:
1314 else:
1315 self.ui.warn("removing save entry %s\n" % short(rev))
1315 self.ui.warn("removing save entry %s\n" % short(rev))
1316 pp = repo.dirstate.parents()
1316 pp = repo.dirstate.parents()
1317 if rev in pp:
1317 if rev in pp:
1318 update = True
1318 update = True
1319 else:
1319 else:
1320 update = False
1320 update = False
1321 self.strip(repo, rev, update=update, backup='strip')
1321 self.strip(repo, rev, update=update, backup='strip')
1322 if qpp:
1322 if qpp:
1323 self.ui.warn("saved queue repository parents: %s %s\n" %
1323 self.ui.warn("saved queue repository parents: %s %s\n" %
1324 (short(qpp[0]), short(qpp[1])))
1324 (short(qpp[0]), short(qpp[1])))
1325 if qupdate:
1325 if qupdate:
1326 self.ui.status(_("queue directory updating\n"))
1326 self.ui.status(_("queue directory updating\n"))
1327 r = self.qrepo()
1327 r = self.qrepo()
1328 if not r:
1328 if not r:
1329 self.ui.warn("Unable to load queue repository\n")
1329 self.ui.warn("Unable to load queue repository\n")
1330 return 1
1330 return 1
1331 hg.clean(r, qpp[0])
1331 hg.clean(r, qpp[0])
1332
1332
1333 def save(self, repo, msg=None):
1333 def save(self, repo, msg=None):
1334 if len(self.applied) == 0:
1334 if len(self.applied) == 0:
1335 self.ui.warn("save: no patches applied, exiting\n")
1335 self.ui.warn("save: no patches applied, exiting\n")
1336 return 1
1336 return 1
1337 if self.issaveline(self.applied[-1]):
1337 if self.issaveline(self.applied[-1]):
1338 self.ui.warn("status is already saved\n")
1338 self.ui.warn("status is already saved\n")
1339 return 1
1339 return 1
1340
1340
1341 ar = [ ':' + x for x in self.full_series ]
1341 ar = [ ':' + x for x in self.full_series ]
1342 if not msg:
1342 if not msg:
1343 msg = "hg patches saved state"
1343 msg = "hg patches saved state"
1344 else:
1344 else:
1345 msg = "hg patches: " + msg.rstrip('\r\n')
1345 msg = "hg patches: " + msg.rstrip('\r\n')
1346 r = self.qrepo()
1346 r = self.qrepo()
1347 if r:
1347 if r:
1348 pp = r.dirstate.parents()
1348 pp = r.dirstate.parents()
1349 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1349 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1350 msg += "\n\nPatch Data:\n"
1350 msg += "\n\nPatch Data:\n"
1351 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1351 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1352 "\n".join(ar) + '\n' or "")
1352 "\n".join(ar) + '\n' or "")
1353 n = repo.commit(None, text, user=None, force=1)
1353 n = repo.commit(None, text, user=None, force=1)
1354 if not n:
1354 if not n:
1355 self.ui.warn("repo commit failed\n")
1355 self.ui.warn("repo commit failed\n")
1356 return 1
1356 return 1
1357 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1357 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1358 self.applied_dirty = 1
1358 self.applied_dirty = 1
1359 self.removeundo(repo)
1359 self.removeundo(repo)
1360
1360
1361 def full_series_end(self):
1361 def full_series_end(self):
1362 if len(self.applied) > 0:
1362 if len(self.applied) > 0:
1363 p = self.applied[-1].name
1363 p = self.applied[-1].name
1364 end = self.find_series(p)
1364 end = self.find_series(p)
1365 if end == None:
1365 if end == None:
1366 return len(self.full_series)
1366 return len(self.full_series)
1367 return end + 1
1367 return end + 1
1368 return 0
1368 return 0
1369
1369
1370 def series_end(self, all_patches=False):
1370 def series_end(self, all_patches=False):
1371 """If all_patches is False, return the index of the next pushable patch
1371 """If all_patches is False, return the index of the next pushable patch
1372 in the series, or the series length. If all_patches is True, return the
1372 in the series, or the series length. If all_patches is True, return the
1373 index of the first patch past the last applied one.
1373 index of the first patch past the last applied one.
1374 """
1374 """
1375 end = 0
1375 end = 0
1376 def next(start):
1376 def next(start):
1377 if all_patches:
1377 if all_patches:
1378 return start
1378 return start
1379 i = start
1379 i = start
1380 while i < len(self.series):
1380 while i < len(self.series):
1381 p, reason = self.pushable(i)
1381 p, reason = self.pushable(i)
1382 if p:
1382 if p:
1383 break
1383 break
1384 self.explain_pushable(i)
1384 self.explain_pushable(i)
1385 i += 1
1385 i += 1
1386 return i
1386 return i
1387 if len(self.applied) > 0:
1387 if len(self.applied) > 0:
1388 p = self.applied[-1].name
1388 p = self.applied[-1].name
1389 try:
1389 try:
1390 end = self.series.index(p)
1390 end = self.series.index(p)
1391 except ValueError:
1391 except ValueError:
1392 return 0
1392 return 0
1393 return next(end + 1)
1393 return next(end + 1)
1394 return next(end)
1394 return next(end)
1395
1395
1396 def appliedname(self, index):
1396 def appliedname(self, index):
1397 pname = self.applied[index].name
1397 pname = self.applied[index].name
1398 if not self.ui.verbose:
1398 if not self.ui.verbose:
1399 p = pname
1399 p = pname
1400 else:
1400 else:
1401 p = str(self.series.index(pname)) + " " + pname
1401 p = str(self.series.index(pname)) + " " + pname
1402 return p
1402 return p
1403
1403
1404 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1404 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1405 force=None, git=False):
1405 force=None, git=False):
1406 def checkseries(patchname):
1406 def checkseries(patchname):
1407 if patchname in self.series:
1407 if patchname in self.series:
1408 raise util.Abort(_('patch %s is already in the series file')
1408 raise util.Abort(_('patch %s is already in the series file')
1409 % patchname)
1409 % patchname)
1410 def checkfile(patchname):
1410 def checkfile(patchname):
1411 if not force and os.path.exists(self.join(patchname)):
1411 if not force and os.path.exists(self.join(patchname)):
1412 raise util.Abort(_('patch "%s" already exists')
1412 raise util.Abort(_('patch "%s" already exists')
1413 % patchname)
1413 % patchname)
1414
1414
1415 if rev:
1415 if rev:
1416 if files:
1416 if files:
1417 raise util.Abort(_('option "-r" not valid when importing '
1417 raise util.Abort(_('option "-r" not valid when importing '
1418 'files'))
1418 'files'))
1419 rev = cmdutil.revrange(repo, rev)
1419 rev = cmdutil.revrange(repo, rev)
1420 rev.sort(lambda x, y: cmp(y, x))
1420 rev.sort(lambda x, y: cmp(y, x))
1421 if (len(files) > 1 or len(rev) > 1) and patchname:
1421 if (len(files) > 1 or len(rev) > 1) and patchname:
1422 raise util.Abort(_('option "-n" not valid when importing multiple '
1422 raise util.Abort(_('option "-n" not valid when importing multiple '
1423 'patches'))
1423 'patches'))
1424 i = 0
1424 i = 0
1425 added = []
1425 added = []
1426 if rev:
1426 if rev:
1427 # If mq patches are applied, we can only import revisions
1427 # If mq patches are applied, we can only import revisions
1428 # that form a linear path to qbase.
1428 # that form a linear path to qbase.
1429 # Otherwise, they should form a linear path to a head.
1429 # Otherwise, they should form a linear path to a head.
1430 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1430 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1431 if len(heads) > 1:
1431 if len(heads) > 1:
1432 raise util.Abort(_('revision %d is the root of more than one '
1432 raise util.Abort(_('revision %d is the root of more than one '
1433 'branch') % rev[-1])
1433 'branch') % rev[-1])
1434 if self.applied:
1434 if self.applied:
1435 base = revlog.hex(repo.changelog.node(rev[0]))
1435 base = revlog.hex(repo.changelog.node(rev[0]))
1436 if base in [n.rev for n in self.applied]:
1436 if base in [n.rev for n in self.applied]:
1437 raise util.Abort(_('revision %d is already managed')
1437 raise util.Abort(_('revision %d is already managed')
1438 % rev[0])
1438 % rev[0])
1439 if heads != [revlog.bin(self.applied[-1].rev)]:
1439 if heads != [revlog.bin(self.applied[-1].rev)]:
1440 raise util.Abort(_('revision %d is not the parent of '
1440 raise util.Abort(_('revision %d is not the parent of '
1441 'the queue') % rev[0])
1441 'the queue') % rev[0])
1442 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1442 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1443 lastparent = repo.changelog.parentrevs(base)[0]
1443 lastparent = repo.changelog.parentrevs(base)[0]
1444 else:
1444 else:
1445 if heads != [repo.changelog.node(rev[0])]:
1445 if heads != [repo.changelog.node(rev[0])]:
1446 raise util.Abort(_('revision %d has unmanaged children')
1446 raise util.Abort(_('revision %d has unmanaged children')
1447 % rev[0])
1447 % rev[0])
1448 lastparent = None
1448 lastparent = None
1449
1449
1450 if git:
1450 if git:
1451 self.diffopts().git = True
1451 self.diffopts().git = True
1452
1452
1453 for r in rev:
1453 for r in rev:
1454 p1, p2 = repo.changelog.parentrevs(r)
1454 p1, p2 = repo.changelog.parentrevs(r)
1455 n = repo.changelog.node(r)
1455 n = repo.changelog.node(r)
1456 if p2 != revlog.nullrev:
1456 if p2 != revlog.nullrev:
1457 raise util.Abort(_('cannot import merge revision %d') % r)
1457 raise util.Abort(_('cannot import merge revision %d') % r)
1458 if lastparent and lastparent != r:
1458 if lastparent and lastparent != r:
1459 raise util.Abort(_('revision %d is not the parent of %d')
1459 raise util.Abort(_('revision %d is not the parent of %d')
1460 % (r, lastparent))
1460 % (r, lastparent))
1461 lastparent = p1
1461 lastparent = p1
1462
1462
1463 if not patchname:
1463 if not patchname:
1464 patchname = normname('%d.diff' % r)
1464 patchname = normname('%d.diff' % r)
1465 self.check_reserved_name(patchname)
1465 self.check_reserved_name(patchname)
1466 checkseries(patchname)
1466 checkseries(patchname)
1467 checkfile(patchname)
1467 checkfile(patchname)
1468 self.full_series.insert(0, patchname)
1468 self.full_series.insert(0, patchname)
1469
1469
1470 patchf = self.opener(patchname, "w")
1470 patchf = self.opener(patchname, "w")
1471 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1471 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1472 patchf.close()
1472 patchf.close()
1473
1473
1474 se = statusentry(revlog.hex(n), patchname)
1474 se = statusentry(revlog.hex(n), patchname)
1475 self.applied.insert(0, se)
1475 self.applied.insert(0, se)
1476
1476
1477 added.append(patchname)
1477 added.append(patchname)
1478 patchname = None
1478 patchname = None
1479 self.parse_series()
1479 self.parse_series()
1480 self.applied_dirty = 1
1480 self.applied_dirty = 1
1481
1481
1482 for filename in files:
1482 for filename in files:
1483 if existing:
1483 if existing:
1484 if filename == '-':
1484 if filename == '-':
1485 raise util.Abort(_('-e is incompatible with import from -'))
1485 raise util.Abort(_('-e is incompatible with import from -'))
1486 if not patchname:
1486 if not patchname:
1487 patchname = normname(filename)
1487 patchname = normname(filename)
1488 self.check_reserved_name(patchname)
1488 self.check_reserved_name(patchname)
1489 if not os.path.isfile(self.join(patchname)):
1489 if not os.path.isfile(self.join(patchname)):
1490 raise util.Abort(_("patch %s does not exist") % patchname)
1490 raise util.Abort(_("patch %s does not exist") % patchname)
1491 else:
1491 else:
1492 try:
1492 try:
1493 if filename == '-':
1493 if filename == '-':
1494 if not patchname:
1494 if not patchname:
1495 raise util.Abort(_('need --name to import a patch from -'))
1495 raise util.Abort(_('need --name to import a patch from -'))
1496 text = sys.stdin.read()
1496 text = sys.stdin.read()
1497 else:
1497 else:
1498 text = file(filename, 'rb').read()
1498 text = file(filename, 'rb').read()
1499 except IOError:
1499 except IOError:
1500 raise util.Abort(_("unable to read %s") % patchname)
1500 raise util.Abort(_("unable to read %s") % patchname)
1501 if not patchname:
1501 if not patchname:
1502 patchname = normname(os.path.basename(filename))
1502 patchname = normname(os.path.basename(filename))
1503 self.check_reserved_name(patchname)
1503 self.check_reserved_name(patchname)
1504 checkfile(patchname)
1504 checkfile(patchname)
1505 patchf = self.opener(patchname, "w")
1505 patchf = self.opener(patchname, "w")
1506 patchf.write(text)
1506 patchf.write(text)
1507 checkseries(patchname)
1507 checkseries(patchname)
1508 index = self.full_series_end() + i
1508 index = self.full_series_end() + i
1509 self.full_series[index:index] = [patchname]
1509 self.full_series[index:index] = [patchname]
1510 self.parse_series()
1510 self.parse_series()
1511 self.ui.warn("adding %s to series file\n" % patchname)
1511 self.ui.warn("adding %s to series file\n" % patchname)
1512 i += 1
1512 i += 1
1513 added.append(patchname)
1513 added.append(patchname)
1514 patchname = None
1514 patchname = None
1515 self.series_dirty = 1
1515 self.series_dirty = 1
1516 qrepo = self.qrepo()
1516 qrepo = self.qrepo()
1517 if qrepo:
1517 if qrepo:
1518 qrepo.add(added)
1518 qrepo.add(added)
1519
1519
1520 def delete(ui, repo, *patches, **opts):
1520 def delete(ui, repo, *patches, **opts):
1521 """remove patches from queue
1521 """remove patches from queue
1522
1522
1523 The patches must not be applied, unless they are arguments to
1523 The patches must not be applied, unless they are arguments to
1524 the --rev parameter. At least one patch or revision is required.
1524 the --rev parameter. At least one patch or revision is required.
1525
1525
1526 With --rev, mq will stop managing the named revisions (converting
1526 With --rev, mq will stop managing the named revisions (converting
1527 them to regular mercurial changesets). The qfinish command should be
1527 them to regular mercurial changesets). The qfinish command should be
1528 used as an alternative for qdel -r, as the latter option is deprecated.
1528 used as an alternative for qdel -r, as the latter option is deprecated.
1529
1529
1530 With --keep, the patch files are preserved in the patch directory."""
1530 With --keep, the patch files are preserved in the patch directory."""
1531 q = repo.mq
1531 q = repo.mq
1532 q.delete(repo, patches, opts)
1532 q.delete(repo, patches, opts)
1533 q.save_dirty()
1533 q.save_dirty()
1534 return 0
1534 return 0
1535
1535
1536 def applied(ui, repo, patch=None, **opts):
1536 def applied(ui, repo, patch=None, **opts):
1537 """print the patches already applied"""
1537 """print the patches already applied"""
1538 q = repo.mq
1538 q = repo.mq
1539 if patch:
1539 if patch:
1540 if patch not in q.series:
1540 if patch not in q.series:
1541 raise util.Abort(_("patch %s is not in series file") % patch)
1541 raise util.Abort(_("patch %s is not in series file") % patch)
1542 end = q.series.index(patch) + 1
1542 end = q.series.index(patch) + 1
1543 else:
1543 else:
1544 end = q.series_end(True)
1544 end = q.series_end(True)
1545 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1545 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1546
1546
1547 def unapplied(ui, repo, patch=None, **opts):
1547 def unapplied(ui, repo, patch=None, **opts):
1548 """print the patches not yet applied"""
1548 """print the patches not yet applied"""
1549 q = repo.mq
1549 q = repo.mq
1550 if patch:
1550 if patch:
1551 if patch not in q.series:
1551 if patch not in q.series:
1552 raise util.Abort(_("patch %s is not in series file") % patch)
1552 raise util.Abort(_("patch %s is not in series file") % patch)
1553 start = q.series.index(patch) + 1
1553 start = q.series.index(patch) + 1
1554 else:
1554 else:
1555 start = q.series_end(True)
1555 start = q.series_end(True)
1556 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1556 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1557
1557
1558 def qimport(ui, repo, *filename, **opts):
1558 def qimport(ui, repo, *filename, **opts):
1559 """import a patch
1559 """import a patch
1560
1560
1561 The patch is inserted into the series after the last applied patch.
1561 The patch is inserted into the series after the last applied patch.
1562 If no patches have been applied, qimport prepends the patch
1562 If no patches have been applied, qimport prepends the patch
1563 to the series.
1563 to the series.
1564
1564
1565 The patch will have the same name as its source file unless you
1565 The patch will have the same name as its source file unless you
1566 give it a new one with --name.
1566 give it a new one with --name.
1567
1567
1568 You can register an existing patch inside the patch directory
1568 You can register an existing patch inside the patch directory
1569 with the --existing flag.
1569 with the --existing flag.
1570
1570
1571 With --force, an existing patch of the same name will be overwritten.
1571 With --force, an existing patch of the same name will be overwritten.
1572
1572
1573 An existing changeset may be placed under mq control with --rev
1573 An existing changeset may be placed under mq control with --rev
1574 (e.g. qimport --rev tip -n patch will place tip under mq control).
1574 (e.g. qimport --rev tip -n patch will place tip under mq control).
1575 With --git, patches imported with --rev will use the git diff
1575 With --git, patches imported with --rev will use the git diff
1576 format.
1576 format.
1577 """
1577 """
1578 q = repo.mq
1578 q = repo.mq
1579 q.qimport(repo, filename, patchname=opts['name'],
1579 q.qimport(repo, filename, patchname=opts['name'],
1580 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1580 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1581 git=opts['git'])
1581 git=opts['git'])
1582 q.save_dirty()
1582 q.save_dirty()
1583 return 0
1583 return 0
1584
1584
1585 def init(ui, repo, **opts):
1585 def init(ui, repo, **opts):
1586 """init a new queue repository
1586 """init a new queue repository
1587
1587
1588 The queue repository is unversioned by default. If -c is
1588 The queue repository is unversioned by default. If -c is
1589 specified, qinit will create a separate nested repository
1589 specified, qinit will create a separate nested repository
1590 for patches (qinit -c may also be run later to convert
1590 for patches (qinit -c may also be run later to convert
1591 an unversioned patch repository into a versioned one).
1591 an unversioned patch repository into a versioned one).
1592 You can use qcommit to commit changes to this queue repository."""
1592 You can use qcommit to commit changes to this queue repository."""
1593 q = repo.mq
1593 q = repo.mq
1594 r = q.init(repo, create=opts['create_repo'])
1594 r = q.init(repo, create=opts['create_repo'])
1595 q.save_dirty()
1595 q.save_dirty()
1596 if r:
1596 if r:
1597 if not os.path.exists(r.wjoin('.hgignore')):
1597 if not os.path.exists(r.wjoin('.hgignore')):
1598 fp = r.wopener('.hgignore', 'w')
1598 fp = r.wopener('.hgignore', 'w')
1599 fp.write('^\\.hg\n')
1599 fp.write('^\\.hg\n')
1600 fp.write('^\\.mq\n')
1600 fp.write('^\\.mq\n')
1601 fp.write('syntax: glob\n')
1601 fp.write('syntax: glob\n')
1602 fp.write('status\n')
1602 fp.write('status\n')
1603 fp.write('guards\n')
1603 fp.write('guards\n')
1604 fp.close()
1604 fp.close()
1605 if not os.path.exists(r.wjoin('series')):
1605 if not os.path.exists(r.wjoin('series')):
1606 r.wopener('series', 'w').close()
1606 r.wopener('series', 'w').close()
1607 r.add(['.hgignore', 'series'])
1607 r.add(['.hgignore', 'series'])
1608 commands.add(ui, r)
1608 commands.add(ui, r)
1609 return 0
1609 return 0
1610
1610
1611 def clone(ui, source, dest=None, **opts):
1611 def clone(ui, source, dest=None, **opts):
1612 '''clone main and patch repository at same time
1612 '''clone main and patch repository at same time
1613
1613
1614 If source is local, destination will have no patches applied. If
1614 If source is local, destination will have no patches applied. If
1615 source is remote, this command can not check if patches are
1615 source is remote, this command can not check if patches are
1616 applied in source, so cannot guarantee that patches are not
1616 applied in source, so cannot guarantee that patches are not
1617 applied in destination. If you clone remote repository, be sure
1617 applied in destination. If you clone remote repository, be sure
1618 before that it has no patches applied.
1618 before that it has no patches applied.
1619
1619
1620 Source patch repository is looked for in <src>/.hg/patches by
1620 Source patch repository is looked for in <src>/.hg/patches by
1621 default. Use -p <url> to change.
1621 default. Use -p <url> to change.
1622
1622
1623 The patch directory must be a nested mercurial repository, as
1623 The patch directory must be a nested mercurial repository, as
1624 would be created by qinit -c.
1624 would be created by qinit -c.
1625 '''
1625 '''
1626 def patchdir(repo):
1626 def patchdir(repo):
1627 url = repo.url()
1627 url = repo.url()
1628 if url.endswith('/'):
1628 if url.endswith('/'):
1629 url = url[:-1]
1629 url = url[:-1]
1630 return url + '/.hg/patches'
1630 return url + '/.hg/patches'
1631 cmdutil.setremoteconfig(ui, opts)
1631 cmdutil.setremoteconfig(ui, opts)
1632 if dest is None:
1632 if dest is None:
1633 dest = hg.defaultdest(source)
1633 dest = hg.defaultdest(source)
1634 sr = hg.repository(ui, ui.expandpath(source))
1634 sr = hg.repository(ui, ui.expandpath(source))
1635 patchespath = opts['patches'] or patchdir(sr)
1635 patchespath = opts['patches'] or patchdir(sr)
1636 try:
1636 try:
1637 pr = hg.repository(ui, patchespath)
1637 pr = hg.repository(ui, patchespath)
1638 except RepoError:
1638 except RepoError:
1639 raise util.Abort(_('versioned patch repository not found'
1639 raise util.Abort(_('versioned patch repository not found'
1640 ' (see qinit -c)'))
1640 ' (see qinit -c)'))
1641 qbase, destrev = None, None
1641 qbase, destrev = None, None
1642 if sr.local():
1642 if sr.local():
1643 if sr.mq.applied:
1643 if sr.mq.applied:
1644 qbase = revlog.bin(sr.mq.applied[0].rev)
1644 qbase = revlog.bin(sr.mq.applied[0].rev)
1645 if not hg.islocal(dest):
1645 if not hg.islocal(dest):
1646 heads = dict.fromkeys(sr.heads())
1646 heads = dict.fromkeys(sr.heads())
1647 for h in sr.heads(qbase):
1647 for h in sr.heads(qbase):
1648 del heads[h]
1648 del heads[h]
1649 destrev = heads.keys()
1649 destrev = heads.keys()
1650 destrev.append(sr.changelog.parents(qbase)[0])
1650 destrev.append(sr.changelog.parents(qbase)[0])
1651 elif sr.capable('lookup'):
1651 elif sr.capable('lookup'):
1652 try:
1652 try:
1653 qbase = sr.lookup('qbase')
1653 qbase = sr.lookup('qbase')
1654 except RepoError:
1654 except RepoError:
1655 pass
1655 pass
1656 ui.note(_('cloning main repo\n'))
1656 ui.note(_('cloning main repo\n'))
1657 sr, dr = hg.clone(ui, sr.url(), dest,
1657 sr, dr = hg.clone(ui, sr.url(), dest,
1658 pull=opts['pull'],
1658 pull=opts['pull'],
1659 rev=destrev,
1659 rev=destrev,
1660 update=False,
1660 update=False,
1661 stream=opts['uncompressed'])
1661 stream=opts['uncompressed'])
1662 ui.note(_('cloning patch repo\n'))
1662 ui.note(_('cloning patch repo\n'))
1663 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1663 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1664 pull=opts['pull'], update=not opts['noupdate'],
1664 pull=opts['pull'], update=not opts['noupdate'],
1665 stream=opts['uncompressed'])
1665 stream=opts['uncompressed'])
1666 if dr.local():
1666 if dr.local():
1667 if qbase:
1667 if qbase:
1668 ui.note(_('stripping applied patches from destination repo\n'))
1668 ui.note(_('stripping applied patches from destination repo\n'))
1669 dr.mq.strip(dr, qbase, update=False, backup=None)
1669 dr.mq.strip(dr, qbase, update=False, backup=None)
1670 if not opts['noupdate']:
1670 if not opts['noupdate']:
1671 ui.note(_('updating destination repo\n'))
1671 ui.note(_('updating destination repo\n'))
1672 hg.update(dr, dr.changelog.tip())
1672 hg.update(dr, dr.changelog.tip())
1673
1673
1674 def commit(ui, repo, *pats, **opts):
1674 def commit(ui, repo, *pats, **opts):
1675 """commit changes in the queue repository"""
1675 """commit changes in the queue repository"""
1676 q = repo.mq
1676 q = repo.mq
1677 r = q.qrepo()
1677 r = q.qrepo()
1678 if not r: raise util.Abort('no queue repository')
1678 if not r: raise util.Abort('no queue repository')
1679 commands.commit(r.ui, r, *pats, **opts)
1679 commands.commit(r.ui, r, *pats, **opts)
1680
1680
1681 def series(ui, repo, **opts):
1681 def series(ui, repo, **opts):
1682 """print the entire series file"""
1682 """print the entire series file"""
1683 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1683 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1684 return 0
1684 return 0
1685
1685
1686 def top(ui, repo, **opts):
1686 def top(ui, repo, **opts):
1687 """print the name of the current patch"""
1687 """print the name of the current patch"""
1688 q = repo.mq
1688 q = repo.mq
1689 t = q.applied and q.series_end(True) or 0
1689 t = q.applied and q.series_end(True) or 0
1690 if t:
1690 if t:
1691 return q.qseries(repo, start=t-1, length=1, status='A',
1691 return q.qseries(repo, start=t-1, length=1, status='A',
1692 summary=opts.get('summary'))
1692 summary=opts.get('summary'))
1693 else:
1693 else:
1694 ui.write("No patches applied\n")
1694 ui.write("No patches applied\n")
1695 return 1
1695 return 1
1696
1696
1697 def next(ui, repo, **opts):
1697 def next(ui, repo, **opts):
1698 """print the name of the next patch"""
1698 """print the name of the next patch"""
1699 q = repo.mq
1699 q = repo.mq
1700 end = q.series_end()
1700 end = q.series_end()
1701 if end == len(q.series):
1701 if end == len(q.series):
1702 ui.write("All patches applied\n")
1702 ui.write("All patches applied\n")
1703 return 1
1703 return 1
1704 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1704 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1705
1705
1706 def prev(ui, repo, **opts):
1706 def prev(ui, repo, **opts):
1707 """print the name of the previous patch"""
1707 """print the name of the previous patch"""
1708 q = repo.mq
1708 q = repo.mq
1709 l = len(q.applied)
1709 l = len(q.applied)
1710 if l == 1:
1710 if l == 1:
1711 ui.write("Only one patch applied\n")
1711 ui.write("Only one patch applied\n")
1712 return 1
1712 return 1
1713 if not l:
1713 if not l:
1714 ui.write("No patches applied\n")
1714 ui.write("No patches applied\n")
1715 return 1
1715 return 1
1716 return q.qseries(repo, start=l-2, length=1, status='A',
1716 return q.qseries(repo, start=l-2, length=1, status='A',
1717 summary=opts.get('summary'))
1717 summary=opts.get('summary'))
1718
1718
1719 def setupheaderopts(ui, opts):
1719 def setupheaderopts(ui, opts):
1720 def do(opt,val):
1720 def do(opt,val):
1721 if not opts[opt] and opts['current' + opt]:
1721 if not opts[opt] and opts['current' + opt]:
1722 opts[opt] = val
1722 opts[opt] = val
1723 do('user', ui.username())
1723 do('user', ui.username())
1724 do('date', "%d %d" % util.makedate())
1724 do('date', "%d %d" % util.makedate())
1725
1725
1726 def new(ui, repo, patch, *args, **opts):
1726 def new(ui, repo, patch, *args, **opts):
1727 """create a new patch
1727 """create a new patch
1728
1728
1729 qnew creates a new patch on top of the currently-applied patch
1729 qnew creates a new patch on top of the currently-applied patch
1730 (if any). It will refuse to run if there are any outstanding
1730 (if any). It will refuse to run if there are any outstanding
1731 changes unless -f is specified, in which case the patch will
1731 changes unless -f is specified, in which case the patch will
1732 be initialised with them. You may also use -I, -X, and/or a list of
1732 be initialised with them. You may also use -I, -X, and/or a list of
1733 files after the patch name to add only changes to matching files
1733 files after the patch name to add only changes to matching files
1734 to the new patch, leaving the rest as uncommitted modifications.
1734 to the new patch, leaving the rest as uncommitted modifications.
1735
1735
1736 -e, -m or -l set the patch header as well as the commit message.
1736 -e, -m or -l set the patch header as well as the commit message.
1737 If none is specified, the patch header is empty and the
1737 If none is specified, the patch header is empty and the
1738 commit message is '[mq]: PATCH'"""
1738 commit message is '[mq]: PATCH'"""
1739 q = repo.mq
1739 q = repo.mq
1740 message = cmdutil.logmessage(opts)
1740 message = cmdutil.logmessage(opts)
1741 if opts['edit']:
1741 if opts['edit']:
1742 message = ui.edit(message, ui.username())
1742 message = ui.edit(message, ui.username())
1743 opts['msg'] = message
1743 opts['msg'] = message
1744 setupheaderopts(ui, opts)
1744 setupheaderopts(ui, opts)
1745 q.new(repo, patch, *args, **opts)
1745 q.new(repo, patch, *args, **opts)
1746 q.save_dirty()
1746 q.save_dirty()
1747 return 0
1747 return 0
1748
1748
1749 def refresh(ui, repo, *pats, **opts):
1749 def refresh(ui, repo, *pats, **opts):
1750 """update the current patch
1750 """update the current patch
1751
1751
1752 If any file patterns are provided, the refreshed patch will contain only
1752 If any file patterns are provided, the refreshed patch will contain only
1753 the modifications that match those patterns; the remaining modifications
1753 the modifications that match those patterns; the remaining modifications
1754 will remain in the working directory.
1754 will remain in the working directory.
1755
1755
1756 hg add/remove/copy/rename work as usual, though you might want to use
1756 hg add/remove/copy/rename work as usual, though you might want to use
1757 git-style patches (--git or [diff] git=1) to track copies and renames.
1757 git-style patches (--git or [diff] git=1) to track copies and renames.
1758 """
1758 """
1759 q = repo.mq
1759 q = repo.mq
1760 message = cmdutil.logmessage(opts)
1760 message = cmdutil.logmessage(opts)
1761 if opts['edit']:
1761 if opts['edit']:
1762 if not q.applied:
1762 if not q.applied:
1763 ui.write(_("No patches applied\n"))
1763 ui.write(_("No patches applied\n"))
1764 return 1
1764 return 1
1765 if message:
1765 if message:
1766 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1766 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1767 patch = q.applied[-1].name
1767 patch = q.applied[-1].name
1768 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1768 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1769 message = ui.edit('\n'.join(message), user or ui.username())
1769 message = ui.edit('\n'.join(message), user or ui.username())
1770 setupheaderopts(ui, opts)
1770 setupheaderopts(ui, opts)
1771 ret = q.refresh(repo, pats, msg=message, **opts)
1771 ret = q.refresh(repo, pats, msg=message, **opts)
1772 q.save_dirty()
1772 q.save_dirty()
1773 return ret
1773 return ret
1774
1774
1775 def diff(ui, repo, *pats, **opts):
1775 def diff(ui, repo, *pats, **opts):
1776 """diff of the current patch and subsequent modifications
1776 """diff of the current patch and subsequent modifications
1777
1777
1778 Shows a diff which includes the current patch as well as any changes which
1778 Shows a diff which includes the current patch as well as any changes which
1779 have been made in the working directory since the last refresh (thus
1779 have been made in the working directory since the last refresh (thus
1780 showing what the current patch would become after a qrefresh).
1780 showing what the current patch would become after a qrefresh).
1781
1781
1782 Use 'hg diff' if you only want to see the changes made since the last
1782 Use 'hg diff' if you only want to see the changes made since the last
1783 qrefresh, or 'hg export qtip' if you want to see changes made by the
1783 qrefresh, or 'hg export qtip' if you want to see changes made by the
1784 current patch without including changes made since the qrefresh.
1784 current patch without including changes made since the qrefresh.
1785 """
1785 """
1786 repo.mq.diff(repo, pats, opts)
1786 repo.mq.diff(repo, pats, opts)
1787 return 0
1787 return 0
1788
1788
1789 def fold(ui, repo, *files, **opts):
1789 def fold(ui, repo, *files, **opts):
1790 """fold the named patches into the current patch
1790 """fold the named patches into the current patch
1791
1791
1792 Patches must not yet be applied. Each patch will be successively
1792 Patches must not yet be applied. Each patch will be successively
1793 applied to the current patch in the order given. If all the
1793 applied to the current patch in the order given. If all the
1794 patches apply successfully, the current patch will be refreshed
1794 patches apply successfully, the current patch will be refreshed
1795 with the new cumulative patch, and the folded patches will
1795 with the new cumulative patch, and the folded patches will
1796 be deleted. With -k/--keep, the folded patch files will not
1796 be deleted. With -k/--keep, the folded patch files will not
1797 be removed afterwards.
1797 be removed afterwards.
1798
1798
1799 The header for each folded patch will be concatenated with
1799 The header for each folded patch will be concatenated with
1800 the current patch header, separated by a line of '* * *'."""
1800 the current patch header, separated by a line of '* * *'."""
1801
1801
1802 q = repo.mq
1802 q = repo.mq
1803
1803
1804 if not files:
1804 if not files:
1805 raise util.Abort(_('qfold requires at least one patch name'))
1805 raise util.Abort(_('qfold requires at least one patch name'))
1806 if not q.check_toppatch(repo):
1806 if not q.check_toppatch(repo):
1807 raise util.Abort(_('No patches applied'))
1807 raise util.Abort(_('No patches applied'))
1808
1808
1809 message = cmdutil.logmessage(opts)
1809 message = cmdutil.logmessage(opts)
1810 if opts['edit']:
1810 if opts['edit']:
1811 if message:
1811 if message:
1812 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1812 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1813
1813
1814 parent = q.lookup('qtip')
1814 parent = q.lookup('qtip')
1815 patches = []
1815 patches = []
1816 messages = []
1816 messages = []
1817 for f in files:
1817 for f in files:
1818 p = q.lookup(f)
1818 p = q.lookup(f)
1819 if p in patches or p == parent:
1819 if p in patches or p == parent:
1820 ui.warn(_('Skipping already folded patch %s') % p)
1820 ui.warn(_('Skipping already folded patch %s') % p)
1821 if q.isapplied(p):
1821 if q.isapplied(p):
1822 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1822 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1823 patches.append(p)
1823 patches.append(p)
1824
1824
1825 for p in patches:
1825 for p in patches:
1826 if not message:
1826 if not message:
1827 messages.append(q.readheaders(p)[0])
1827 messages.append(q.readheaders(p)[0])
1828 pf = q.join(p)
1828 pf = q.join(p)
1829 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1829 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1830 if not patchsuccess:
1830 if not patchsuccess:
1831 raise util.Abort(_('Error folding patch %s') % p)
1831 raise util.Abort(_('Error folding patch %s') % p)
1832 patch.updatedir(ui, repo, files)
1832 patch.updatedir(ui, repo, files)
1833
1833
1834 if not message:
1834 if not message:
1835 message, comments, user = q.readheaders(parent)[0:3]
1835 message, comments, user = q.readheaders(parent)[0:3]
1836 for msg in messages:
1836 for msg in messages:
1837 message.append('* * *')
1837 message.append('* * *')
1838 message.extend(msg)
1838 message.extend(msg)
1839 message = '\n'.join(message)
1839 message = '\n'.join(message)
1840
1840
1841 if opts['edit']:
1841 if opts['edit']:
1842 message = ui.edit(message, user or ui.username())
1842 message = ui.edit(message, user or ui.username())
1843
1843
1844 q.refresh(repo, msg=message)
1844 q.refresh(repo, msg=message)
1845 q.delete(repo, patches, opts)
1845 q.delete(repo, patches, opts)
1846 q.save_dirty()
1846 q.save_dirty()
1847
1847
1848 def goto(ui, repo, patch, **opts):
1848 def goto(ui, repo, patch, **opts):
1849 '''push or pop patches until named patch is at top of stack'''
1849 '''push or pop patches until named patch is at top of stack'''
1850 q = repo.mq
1850 q = repo.mq
1851 patch = q.lookup(patch)
1851 patch = q.lookup(patch)
1852 if q.isapplied(patch):
1852 if q.isapplied(patch):
1853 ret = q.pop(repo, patch, force=opts['force'])
1853 ret = q.pop(repo, patch, force=opts['force'])
1854 else:
1854 else:
1855 ret = q.push(repo, patch, force=opts['force'])
1855 ret = q.push(repo, patch, force=opts['force'])
1856 q.save_dirty()
1856 q.save_dirty()
1857 return ret
1857 return ret
1858
1858
1859 def guard(ui, repo, *args, **opts):
1859 def guard(ui, repo, *args, **opts):
1860 '''set or print guards for a patch
1860 '''set or print guards for a patch
1861
1861
1862 Guards control whether a patch can be pushed. A patch with no
1862 Guards control whether a patch can be pushed. A patch with no
1863 guards is always pushed. A patch with a positive guard ("+foo") is
1863 guards is always pushed. A patch with a positive guard ("+foo") is
1864 pushed only if the qselect command has activated it. A patch with
1864 pushed only if the qselect command has activated it. A patch with
1865 a negative guard ("-foo") is never pushed if the qselect command
1865 a negative guard ("-foo") is never pushed if the qselect command
1866 has activated it.
1866 has activated it.
1867
1867
1868 With no arguments, print the currently active guards.
1868 With no arguments, print the currently active guards.
1869 With arguments, set guards for the named patch.
1869 With arguments, set guards for the named patch.
1870
1870
1871 To set a negative guard "-foo" on topmost patch ("--" is needed so
1871 To set a negative guard "-foo" on topmost patch ("--" is needed so
1872 hg will not interpret "-foo" as an option):
1872 hg will not interpret "-foo" as an option):
1873 hg qguard -- -foo
1873 hg qguard -- -foo
1874
1874
1875 To set guards on another patch:
1875 To set guards on another patch:
1876 hg qguard other.patch +2.6.17 -stable
1876 hg qguard other.patch +2.6.17 -stable
1877 '''
1877 '''
1878 def status(idx):
1878 def status(idx):
1879 guards = q.series_guards[idx] or ['unguarded']
1879 guards = q.series_guards[idx] or ['unguarded']
1880 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1880 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1881 q = repo.mq
1881 q = repo.mq
1882 patch = None
1882 patch = None
1883 args = list(args)
1883 args = list(args)
1884 if opts['list']:
1884 if opts['list']:
1885 if args or opts['none']:
1885 if args or opts['none']:
1886 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1886 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1887 for i in xrange(len(q.series)):
1887 for i in xrange(len(q.series)):
1888 status(i)
1888 status(i)
1889 return
1889 return
1890 if not args or args[0][0:1] in '-+':
1890 if not args or args[0][0:1] in '-+':
1891 if not q.applied:
1891 if not q.applied:
1892 raise util.Abort(_('no patches applied'))
1892 raise util.Abort(_('no patches applied'))
1893 patch = q.applied[-1].name
1893 patch = q.applied[-1].name
1894 if patch is None and args[0][0:1] not in '-+':
1894 if patch is None and args[0][0:1] not in '-+':
1895 patch = args.pop(0)
1895 patch = args.pop(0)
1896 if patch is None:
1896 if patch is None:
1897 raise util.Abort(_('no patch to work with'))
1897 raise util.Abort(_('no patch to work with'))
1898 if args or opts['none']:
1898 if args or opts['none']:
1899 idx = q.find_series(patch)
1899 idx = q.find_series(patch)
1900 if idx is None:
1900 if idx is None:
1901 raise util.Abort(_('no patch named %s') % patch)
1901 raise util.Abort(_('no patch named %s') % patch)
1902 q.set_guards(idx, args)
1902 q.set_guards(idx, args)
1903 q.save_dirty()
1903 q.save_dirty()
1904 else:
1904 else:
1905 status(q.series.index(q.lookup(patch)))
1905 status(q.series.index(q.lookup(patch)))
1906
1906
1907 def header(ui, repo, patch=None):
1907 def header(ui, repo, patch=None):
1908 """Print the header of the topmost or specified patch"""
1908 """Print the header of the topmost or specified patch"""
1909 q = repo.mq
1909 q = repo.mq
1910
1910
1911 if patch:
1911 if patch:
1912 patch = q.lookup(patch)
1912 patch = q.lookup(patch)
1913 else:
1913 else:
1914 if not q.applied:
1914 if not q.applied:
1915 ui.write('No patches applied\n')
1915 ui.write('No patches applied\n')
1916 return 1
1916 return 1
1917 patch = q.lookup('qtip')
1917 patch = q.lookup('qtip')
1918 message = repo.mq.readheaders(patch)[0]
1918 message = repo.mq.readheaders(patch)[0]
1919
1919
1920 ui.write('\n'.join(message) + '\n')
1920 ui.write('\n'.join(message) + '\n')
1921
1921
1922 def lastsavename(path):
1922 def lastsavename(path):
1923 (directory, base) = os.path.split(path)
1923 (directory, base) = os.path.split(path)
1924 names = os.listdir(directory)
1924 names = os.listdir(directory)
1925 namere = re.compile("%s.([0-9]+)" % base)
1925 namere = re.compile("%s.([0-9]+)" % base)
1926 maxindex = None
1926 maxindex = None
1927 maxname = None
1927 maxname = None
1928 for f in names:
1928 for f in names:
1929 m = namere.match(f)
1929 m = namere.match(f)
1930 if m:
1930 if m:
1931 index = int(m.group(1))
1931 index = int(m.group(1))
1932 if maxindex == None or index > maxindex:
1932 if maxindex == None or index > maxindex:
1933 maxindex = index
1933 maxindex = index
1934 maxname = f
1934 maxname = f
1935 if maxname:
1935 if maxname:
1936 return (os.path.join(directory, maxname), maxindex)
1936 return (os.path.join(directory, maxname), maxindex)
1937 return (None, None)
1937 return (None, None)
1938
1938
1939 def savename(path):
1939 def savename(path):
1940 (last, index) = lastsavename(path)
1940 (last, index) = lastsavename(path)
1941 if last is None:
1941 if last is None:
1942 index = 0
1942 index = 0
1943 newpath = path + ".%d" % (index + 1)
1943 newpath = path + ".%d" % (index + 1)
1944 return newpath
1944 return newpath
1945
1945
1946 def push(ui, repo, patch=None, **opts):
1946 def push(ui, repo, patch=None, **opts):
1947 """push the next patch onto the stack
1947 """push the next patch onto the stack
1948
1948
1949 When --force is applied, all local changes in patched files will be lost.
1949 When --force is applied, all local changes in patched files will be lost.
1950 """
1950 """
1951 q = repo.mq
1951 q = repo.mq
1952 mergeq = None
1952 mergeq = None
1953
1953
1954 if opts['all']:
1954 if opts['all']:
1955 if not q.series:
1955 if not q.series:
1956 ui.warn(_('no patches in series\n'))
1956 ui.warn(_('no patches in series\n'))
1957 return 0
1957 return 0
1958 patch = q.series[-1]
1958 patch = q.series[-1]
1959 if opts['merge']:
1959 if opts['merge']:
1960 if opts['name']:
1960 if opts['name']:
1961 newpath = repo.join(opts['name'])
1961 newpath = repo.join(opts['name'])
1962 else:
1962 else:
1963 newpath, i = lastsavename(q.path)
1963 newpath, i = lastsavename(q.path)
1964 if not newpath:
1964 if not newpath:
1965 ui.warn("no saved queues found, please use -n\n")
1965 ui.warn("no saved queues found, please use -n\n")
1966 return 1
1966 return 1
1967 mergeq = queue(ui, repo.join(""), newpath)
1967 mergeq = queue(ui, repo.join(""), newpath)
1968 ui.warn("merging with queue at: %s\n" % mergeq.path)
1968 ui.warn("merging with queue at: %s\n" % mergeq.path)
1969 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1969 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1970 mergeq=mergeq)
1970 mergeq=mergeq)
1971 return ret
1971 return ret
1972
1972
1973 def pop(ui, repo, patch=None, **opts):
1973 def pop(ui, repo, patch=None, **opts):
1974 """pop the current patch off the stack
1974 """pop the current patch off the stack
1975
1975
1976 By default, pops off the top of the patch stack. If given a patch name,
1976 By default, pops off the top of the patch stack. If given a patch name,
1977 keeps popping off patches until the named patch is at the top of the stack.
1977 keeps popping off patches until the named patch is at the top of the stack.
1978 """
1978 """
1979 localupdate = True
1979 localupdate = True
1980 if opts['name']:
1980 if opts['name']:
1981 q = queue(ui, repo.join(""), repo.join(opts['name']))
1981 q = queue(ui, repo.join(""), repo.join(opts['name']))
1982 ui.warn('using patch queue: %s\n' % q.path)
1982 ui.warn('using patch queue: %s\n' % q.path)
1983 localupdate = False
1983 localupdate = False
1984 else:
1984 else:
1985 q = repo.mq
1985 q = repo.mq
1986 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1986 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1987 all=opts['all'])
1987 all=opts['all'])
1988 q.save_dirty()
1988 q.save_dirty()
1989 return ret
1989 return ret
1990
1990
1991 def rename(ui, repo, patch, name=None, **opts):
1991 def rename(ui, repo, patch, name=None, **opts):
1992 """rename a patch
1992 """rename a patch
1993
1993
1994 With one argument, renames the current patch to PATCH1.
1994 With one argument, renames the current patch to PATCH1.
1995 With two arguments, renames PATCH1 to PATCH2."""
1995 With two arguments, renames PATCH1 to PATCH2."""
1996
1996
1997 q = repo.mq
1997 q = repo.mq
1998
1998
1999 if not name:
1999 if not name:
2000 name = patch
2000 name = patch
2001 patch = None
2001 patch = None
2002
2002
2003 if patch:
2003 if patch:
2004 patch = q.lookup(patch)
2004 patch = q.lookup(patch)
2005 else:
2005 else:
2006 if not q.applied:
2006 if not q.applied:
2007 ui.write(_('No patches applied\n'))
2007 ui.write(_('No patches applied\n'))
2008 return
2008 return
2009 patch = q.lookup('qtip')
2009 patch = q.lookup('qtip')
2010 absdest = q.join(name)
2010 absdest = q.join(name)
2011 if os.path.isdir(absdest):
2011 if os.path.isdir(absdest):
2012 name = normname(os.path.join(name, os.path.basename(patch)))
2012 name = normname(os.path.join(name, os.path.basename(patch)))
2013 absdest = q.join(name)
2013 absdest = q.join(name)
2014 if os.path.exists(absdest):
2014 if os.path.exists(absdest):
2015 raise util.Abort(_('%s already exists') % absdest)
2015 raise util.Abort(_('%s already exists') % absdest)
2016
2016
2017 if name in q.series:
2017 if name in q.series:
2018 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2018 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2019
2019
2020 if ui.verbose:
2020 if ui.verbose:
2021 ui.write('Renaming %s to %s\n' % (patch, name))
2021 ui.write('Renaming %s to %s\n' % (patch, name))
2022 i = q.find_series(patch)
2022 i = q.find_series(patch)
2023 guards = q.guard_re.findall(q.full_series[i])
2023 guards = q.guard_re.findall(q.full_series[i])
2024 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2024 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2025 q.parse_series()
2025 q.parse_series()
2026 q.series_dirty = 1
2026 q.series_dirty = 1
2027
2027
2028 info = q.isapplied(patch)
2028 info = q.isapplied(patch)
2029 if info:
2029 if info:
2030 q.applied[info[0]] = statusentry(info[1], name)
2030 q.applied[info[0]] = statusentry(info[1], name)
2031 q.applied_dirty = 1
2031 q.applied_dirty = 1
2032
2032
2033 util.rename(q.join(patch), absdest)
2033 util.rename(q.join(patch), absdest)
2034 r = q.qrepo()
2034 r = q.qrepo()
2035 if r:
2035 if r:
2036 wlock = r.wlock()
2036 wlock = r.wlock()
2037 try:
2037 try:
2038 if r.dirstate[patch] == 'a':
2038 if r.dirstate[patch] == 'a':
2039 r.dirstate.forget(patch)
2039 r.dirstate.forget(patch)
2040 r.dirstate.add(name)
2040 r.dirstate.add(name)
2041 else:
2041 else:
2042 if r.dirstate[name] == 'r':
2042 if r.dirstate[name] == 'r':
2043 r.undelete([name])
2043 r.undelete([name])
2044 r.copy(patch, name)
2044 r.copy(patch, name)
2045 r.remove([patch], False)
2045 r.remove([patch], False)
2046 finally:
2046 finally:
2047 del wlock
2047 del wlock
2048
2048
2049 q.save_dirty()
2049 q.save_dirty()
2050
2050
2051 def restore(ui, repo, rev, **opts):
2051 def restore(ui, repo, rev, **opts):
2052 """restore the queue state saved by a rev"""
2052 """restore the queue state saved by a rev"""
2053 rev = repo.lookup(rev)
2053 rev = repo.lookup(rev)
2054 q = repo.mq
2054 q = repo.mq
2055 q.restore(repo, rev, delete=opts['delete'],
2055 q.restore(repo, rev, delete=opts['delete'],
2056 qupdate=opts['update'])
2056 qupdate=opts['update'])
2057 q.save_dirty()
2057 q.save_dirty()
2058 return 0
2058 return 0
2059
2059
2060 def save(ui, repo, **opts):
2060 def save(ui, repo, **opts):
2061 """save current queue state"""
2061 """save current queue state"""
2062 q = repo.mq
2062 q = repo.mq
2063 message = cmdutil.logmessage(opts)
2063 message = cmdutil.logmessage(opts)
2064 ret = q.save(repo, msg=message)
2064 ret = q.save(repo, msg=message)
2065 if ret:
2065 if ret:
2066 return ret
2066 return ret
2067 q.save_dirty()
2067 q.save_dirty()
2068 if opts['copy']:
2068 if opts['copy']:
2069 path = q.path
2069 path = q.path
2070 if opts['name']:
2070 if opts['name']:
2071 newpath = os.path.join(q.basepath, opts['name'])
2071 newpath = os.path.join(q.basepath, opts['name'])
2072 if os.path.exists(newpath):
2072 if os.path.exists(newpath):
2073 if not os.path.isdir(newpath):
2073 if not os.path.isdir(newpath):
2074 raise util.Abort(_('destination %s exists and is not '
2074 raise util.Abort(_('destination %s exists and is not '
2075 'a directory') % newpath)
2075 'a directory') % newpath)
2076 if not opts['force']:
2076 if not opts['force']:
2077 raise util.Abort(_('destination %s exists, '
2077 raise util.Abort(_('destination %s exists, '
2078 'use -f to force') % newpath)
2078 'use -f to force') % newpath)
2079 else:
2079 else:
2080 newpath = savename(path)
2080 newpath = savename(path)
2081 ui.warn("copy %s to %s\n" % (path, newpath))
2081 ui.warn("copy %s to %s\n" % (path, newpath))
2082 util.copyfiles(path, newpath)
2082 util.copyfiles(path, newpath)
2083 if opts['empty']:
2083 if opts['empty']:
2084 try:
2084 try:
2085 os.unlink(q.join(q.status_path))
2085 os.unlink(q.join(q.status_path))
2086 except:
2086 except:
2087 pass
2087 pass
2088 return 0
2088 return 0
2089
2089
2090 def strip(ui, repo, rev, **opts):
2090 def strip(ui, repo, rev, **opts):
2091 """strip a revision and all its descendants from the repository
2091 """strip a revision and all its descendants from the repository
2092
2092
2093 If one of the working dir's parent revisions is stripped, the working
2093 If one of the working dir's parent revisions is stripped, the working
2094 directory will be updated to the parent of the stripped revision.
2094 directory will be updated to the parent of the stripped revision.
2095 """
2095 """
2096 backup = 'all'
2096 backup = 'all'
2097 if opts['backup']:
2097 if opts['backup']:
2098 backup = 'strip'
2098 backup = 'strip'
2099 elif opts['nobackup']:
2099 elif opts['nobackup']:
2100 backup = 'none'
2100 backup = 'none'
2101
2101
2102 rev = repo.lookup(rev)
2102 rev = repo.lookup(rev)
2103 p = repo.dirstate.parents()
2103 p = repo.dirstate.parents()
2104 cl = repo.changelog
2104 cl = repo.changelog
2105 update = True
2105 update = True
2106 if p[0] == revlog.nullid:
2106 if p[0] == revlog.nullid:
2107 update = False
2107 update = False
2108 elif p[1] == revlog.nullid and rev != cl.ancestor(p[0], rev):
2108 elif p[1] == revlog.nullid and rev != cl.ancestor(p[0], rev):
2109 update = False
2109 update = False
2110 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2110 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2111 update = False
2111 update = False
2112
2112
2113 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2113 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2114 return 0
2114 return 0
2115
2115
2116 def select(ui, repo, *args, **opts):
2116 def select(ui, repo, *args, **opts):
2117 '''set or print guarded patches to push
2117 '''set or print guarded patches to push
2118
2118
2119 Use the qguard command to set or print guards on patch, then use
2119 Use the qguard command to set or print guards on patch, then use
2120 qselect to tell mq which guards to use. A patch will be pushed if it
2120 qselect to tell mq which guards to use. A patch will be pushed if it
2121 has no guards or any positive guards match the currently selected guard,
2121 has no guards or any positive guards match the currently selected guard,
2122 but will not be pushed if any negative guards match the current guard.
2122 but will not be pushed if any negative guards match the current guard.
2123 For example:
2123 For example:
2124
2124
2125 qguard foo.patch -stable (negative guard)
2125 qguard foo.patch -stable (negative guard)
2126 qguard bar.patch +stable (positive guard)
2126 qguard bar.patch +stable (positive guard)
2127 qselect stable
2127 qselect stable
2128
2128
2129 This activates the "stable" guard. mq will skip foo.patch (because
2129 This activates the "stable" guard. mq will skip foo.patch (because
2130 it has a negative match) but push bar.patch (because it
2130 it has a negative match) but push bar.patch (because it
2131 has a positive match).
2131 has a positive match).
2132
2132
2133 With no arguments, prints the currently active guards.
2133 With no arguments, prints the currently active guards.
2134 With one argument, sets the active guard.
2134 With one argument, sets the active guard.
2135
2135
2136 Use -n/--none to deactivate guards (no other arguments needed).
2136 Use -n/--none to deactivate guards (no other arguments needed).
2137 When no guards are active, patches with positive guards are skipped
2137 When no guards are active, patches with positive guards are skipped
2138 and patches with negative guards are pushed.
2138 and patches with negative guards are pushed.
2139
2139
2140 qselect can change the guards on applied patches. It does not pop
2140 qselect can change the guards on applied patches. It does not pop
2141 guarded patches by default. Use --pop to pop back to the last applied
2141 guarded patches by default. Use --pop to pop back to the last applied
2142 patch that is not guarded. Use --reapply (which implies --pop) to push
2142 patch that is not guarded. Use --reapply (which implies --pop) to push
2143 back to the current patch afterwards, but skip guarded patches.
2143 back to the current patch afterwards, but skip guarded patches.
2144
2144
2145 Use -s/--series to print a list of all guards in the series file (no
2145 Use -s/--series to print a list of all guards in the series file (no
2146 other arguments needed). Use -v for more information.'''
2146 other arguments needed). Use -v for more information.'''
2147
2147
2148 q = repo.mq
2148 q = repo.mq
2149 guards = q.active()
2149 guards = q.active()
2150 if args or opts['none']:
2150 if args or opts['none']:
2151 old_unapplied = q.unapplied(repo)
2151 old_unapplied = q.unapplied(repo)
2152 old_guarded = [i for i in xrange(len(q.applied)) if
2152 old_guarded = [i for i in xrange(len(q.applied)) if
2153 not q.pushable(i)[0]]
2153 not q.pushable(i)[0]]
2154 q.set_active(args)
2154 q.set_active(args)
2155 q.save_dirty()
2155 q.save_dirty()
2156 if not args:
2156 if not args:
2157 ui.status(_('guards deactivated\n'))
2157 ui.status(_('guards deactivated\n'))
2158 if not opts['pop'] and not opts['reapply']:
2158 if not opts['pop'] and not opts['reapply']:
2159 unapplied = q.unapplied(repo)
2159 unapplied = q.unapplied(repo)
2160 guarded = [i for i in xrange(len(q.applied))
2160 guarded = [i for i in xrange(len(q.applied))
2161 if not q.pushable(i)[0]]
2161 if not q.pushable(i)[0]]
2162 if len(unapplied) != len(old_unapplied):
2162 if len(unapplied) != len(old_unapplied):
2163 ui.status(_('number of unguarded, unapplied patches has '
2163 ui.status(_('number of unguarded, unapplied patches has '
2164 'changed from %d to %d\n') %
2164 'changed from %d to %d\n') %
2165 (len(old_unapplied), len(unapplied)))
2165 (len(old_unapplied), len(unapplied)))
2166 if len(guarded) != len(old_guarded):
2166 if len(guarded) != len(old_guarded):
2167 ui.status(_('number of guarded, applied patches has changed '
2167 ui.status(_('number of guarded, applied patches has changed '
2168 'from %d to %d\n') %
2168 'from %d to %d\n') %
2169 (len(old_guarded), len(guarded)))
2169 (len(old_guarded), len(guarded)))
2170 elif opts['series']:
2170 elif opts['series']:
2171 guards = {}
2171 guards = {}
2172 noguards = 0
2172 noguards = 0
2173 for gs in q.series_guards:
2173 for gs in q.series_guards:
2174 if not gs:
2174 if not gs:
2175 noguards += 1
2175 noguards += 1
2176 for g in gs:
2176 for g in gs:
2177 guards.setdefault(g, 0)
2177 guards.setdefault(g, 0)
2178 guards[g] += 1
2178 guards[g] += 1
2179 if ui.verbose:
2179 if ui.verbose:
2180 guards['NONE'] = noguards
2180 guards['NONE'] = noguards
2181 guards = guards.items()
2181 guards = guards.items()
2182 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2182 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2183 if guards:
2183 if guards:
2184 ui.note(_('guards in series file:\n'))
2184 ui.note(_('guards in series file:\n'))
2185 for guard, count in guards:
2185 for guard, count in guards:
2186 ui.note('%2d ' % count)
2186 ui.note('%2d ' % count)
2187 ui.write(guard, '\n')
2187 ui.write(guard, '\n')
2188 else:
2188 else:
2189 ui.note(_('no guards in series file\n'))
2189 ui.note(_('no guards in series file\n'))
2190 else:
2190 else:
2191 if guards:
2191 if guards:
2192 ui.note(_('active guards:\n'))
2192 ui.note(_('active guards:\n'))
2193 for g in guards:
2193 for g in guards:
2194 ui.write(g, '\n')
2194 ui.write(g, '\n')
2195 else:
2195 else:
2196 ui.write(_('no active guards\n'))
2196 ui.write(_('no active guards\n'))
2197 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2197 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2198 popped = False
2198 popped = False
2199 if opts['pop'] or opts['reapply']:
2199 if opts['pop'] or opts['reapply']:
2200 for i in xrange(len(q.applied)):
2200 for i in xrange(len(q.applied)):
2201 pushable, reason = q.pushable(i)
2201 pushable, reason = q.pushable(i)
2202 if not pushable:
2202 if not pushable:
2203 ui.status(_('popping guarded patches\n'))
2203 ui.status(_('popping guarded patches\n'))
2204 popped = True
2204 popped = True
2205 if i == 0:
2205 if i == 0:
2206 q.pop(repo, all=True)
2206 q.pop(repo, all=True)
2207 else:
2207 else:
2208 q.pop(repo, i-1)
2208 q.pop(repo, i-1)
2209 break
2209 break
2210 if popped:
2210 if popped:
2211 try:
2211 try:
2212 if reapply:
2212 if reapply:
2213 ui.status(_('reapplying unguarded patches\n'))
2213 ui.status(_('reapplying unguarded patches\n'))
2214 q.push(repo, reapply)
2214 q.push(repo, reapply)
2215 finally:
2215 finally:
2216 q.save_dirty()
2216 q.save_dirty()
2217
2217
2218 def finish(ui, repo, *revrange, **opts):
2218 def finish(ui, repo, *revrange, **opts):
2219 """move applied patches into repository history
2219 """move applied patches into repository history
2220
2220
2221 Finishes the specified revisions (corresponding to applied patches) by
2221 Finishes the specified revisions (corresponding to applied patches) by
2222 moving them out of mq control into regular repository history.
2222 moving them out of mq control into regular repository history.
2223
2223
2224 Accepts a revision range or the --all option. If --all is specified, all
2224 Accepts a revision range or the --all option. If --all is specified, all
2225 applied mq revisions are removed from mq control. Otherwise, the given
2225 applied mq revisions are removed from mq control. Otherwise, the given
2226 revisions must be at the base of the stack of applied patches.
2226 revisions must be at the base of the stack of applied patches.
2227
2227
2228 This can be especially useful if your changes have been applied to an
2228 This can be especially useful if your changes have been applied to an
2229 upstream repository, or if you are about to push your changes to upstream.
2229 upstream repository, or if you are about to push your changes to upstream.
2230 """
2230 """
2231 if not opts['applied'] and not revrange:
2231 if not opts['applied'] and not revrange:
2232 raise util.Abort(_('no revisions specified'))
2232 raise util.Abort(_('no revisions specified'))
2233 elif opts['applied']:
2233 elif opts['applied']:
2234 revrange = ('qbase:qtip',) + revrange
2234 revrange = ('qbase:qtip',) + revrange
2235
2235
2236 q = repo.mq
2236 q = repo.mq
2237 if not q.applied:
2237 if not q.applied:
2238 ui.status(_('no patches applied\n'))
2238 ui.status(_('no patches applied\n'))
2239 return 0
2239 return 0
2240
2240
2241 revs = cmdutil.revrange(repo, revrange)
2241 revs = cmdutil.revrange(repo, revrange)
2242 q.finish(repo, revs)
2242 q.finish(repo, revs)
2243 q.save_dirty()
2243 q.save_dirty()
2244 return 0
2244 return 0
2245
2245
2246 def reposetup(ui, repo):
2246 def reposetup(ui, repo):
2247 class mqrepo(repo.__class__):
2247 class mqrepo(repo.__class__):
2248 def abort_if_wdir_patched(self, errmsg, force=False):
2248 def abort_if_wdir_patched(self, errmsg, force=False):
2249 if self.mq.applied and not force:
2249 if self.mq.applied and not force:
2250 parent = revlog.hex(self.dirstate.parents()[0])
2250 parent = revlog.hex(self.dirstate.parents()[0])
2251 if parent in [s.rev for s in self.mq.applied]:
2251 if parent in [s.rev for s in self.mq.applied]:
2252 raise util.Abort(errmsg)
2252 raise util.Abort(errmsg)
2253
2253
2254 def commit(self, *args, **opts):
2254 def commit(self, *args, **opts):
2255 if len(args) >= 6:
2255 if len(args) >= 6:
2256 force = args[5]
2256 force = args[5]
2257 else:
2257 else:
2258 force = opts.get('force')
2258 force = opts.get('force')
2259 self.abort_if_wdir_patched(
2259 self.abort_if_wdir_patched(
2260 _('cannot commit over an applied mq patch'),
2260 _('cannot commit over an applied mq patch'),
2261 force)
2261 force)
2262
2262
2263 return super(mqrepo, self).commit(*args, **opts)
2263 return super(mqrepo, self).commit(*args, **opts)
2264
2264
2265 def push(self, remote, force=False, revs=None):
2265 def push(self, remote, force=False, revs=None):
2266 if self.mq.applied and not force and not revs:
2266 if self.mq.applied and not force and not revs:
2267 raise util.Abort(_('source has mq patches applied'))
2267 raise util.Abort(_('source has mq patches applied'))
2268 return super(mqrepo, self).push(remote, force, revs)
2268 return super(mqrepo, self).push(remote, force, revs)
2269
2269
2270 def tags(self):
2270 def tags(self):
2271 if self.tagscache:
2271 if self.tagscache:
2272 return self.tagscache
2272 return self.tagscache
2273
2273
2274 tagscache = super(mqrepo, self).tags()
2274 tagscache = super(mqrepo, self).tags()
2275
2275
2276 q = self.mq
2276 q = self.mq
2277 if not q.applied:
2277 if not q.applied:
2278 return tagscache
2278 return tagscache
2279
2279
2280 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2280 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2281
2281
2282 if mqtags[-1][0] not in self.changelog.nodemap:
2282 if mqtags[-1][0] not in self.changelog.nodemap:
2283 self.ui.warn('mq status file refers to unknown node %s\n'
2283 self.ui.warn('mq status file refers to unknown node %s\n'
2284 % revlog.short(mqtags[-1][0]))
2284 % revlog.short(mqtags[-1][0]))
2285 return tagscache
2285 return tagscache
2286
2286
2287 mqtags.append((mqtags[-1][0], 'qtip'))
2287 mqtags.append((mqtags[-1][0], 'qtip'))
2288 mqtags.append((mqtags[0][0], 'qbase'))
2288 mqtags.append((mqtags[0][0], 'qbase'))
2289 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2289 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2290 for patch in mqtags:
2290 for patch in mqtags:
2291 if patch[1] in tagscache:
2291 if patch[1] in tagscache:
2292 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2292 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2293 else:
2293 else:
2294 tagscache[patch[1]] = patch[0]
2294 tagscache[patch[1]] = patch[0]
2295
2295
2296 return tagscache
2296 return tagscache
2297
2297
2298 def _branchtags(self, partial, lrev):
2298 def _branchtags(self, partial, lrev):
2299 q = self.mq
2299 q = self.mq
2300 if not q.applied:
2300 if not q.applied:
2301 return super(mqrepo, self)._branchtags(partial, lrev)
2301 return super(mqrepo, self)._branchtags(partial, lrev)
2302
2302
2303 cl = self.changelog
2303 cl = self.changelog
2304 qbasenode = revlog.bin(q.applied[0].rev)
2304 qbasenode = revlog.bin(q.applied[0].rev)
2305 if qbasenode not in cl.nodemap:
2305 if qbasenode not in cl.nodemap:
2306 self.ui.warn('mq status file refers to unknown node %s\n'
2306 self.ui.warn('mq status file refers to unknown node %s\n'
2307 % revlog.short(qbasenode))
2307 % revlog.short(qbasenode))
2308 return super(mqrepo, self)._branchtags(partial, lrev)
2308 return super(mqrepo, self)._branchtags(partial, lrev)
2309
2309
2310 qbase = cl.rev(qbasenode)
2310 qbase = cl.rev(qbasenode)
2311 start = lrev + 1
2311 start = lrev + 1
2312 if start < qbase:
2312 if start < qbase:
2313 # update the cache (excluding the patches) and save it
2313 # update the cache (excluding the patches) and save it
2314 self._updatebranchcache(partial, lrev+1, qbase)
2314 self._updatebranchcache(partial, lrev+1, qbase)
2315 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2315 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2316 start = qbase
2316 start = qbase
2317 # if start = qbase, the cache is as updated as it should be.
2317 # if start = qbase, the cache is as updated as it should be.
2318 # if start > qbase, the cache includes (part of) the patches.
2318 # if start > qbase, the cache includes (part of) the patches.
2319 # we might as well use it, but we won't save it.
2319 # we might as well use it, but we won't save it.
2320
2320
2321 # update the cache up to the tip
2321 # update the cache up to the tip
2322 self._updatebranchcache(partial, start, cl.count())
2322 self._updatebranchcache(partial, start, cl.count())
2323
2323
2324 return partial
2324 return partial
2325
2325
2326 if repo.local():
2326 if repo.local():
2327 repo.__class__ = mqrepo
2327 repo.__class__ = mqrepo
2328 repo.mq = queue(ui, repo.join(""))
2328 repo.mq = queue(ui, repo.join(""))
2329
2329
2330 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2330 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2331
2331
2332 headeropts = [
2332 headeropts = [
2333 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2333 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2334 ('u', 'user', '', _('add "From: <given user>" to patch')),
2334 ('u', 'user', '', _('add "From: <given user>" to patch')),
2335 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2335 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2336 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2336 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2337
2337
2338 cmdtable = {
2338 cmdtable = {
2339 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2339 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2340 "qclone":
2340 "qclone":
2341 (clone,
2341 (clone,
2342 [('', 'pull', None, _('use pull protocol to copy metadata')),
2342 [('', 'pull', None, _('use pull protocol to copy metadata')),
2343 ('U', 'noupdate', None, _('do not update the new working directories')),
2343 ('U', 'noupdate', None, _('do not update the new working directories')),
2344 ('', 'uncompressed', None,
2344 ('', 'uncompressed', None,
2345 _('use uncompressed transfer (fast over LAN)')),
2345 _('use uncompressed transfer (fast over LAN)')),
2346 ('p', 'patches', '', _('location of source patch repo')),
2346 ('p', 'patches', '', _('location of source patch repo')),
2347 ] + commands.remoteopts,
2347 ] + commands.remoteopts,
2348 _('hg qclone [OPTION]... SOURCE [DEST]')),
2348 _('hg qclone [OPTION]... SOURCE [DEST]')),
2349 "qcommit|qci":
2349 "qcommit|qci":
2350 (commit,
2350 (commit,
2351 commands.table["^commit|ci"][1],
2351 commands.table["^commit|ci"][1],
2352 _('hg qcommit [OPTION]... [FILE]...')),
2352 _('hg qcommit [OPTION]... [FILE]...')),
2353 "^qdiff":
2353 "^qdiff":
2354 (diff,
2354 (diff,
2355 commands.diffopts + commands.diffopts2 + commands.walkopts,
2355 commands.diffopts + commands.diffopts2 + commands.walkopts,
2356 _('hg qdiff [OPTION]... [FILE]...')),
2356 _('hg qdiff [OPTION]... [FILE]...')),
2357 "qdelete|qremove|qrm":
2357 "qdelete|qremove|qrm":
2358 (delete,
2358 (delete,
2359 [('k', 'keep', None, _('keep patch file')),
2359 [('k', 'keep', None, _('keep patch file')),
2360 ('r', 'rev', [], _('stop managing a revision'))],
2360 ('r', 'rev', [], _('stop managing a revision'))],
2361 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2361 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2362 'qfold':
2362 'qfold':
2363 (fold,
2363 (fold,
2364 [('e', 'edit', None, _('edit patch header')),
2364 [('e', 'edit', None, _('edit patch header')),
2365 ('k', 'keep', None, _('keep folded patch files')),
2365 ('k', 'keep', None, _('keep folded patch files')),
2366 ] + commands.commitopts,
2366 ] + commands.commitopts,
2367 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2367 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2368 'qgoto':
2368 'qgoto':
2369 (goto,
2369 (goto,
2370 [('f', 'force', None, _('overwrite any local changes'))],
2370 [('f', 'force', None, _('overwrite any local changes'))],
2371 _('hg qgoto [OPTION]... PATCH')),
2371 _('hg qgoto [OPTION]... PATCH')),
2372 'qguard':
2372 'qguard':
2373 (guard,
2373 (guard,
2374 [('l', 'list', None, _('list all patches and guards')),
2374 [('l', 'list', None, _('list all patches and guards')),
2375 ('n', 'none', None, _('drop all guards'))],
2375 ('n', 'none', None, _('drop all guards'))],
2376 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2376 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2377 'qheader': (header, [], _('hg qheader [PATCH]')),
2377 'qheader': (header, [], _('hg qheader [PATCH]')),
2378 "^qimport":
2378 "^qimport":
2379 (qimport,
2379 (qimport,
2380 [('e', 'existing', None, 'import file in patch dir'),
2380 [('e', 'existing', None, 'import file in patch dir'),
2381 ('n', 'name', '', 'patch file name'),
2381 ('n', 'name', '', 'patch file name'),
2382 ('f', 'force', None, 'overwrite existing files'),
2382 ('f', 'force', None, 'overwrite existing files'),
2383 ('r', 'rev', [], 'place existing revisions under mq control'),
2383 ('r', 'rev', [], 'place existing revisions under mq control'),
2384 ('g', 'git', None, _('use git extended diff format'))],
2384 ('g', 'git', None, _('use git extended diff format'))],
2385 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2385 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2386 "^qinit":
2386 "^qinit":
2387 (init,
2387 (init,
2388 [('c', 'create-repo', None, 'create queue repository')],
2388 [('c', 'create-repo', None, 'create queue repository')],
2389 _('hg qinit [-c]')),
2389 _('hg qinit [-c]')),
2390 "qnew":
2390 "qnew":
2391 (new,
2391 (new,
2392 [('e', 'edit', None, _('edit commit message')),
2392 [('e', 'edit', None, _('edit commit message')),
2393 ('f', 'force', None, _('import uncommitted changes into patch')),
2393 ('f', 'force', None, _('import uncommitted changes into patch')),
2394 ('g', 'git', None, _('use git extended diff format')),
2394 ('g', 'git', None, _('use git extended diff format')),
2395 ] + commands.walkopts + commands.commitopts + headeropts,
2395 ] + commands.walkopts + commands.commitopts + headeropts,
2396 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2396 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2397 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2397 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2398 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2398 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2399 "^qpop":
2399 "^qpop":
2400 (pop,
2400 (pop,
2401 [('a', 'all', None, _('pop all patches')),
2401 [('a', 'all', None, _('pop all patches')),
2402 ('n', 'name', '', _('queue name to pop')),
2402 ('n', 'name', '', _('queue name to pop')),
2403 ('f', 'force', None, _('forget any local changes'))],
2403 ('f', 'force', None, _('forget any local changes'))],
2404 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2404 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2405 "^qpush":
2405 "^qpush":
2406 (push,
2406 (push,
2407 [('f', 'force', None, _('apply if the patch has rejects')),
2407 [('f', 'force', None, _('apply if the patch has rejects')),
2408 ('l', 'list', None, _('list patch name in commit text')),
2408 ('l', 'list', None, _('list patch name in commit text')),
2409 ('a', 'all', None, _('apply all patches')),
2409 ('a', 'all', None, _('apply all patches')),
2410 ('m', 'merge', None, _('merge from another queue')),
2410 ('m', 'merge', None, _('merge from another queue')),
2411 ('n', 'name', '', _('merge queue name'))],
2411 ('n', 'name', '', _('merge queue name'))],
2412 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2412 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2413 "^qrefresh":
2413 "^qrefresh":
2414 (refresh,
2414 (refresh,
2415 [('e', 'edit', None, _('edit commit message')),
2415 [('e', 'edit', None, _('edit commit message')),
2416 ('g', 'git', None, _('use git extended diff format')),
2416 ('g', 'git', None, _('use git extended diff format')),
2417 ('s', 'short', None, _('refresh only files already in the patch')),
2417 ('s', 'short', None, _('refresh only files already in the patch')),
2418 ] + commands.walkopts + commands.commitopts + headeropts,
2418 ] + commands.walkopts + commands.commitopts + headeropts,
2419 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2419 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2420 'qrename|qmv':
2420 'qrename|qmv':
2421 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2421 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2422 "qrestore":
2422 "qrestore":
2423 (restore,
2423 (restore,
2424 [('d', 'delete', None, _('delete save entry')),
2424 [('d', 'delete', None, _('delete save entry')),
2425 ('u', 'update', None, _('update queue working dir'))],
2425 ('u', 'update', None, _('update queue working dir'))],
2426 _('hg qrestore [-d] [-u] REV')),
2426 _('hg qrestore [-d] [-u] REV')),
2427 "qsave":
2427 "qsave":
2428 (save,
2428 (save,
2429 [('c', 'copy', None, _('copy patch directory')),
2429 [('c', 'copy', None, _('copy patch directory')),
2430 ('n', 'name', '', _('copy directory name')),
2430 ('n', 'name', '', _('copy directory name')),
2431 ('e', 'empty', None, _('clear queue status file')),
2431 ('e', 'empty', None, _('clear queue status file')),
2432 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2432 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2433 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2433 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2434 "qselect":
2434 "qselect":
2435 (select,
2435 (select,
2436 [('n', 'none', None, _('disable all guards')),
2436 [('n', 'none', None, _('disable all guards')),
2437 ('s', 'series', None, _('list all guards in series file')),
2437 ('s', 'series', None, _('list all guards in series file')),
2438 ('', 'pop', None, _('pop to before first guarded applied patch')),
2438 ('', 'pop', None, _('pop to before first guarded applied patch')),
2439 ('', 'reapply', None, _('pop, then reapply patches'))],
2439 ('', 'reapply', None, _('pop, then reapply patches'))],
2440 _('hg qselect [OPTION]... [GUARD]...')),
2440 _('hg qselect [OPTION]... [GUARD]...')),
2441 "qseries":
2441 "qseries":
2442 (series,
2442 (series,
2443 [('m', 'missing', None, _('print patches not in series')),
2443 [('m', 'missing', None, _('print patches not in series')),
2444 ] + seriesopts,
2444 ] + seriesopts,
2445 _('hg qseries [-ms]')),
2445 _('hg qseries [-ms]')),
2446 "^strip":
2446 "^strip":
2447 (strip,
2447 (strip,
2448 [('f', 'force', None, _('force removal with local changes')),
2448 [('f', 'force', None, _('force removal with local changes')),
2449 ('b', 'backup', None, _('bundle unrelated changesets')),
2449 ('b', 'backup', None, _('bundle unrelated changesets')),
2450 ('n', 'nobackup', None, _('no backups'))],
2450 ('n', 'nobackup', None, _('no backups'))],
2451 _('hg strip [-f] [-b] [-n] REV')),
2451 _('hg strip [-f] [-b] [-n] REV')),
2452 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2452 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2453 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2453 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2454 "qfinish":
2454 "qfinish":
2455 (finish,
2455 (finish,
2456 [('a', 'applied', None, _('finish all applied changesets'))],
2456 [('a', 'applied', None, _('finish all applied changesets'))],
2457 _('hg qfinish [-a] [REV...]')),
2457 _('hg qfinish [-a] [REV...]')),
2458 }
2458 }
@@ -1,144 +1,144 b''
1 # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
1 # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
2 #
2 #
3 # This software may be used and distributed according to the terms
3 # This software may be used and distributed according to the terms
4 # of the GNU General Public License, incorporated herein by reference.
4 # of the GNU General Public License, incorporated herein by reference.
5 #
5 #
6 # To perform automatic newline conversion, use:
6 # To perform automatic newline conversion, use:
7 #
7 #
8 # [extensions]
8 # [extensions]
9 # hgext.win32text =
9 # hgext.win32text =
10 # [encode]
10 # [encode]
11 # ** = cleverencode:
11 # ** = cleverencode:
12 # # or ** = macencode:
12 # # or ** = macencode:
13 # [decode]
13 # [decode]
14 # ** = cleverdecode:
14 # ** = cleverdecode:
15 # # or ** = macdecode:
15 # # or ** = macdecode:
16 #
16 #
17 # If not doing conversion, to make sure you do not commit CRLF/CR by accident:
17 # If not doing conversion, to make sure you do not commit CRLF/CR by accident:
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
20 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
21 # # or pretxncommit.cr = python:hgext.win32text.forbidcr
21 # # or pretxncommit.cr = python:hgext.win32text.forbidcr
22 #
22 #
23 # To do the same check on a server to prevent CRLF/CR from being pushed or
23 # To do the same check on a server to prevent CRLF/CR from being pushed or
24 # pulled:
24 # pulled:
25 #
25 #
26 # [hooks]
26 # [hooks]
27 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
27 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
28 # # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
28 # # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
29
29
30 from mercurial.i18n import gettext as _
30 from mercurial.i18n import gettext as _
31 from mercurial.node import bin, short
31 from mercurial.node import bin, short
32 from mercurial import util
32 from mercurial import util
33 import re
33 import re
34
34
35 # regexp for single LF without CR preceding.
35 # regexp for single LF without CR preceding.
36 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
36 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
37
37
38 newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
38 newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
39 filterstr = {'\r\n': 'clever', '\r': 'mac'}
39 filterstr = {'\r\n': 'clever', '\r': 'mac'}
40
40
41 def checknewline(s, newline, ui=None, repo=None, filename=None):
41 def checknewline(s, newline, ui=None, repo=None, filename=None):
42 # warn if already has 'newline' in repository.
42 # warn if already has 'newline' in repository.
43 # it might cause unexpected eol conversion.
43 # it might cause unexpected eol conversion.
44 # see issue 302:
44 # see issue 302:
45 # http://www.selenic.com/mercurial/bts/issue302
45 # http://www.selenic.com/mercurial/bts/issue302
46 if newline in s and ui and filename and repo:
46 if newline in s and ui and filename and repo:
47 ui.warn(_('WARNING: %s already has %s line endings\n'
47 ui.warn(_('WARNING: %s already has %s line endings\n'
48 'and does not need EOL conversion by the win32text plugin.\n'
48 'and does not need EOL conversion by the win32text plugin.\n'
49 'Before your next commit, please reconsider your '
49 'Before your next commit, please reconsider your '
50 'encode/decode settings in \nMercurial.ini or %s.\n') %
50 'encode/decode settings in \nMercurial.ini or %s.\n') %
51 (filename, newlinestr[newline], repo.join('hgrc')))
51 (filename, newlinestr[newline], repo.join('hgrc')))
52
52
53 def dumbdecode(s, cmd, **kwargs):
53 def dumbdecode(s, cmd, **kwargs):
54 checknewline(s, '\r\n', **kwargs)
54 checknewline(s, '\r\n', **kwargs)
55 # replace single LF to CRLF
55 # replace single LF to CRLF
56 return re_single_lf.sub('\\1\r\n', s)
56 return re_single_lf.sub('\\1\r\n', s)
57
57
58 def dumbencode(s, cmd):
58 def dumbencode(s, cmd):
59 return s.replace('\r\n', '\n')
59 return s.replace('\r\n', '\n')
60
60
61 def macdumbdecode(s, cmd, **kwargs):
61 def macdumbdecode(s, cmd, **kwargs):
62 checknewline(s, '\r', **kwargs)
62 checknewline(s, '\r', **kwargs)
63 return s.replace('\n', '\r')
63 return s.replace('\n', '\r')
64
64
65 def macdumbencode(s, cmd):
65 def macdumbencode(s, cmd):
66 return s.replace('\r', '\n')
66 return s.replace('\r', '\n')
67
67
68 def cleverdecode(s, cmd, **kwargs):
68 def cleverdecode(s, cmd, **kwargs):
69 if not util.binary(s):
69 if not util.binary(s):
70 return dumbdecode(s, cmd, **kwargs)
70 return dumbdecode(s, cmd, **kwargs)
71 return s
71 return s
72
72
73 def cleverencode(s, cmd):
73 def cleverencode(s, cmd):
74 if not util.binary(s):
74 if not util.binary(s):
75 return dumbencode(s, cmd)
75 return dumbencode(s, cmd)
76 return s
76 return s
77
77
78 def macdecode(s, cmd, **kwargs):
78 def macdecode(s, cmd, **kwargs):
79 if not util.binary(s):
79 if not util.binary(s):
80 return macdumbdecode(s, cmd, **kwargs)
80 return macdumbdecode(s, cmd, **kwargs)
81 return s
81 return s
82
82
83 def macencode(s, cmd):
83 def macencode(s, cmd):
84 if not util.binary(s):
84 if not util.binary(s):
85 return macdumbencode(s, cmd)
85 return macdumbencode(s, cmd)
86 return s
86 return s
87
87
88 _filters = {
88 _filters = {
89 'dumbdecode:': dumbdecode,
89 'dumbdecode:': dumbdecode,
90 'dumbencode:': dumbencode,
90 'dumbencode:': dumbencode,
91 'cleverdecode:': cleverdecode,
91 'cleverdecode:': cleverdecode,
92 'cleverencode:': cleverencode,
92 'cleverencode:': cleverencode,
93 'macdumbdecode:': macdumbdecode,
93 'macdumbdecode:': macdumbdecode,
94 'macdumbencode:': macdumbencode,
94 'macdumbencode:': macdumbencode,
95 'macdecode:': macdecode,
95 'macdecode:': macdecode,
96 'macencode:': macencode,
96 'macencode:': macencode,
97 }
97 }
98
98
99 def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
99 def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
100 halt = False
100 halt = False
101 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
101 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
102 c = repo.changectx(rev)
102 c = repo[rev]
103 for f in c.files():
103 for f in c.files():
104 if f not in c:
104 if f not in c:
105 continue
105 continue
106 data = c[f].data()
106 data = c[f].data()
107 if not util.binary(data) and newline in data:
107 if not util.binary(data) and newline in data:
108 if not halt:
108 if not halt:
109 ui.warn(_('Attempt to commit or push text file(s) '
109 ui.warn(_('Attempt to commit or push text file(s) '
110 'using %s line endings\n') %
110 'using %s line endings\n') %
111 newlinestr[newline])
111 newlinestr[newline])
112 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
112 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
113 halt = True
113 halt = True
114 if halt and hooktype == 'pretxnchangegroup':
114 if halt and hooktype == 'pretxnchangegroup':
115 crlf = newlinestr[newline].lower()
115 crlf = newlinestr[newline].lower()
116 filter = filterstr[newline]
116 filter = filterstr[newline]
117 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
117 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
118 'add to Mercurial.ini or .hg/hgrc:\n'
118 'add to Mercurial.ini or .hg/hgrc:\n'
119 '\n'
119 '\n'
120 '[hooks]\n'
120 '[hooks]\n'
121 'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
121 'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
122 '\n'
122 '\n'
123 'and also consider adding:\n'
123 'and also consider adding:\n'
124 '\n'
124 '\n'
125 '[extensions]\n'
125 '[extensions]\n'
126 'hgext.win32text =\n'
126 'hgext.win32text =\n'
127 '[encode]\n'
127 '[encode]\n'
128 '** = %sencode:\n'
128 '** = %sencode:\n'
129 '[decode]\n'
129 '[decode]\n'
130 '** = %sdecode:\n') % (crlf, crlf, filter, filter))
130 '** = %sdecode:\n') % (crlf, crlf, filter, filter))
131 return halt
131 return halt
132
132
133 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
133 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
134 return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
134 return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
135
135
136 def forbidcr(ui, repo, hooktype, node, **kwargs):
136 def forbidcr(ui, repo, hooktype, node, **kwargs):
137 return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
137 return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
138
138
139 def reposetup(ui, repo):
139 def reposetup(ui, repo):
140 if not repo.local():
140 if not repo.local():
141 return
141 return
142 for name, fn in _filters.iteritems():
142 for name, fn in _filters.iteritems():
143 repo.adddatafilter(name, fn)
143 repo.adddatafilter(name, fn)
144
144
@@ -1,225 +1,225 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of
5 # This software may be used and distributed according to the terms of
6 # the GNU General Public License, incorporated herein by reference.
6 # the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex
9 from node import hex
10 import cStringIO, os, stat, tarfile, time, util, zipfile
10 import cStringIO, os, stat, tarfile, time, util, zipfile
11 import zlib, gzip
11 import zlib, gzip
12
12
13 def tidyprefix(dest, prefix, suffixes):
13 def tidyprefix(dest, prefix, suffixes):
14 '''choose prefix to use for names in archive. make sure prefix is
14 '''choose prefix to use for names in archive. make sure prefix is
15 safe for consumers.'''
15 safe for consumers.'''
16
16
17 if prefix:
17 if prefix:
18 prefix = util.normpath(prefix)
18 prefix = util.normpath(prefix)
19 else:
19 else:
20 if not isinstance(dest, str):
20 if not isinstance(dest, str):
21 raise ValueError('dest must be string if no prefix')
21 raise ValueError('dest must be string if no prefix')
22 prefix = os.path.basename(dest)
22 prefix = os.path.basename(dest)
23 lower = prefix.lower()
23 lower = prefix.lower()
24 for sfx in suffixes:
24 for sfx in suffixes:
25 if lower.endswith(sfx):
25 if lower.endswith(sfx):
26 prefix = prefix[:-len(sfx)]
26 prefix = prefix[:-len(sfx)]
27 break
27 break
28 lpfx = os.path.normpath(util.localpath(prefix))
28 lpfx = os.path.normpath(util.localpath(prefix))
29 prefix = util.pconvert(lpfx)
29 prefix = util.pconvert(lpfx)
30 if not prefix.endswith('/'):
30 if not prefix.endswith('/'):
31 prefix += '/'
31 prefix += '/'
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 raise util.Abort(_('archive prefix contains illegal components'))
33 raise util.Abort(_('archive prefix contains illegal components'))
34 return prefix
34 return prefix
35
35
36 class tarit:
36 class tarit:
37 '''write archive to tar file or stream. can write uncompressed,
37 '''write archive to tar file or stream. can write uncompressed,
38 or compress with gzip or bzip2.'''
38 or compress with gzip or bzip2.'''
39
39
40 class GzipFileWithTime(gzip.GzipFile):
40 class GzipFileWithTime(gzip.GzipFile):
41
41
42 def __init__(self, *args, **kw):
42 def __init__(self, *args, **kw):
43 timestamp = None
43 timestamp = None
44 if 'timestamp' in kw:
44 if 'timestamp' in kw:
45 timestamp = kw.pop('timestamp')
45 timestamp = kw.pop('timestamp')
46 if timestamp == None:
46 if timestamp == None:
47 self.timestamp = time.time()
47 self.timestamp = time.time()
48 else:
48 else:
49 self.timestamp = timestamp
49 self.timestamp = timestamp
50 gzip.GzipFile.__init__(self, *args, **kw)
50 gzip.GzipFile.__init__(self, *args, **kw)
51
51
52 def _write_gzip_header(self):
52 def _write_gzip_header(self):
53 self.fileobj.write('\037\213') # magic header
53 self.fileobj.write('\037\213') # magic header
54 self.fileobj.write('\010') # compression method
54 self.fileobj.write('\010') # compression method
55 # Python 2.6 deprecates self.filename
55 # Python 2.6 deprecates self.filename
56 fname = getattr(self, 'name', None) or self.filename
56 fname = getattr(self, 'name', None) or self.filename
57 flags = 0
57 flags = 0
58 if fname:
58 if fname:
59 flags = gzip.FNAME
59 flags = gzip.FNAME
60 self.fileobj.write(chr(flags))
60 self.fileobj.write(chr(flags))
61 gzip.write32u(self.fileobj, long(self.timestamp))
61 gzip.write32u(self.fileobj, long(self.timestamp))
62 self.fileobj.write('\002')
62 self.fileobj.write('\002')
63 self.fileobj.write('\377')
63 self.fileobj.write('\377')
64 if fname:
64 if fname:
65 self.fileobj.write(fname + '\000')
65 self.fileobj.write(fname + '\000')
66
66
67 def __init__(self, dest, prefix, mtime, kind=''):
67 def __init__(self, dest, prefix, mtime, kind=''):
68 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
68 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
69 '.tgz', '.tbz2'])
69 '.tgz', '.tbz2'])
70 self.mtime = mtime
70 self.mtime = mtime
71
71
72 def taropen(name, mode, fileobj=None):
72 def taropen(name, mode, fileobj=None):
73 if kind == 'gz':
73 if kind == 'gz':
74 mode = mode[0]
74 mode = mode[0]
75 if not fileobj:
75 if not fileobj:
76 fileobj = open(name, mode + 'b')
76 fileobj = open(name, mode + 'b')
77 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
77 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
78 zlib.Z_BEST_COMPRESSION,
78 zlib.Z_BEST_COMPRESSION,
79 fileobj, timestamp=mtime)
79 fileobj, timestamp=mtime)
80 return tarfile.TarFile.taropen(name, mode, gzfileobj)
80 return tarfile.TarFile.taropen(name, mode, gzfileobj)
81 else:
81 else:
82 return tarfile.open(name, mode + kind, fileobj)
82 return tarfile.open(name, mode + kind, fileobj)
83
83
84 if isinstance(dest, str):
84 if isinstance(dest, str):
85 self.z = taropen(dest, mode='w:')
85 self.z = taropen(dest, mode='w:')
86 else:
86 else:
87 # Python 2.5-2.5.1 have a regression that requires a name arg
87 # Python 2.5-2.5.1 have a regression that requires a name arg
88 self.z = taropen(name='', mode='w|', fileobj=dest)
88 self.z = taropen(name='', mode='w|', fileobj=dest)
89
89
90 def addfile(self, name, mode, islink, data):
90 def addfile(self, name, mode, islink, data):
91 i = tarfile.TarInfo(self.prefix + name)
91 i = tarfile.TarInfo(self.prefix + name)
92 i.mtime = self.mtime
92 i.mtime = self.mtime
93 i.size = len(data)
93 i.size = len(data)
94 if islink:
94 if islink:
95 i.type = tarfile.SYMTYPE
95 i.type = tarfile.SYMTYPE
96 i.mode = 0777
96 i.mode = 0777
97 i.linkname = data
97 i.linkname = data
98 data = None
98 data = None
99 else:
99 else:
100 i.mode = mode
100 i.mode = mode
101 data = cStringIO.StringIO(data)
101 data = cStringIO.StringIO(data)
102 self.z.addfile(i, data)
102 self.z.addfile(i, data)
103
103
104 def done(self):
104 def done(self):
105 self.z.close()
105 self.z.close()
106
106
107 class tellable:
107 class tellable:
108 '''provide tell method for zipfile.ZipFile when writing to http
108 '''provide tell method for zipfile.ZipFile when writing to http
109 response file object.'''
109 response file object.'''
110
110
111 def __init__(self, fp):
111 def __init__(self, fp):
112 self.fp = fp
112 self.fp = fp
113 self.offset = 0
113 self.offset = 0
114
114
115 def __getattr__(self, key):
115 def __getattr__(self, key):
116 return getattr(self.fp, key)
116 return getattr(self.fp, key)
117
117
118 def write(self, s):
118 def write(self, s):
119 self.fp.write(s)
119 self.fp.write(s)
120 self.offset += len(s)
120 self.offset += len(s)
121
121
122 def tell(self):
122 def tell(self):
123 return self.offset
123 return self.offset
124
124
125 class zipit:
125 class zipit:
126 '''write archive to zip file or stream. can write uncompressed,
126 '''write archive to zip file or stream. can write uncompressed,
127 or compressed with deflate.'''
127 or compressed with deflate.'''
128
128
129 def __init__(self, dest, prefix, mtime, compress=True):
129 def __init__(self, dest, prefix, mtime, compress=True):
130 self.prefix = tidyprefix(dest, prefix, ('.zip',))
130 self.prefix = tidyprefix(dest, prefix, ('.zip',))
131 if not isinstance(dest, str):
131 if not isinstance(dest, str):
132 try:
132 try:
133 dest.tell()
133 dest.tell()
134 except (AttributeError, IOError):
134 except (AttributeError, IOError):
135 dest = tellable(dest)
135 dest = tellable(dest)
136 self.z = zipfile.ZipFile(dest, 'w',
136 self.z = zipfile.ZipFile(dest, 'w',
137 compress and zipfile.ZIP_DEFLATED or
137 compress and zipfile.ZIP_DEFLATED or
138 zipfile.ZIP_STORED)
138 zipfile.ZIP_STORED)
139 self.date_time = time.gmtime(mtime)[:6]
139 self.date_time = time.gmtime(mtime)[:6]
140
140
141 def addfile(self, name, mode, islink, data):
141 def addfile(self, name, mode, islink, data):
142 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
142 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
143 i.compress_type = self.z.compression
143 i.compress_type = self.z.compression
144 # unzip will not honor unix file modes unless file creator is
144 # unzip will not honor unix file modes unless file creator is
145 # set to unix (id 3).
145 # set to unix (id 3).
146 i.create_system = 3
146 i.create_system = 3
147 ftype = stat.S_IFREG
147 ftype = stat.S_IFREG
148 if islink:
148 if islink:
149 mode = 0777
149 mode = 0777
150 ftype = stat.S_IFLNK
150 ftype = stat.S_IFLNK
151 i.external_attr = (mode | ftype) << 16L
151 i.external_attr = (mode | ftype) << 16L
152 self.z.writestr(i, data)
152 self.z.writestr(i, data)
153
153
154 def done(self):
154 def done(self):
155 self.z.close()
155 self.z.close()
156
156
157 class fileit:
157 class fileit:
158 '''write archive as files in directory.'''
158 '''write archive as files in directory.'''
159
159
160 def __init__(self, name, prefix, mtime):
160 def __init__(self, name, prefix, mtime):
161 if prefix:
161 if prefix:
162 raise util.Abort(_('cannot give prefix when archiving to files'))
162 raise util.Abort(_('cannot give prefix when archiving to files'))
163 self.basedir = name
163 self.basedir = name
164 self.opener = util.opener(self.basedir)
164 self.opener = util.opener(self.basedir)
165
165
166 def addfile(self, name, mode, islink, data):
166 def addfile(self, name, mode, islink, data):
167 if islink:
167 if islink:
168 self.opener.symlink(data, name)
168 self.opener.symlink(data, name)
169 return
169 return
170 f = self.opener(name, "w", atomictemp=True)
170 f = self.opener(name, "w", atomictemp=True)
171 f.write(data)
171 f.write(data)
172 f.rename()
172 f.rename()
173 destfile = os.path.join(self.basedir, name)
173 destfile = os.path.join(self.basedir, name)
174 os.chmod(destfile, mode)
174 os.chmod(destfile, mode)
175
175
176 def done(self):
176 def done(self):
177 pass
177 pass
178
178
179 archivers = {
179 archivers = {
180 'files': fileit,
180 'files': fileit,
181 'tar': tarit,
181 'tar': tarit,
182 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
182 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
183 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
183 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
184 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
184 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
185 'zip': zipit,
185 'zip': zipit,
186 }
186 }
187
187
188 def archive(repo, dest, node, kind, decode=True, matchfn=None,
188 def archive(repo, dest, node, kind, decode=True, matchfn=None,
189 prefix=None, mtime=None):
189 prefix=None, mtime=None):
190 '''create archive of repo as it was at node.
190 '''create archive of repo as it was at node.
191
191
192 dest can be name of directory, name of archive file, or file
192 dest can be name of directory, name of archive file, or file
193 object to write archive to.
193 object to write archive to.
194
194
195 kind is type of archive to create.
195 kind is type of archive to create.
196
196
197 decode tells whether to put files through decode filters from
197 decode tells whether to put files through decode filters from
198 hgrc.
198 hgrc.
199
199
200 matchfn is function to filter names of files to write to archive.
200 matchfn is function to filter names of files to write to archive.
201
201
202 prefix is name of path to put before every archive member.'''
202 prefix is name of path to put before every archive member.'''
203
203
204 def write(name, mode, islink, getdata):
204 def write(name, mode, islink, getdata):
205 if matchfn and not matchfn(name): return
205 if matchfn and not matchfn(name): return
206 data = getdata()
206 data = getdata()
207 if decode:
207 if decode:
208 data = repo.wwritedata(name, data)
208 data = repo.wwritedata(name, data)
209 archiver.addfile(name, mode, islink, data)
209 archiver.addfile(name, mode, islink, data)
210
210
211 ctx = repo.changectx(node)
211 ctx = repo[node]
212 if kind not in archivers:
212 if kind not in archivers:
213 raise util.Abort(_("unknown archive type '%s'" % kind))
213 raise util.Abort(_("unknown archive type '%s'" % kind))
214 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
214 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
215 m = ctx.manifest()
215 m = ctx.manifest()
216 items = m.items()
216 items = m.items()
217 items.sort()
217 items.sort()
218 if repo.ui.configbool("ui", "archivemeta", True):
218 if repo.ui.configbool("ui", "archivemeta", True):
219 write('.hg_archival.txt', 0644, False,
219 write('.hg_archival.txt', 0644, False,
220 lambda: 'repo: %s\nnode: %s\n' % (
220 lambda: 'repo: %s\nnode: %s\n' % (
221 hex(repo.changelog.node(0)), hex(node)))
221 hex(repo.changelog.node(0)), hex(node)))
222 for filename, filenode in items:
222 for filename, filenode in items:
223 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
223 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
224 lambda: repo.file(filename).read(filenode))
224 lambda: repo.file(filename).read(filenode))
225 archiver.done()
225 archiver.done()
@@ -1,1194 +1,1194 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat
10 import os, sys, bisect, stat
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def findpossible(ui, cmd, table):
21 def findpossible(ui, cmd, table):
22 """
22 """
23 Return cmd -> (aliases, command table entry)
23 Return cmd -> (aliases, command table entry)
24 for each matching command.
24 for each matching command.
25 Return debug commands (or their aliases) only if no normal command matches.
25 Return debug commands (or their aliases) only if no normal command matches.
26 """
26 """
27 choice = {}
27 choice = {}
28 debugchoice = {}
28 debugchoice = {}
29 for e in table.keys():
29 for e in table.keys():
30 aliases = e.lstrip("^").split("|")
30 aliases = e.lstrip("^").split("|")
31 found = None
31 found = None
32 if cmd in aliases:
32 if cmd in aliases:
33 found = cmd
33 found = cmd
34 elif not ui.config("ui", "strict"):
34 elif not ui.config("ui", "strict"):
35 for a in aliases:
35 for a in aliases:
36 if a.startswith(cmd):
36 if a.startswith(cmd):
37 found = a
37 found = a
38 break
38 break
39 if found is not None:
39 if found is not None:
40 if aliases[0].startswith("debug") or found.startswith("debug"):
40 if aliases[0].startswith("debug") or found.startswith("debug"):
41 debugchoice[found] = (aliases, table[e])
41 debugchoice[found] = (aliases, table[e])
42 else:
42 else:
43 choice[found] = (aliases, table[e])
43 choice[found] = (aliases, table[e])
44
44
45 if not choice and debugchoice:
45 if not choice and debugchoice:
46 choice = debugchoice
46 choice = debugchoice
47
47
48 return choice
48 return choice
49
49
50 def findcmd(ui, cmd, table):
50 def findcmd(ui, cmd, table):
51 """Return (aliases, command table entry) for command string."""
51 """Return (aliases, command table entry) for command string."""
52 choice = findpossible(ui, cmd, table)
52 choice = findpossible(ui, cmd, table)
53
53
54 if cmd in choice:
54 if cmd in choice:
55 return choice[cmd]
55 return choice[cmd]
56
56
57 if len(choice) > 1:
57 if len(choice) > 1:
58 clist = choice.keys()
58 clist = choice.keys()
59 clist.sort()
59 clist.sort()
60 raise AmbiguousCommand(cmd, clist)
60 raise AmbiguousCommand(cmd, clist)
61
61
62 if choice:
62 if choice:
63 return choice.values()[0]
63 return choice.values()[0]
64
64
65 raise UnknownCommand(cmd)
65 raise UnknownCommand(cmd)
66
66
67 def bail_if_changed(repo):
67 def bail_if_changed(repo):
68 if repo.dirstate.parents()[1] != nullid:
68 if repo.dirstate.parents()[1] != nullid:
69 raise util.Abort(_('outstanding uncommitted merge'))
69 raise util.Abort(_('outstanding uncommitted merge'))
70 modified, added, removed, deleted = repo.status()[:4]
70 modified, added, removed, deleted = repo.status()[:4]
71 if modified or added or removed or deleted:
71 if modified or added or removed or deleted:
72 raise util.Abort(_("outstanding uncommitted changes"))
72 raise util.Abort(_("outstanding uncommitted changes"))
73
73
74 def logmessage(opts):
74 def logmessage(opts):
75 """ get the log message according to -m and -l option """
75 """ get the log message according to -m and -l option """
76 message = opts['message']
76 message = opts['message']
77 logfile = opts['logfile']
77 logfile = opts['logfile']
78
78
79 if message and logfile:
79 if message and logfile:
80 raise util.Abort(_('options --message and --logfile are mutually '
80 raise util.Abort(_('options --message and --logfile are mutually '
81 'exclusive'))
81 'exclusive'))
82 if not message and logfile:
82 if not message and logfile:
83 try:
83 try:
84 if logfile == '-':
84 if logfile == '-':
85 message = sys.stdin.read()
85 message = sys.stdin.read()
86 else:
86 else:
87 message = open(logfile).read()
87 message = open(logfile).read()
88 except IOError, inst:
88 except IOError, inst:
89 raise util.Abort(_("can't read commit message '%s': %s") %
89 raise util.Abort(_("can't read commit message '%s': %s") %
90 (logfile, inst.strerror))
90 (logfile, inst.strerror))
91 return message
91 return message
92
92
93 def loglimit(opts):
93 def loglimit(opts):
94 """get the log limit according to option -l/--limit"""
94 """get the log limit according to option -l/--limit"""
95 limit = opts.get('limit')
95 limit = opts.get('limit')
96 if limit:
96 if limit:
97 try:
97 try:
98 limit = int(limit)
98 limit = int(limit)
99 except ValueError:
99 except ValueError:
100 raise util.Abort(_('limit must be a positive integer'))
100 raise util.Abort(_('limit must be a positive integer'))
101 if limit <= 0: raise util.Abort(_('limit must be positive'))
101 if limit <= 0: raise util.Abort(_('limit must be positive'))
102 else:
102 else:
103 limit = sys.maxint
103 limit = sys.maxint
104 return limit
104 return limit
105
105
106 def setremoteconfig(ui, opts):
106 def setremoteconfig(ui, opts):
107 "copy remote options to ui tree"
107 "copy remote options to ui tree"
108 if opts.get('ssh'):
108 if opts.get('ssh'):
109 ui.setconfig("ui", "ssh", opts['ssh'])
109 ui.setconfig("ui", "ssh", opts['ssh'])
110 if opts.get('remotecmd'):
110 if opts.get('remotecmd'):
111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
112
112
113 def revpair(repo, revs):
113 def revpair(repo, revs):
114 '''return pair of nodes, given list of revisions. second item can
114 '''return pair of nodes, given list of revisions. second item can
115 be None, meaning use working dir.'''
115 be None, meaning use working dir.'''
116
116
117 def revfix(repo, val, defval):
117 def revfix(repo, val, defval):
118 if not val and val != 0 and defval is not None:
118 if not val and val != 0 and defval is not None:
119 val = defval
119 val = defval
120 return repo.lookup(val)
120 return repo.lookup(val)
121
121
122 if not revs:
122 if not revs:
123 return repo.dirstate.parents()[0], None
123 return repo.dirstate.parents()[0], None
124 end = None
124 end = None
125 if len(revs) == 1:
125 if len(revs) == 1:
126 if revrangesep in revs[0]:
126 if revrangesep in revs[0]:
127 start, end = revs[0].split(revrangesep, 1)
127 start, end = revs[0].split(revrangesep, 1)
128 start = revfix(repo, start, 0)
128 start = revfix(repo, start, 0)
129 end = revfix(repo, end, repo.changelog.count() - 1)
129 end = revfix(repo, end, repo.changelog.count() - 1)
130 else:
130 else:
131 start = revfix(repo, revs[0], None)
131 start = revfix(repo, revs[0], None)
132 elif len(revs) == 2:
132 elif len(revs) == 2:
133 if revrangesep in revs[0] or revrangesep in revs[1]:
133 if revrangesep in revs[0] or revrangesep in revs[1]:
134 raise util.Abort(_('too many revisions specified'))
134 raise util.Abort(_('too many revisions specified'))
135 start = revfix(repo, revs[0], None)
135 start = revfix(repo, revs[0], None)
136 end = revfix(repo, revs[1], None)
136 end = revfix(repo, revs[1], None)
137 else:
137 else:
138 raise util.Abort(_('too many revisions specified'))
138 raise util.Abort(_('too many revisions specified'))
139 return start, end
139 return start, end
140
140
141 def revrange(repo, revs):
141 def revrange(repo, revs):
142 """Yield revision as strings from a list of revision specifications."""
142 """Yield revision as strings from a list of revision specifications."""
143
143
144 def revfix(repo, val, defval):
144 def revfix(repo, val, defval):
145 if not val and val != 0 and defval is not None:
145 if not val and val != 0 and defval is not None:
146 return defval
146 return defval
147 return repo.changelog.rev(repo.lookup(val))
147 return repo.changelog.rev(repo.lookup(val))
148
148
149 seen, l = {}, []
149 seen, l = {}, []
150 for spec in revs:
150 for spec in revs:
151 if revrangesep in spec:
151 if revrangesep in spec:
152 start, end = spec.split(revrangesep, 1)
152 start, end = spec.split(revrangesep, 1)
153 start = revfix(repo, start, 0)
153 start = revfix(repo, start, 0)
154 end = revfix(repo, end, repo.changelog.count() - 1)
154 end = revfix(repo, end, repo.changelog.count() - 1)
155 step = start > end and -1 or 1
155 step = start > end and -1 or 1
156 for rev in xrange(start, end+step, step):
156 for rev in xrange(start, end+step, step):
157 if rev in seen:
157 if rev in seen:
158 continue
158 continue
159 seen[rev] = 1
159 seen[rev] = 1
160 l.append(rev)
160 l.append(rev)
161 else:
161 else:
162 rev = revfix(repo, spec, None)
162 rev = revfix(repo, spec, None)
163 if rev in seen:
163 if rev in seen:
164 continue
164 continue
165 seen[rev] = 1
165 seen[rev] = 1
166 l.append(rev)
166 l.append(rev)
167
167
168 return l
168 return l
169
169
170 def make_filename(repo, pat, node,
170 def make_filename(repo, pat, node,
171 total=None, seqno=None, revwidth=None, pathname=None):
171 total=None, seqno=None, revwidth=None, pathname=None):
172 node_expander = {
172 node_expander = {
173 'H': lambda: hex(node),
173 'H': lambda: hex(node),
174 'R': lambda: str(repo.changelog.rev(node)),
174 'R': lambda: str(repo.changelog.rev(node)),
175 'h': lambda: short(node),
175 'h': lambda: short(node),
176 }
176 }
177 expander = {
177 expander = {
178 '%': lambda: '%',
178 '%': lambda: '%',
179 'b': lambda: os.path.basename(repo.root),
179 'b': lambda: os.path.basename(repo.root),
180 }
180 }
181
181
182 try:
182 try:
183 if node:
183 if node:
184 expander.update(node_expander)
184 expander.update(node_expander)
185 if node:
185 if node:
186 expander['r'] = (lambda:
186 expander['r'] = (lambda:
187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
188 if total is not None:
188 if total is not None:
189 expander['N'] = lambda: str(total)
189 expander['N'] = lambda: str(total)
190 if seqno is not None:
190 if seqno is not None:
191 expander['n'] = lambda: str(seqno)
191 expander['n'] = lambda: str(seqno)
192 if total is not None and seqno is not None:
192 if total is not None and seqno is not None:
193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
194 if pathname is not None:
194 if pathname is not None:
195 expander['s'] = lambda: os.path.basename(pathname)
195 expander['s'] = lambda: os.path.basename(pathname)
196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
197 expander['p'] = lambda: pathname
197 expander['p'] = lambda: pathname
198
198
199 newname = []
199 newname = []
200 patlen = len(pat)
200 patlen = len(pat)
201 i = 0
201 i = 0
202 while i < patlen:
202 while i < patlen:
203 c = pat[i]
203 c = pat[i]
204 if c == '%':
204 if c == '%':
205 i += 1
205 i += 1
206 c = pat[i]
206 c = pat[i]
207 c = expander[c]()
207 c = expander[c]()
208 newname.append(c)
208 newname.append(c)
209 i += 1
209 i += 1
210 return ''.join(newname)
210 return ''.join(newname)
211 except KeyError, inst:
211 except KeyError, inst:
212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
213 inst.args[0])
213 inst.args[0])
214
214
215 def make_file(repo, pat, node=None,
215 def make_file(repo, pat, node=None,
216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
217 if not pat or pat == '-':
217 if not pat or pat == '-':
218 return 'w' in mode and sys.stdout or sys.stdin
218 return 'w' in mode and sys.stdout or sys.stdin
219 if hasattr(pat, 'write') and 'w' in mode:
219 if hasattr(pat, 'write') and 'w' in mode:
220 return pat
220 return pat
221 if hasattr(pat, 'read') and 'r' in mode:
221 if hasattr(pat, 'read') and 'r' in mode:
222 return pat
222 return pat
223 return open(make_filename(repo, pat, node, total, seqno, revwidth,
223 return open(make_filename(repo, pat, node, total, seqno, revwidth,
224 pathname),
224 pathname),
225 mode)
225 mode)
226
226
227 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
227 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
228 if not globbed and default == 'relpath':
228 if not globbed and default == 'relpath':
229 pats = util.expand_glob(pats or [])
229 pats = util.expand_glob(pats or [])
230 m = _match.match(repo.root, repo.getcwd(), pats,
230 m = _match.match(repo.root, repo.getcwd(), pats,
231 opts.get('include'), opts.get('exclude'), default)
231 opts.get('include'), opts.get('exclude'), default)
232 def badfn(f, msg):
232 def badfn(f, msg):
233 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
233 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
234 return False
234 return False
235 m.bad = badfn
235 m.bad = badfn
236 return m
236 return m
237
237
238 def matchall(repo):
238 def matchall(repo):
239 return _match.always(repo.root, repo.getcwd())
239 return _match.always(repo.root, repo.getcwd())
240
240
241 def matchfiles(repo, files):
241 def matchfiles(repo, files):
242 return _match.exact(repo.root, repo.getcwd(), files)
242 return _match.exact(repo.root, repo.getcwd(), files)
243
243
244 def findrenames(repo, added=None, removed=None, threshold=0.5):
244 def findrenames(repo, added=None, removed=None, threshold=0.5):
245 '''find renamed files -- yields (before, after, score) tuples'''
245 '''find renamed files -- yields (before, after, score) tuples'''
246 if added is None or removed is None:
246 if added is None or removed is None:
247 added, removed = repo.status()[1:3]
247 added, removed = repo.status()[1:3]
248 ctx = repo.changectx('.')
248 ctx = repo['.']
249 for a in added:
249 for a in added:
250 aa = repo.wread(a)
250 aa = repo.wread(a)
251 bestname, bestscore = None, threshold
251 bestname, bestscore = None, threshold
252 for r in removed:
252 for r in removed:
253 rr = ctx.filectx(r).data()
253 rr = ctx.filectx(r).data()
254
254
255 # bdiff.blocks() returns blocks of matching lines
255 # bdiff.blocks() returns blocks of matching lines
256 # count the number of bytes in each
256 # count the number of bytes in each
257 equal = 0
257 equal = 0
258 alines = mdiff.splitnewlines(aa)
258 alines = mdiff.splitnewlines(aa)
259 matches = bdiff.blocks(aa, rr)
259 matches = bdiff.blocks(aa, rr)
260 for x1,x2,y1,y2 in matches:
260 for x1,x2,y1,y2 in matches:
261 for line in alines[x1:x2]:
261 for line in alines[x1:x2]:
262 equal += len(line)
262 equal += len(line)
263
263
264 lengths = len(aa) + len(rr)
264 lengths = len(aa) + len(rr)
265 if lengths:
265 if lengths:
266 myscore = equal*2.0 / lengths
266 myscore = equal*2.0 / lengths
267 if myscore >= bestscore:
267 if myscore >= bestscore:
268 bestname, bestscore = r, myscore
268 bestname, bestscore = r, myscore
269 if bestname:
269 if bestname:
270 yield bestname, a, bestscore
270 yield bestname, a, bestscore
271
271
272 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
272 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
273 if dry_run is None:
273 if dry_run is None:
274 dry_run = opts.get('dry_run')
274 dry_run = opts.get('dry_run')
275 if similarity is None:
275 if similarity is None:
276 similarity = float(opts.get('similarity') or 0)
276 similarity = float(opts.get('similarity') or 0)
277 add, remove = [], []
277 add, remove = [], []
278 mapping = {}
278 mapping = {}
279 audit_path = util.path_auditor(repo.root)
279 audit_path = util.path_auditor(repo.root)
280 m = match(repo, pats, opts)
280 m = match(repo, pats, opts)
281 for abs in repo.walk(m):
281 for abs in repo.walk(m):
282 target = repo.wjoin(abs)
282 target = repo.wjoin(abs)
283 good = True
283 good = True
284 try:
284 try:
285 audit_path(abs)
285 audit_path(abs)
286 except:
286 except:
287 good = False
287 good = False
288 rel = m.rel(abs)
288 rel = m.rel(abs)
289 exact = m.exact(abs)
289 exact = m.exact(abs)
290 if good and abs not in repo.dirstate:
290 if good and abs not in repo.dirstate:
291 add.append(abs)
291 add.append(abs)
292 mapping[abs] = rel, m.exact(abs)
292 mapping[abs] = rel, m.exact(abs)
293 if repo.ui.verbose or not exact:
293 if repo.ui.verbose or not exact:
294 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
294 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
295 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
295 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
296 or (os.path.isdir(target) and not os.path.islink(target))):
296 or (os.path.isdir(target) and not os.path.islink(target))):
297 remove.append(abs)
297 remove.append(abs)
298 mapping[abs] = rel, exact
298 mapping[abs] = rel, exact
299 if repo.ui.verbose or not exact:
299 if repo.ui.verbose or not exact:
300 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
300 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
301 if not dry_run:
301 if not dry_run:
302 repo.remove(remove)
302 repo.remove(remove)
303 repo.add(add)
303 repo.add(add)
304 if similarity > 0:
304 if similarity > 0:
305 for old, new, score in findrenames(repo, add, remove, similarity):
305 for old, new, score in findrenames(repo, add, remove, similarity):
306 oldrel, oldexact = mapping[old]
306 oldrel, oldexact = mapping[old]
307 newrel, newexact = mapping[new]
307 newrel, newexact = mapping[new]
308 if repo.ui.verbose or not oldexact or not newexact:
308 if repo.ui.verbose or not oldexact or not newexact:
309 repo.ui.status(_('recording removal of %s as rename to %s '
309 repo.ui.status(_('recording removal of %s as rename to %s '
310 '(%d%% similar)\n') %
310 '(%d%% similar)\n') %
311 (oldrel, newrel, score * 100))
311 (oldrel, newrel, score * 100))
312 if not dry_run:
312 if not dry_run:
313 repo.copy(old, new)
313 repo.copy(old, new)
314
314
315 def copy(ui, repo, pats, opts, rename=False):
315 def copy(ui, repo, pats, opts, rename=False):
316 # called with the repo lock held
316 # called with the repo lock held
317 #
317 #
318 # hgsep => pathname that uses "/" to separate directories
318 # hgsep => pathname that uses "/" to separate directories
319 # ossep => pathname that uses os.sep to separate directories
319 # ossep => pathname that uses os.sep to separate directories
320 cwd = repo.getcwd()
320 cwd = repo.getcwd()
321 targets = {}
321 targets = {}
322 after = opts.get("after")
322 after = opts.get("after")
323 dryrun = opts.get("dry_run")
323 dryrun = opts.get("dry_run")
324
324
325 def walkpat(pat):
325 def walkpat(pat):
326 srcs = []
326 srcs = []
327 m = match(repo, [pat], opts, globbed=True)
327 m = match(repo, [pat], opts, globbed=True)
328 for abs in repo.walk(m):
328 for abs in repo.walk(m):
329 state = repo.dirstate[abs]
329 state = repo.dirstate[abs]
330 rel = m.rel(abs)
330 rel = m.rel(abs)
331 exact = m.exact(abs)
331 exact = m.exact(abs)
332 if state in '?r':
332 if state in '?r':
333 if exact and state == '?':
333 if exact and state == '?':
334 ui.warn(_('%s: not copying - file is not managed\n') % rel)
334 ui.warn(_('%s: not copying - file is not managed\n') % rel)
335 if exact and state == 'r':
335 if exact and state == 'r':
336 ui.warn(_('%s: not copying - file has been marked for'
336 ui.warn(_('%s: not copying - file has been marked for'
337 ' remove\n') % rel)
337 ' remove\n') % rel)
338 continue
338 continue
339 # abs: hgsep
339 # abs: hgsep
340 # rel: ossep
340 # rel: ossep
341 srcs.append((abs, rel, exact))
341 srcs.append((abs, rel, exact))
342 return srcs
342 return srcs
343
343
344 # abssrc: hgsep
344 # abssrc: hgsep
345 # relsrc: ossep
345 # relsrc: ossep
346 # otarget: ossep
346 # otarget: ossep
347 def copyfile(abssrc, relsrc, otarget, exact):
347 def copyfile(abssrc, relsrc, otarget, exact):
348 abstarget = util.canonpath(repo.root, cwd, otarget)
348 abstarget = util.canonpath(repo.root, cwd, otarget)
349 reltarget = repo.pathto(abstarget, cwd)
349 reltarget = repo.pathto(abstarget, cwd)
350 target = repo.wjoin(abstarget)
350 target = repo.wjoin(abstarget)
351 src = repo.wjoin(abssrc)
351 src = repo.wjoin(abssrc)
352 state = repo.dirstate[abstarget]
352 state = repo.dirstate[abstarget]
353
353
354 # check for collisions
354 # check for collisions
355 prevsrc = targets.get(abstarget)
355 prevsrc = targets.get(abstarget)
356 if prevsrc is not None:
356 if prevsrc is not None:
357 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
357 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
358 (reltarget, repo.pathto(abssrc, cwd),
358 (reltarget, repo.pathto(abssrc, cwd),
359 repo.pathto(prevsrc, cwd)))
359 repo.pathto(prevsrc, cwd)))
360 return
360 return
361
361
362 # check for overwrites
362 # check for overwrites
363 exists = os.path.exists(target)
363 exists = os.path.exists(target)
364 if (not after and exists or after and state in 'mn'):
364 if (not after and exists or after and state in 'mn'):
365 if not opts['force']:
365 if not opts['force']:
366 ui.warn(_('%s: not overwriting - file exists\n') %
366 ui.warn(_('%s: not overwriting - file exists\n') %
367 reltarget)
367 reltarget)
368 return
368 return
369
369
370 if after:
370 if after:
371 if not exists:
371 if not exists:
372 return
372 return
373 elif not dryrun:
373 elif not dryrun:
374 try:
374 try:
375 if exists:
375 if exists:
376 os.unlink(target)
376 os.unlink(target)
377 targetdir = os.path.dirname(target) or '.'
377 targetdir = os.path.dirname(target) or '.'
378 if not os.path.isdir(targetdir):
378 if not os.path.isdir(targetdir):
379 os.makedirs(targetdir)
379 os.makedirs(targetdir)
380 util.copyfile(src, target)
380 util.copyfile(src, target)
381 except IOError, inst:
381 except IOError, inst:
382 if inst.errno == errno.ENOENT:
382 if inst.errno == errno.ENOENT:
383 ui.warn(_('%s: deleted in working copy\n') % relsrc)
383 ui.warn(_('%s: deleted in working copy\n') % relsrc)
384 else:
384 else:
385 ui.warn(_('%s: cannot copy - %s\n') %
385 ui.warn(_('%s: cannot copy - %s\n') %
386 (relsrc, inst.strerror))
386 (relsrc, inst.strerror))
387 return True # report a failure
387 return True # report a failure
388
388
389 if ui.verbose or not exact:
389 if ui.verbose or not exact:
390 action = rename and "moving" or "copying"
390 action = rename and "moving" or "copying"
391 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
391 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
392
392
393 targets[abstarget] = abssrc
393 targets[abstarget] = abssrc
394
394
395 # fix up dirstate
395 # fix up dirstate
396 origsrc = repo.dirstate.copied(abssrc) or abssrc
396 origsrc = repo.dirstate.copied(abssrc) or abssrc
397 if abstarget == origsrc: # copying back a copy?
397 if abstarget == origsrc: # copying back a copy?
398 if state not in 'mn' and not dryrun:
398 if state not in 'mn' and not dryrun:
399 repo.dirstate.normallookup(abstarget)
399 repo.dirstate.normallookup(abstarget)
400 else:
400 else:
401 if repo.dirstate[origsrc] == 'a':
401 if repo.dirstate[origsrc] == 'a':
402 if not ui.quiet:
402 if not ui.quiet:
403 ui.warn(_("%s has not been committed yet, so no copy "
403 ui.warn(_("%s has not been committed yet, so no copy "
404 "data will be stored for %s.\n")
404 "data will be stored for %s.\n")
405 % (repo.pathto(origsrc, cwd), reltarget))
405 % (repo.pathto(origsrc, cwd), reltarget))
406 if abstarget not in repo.dirstate and not dryrun:
406 if abstarget not in repo.dirstate and not dryrun:
407 repo.add([abstarget])
407 repo.add([abstarget])
408 elif not dryrun:
408 elif not dryrun:
409 repo.copy(origsrc, abstarget)
409 repo.copy(origsrc, abstarget)
410
410
411 if rename and not dryrun:
411 if rename and not dryrun:
412 repo.remove([abssrc], not after)
412 repo.remove([abssrc], not after)
413
413
414 # pat: ossep
414 # pat: ossep
415 # dest ossep
415 # dest ossep
416 # srcs: list of (hgsep, hgsep, ossep, bool)
416 # srcs: list of (hgsep, hgsep, ossep, bool)
417 # return: function that takes hgsep and returns ossep
417 # return: function that takes hgsep and returns ossep
418 def targetpathfn(pat, dest, srcs):
418 def targetpathfn(pat, dest, srcs):
419 if os.path.isdir(pat):
419 if os.path.isdir(pat):
420 abspfx = util.canonpath(repo.root, cwd, pat)
420 abspfx = util.canonpath(repo.root, cwd, pat)
421 abspfx = util.localpath(abspfx)
421 abspfx = util.localpath(abspfx)
422 if destdirexists:
422 if destdirexists:
423 striplen = len(os.path.split(abspfx)[0])
423 striplen = len(os.path.split(abspfx)[0])
424 else:
424 else:
425 striplen = len(abspfx)
425 striplen = len(abspfx)
426 if striplen:
426 if striplen:
427 striplen += len(os.sep)
427 striplen += len(os.sep)
428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
429 elif destdirexists:
429 elif destdirexists:
430 res = lambda p: os.path.join(dest,
430 res = lambda p: os.path.join(dest,
431 os.path.basename(util.localpath(p)))
431 os.path.basename(util.localpath(p)))
432 else:
432 else:
433 res = lambda p: dest
433 res = lambda p: dest
434 return res
434 return res
435
435
436 # pat: ossep
436 # pat: ossep
437 # dest ossep
437 # dest ossep
438 # srcs: list of (hgsep, hgsep, ossep, bool)
438 # srcs: list of (hgsep, hgsep, ossep, bool)
439 # return: function that takes hgsep and returns ossep
439 # return: function that takes hgsep and returns ossep
440 def targetpathafterfn(pat, dest, srcs):
440 def targetpathafterfn(pat, dest, srcs):
441 if util.patkind(pat, None)[0]:
441 if util.patkind(pat, None)[0]:
442 # a mercurial pattern
442 # a mercurial pattern
443 res = lambda p: os.path.join(dest,
443 res = lambda p: os.path.join(dest,
444 os.path.basename(util.localpath(p)))
444 os.path.basename(util.localpath(p)))
445 else:
445 else:
446 abspfx = util.canonpath(repo.root, cwd, pat)
446 abspfx = util.canonpath(repo.root, cwd, pat)
447 if len(abspfx) < len(srcs[0][0]):
447 if len(abspfx) < len(srcs[0][0]):
448 # A directory. Either the target path contains the last
448 # A directory. Either the target path contains the last
449 # component of the source path or it does not.
449 # component of the source path or it does not.
450 def evalpath(striplen):
450 def evalpath(striplen):
451 score = 0
451 score = 0
452 for s in srcs:
452 for s in srcs:
453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
454 if os.path.exists(t):
454 if os.path.exists(t):
455 score += 1
455 score += 1
456 return score
456 return score
457
457
458 abspfx = util.localpath(abspfx)
458 abspfx = util.localpath(abspfx)
459 striplen = len(abspfx)
459 striplen = len(abspfx)
460 if striplen:
460 if striplen:
461 striplen += len(os.sep)
461 striplen += len(os.sep)
462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
463 score = evalpath(striplen)
463 score = evalpath(striplen)
464 striplen1 = len(os.path.split(abspfx)[0])
464 striplen1 = len(os.path.split(abspfx)[0])
465 if striplen1:
465 if striplen1:
466 striplen1 += len(os.sep)
466 striplen1 += len(os.sep)
467 if evalpath(striplen1) > score:
467 if evalpath(striplen1) > score:
468 striplen = striplen1
468 striplen = striplen1
469 res = lambda p: os.path.join(dest,
469 res = lambda p: os.path.join(dest,
470 util.localpath(p)[striplen:])
470 util.localpath(p)[striplen:])
471 else:
471 else:
472 # a file
472 # a file
473 if destdirexists:
473 if destdirexists:
474 res = lambda p: os.path.join(dest,
474 res = lambda p: os.path.join(dest,
475 os.path.basename(util.localpath(p)))
475 os.path.basename(util.localpath(p)))
476 else:
476 else:
477 res = lambda p: dest
477 res = lambda p: dest
478 return res
478 return res
479
479
480
480
481 pats = util.expand_glob(pats)
481 pats = util.expand_glob(pats)
482 if not pats:
482 if not pats:
483 raise util.Abort(_('no source or destination specified'))
483 raise util.Abort(_('no source or destination specified'))
484 if len(pats) == 1:
484 if len(pats) == 1:
485 raise util.Abort(_('no destination specified'))
485 raise util.Abort(_('no destination specified'))
486 dest = pats.pop()
486 dest = pats.pop()
487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
488 if not destdirexists:
488 if not destdirexists:
489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
490 raise util.Abort(_('with multiple sources, destination must be an '
490 raise util.Abort(_('with multiple sources, destination must be an '
491 'existing directory'))
491 'existing directory'))
492 if util.endswithsep(dest):
492 if util.endswithsep(dest):
493 raise util.Abort(_('destination %s is not a directory') % dest)
493 raise util.Abort(_('destination %s is not a directory') % dest)
494
494
495 tfn = targetpathfn
495 tfn = targetpathfn
496 if after:
496 if after:
497 tfn = targetpathafterfn
497 tfn = targetpathafterfn
498 copylist = []
498 copylist = []
499 for pat in pats:
499 for pat in pats:
500 srcs = walkpat(pat)
500 srcs = walkpat(pat)
501 if not srcs:
501 if not srcs:
502 continue
502 continue
503 copylist.append((tfn(pat, dest, srcs), srcs))
503 copylist.append((tfn(pat, dest, srcs), srcs))
504 if not copylist:
504 if not copylist:
505 raise util.Abort(_('no files to copy'))
505 raise util.Abort(_('no files to copy'))
506
506
507 errors = 0
507 errors = 0
508 for targetpath, srcs in copylist:
508 for targetpath, srcs in copylist:
509 for abssrc, relsrc, exact in srcs:
509 for abssrc, relsrc, exact in srcs:
510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
511 errors += 1
511 errors += 1
512
512
513 if errors:
513 if errors:
514 ui.warn(_('(consider using --after)\n'))
514 ui.warn(_('(consider using --after)\n'))
515
515
516 return errors
516 return errors
517
517
518 def service(opts, parentfn=None, initfn=None, runfn=None):
518 def service(opts, parentfn=None, initfn=None, runfn=None):
519 '''Run a command as a service.'''
519 '''Run a command as a service.'''
520
520
521 if opts['daemon'] and not opts['daemon_pipefds']:
521 if opts['daemon'] and not opts['daemon_pipefds']:
522 rfd, wfd = os.pipe()
522 rfd, wfd = os.pipe()
523 args = sys.argv[:]
523 args = sys.argv[:]
524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
525 # Don't pass --cwd to the child process, because we've already
525 # Don't pass --cwd to the child process, because we've already
526 # changed directory.
526 # changed directory.
527 for i in xrange(1,len(args)):
527 for i in xrange(1,len(args)):
528 if args[i].startswith('--cwd='):
528 if args[i].startswith('--cwd='):
529 del args[i]
529 del args[i]
530 break
530 break
531 elif args[i].startswith('--cwd'):
531 elif args[i].startswith('--cwd'):
532 del args[i:i+2]
532 del args[i:i+2]
533 break
533 break
534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
535 args[0], args)
535 args[0], args)
536 os.close(wfd)
536 os.close(wfd)
537 os.read(rfd, 1)
537 os.read(rfd, 1)
538 if parentfn:
538 if parentfn:
539 return parentfn(pid)
539 return parentfn(pid)
540 else:
540 else:
541 os._exit(0)
541 os._exit(0)
542
542
543 if initfn:
543 if initfn:
544 initfn()
544 initfn()
545
545
546 if opts['pid_file']:
546 if opts['pid_file']:
547 fp = open(opts['pid_file'], 'w')
547 fp = open(opts['pid_file'], 'w')
548 fp.write(str(os.getpid()) + '\n')
548 fp.write(str(os.getpid()) + '\n')
549 fp.close()
549 fp.close()
550
550
551 if opts['daemon_pipefds']:
551 if opts['daemon_pipefds']:
552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
553 os.close(rfd)
553 os.close(rfd)
554 try:
554 try:
555 os.setsid()
555 os.setsid()
556 except AttributeError:
556 except AttributeError:
557 pass
557 pass
558 os.write(wfd, 'y')
558 os.write(wfd, 'y')
559 os.close(wfd)
559 os.close(wfd)
560 sys.stdout.flush()
560 sys.stdout.flush()
561 sys.stderr.flush()
561 sys.stderr.flush()
562 fd = os.open(util.nulldev, os.O_RDWR)
562 fd = os.open(util.nulldev, os.O_RDWR)
563 if fd != 0: os.dup2(fd, 0)
563 if fd != 0: os.dup2(fd, 0)
564 if fd != 1: os.dup2(fd, 1)
564 if fd != 1: os.dup2(fd, 1)
565 if fd != 2: os.dup2(fd, 2)
565 if fd != 2: os.dup2(fd, 2)
566 if fd not in (0, 1, 2): os.close(fd)
566 if fd not in (0, 1, 2): os.close(fd)
567
567
568 if runfn:
568 if runfn:
569 return runfn()
569 return runfn()
570
570
571 class changeset_printer(object):
571 class changeset_printer(object):
572 '''show changeset information when templating not requested.'''
572 '''show changeset information when templating not requested.'''
573
573
574 def __init__(self, ui, repo, patch, buffered):
574 def __init__(self, ui, repo, patch, buffered):
575 self.ui = ui
575 self.ui = ui
576 self.repo = repo
576 self.repo = repo
577 self.buffered = buffered
577 self.buffered = buffered
578 self.patch = patch
578 self.patch = patch
579 self.header = {}
579 self.header = {}
580 self.hunk = {}
580 self.hunk = {}
581 self.lastheader = None
581 self.lastheader = None
582
582
583 def flush(self, rev):
583 def flush(self, rev):
584 if rev in self.header:
584 if rev in self.header:
585 h = self.header[rev]
585 h = self.header[rev]
586 if h != self.lastheader:
586 if h != self.lastheader:
587 self.lastheader = h
587 self.lastheader = h
588 self.ui.write(h)
588 self.ui.write(h)
589 del self.header[rev]
589 del self.header[rev]
590 if rev in self.hunk:
590 if rev in self.hunk:
591 self.ui.write(self.hunk[rev])
591 self.ui.write(self.hunk[rev])
592 del self.hunk[rev]
592 del self.hunk[rev]
593 return 1
593 return 1
594 return 0
594 return 0
595
595
596 def show(self, rev=0, changenode=None, copies=(), **props):
596 def show(self, rev=0, changenode=None, copies=(), **props):
597 if self.buffered:
597 if self.buffered:
598 self.ui.pushbuffer()
598 self.ui.pushbuffer()
599 self._show(rev, changenode, copies, props)
599 self._show(rev, changenode, copies, props)
600 self.hunk[rev] = self.ui.popbuffer()
600 self.hunk[rev] = self.ui.popbuffer()
601 else:
601 else:
602 self._show(rev, changenode, copies, props)
602 self._show(rev, changenode, copies, props)
603
603
604 def _show(self, rev, changenode, copies, props):
604 def _show(self, rev, changenode, copies, props):
605 '''show a single changeset or file revision'''
605 '''show a single changeset or file revision'''
606 log = self.repo.changelog
606 log = self.repo.changelog
607 if changenode is None:
607 if changenode is None:
608 changenode = log.node(rev)
608 changenode = log.node(rev)
609 elif not rev:
609 elif not rev:
610 rev = log.rev(changenode)
610 rev = log.rev(changenode)
611
611
612 if self.ui.quiet:
612 if self.ui.quiet:
613 self.ui.write("%d:%s\n" % (rev, short(changenode)))
613 self.ui.write("%d:%s\n" % (rev, short(changenode)))
614 return
614 return
615
615
616 changes = log.read(changenode)
616 changes = log.read(changenode)
617 date = util.datestr(changes[2])
617 date = util.datestr(changes[2])
618 extra = changes[5]
618 extra = changes[5]
619 branch = extra.get("branch")
619 branch = extra.get("branch")
620
620
621 hexfunc = self.ui.debugflag and hex or short
621 hexfunc = self.ui.debugflag and hex or short
622
622
623 parents = [(p, hexfunc(log.node(p)))
623 parents = [(p, hexfunc(log.node(p)))
624 for p in self._meaningful_parentrevs(log, rev)]
624 for p in self._meaningful_parentrevs(log, rev)]
625
625
626 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
626 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
627
627
628 # don't show the default branch name
628 # don't show the default branch name
629 if branch != 'default':
629 if branch != 'default':
630 branch = util.tolocal(branch)
630 branch = util.tolocal(branch)
631 self.ui.write(_("branch: %s\n") % branch)
631 self.ui.write(_("branch: %s\n") % branch)
632 for tag in self.repo.nodetags(changenode):
632 for tag in self.repo.nodetags(changenode):
633 self.ui.write(_("tag: %s\n") % tag)
633 self.ui.write(_("tag: %s\n") % tag)
634 for parent in parents:
634 for parent in parents:
635 self.ui.write(_("parent: %d:%s\n") % parent)
635 self.ui.write(_("parent: %d:%s\n") % parent)
636
636
637 if self.ui.debugflag:
637 if self.ui.debugflag:
638 self.ui.write(_("manifest: %d:%s\n") %
638 self.ui.write(_("manifest: %d:%s\n") %
639 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
639 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
640 self.ui.write(_("user: %s\n") % changes[1])
640 self.ui.write(_("user: %s\n") % changes[1])
641 self.ui.write(_("date: %s\n") % date)
641 self.ui.write(_("date: %s\n") % date)
642
642
643 if self.ui.debugflag:
643 if self.ui.debugflag:
644 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
644 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
645 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
645 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
646 files):
646 files):
647 if value:
647 if value:
648 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
648 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
649 elif changes[3] and self.ui.verbose:
649 elif changes[3] and self.ui.verbose:
650 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
650 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
651 if copies and self.ui.verbose:
651 if copies and self.ui.verbose:
652 copies = ['%s (%s)' % c for c in copies]
652 copies = ['%s (%s)' % c for c in copies]
653 self.ui.write(_("copies: %s\n") % ' '.join(copies))
653 self.ui.write(_("copies: %s\n") % ' '.join(copies))
654
654
655 if extra and self.ui.debugflag:
655 if extra and self.ui.debugflag:
656 extraitems = extra.items()
656 extraitems = extra.items()
657 extraitems.sort()
657 extraitems.sort()
658 for key, value in extraitems:
658 for key, value in extraitems:
659 self.ui.write(_("extra: %s=%s\n")
659 self.ui.write(_("extra: %s=%s\n")
660 % (key, value.encode('string_escape')))
660 % (key, value.encode('string_escape')))
661
661
662 description = changes[4].strip()
662 description = changes[4].strip()
663 if description:
663 if description:
664 if self.ui.verbose:
664 if self.ui.verbose:
665 self.ui.write(_("description:\n"))
665 self.ui.write(_("description:\n"))
666 self.ui.write(description)
666 self.ui.write(description)
667 self.ui.write("\n\n")
667 self.ui.write("\n\n")
668 else:
668 else:
669 self.ui.write(_("summary: %s\n") %
669 self.ui.write(_("summary: %s\n") %
670 description.splitlines()[0])
670 description.splitlines()[0])
671 self.ui.write("\n")
671 self.ui.write("\n")
672
672
673 self.showpatch(changenode)
673 self.showpatch(changenode)
674
674
675 def showpatch(self, node):
675 def showpatch(self, node):
676 if self.patch:
676 if self.patch:
677 prev = self.repo.changelog.parents(node)[0]
677 prev = self.repo.changelog.parents(node)[0]
678 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
678 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
679 opts=patch.diffopts(self.ui))
679 opts=patch.diffopts(self.ui))
680 self.ui.write("\n")
680 self.ui.write("\n")
681
681
682 def _meaningful_parentrevs(self, log, rev):
682 def _meaningful_parentrevs(self, log, rev):
683 """Return list of meaningful (or all if debug) parentrevs for rev.
683 """Return list of meaningful (or all if debug) parentrevs for rev.
684
684
685 For merges (two non-nullrev revisions) both parents are meaningful.
685 For merges (two non-nullrev revisions) both parents are meaningful.
686 Otherwise the first parent revision is considered meaningful if it
686 Otherwise the first parent revision is considered meaningful if it
687 is not the preceding revision.
687 is not the preceding revision.
688 """
688 """
689 parents = log.parentrevs(rev)
689 parents = log.parentrevs(rev)
690 if not self.ui.debugflag and parents[1] == nullrev:
690 if not self.ui.debugflag and parents[1] == nullrev:
691 if parents[0] >= rev - 1:
691 if parents[0] >= rev - 1:
692 parents = []
692 parents = []
693 else:
693 else:
694 parents = [parents[0]]
694 parents = [parents[0]]
695 return parents
695 return parents
696
696
697
697
698 class changeset_templater(changeset_printer):
698 class changeset_templater(changeset_printer):
699 '''format changeset information.'''
699 '''format changeset information.'''
700
700
701 def __init__(self, ui, repo, patch, mapfile, buffered):
701 def __init__(self, ui, repo, patch, mapfile, buffered):
702 changeset_printer.__init__(self, ui, repo, patch, buffered)
702 changeset_printer.__init__(self, ui, repo, patch, buffered)
703 filters = templatefilters.filters.copy()
703 filters = templatefilters.filters.copy()
704 filters['formatnode'] = (ui.debugflag and (lambda x: x)
704 filters['formatnode'] = (ui.debugflag and (lambda x: x)
705 or (lambda x: x[:12]))
705 or (lambda x: x[:12]))
706 self.t = templater.templater(mapfile, filters,
706 self.t = templater.templater(mapfile, filters,
707 cache={
707 cache={
708 'parent': '{rev}:{node|formatnode} ',
708 'parent': '{rev}:{node|formatnode} ',
709 'manifest': '{rev}:{node|formatnode}',
709 'manifest': '{rev}:{node|formatnode}',
710 'filecopy': '{name} ({source})'})
710 'filecopy': '{name} ({source})'})
711
711
712 def use_template(self, t):
712 def use_template(self, t):
713 '''set template string to use'''
713 '''set template string to use'''
714 self.t.cache['changeset'] = t
714 self.t.cache['changeset'] = t
715
715
716 def _show(self, rev, changenode, copies, props):
716 def _show(self, rev, changenode, copies, props):
717 '''show a single changeset or file revision'''
717 '''show a single changeset or file revision'''
718 log = self.repo.changelog
718 log = self.repo.changelog
719 if changenode is None:
719 if changenode is None:
720 changenode = log.node(rev)
720 changenode = log.node(rev)
721 elif not rev:
721 elif not rev:
722 rev = log.rev(changenode)
722 rev = log.rev(changenode)
723
723
724 changes = log.read(changenode)
724 changes = log.read(changenode)
725
725
726 def showlist(name, values, plural=None, **args):
726 def showlist(name, values, plural=None, **args):
727 '''expand set of values.
727 '''expand set of values.
728 name is name of key in template map.
728 name is name of key in template map.
729 values is list of strings or dicts.
729 values is list of strings or dicts.
730 plural is plural of name, if not simply name + 's'.
730 plural is plural of name, if not simply name + 's'.
731
731
732 expansion works like this, given name 'foo'.
732 expansion works like this, given name 'foo'.
733
733
734 if values is empty, expand 'no_foos'.
734 if values is empty, expand 'no_foos'.
735
735
736 if 'foo' not in template map, return values as a string,
736 if 'foo' not in template map, return values as a string,
737 joined by space.
737 joined by space.
738
738
739 expand 'start_foos'.
739 expand 'start_foos'.
740
740
741 for each value, expand 'foo'. if 'last_foo' in template
741 for each value, expand 'foo'. if 'last_foo' in template
742 map, expand it instead of 'foo' for last key.
742 map, expand it instead of 'foo' for last key.
743
743
744 expand 'end_foos'.
744 expand 'end_foos'.
745 '''
745 '''
746 if plural: names = plural
746 if plural: names = plural
747 else: names = name + 's'
747 else: names = name + 's'
748 if not values:
748 if not values:
749 noname = 'no_' + names
749 noname = 'no_' + names
750 if noname in self.t:
750 if noname in self.t:
751 yield self.t(noname, **args)
751 yield self.t(noname, **args)
752 return
752 return
753 if name not in self.t:
753 if name not in self.t:
754 if isinstance(values[0], str):
754 if isinstance(values[0], str):
755 yield ' '.join(values)
755 yield ' '.join(values)
756 else:
756 else:
757 for v in values:
757 for v in values:
758 yield dict(v, **args)
758 yield dict(v, **args)
759 return
759 return
760 startname = 'start_' + names
760 startname = 'start_' + names
761 if startname in self.t:
761 if startname in self.t:
762 yield self.t(startname, **args)
762 yield self.t(startname, **args)
763 vargs = args.copy()
763 vargs = args.copy()
764 def one(v, tag=name):
764 def one(v, tag=name):
765 try:
765 try:
766 vargs.update(v)
766 vargs.update(v)
767 except (AttributeError, ValueError):
767 except (AttributeError, ValueError):
768 try:
768 try:
769 for a, b in v:
769 for a, b in v:
770 vargs[a] = b
770 vargs[a] = b
771 except ValueError:
771 except ValueError:
772 vargs[name] = v
772 vargs[name] = v
773 return self.t(tag, **vargs)
773 return self.t(tag, **vargs)
774 lastname = 'last_' + name
774 lastname = 'last_' + name
775 if lastname in self.t:
775 if lastname in self.t:
776 last = values.pop()
776 last = values.pop()
777 else:
777 else:
778 last = None
778 last = None
779 for v in values:
779 for v in values:
780 yield one(v)
780 yield one(v)
781 if last is not None:
781 if last is not None:
782 yield one(last, tag=lastname)
782 yield one(last, tag=lastname)
783 endname = 'end_' + names
783 endname = 'end_' + names
784 if endname in self.t:
784 if endname in self.t:
785 yield self.t(endname, **args)
785 yield self.t(endname, **args)
786
786
787 def showbranches(**args):
787 def showbranches(**args):
788 branch = changes[5].get("branch")
788 branch = changes[5].get("branch")
789 if branch != 'default':
789 if branch != 'default':
790 branch = util.tolocal(branch)
790 branch = util.tolocal(branch)
791 return showlist('branch', [branch], plural='branches', **args)
791 return showlist('branch', [branch], plural='branches', **args)
792
792
793 def showparents(**args):
793 def showparents(**args):
794 parents = [[('rev', p), ('node', hex(log.node(p)))]
794 parents = [[('rev', p), ('node', hex(log.node(p)))]
795 for p in self._meaningful_parentrevs(log, rev)]
795 for p in self._meaningful_parentrevs(log, rev)]
796 return showlist('parent', parents, **args)
796 return showlist('parent', parents, **args)
797
797
798 def showtags(**args):
798 def showtags(**args):
799 return showlist('tag', self.repo.nodetags(changenode), **args)
799 return showlist('tag', self.repo.nodetags(changenode), **args)
800
800
801 def showextras(**args):
801 def showextras(**args):
802 extras = changes[5].items()
802 extras = changes[5].items()
803 extras.sort()
803 extras.sort()
804 for key, value in extras:
804 for key, value in extras:
805 args = args.copy()
805 args = args.copy()
806 args.update(dict(key=key, value=value))
806 args.update(dict(key=key, value=value))
807 yield self.t('extra', **args)
807 yield self.t('extra', **args)
808
808
809 def showcopies(**args):
809 def showcopies(**args):
810 c = [{'name': x[0], 'source': x[1]} for x in copies]
810 c = [{'name': x[0], 'source': x[1]} for x in copies]
811 return showlist('file_copy', c, plural='file_copies', **args)
811 return showlist('file_copy', c, plural='file_copies', **args)
812
812
813 files = []
813 files = []
814 def getfiles():
814 def getfiles():
815 if not files:
815 if not files:
816 files[:] = self.repo.status(
816 files[:] = self.repo.status(
817 log.parents(changenode)[0], changenode)[:3]
817 log.parents(changenode)[0], changenode)[:3]
818 return files
818 return files
819 def showfiles(**args):
819 def showfiles(**args):
820 return showlist('file', changes[3], **args)
820 return showlist('file', changes[3], **args)
821 def showmods(**args):
821 def showmods(**args):
822 return showlist('file_mod', getfiles()[0], **args)
822 return showlist('file_mod', getfiles()[0], **args)
823 def showadds(**args):
823 def showadds(**args):
824 return showlist('file_add', getfiles()[1], **args)
824 return showlist('file_add', getfiles()[1], **args)
825 def showdels(**args):
825 def showdels(**args):
826 return showlist('file_del', getfiles()[2], **args)
826 return showlist('file_del', getfiles()[2], **args)
827 def showmanifest(**args):
827 def showmanifest(**args):
828 args = args.copy()
828 args = args.copy()
829 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
829 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
830 node=hex(changes[0])))
830 node=hex(changes[0])))
831 return self.t('manifest', **args)
831 return self.t('manifest', **args)
832
832
833 defprops = {
833 defprops = {
834 'author': changes[1],
834 'author': changes[1],
835 'branches': showbranches,
835 'branches': showbranches,
836 'date': changes[2],
836 'date': changes[2],
837 'desc': changes[4].strip(),
837 'desc': changes[4].strip(),
838 'file_adds': showadds,
838 'file_adds': showadds,
839 'file_dels': showdels,
839 'file_dels': showdels,
840 'file_mods': showmods,
840 'file_mods': showmods,
841 'files': showfiles,
841 'files': showfiles,
842 'file_copies': showcopies,
842 'file_copies': showcopies,
843 'manifest': showmanifest,
843 'manifest': showmanifest,
844 'node': hex(changenode),
844 'node': hex(changenode),
845 'parents': showparents,
845 'parents': showparents,
846 'rev': rev,
846 'rev': rev,
847 'tags': showtags,
847 'tags': showtags,
848 'extras': showextras,
848 'extras': showextras,
849 }
849 }
850 props = props.copy()
850 props = props.copy()
851 props.update(defprops)
851 props.update(defprops)
852
852
853 try:
853 try:
854 if self.ui.debugflag and 'header_debug' in self.t:
854 if self.ui.debugflag and 'header_debug' in self.t:
855 key = 'header_debug'
855 key = 'header_debug'
856 elif self.ui.quiet and 'header_quiet' in self.t:
856 elif self.ui.quiet and 'header_quiet' in self.t:
857 key = 'header_quiet'
857 key = 'header_quiet'
858 elif self.ui.verbose and 'header_verbose' in self.t:
858 elif self.ui.verbose and 'header_verbose' in self.t:
859 key = 'header_verbose'
859 key = 'header_verbose'
860 elif 'header' in self.t:
860 elif 'header' in self.t:
861 key = 'header'
861 key = 'header'
862 else:
862 else:
863 key = ''
863 key = ''
864 if key:
864 if key:
865 h = templater.stringify(self.t(key, **props))
865 h = templater.stringify(self.t(key, **props))
866 if self.buffered:
866 if self.buffered:
867 self.header[rev] = h
867 self.header[rev] = h
868 else:
868 else:
869 self.ui.write(h)
869 self.ui.write(h)
870 if self.ui.debugflag and 'changeset_debug' in self.t:
870 if self.ui.debugflag and 'changeset_debug' in self.t:
871 key = 'changeset_debug'
871 key = 'changeset_debug'
872 elif self.ui.quiet and 'changeset_quiet' in self.t:
872 elif self.ui.quiet and 'changeset_quiet' in self.t:
873 key = 'changeset_quiet'
873 key = 'changeset_quiet'
874 elif self.ui.verbose and 'changeset_verbose' in self.t:
874 elif self.ui.verbose and 'changeset_verbose' in self.t:
875 key = 'changeset_verbose'
875 key = 'changeset_verbose'
876 else:
876 else:
877 key = 'changeset'
877 key = 'changeset'
878 self.ui.write(templater.stringify(self.t(key, **props)))
878 self.ui.write(templater.stringify(self.t(key, **props)))
879 self.showpatch(changenode)
879 self.showpatch(changenode)
880 except KeyError, inst:
880 except KeyError, inst:
881 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
881 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
882 inst.args[0]))
882 inst.args[0]))
883 except SyntaxError, inst:
883 except SyntaxError, inst:
884 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
884 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
885
885
886 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
886 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
887 """show one changeset using template or regular display.
887 """show one changeset using template or regular display.
888
888
889 Display format will be the first non-empty hit of:
889 Display format will be the first non-empty hit of:
890 1. option 'template'
890 1. option 'template'
891 2. option 'style'
891 2. option 'style'
892 3. [ui] setting 'logtemplate'
892 3. [ui] setting 'logtemplate'
893 4. [ui] setting 'style'
893 4. [ui] setting 'style'
894 If all of these values are either the unset or the empty string,
894 If all of these values are either the unset or the empty string,
895 regular display via changeset_printer() is done.
895 regular display via changeset_printer() is done.
896 """
896 """
897 # options
897 # options
898 patch = False
898 patch = False
899 if opts.get('patch'):
899 if opts.get('patch'):
900 patch = matchfn or matchall(repo)
900 patch = matchfn or matchall(repo)
901
901
902 tmpl = opts.get('template')
902 tmpl = opts.get('template')
903 mapfile = None
903 mapfile = None
904 if tmpl:
904 if tmpl:
905 tmpl = templater.parsestring(tmpl, quoted=False)
905 tmpl = templater.parsestring(tmpl, quoted=False)
906 else:
906 else:
907 mapfile = opts.get('style')
907 mapfile = opts.get('style')
908 # ui settings
908 # ui settings
909 if not mapfile:
909 if not mapfile:
910 tmpl = ui.config('ui', 'logtemplate')
910 tmpl = ui.config('ui', 'logtemplate')
911 if tmpl:
911 if tmpl:
912 tmpl = templater.parsestring(tmpl)
912 tmpl = templater.parsestring(tmpl)
913 else:
913 else:
914 mapfile = ui.config('ui', 'style')
914 mapfile = ui.config('ui', 'style')
915
915
916 if tmpl or mapfile:
916 if tmpl or mapfile:
917 if mapfile:
917 if mapfile:
918 if not os.path.split(mapfile)[0]:
918 if not os.path.split(mapfile)[0]:
919 mapname = (templater.templatepath('map-cmdline.' + mapfile)
919 mapname = (templater.templatepath('map-cmdline.' + mapfile)
920 or templater.templatepath(mapfile))
920 or templater.templatepath(mapfile))
921 if mapname: mapfile = mapname
921 if mapname: mapfile = mapname
922 try:
922 try:
923 t = changeset_templater(ui, repo, patch, mapfile, buffered)
923 t = changeset_templater(ui, repo, patch, mapfile, buffered)
924 except SyntaxError, inst:
924 except SyntaxError, inst:
925 raise util.Abort(inst.args[0])
925 raise util.Abort(inst.args[0])
926 if tmpl: t.use_template(tmpl)
926 if tmpl: t.use_template(tmpl)
927 return t
927 return t
928 return changeset_printer(ui, repo, patch, buffered)
928 return changeset_printer(ui, repo, patch, buffered)
929
929
930 def finddate(ui, repo, date):
930 def finddate(ui, repo, date):
931 """Find the tipmost changeset that matches the given date spec"""
931 """Find the tipmost changeset that matches the given date spec"""
932 df = util.matchdate(date)
932 df = util.matchdate(date)
933 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
933 get = util.cachefunc(lambda r: repo[r].changeset())
934 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
934 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
935 results = {}
935 results = {}
936 for st, rev, fns in changeiter:
936 for st, rev, fns in changeiter:
937 if st == 'add':
937 if st == 'add':
938 d = get(rev)[2]
938 d = get(rev)[2]
939 if df(d[0]):
939 if df(d[0]):
940 results[rev] = d
940 results[rev] = d
941 elif st == 'iter':
941 elif st == 'iter':
942 if rev in results:
942 if rev in results:
943 ui.status("Found revision %s from %s\n" %
943 ui.status("Found revision %s from %s\n" %
944 (rev, util.datestr(results[rev])))
944 (rev, util.datestr(results[rev])))
945 return str(rev)
945 return str(rev)
946
946
947 raise util.Abort(_("revision matching date not found"))
947 raise util.Abort(_("revision matching date not found"))
948
948
949 def walkchangerevs(ui, repo, pats, change, opts):
949 def walkchangerevs(ui, repo, pats, change, opts):
950 '''Iterate over files and the revs they changed in.
950 '''Iterate over files and the revs they changed in.
951
951
952 Callers most commonly need to iterate backwards over the history
952 Callers most commonly need to iterate backwards over the history
953 it is interested in. Doing so has awful (quadratic-looking)
953 it is interested in. Doing so has awful (quadratic-looking)
954 performance, so we use iterators in a "windowed" way.
954 performance, so we use iterators in a "windowed" way.
955
955
956 We walk a window of revisions in the desired order. Within the
956 We walk a window of revisions in the desired order. Within the
957 window, we first walk forwards to gather data, then in the desired
957 window, we first walk forwards to gather data, then in the desired
958 order (usually backwards) to display it.
958 order (usually backwards) to display it.
959
959
960 This function returns an (iterator, matchfn) tuple. The iterator
960 This function returns an (iterator, matchfn) tuple. The iterator
961 yields 3-tuples. They will be of one of the following forms:
961 yields 3-tuples. They will be of one of the following forms:
962
962
963 "window", incrementing, lastrev: stepping through a window,
963 "window", incrementing, lastrev: stepping through a window,
964 positive if walking forwards through revs, last rev in the
964 positive if walking forwards through revs, last rev in the
965 sequence iterated over - use to reset state for the current window
965 sequence iterated over - use to reset state for the current window
966
966
967 "add", rev, fns: out-of-order traversal of the given file names
967 "add", rev, fns: out-of-order traversal of the given file names
968 fns, which changed during revision rev - use to gather data for
968 fns, which changed during revision rev - use to gather data for
969 possible display
969 possible display
970
970
971 "iter", rev, None: in-order traversal of the revs earlier iterated
971 "iter", rev, None: in-order traversal of the revs earlier iterated
972 over with "add" - use to display data'''
972 over with "add" - use to display data'''
973
973
974 def increasing_windows(start, end, windowsize=8, sizelimit=512):
974 def increasing_windows(start, end, windowsize=8, sizelimit=512):
975 if start < end:
975 if start < end:
976 while start < end:
976 while start < end:
977 yield start, min(windowsize, end-start)
977 yield start, min(windowsize, end-start)
978 start += windowsize
978 start += windowsize
979 if windowsize < sizelimit:
979 if windowsize < sizelimit:
980 windowsize *= 2
980 windowsize *= 2
981 else:
981 else:
982 while start > end:
982 while start > end:
983 yield start, min(windowsize, start-end-1)
983 yield start, min(windowsize, start-end-1)
984 start -= windowsize
984 start -= windowsize
985 if windowsize < sizelimit:
985 if windowsize < sizelimit:
986 windowsize *= 2
986 windowsize *= 2
987
987
988 m = match(repo, pats, opts)
988 m = match(repo, pats, opts)
989 follow = opts.get('follow') or opts.get('follow_first')
989 follow = opts.get('follow') or opts.get('follow_first')
990
990
991 if repo.changelog.count() == 0:
991 if repo.changelog.count() == 0:
992 return [], m
992 return [], m
993
993
994 if follow:
994 if follow:
995 defrange = '%s:0' % repo.changectx('.').rev()
995 defrange = '%s:0' % repo['.'].rev()
996 else:
996 else:
997 defrange = '-1:0'
997 defrange = '-1:0'
998 revs = revrange(repo, opts['rev'] or [defrange])
998 revs = revrange(repo, opts['rev'] or [defrange])
999 wanted = {}
999 wanted = {}
1000 slowpath = m.anypats() or opts.get('removed')
1000 slowpath = m.anypats() or opts.get('removed')
1001 fncache = {}
1001 fncache = {}
1002
1002
1003 if not slowpath and not m.files():
1003 if not slowpath and not m.files():
1004 # No files, no patterns. Display all revs.
1004 # No files, no patterns. Display all revs.
1005 wanted = dict.fromkeys(revs)
1005 wanted = dict.fromkeys(revs)
1006 copies = []
1006 copies = []
1007 if not slowpath:
1007 if not slowpath:
1008 # Only files, no patterns. Check the history of each file.
1008 # Only files, no patterns. Check the history of each file.
1009 def filerevgen(filelog, node):
1009 def filerevgen(filelog, node):
1010 cl_count = repo.changelog.count()
1010 cl_count = repo.changelog.count()
1011 if node is None:
1011 if node is None:
1012 last = filelog.count() - 1
1012 last = filelog.count() - 1
1013 else:
1013 else:
1014 last = filelog.rev(node)
1014 last = filelog.rev(node)
1015 for i, window in increasing_windows(last, nullrev):
1015 for i, window in increasing_windows(last, nullrev):
1016 revs = []
1016 revs = []
1017 for j in xrange(i - window, i + 1):
1017 for j in xrange(i - window, i + 1):
1018 n = filelog.node(j)
1018 n = filelog.node(j)
1019 revs.append((filelog.linkrev(n),
1019 revs.append((filelog.linkrev(n),
1020 follow and filelog.renamed(n)))
1020 follow and filelog.renamed(n)))
1021 revs.reverse()
1021 revs.reverse()
1022 for rev in revs:
1022 for rev in revs:
1023 # only yield rev for which we have the changelog, it can
1023 # only yield rev for which we have the changelog, it can
1024 # happen while doing "hg log" during a pull or commit
1024 # happen while doing "hg log" during a pull or commit
1025 if rev[0] < cl_count:
1025 if rev[0] < cl_count:
1026 yield rev
1026 yield rev
1027 def iterfiles():
1027 def iterfiles():
1028 for filename in m.files():
1028 for filename in m.files():
1029 yield filename, None
1029 yield filename, None
1030 for filename_node in copies:
1030 for filename_node in copies:
1031 yield filename_node
1031 yield filename_node
1032 minrev, maxrev = min(revs), max(revs)
1032 minrev, maxrev = min(revs), max(revs)
1033 for file_, node in iterfiles():
1033 for file_, node in iterfiles():
1034 filelog = repo.file(file_)
1034 filelog = repo.file(file_)
1035 if filelog.count() == 0:
1035 if filelog.count() == 0:
1036 if node is None:
1036 if node is None:
1037 # A zero count may be a directory or deleted file, so
1037 # A zero count may be a directory or deleted file, so
1038 # try to find matching entries on the slow path.
1038 # try to find matching entries on the slow path.
1039 slowpath = True
1039 slowpath = True
1040 break
1040 break
1041 else:
1041 else:
1042 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1042 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1043 % (file_, short(node)))
1043 % (file_, short(node)))
1044 continue
1044 continue
1045 for rev, copied in filerevgen(filelog, node):
1045 for rev, copied in filerevgen(filelog, node):
1046 if rev <= maxrev:
1046 if rev <= maxrev:
1047 if rev < minrev:
1047 if rev < minrev:
1048 break
1048 break
1049 fncache.setdefault(rev, [])
1049 fncache.setdefault(rev, [])
1050 fncache[rev].append(file_)
1050 fncache[rev].append(file_)
1051 wanted[rev] = 1
1051 wanted[rev] = 1
1052 if follow and copied:
1052 if follow and copied:
1053 copies.append(copied)
1053 copies.append(copied)
1054 if slowpath:
1054 if slowpath:
1055 if follow:
1055 if follow:
1056 raise util.Abort(_('can only follow copies/renames for explicit '
1056 raise util.Abort(_('can only follow copies/renames for explicit '
1057 'file names'))
1057 'file names'))
1058
1058
1059 # The slow path checks files modified in every changeset.
1059 # The slow path checks files modified in every changeset.
1060 def changerevgen():
1060 def changerevgen():
1061 for i, window in increasing_windows(repo.changelog.count()-1,
1061 for i, window in increasing_windows(repo.changelog.count()-1,
1062 nullrev):
1062 nullrev):
1063 for j in xrange(i - window, i + 1):
1063 for j in xrange(i - window, i + 1):
1064 yield j, change(j)[3]
1064 yield j, change(j)[3]
1065
1065
1066 for rev, changefiles in changerevgen():
1066 for rev, changefiles in changerevgen():
1067 matches = filter(m, changefiles)
1067 matches = filter(m, changefiles)
1068 if matches:
1068 if matches:
1069 fncache[rev] = matches
1069 fncache[rev] = matches
1070 wanted[rev] = 1
1070 wanted[rev] = 1
1071
1071
1072 class followfilter:
1072 class followfilter:
1073 def __init__(self, onlyfirst=False):
1073 def __init__(self, onlyfirst=False):
1074 self.startrev = nullrev
1074 self.startrev = nullrev
1075 self.roots = []
1075 self.roots = []
1076 self.onlyfirst = onlyfirst
1076 self.onlyfirst = onlyfirst
1077
1077
1078 def match(self, rev):
1078 def match(self, rev):
1079 def realparents(rev):
1079 def realparents(rev):
1080 if self.onlyfirst:
1080 if self.onlyfirst:
1081 return repo.changelog.parentrevs(rev)[0:1]
1081 return repo.changelog.parentrevs(rev)[0:1]
1082 else:
1082 else:
1083 return filter(lambda x: x != nullrev,
1083 return filter(lambda x: x != nullrev,
1084 repo.changelog.parentrevs(rev))
1084 repo.changelog.parentrevs(rev))
1085
1085
1086 if self.startrev == nullrev:
1086 if self.startrev == nullrev:
1087 self.startrev = rev
1087 self.startrev = rev
1088 return True
1088 return True
1089
1089
1090 if rev > self.startrev:
1090 if rev > self.startrev:
1091 # forward: all descendants
1091 # forward: all descendants
1092 if not self.roots:
1092 if not self.roots:
1093 self.roots.append(self.startrev)
1093 self.roots.append(self.startrev)
1094 for parent in realparents(rev):
1094 for parent in realparents(rev):
1095 if parent in self.roots:
1095 if parent in self.roots:
1096 self.roots.append(rev)
1096 self.roots.append(rev)
1097 return True
1097 return True
1098 else:
1098 else:
1099 # backwards: all parents
1099 # backwards: all parents
1100 if not self.roots:
1100 if not self.roots:
1101 self.roots.extend(realparents(self.startrev))
1101 self.roots.extend(realparents(self.startrev))
1102 if rev in self.roots:
1102 if rev in self.roots:
1103 self.roots.remove(rev)
1103 self.roots.remove(rev)
1104 self.roots.extend(realparents(rev))
1104 self.roots.extend(realparents(rev))
1105 return True
1105 return True
1106
1106
1107 return False
1107 return False
1108
1108
1109 # it might be worthwhile to do this in the iterator if the rev range
1109 # it might be worthwhile to do this in the iterator if the rev range
1110 # is descending and the prune args are all within that range
1110 # is descending and the prune args are all within that range
1111 for rev in opts.get('prune', ()):
1111 for rev in opts.get('prune', ()):
1112 rev = repo.changelog.rev(repo.lookup(rev))
1112 rev = repo.changelog.rev(repo.lookup(rev))
1113 ff = followfilter()
1113 ff = followfilter()
1114 stop = min(revs[0], revs[-1])
1114 stop = min(revs[0], revs[-1])
1115 for x in xrange(rev, stop-1, -1):
1115 for x in xrange(rev, stop-1, -1):
1116 if ff.match(x) and x in wanted:
1116 if ff.match(x) and x in wanted:
1117 del wanted[x]
1117 del wanted[x]
1118
1118
1119 def iterate():
1119 def iterate():
1120 if follow and not m.files():
1120 if follow and not m.files():
1121 ff = followfilter(onlyfirst=opts.get('follow_first'))
1121 ff = followfilter(onlyfirst=opts.get('follow_first'))
1122 def want(rev):
1122 def want(rev):
1123 if ff.match(rev) and rev in wanted:
1123 if ff.match(rev) and rev in wanted:
1124 return True
1124 return True
1125 return False
1125 return False
1126 else:
1126 else:
1127 def want(rev):
1127 def want(rev):
1128 return rev in wanted
1128 return rev in wanted
1129
1129
1130 for i, window in increasing_windows(0, len(revs)):
1130 for i, window in increasing_windows(0, len(revs)):
1131 yield 'window', revs[0] < revs[-1], revs[-1]
1131 yield 'window', revs[0] < revs[-1], revs[-1]
1132 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1132 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1133 srevs = list(nrevs)
1133 srevs = list(nrevs)
1134 srevs.sort()
1134 srevs.sort()
1135 for rev in srevs:
1135 for rev in srevs:
1136 fns = fncache.get(rev)
1136 fns = fncache.get(rev)
1137 if not fns:
1137 if not fns:
1138 def fns_generator():
1138 def fns_generator():
1139 for f in change(rev)[3]:
1139 for f in change(rev)[3]:
1140 if m(f):
1140 if m(f):
1141 yield f
1141 yield f
1142 fns = fns_generator()
1142 fns = fns_generator()
1143 yield 'add', rev, fns
1143 yield 'add', rev, fns
1144 for rev in nrevs:
1144 for rev in nrevs:
1145 yield 'iter', rev, None
1145 yield 'iter', rev, None
1146 return iterate(), m
1146 return iterate(), m
1147
1147
1148 def commit(ui, repo, commitfunc, pats, opts):
1148 def commit(ui, repo, commitfunc, pats, opts):
1149 '''commit the specified files or all outstanding changes'''
1149 '''commit the specified files or all outstanding changes'''
1150 date = opts.get('date')
1150 date = opts.get('date')
1151 if date:
1151 if date:
1152 opts['date'] = util.parsedate(date)
1152 opts['date'] = util.parsedate(date)
1153 message = logmessage(opts)
1153 message = logmessage(opts)
1154
1154
1155 # extract addremove carefully -- this function can be called from a command
1155 # extract addremove carefully -- this function can be called from a command
1156 # that doesn't support addremove
1156 # that doesn't support addremove
1157 if opts.get('addremove'):
1157 if opts.get('addremove'):
1158 addremove(repo, pats, opts)
1158 addremove(repo, pats, opts)
1159
1159
1160 m = match(repo, pats, opts)
1160 m = match(repo, pats, opts)
1161 if pats:
1161 if pats:
1162 status = repo.status(match=m)
1162 status = repo.status(match=m)
1163 modified, added, removed, deleted, unknown = status[:5]
1163 modified, added, removed, deleted, unknown = status[:5]
1164 files = modified + added + removed
1164 files = modified + added + removed
1165 slist = None
1165 slist = None
1166 for f in m.files():
1166 for f in m.files():
1167 if f == '.':
1167 if f == '.':
1168 continue
1168 continue
1169 if f not in files:
1169 if f not in files:
1170 rf = repo.wjoin(f)
1170 rf = repo.wjoin(f)
1171 rel = repo.pathto(f)
1171 rel = repo.pathto(f)
1172 try:
1172 try:
1173 mode = os.lstat(rf)[stat.ST_MODE]
1173 mode = os.lstat(rf)[stat.ST_MODE]
1174 except OSError:
1174 except OSError:
1175 raise util.Abort(_("file %s not found!") % rel)
1175 raise util.Abort(_("file %s not found!") % rel)
1176 if stat.S_ISDIR(mode):
1176 if stat.S_ISDIR(mode):
1177 name = f + '/'
1177 name = f + '/'
1178 if slist is None:
1178 if slist is None:
1179 slist = list(files)
1179 slist = list(files)
1180 slist.sort()
1180 slist.sort()
1181 i = bisect.bisect(slist, name)
1181 i = bisect.bisect(slist, name)
1182 if i >= len(slist) or not slist[i].startswith(name):
1182 if i >= len(slist) or not slist[i].startswith(name):
1183 raise util.Abort(_("no match under directory %s!")
1183 raise util.Abort(_("no match under directory %s!")
1184 % rel)
1184 % rel)
1185 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1185 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1186 raise util.Abort(_("can't commit %s: "
1186 raise util.Abort(_("can't commit %s: "
1187 "unsupported file type!") % rel)
1187 "unsupported file type!") % rel)
1188 elif f not in repo.dirstate:
1188 elif f not in repo.dirstate:
1189 raise util.Abort(_("file %s not tracked!") % rel)
1189 raise util.Abort(_("file %s not tracked!") % rel)
1190 m = matchfiles(repo, files)
1190 m = matchfiles(repo, files)
1191 try:
1191 try:
1192 return commitfunc(ui, repo, message, m, opts)
1192 return commitfunc(ui, repo, message, m, opts)
1193 except ValueError, inst:
1193 except ValueError, inst:
1194 raise util.Abort(str(inst))
1194 raise util.Abort(str(inst))
@@ -1,3339 +1,3336 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from repo import RepoError, NoCapability
9 from repo import RepoError, NoCapability
10 from i18n import _
10 from i18n import _
11 import os, re, sys, urllib
11 import os, re, sys, urllib
12 import hg, util, revlog, bundlerepo, extensions, copies
12 import hg, util, revlog, bundlerepo, extensions, copies
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import version, socket
14 import version, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
16 import merge as merge_
16 import merge as merge_
17
17
18 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
19
19
20 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
21 """add the specified files on the next commit
21 """add the specified files on the next commit
22
22
23 Schedule files to be version controlled and added to the repository.
23 Schedule files to be version controlled and added to the repository.
24
24
25 The files will be added to the repository at the next commit. To
25 The files will be added to the repository at the next commit. To
26 undo an add before that, see hg revert.
26 undo an add before that, see hg revert.
27
27
28 If no names are given, add all files in the repository.
28 If no names are given, add all files in the repository.
29 """
29 """
30
30
31 rejected = None
31 rejected = None
32 exacts = {}
32 exacts = {}
33 names = []
33 names = []
34 m = cmdutil.match(repo, pats, opts)
34 m = cmdutil.match(repo, pats, opts)
35 m.bad = lambda x,y: True
35 m.bad = lambda x,y: True
36 for abs in repo.walk(m):
36 for abs in repo.walk(m):
37 if m.exact(abs):
37 if m.exact(abs):
38 if ui.verbose:
38 if ui.verbose:
39 ui.status(_('adding %s\n') % m.rel(abs))
39 ui.status(_('adding %s\n') % m.rel(abs))
40 names.append(abs)
40 names.append(abs)
41 exacts[abs] = 1
41 exacts[abs] = 1
42 elif abs not in repo.dirstate:
42 elif abs not in repo.dirstate:
43 ui.status(_('adding %s\n') % m.rel(abs))
43 ui.status(_('adding %s\n') % m.rel(abs))
44 names.append(abs)
44 names.append(abs)
45 if not opts.get('dry_run'):
45 if not opts.get('dry_run'):
46 rejected = repo.add(names)
46 rejected = repo.add(names)
47 rejected = [p for p in rejected if p in exacts]
47 rejected = [p for p in rejected if p in exacts]
48 return rejected and 1 or 0
48 return rejected and 1 or 0
49
49
50 def addremove(ui, repo, *pats, **opts):
50 def addremove(ui, repo, *pats, **opts):
51 """add all new files, delete all missing files
51 """add all new files, delete all missing files
52
52
53 Add all new files and remove all missing files from the repository.
53 Add all new files and remove all missing files from the repository.
54
54
55 New files are ignored if they match any of the patterns in .hgignore. As
55 New files are ignored if they match any of the patterns in .hgignore. As
56 with add, these changes take effect at the next commit.
56 with add, these changes take effect at the next commit.
57
57
58 Use the -s option to detect renamed files. With a parameter > 0,
58 Use the -s option to detect renamed files. With a parameter > 0,
59 this compares every removed file with every added file and records
59 this compares every removed file with every added file and records
60 those similar enough as renames. This option takes a percentage
60 those similar enough as renames. This option takes a percentage
61 between 0 (disabled) and 100 (files must be identical) as its
61 between 0 (disabled) and 100 (files must be identical) as its
62 parameter. Detecting renamed files this way can be expensive.
62 parameter. Detecting renamed files this way can be expensive.
63 """
63 """
64 try:
64 try:
65 sim = float(opts.get('similarity') or 0)
65 sim = float(opts.get('similarity') or 0)
66 except ValueError:
66 except ValueError:
67 raise util.Abort(_('similarity must be a number'))
67 raise util.Abort(_('similarity must be a number'))
68 if sim < 0 or sim > 100:
68 if sim < 0 or sim > 100:
69 raise util.Abort(_('similarity must be between 0 and 100'))
69 raise util.Abort(_('similarity must be between 0 and 100'))
70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
71
71
72 def annotate(ui, repo, *pats, **opts):
72 def annotate(ui, repo, *pats, **opts):
73 """show changeset information per file line
73 """show changeset information per file line
74
74
75 List changes in files, showing the revision id responsible for each line
75 List changes in files, showing the revision id responsible for each line
76
76
77 This command is useful to discover who did a change or when a change took
77 This command is useful to discover who did a change or when a change took
78 place.
78 place.
79
79
80 Without the -a option, annotate will avoid processing files it
80 Without the -a option, annotate will avoid processing files it
81 detects as binary. With -a, annotate will generate an annotation
81 detects as binary. With -a, annotate will generate an annotation
82 anyway, probably with undesirable results.
82 anyway, probably with undesirable results.
83 """
83 """
84 datefunc = ui.quiet and util.shortdate or util.datestr
84 datefunc = ui.quiet and util.shortdate or util.datestr
85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
86
86
87 if not pats:
87 if not pats:
88 raise util.Abort(_('at least one file name or pattern required'))
88 raise util.Abort(_('at least one file name or pattern required'))
89
89
90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
91 ('number', lambda x: str(x[0].rev())),
91 ('number', lambda x: str(x[0].rev())),
92 ('changeset', lambda x: short(x[0].node())),
92 ('changeset', lambda x: short(x[0].node())),
93 ('date', getdate),
93 ('date', getdate),
94 ('follow', lambda x: x[0].path()),
94 ('follow', lambda x: x[0].path()),
95 ]
95 ]
96
96
97 if (not opts['user'] and not opts['changeset'] and not opts['date']
97 if (not opts['user'] and not opts['changeset'] and not opts['date']
98 and not opts['follow']):
98 and not opts['follow']):
99 opts['number'] = 1
99 opts['number'] = 1
100
100
101 linenumber = opts.get('line_number') is not None
101 linenumber = opts.get('line_number') is not None
102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
103 raise util.Abort(_('at least one of -n/-c is required for -l'))
103 raise util.Abort(_('at least one of -n/-c is required for -l'))
104
104
105 funcmap = [func for op, func in opmap if opts.get(op)]
105 funcmap = [func for op, func in opmap if opts.get(op)]
106 if linenumber:
106 if linenumber:
107 lastfunc = funcmap[-1]
107 lastfunc = funcmap[-1]
108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
109
109
110 ctx = repo.changectx(opts['rev'])
110 ctx = repo[opts['rev']]
111
111
112 m = cmdutil.match(repo, pats, opts)
112 m = cmdutil.match(repo, pats, opts)
113 for abs in repo.walk(m, ctx.node()):
113 for abs in repo.walk(m, ctx.node()):
114 fctx = ctx.filectx(abs)
114 fctx = ctx.filectx(abs)
115 if not opts['text'] and util.binary(fctx.data()):
115 if not opts['text'] and util.binary(fctx.data()):
116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
117 continue
117 continue
118
118
119 lines = fctx.annotate(follow=opts.get('follow'),
119 lines = fctx.annotate(follow=opts.get('follow'),
120 linenumber=linenumber)
120 linenumber=linenumber)
121 pieces = []
121 pieces = []
122
122
123 for f in funcmap:
123 for f in funcmap:
124 l = [f(n) for n, dummy in lines]
124 l = [f(n) for n, dummy in lines]
125 if l:
125 if l:
126 m = max(map(len, l))
126 m = max(map(len, l))
127 pieces.append(["%*s" % (m, x) for x in l])
127 pieces.append(["%*s" % (m, x) for x in l])
128
128
129 if pieces:
129 if pieces:
130 for p, l in zip(zip(*pieces), lines):
130 for p, l in zip(zip(*pieces), lines):
131 ui.write("%s: %s" % (" ".join(p), l[1]))
131 ui.write("%s: %s" % (" ".join(p), l[1]))
132
132
133 def archive(ui, repo, dest, **opts):
133 def archive(ui, repo, dest, **opts):
134 '''create unversioned archive of a repository revision
134 '''create unversioned archive of a repository revision
135
135
136 By default, the revision used is the parent of the working
136 By default, the revision used is the parent of the working
137 directory; use "-r" to specify a different revision.
137 directory; use "-r" to specify a different revision.
138
138
139 To specify the type of archive to create, use "-t". Valid
139 To specify the type of archive to create, use "-t". Valid
140 types are:
140 types are:
141
141
142 "files" (default): a directory full of files
142 "files" (default): a directory full of files
143 "tar": tar archive, uncompressed
143 "tar": tar archive, uncompressed
144 "tbz2": tar archive, compressed using bzip2
144 "tbz2": tar archive, compressed using bzip2
145 "tgz": tar archive, compressed using gzip
145 "tgz": tar archive, compressed using gzip
146 "uzip": zip archive, uncompressed
146 "uzip": zip archive, uncompressed
147 "zip": zip archive, compressed using deflate
147 "zip": zip archive, compressed using deflate
148
148
149 The exact name of the destination archive or directory is given
149 The exact name of the destination archive or directory is given
150 using a format string; see "hg help export" for details.
150 using a format string; see "hg help export" for details.
151
151
152 Each member added to an archive file has a directory prefix
152 Each member added to an archive file has a directory prefix
153 prepended. Use "-p" to specify a format string for the prefix.
153 prepended. Use "-p" to specify a format string for the prefix.
154 The default is the basename of the archive, with suffixes removed.
154 The default is the basename of the archive, with suffixes removed.
155 '''
155 '''
156
156
157 ctx = repo.changectx(opts['rev'])
157 ctx = repo[opts['rev']]
158 if not ctx:
158 if not ctx:
159 raise util.Abort(_('repository has no revisions'))
159 raise util.Abort(_('repository has no revisions'))
160 node = ctx.node()
160 node = ctx.node()
161 dest = cmdutil.make_filename(repo, dest, node)
161 dest = cmdutil.make_filename(repo, dest, node)
162 if os.path.realpath(dest) == repo.root:
162 if os.path.realpath(dest) == repo.root:
163 raise util.Abort(_('repository root cannot be destination'))
163 raise util.Abort(_('repository root cannot be destination'))
164 matchfn = cmdutil.match(repo, [], opts)
164 matchfn = cmdutil.match(repo, [], opts)
165 kind = opts.get('type') or 'files'
165 kind = opts.get('type') or 'files'
166 prefix = opts['prefix']
166 prefix = opts['prefix']
167 if dest == '-':
167 if dest == '-':
168 if kind == 'files':
168 if kind == 'files':
169 raise util.Abort(_('cannot archive plain files to stdout'))
169 raise util.Abort(_('cannot archive plain files to stdout'))
170 dest = sys.stdout
170 dest = sys.stdout
171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
172 prefix = cmdutil.make_filename(repo, prefix, node)
172 prefix = cmdutil.make_filename(repo, prefix, node)
173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
174 matchfn, prefix)
174 matchfn, prefix)
175
175
176 def backout(ui, repo, node=None, rev=None, **opts):
176 def backout(ui, repo, node=None, rev=None, **opts):
177 '''reverse effect of earlier changeset
177 '''reverse effect of earlier changeset
178
178
179 Commit the backed out changes as a new changeset. The new
179 Commit the backed out changes as a new changeset. The new
180 changeset is a child of the backed out changeset.
180 changeset is a child of the backed out changeset.
181
181
182 If you back out a changeset other than the tip, a new head is
182 If you back out a changeset other than the tip, a new head is
183 created. This head will be the new tip and you should merge this
183 created. This head will be the new tip and you should merge this
184 backout changeset with another head (current one by default).
184 backout changeset with another head (current one by default).
185
185
186 The --merge option remembers the parent of the working directory
186 The --merge option remembers the parent of the working directory
187 before starting the backout, then merges the new head with that
187 before starting the backout, then merges the new head with that
188 changeset afterwards. This saves you from doing the merge by
188 changeset afterwards. This saves you from doing the merge by
189 hand. The result of this merge is not committed, as for a normal
189 hand. The result of this merge is not committed, as for a normal
190 merge.
190 merge.
191
191
192 See \'hg help dates\' for a list of formats valid for -d/--date.
192 See \'hg help dates\' for a list of formats valid for -d/--date.
193 '''
193 '''
194 if rev and node:
194 if rev and node:
195 raise util.Abort(_("please specify just one revision"))
195 raise util.Abort(_("please specify just one revision"))
196
196
197 if not rev:
197 if not rev:
198 rev = node
198 rev = node
199
199
200 if not rev:
200 if not rev:
201 raise util.Abort(_("please specify a revision to backout"))
201 raise util.Abort(_("please specify a revision to backout"))
202
202
203 date = opts.get('date')
203 date = opts.get('date')
204 if date:
204 if date:
205 opts['date'] = util.parsedate(date)
205 opts['date'] = util.parsedate(date)
206
206
207 cmdutil.bail_if_changed(repo)
207 cmdutil.bail_if_changed(repo)
208 node = repo.lookup(rev)
208 node = repo.lookup(rev)
209
209
210 op1, op2 = repo.dirstate.parents()
210 op1, op2 = repo.dirstate.parents()
211 a = repo.changelog.ancestor(op1, node)
211 a = repo.changelog.ancestor(op1, node)
212 if a != node:
212 if a != node:
213 raise util.Abort(_('cannot back out change on a different branch'))
213 raise util.Abort(_('cannot back out change on a different branch'))
214
214
215 p1, p2 = repo.changelog.parents(node)
215 p1, p2 = repo.changelog.parents(node)
216 if p1 == nullid:
216 if p1 == nullid:
217 raise util.Abort(_('cannot back out a change with no parents'))
217 raise util.Abort(_('cannot back out a change with no parents'))
218 if p2 != nullid:
218 if p2 != nullid:
219 if not opts['parent']:
219 if not opts['parent']:
220 raise util.Abort(_('cannot back out a merge changeset without '
220 raise util.Abort(_('cannot back out a merge changeset without '
221 '--parent'))
221 '--parent'))
222 p = repo.lookup(opts['parent'])
222 p = repo.lookup(opts['parent'])
223 if p not in (p1, p2):
223 if p not in (p1, p2):
224 raise util.Abort(_('%s is not a parent of %s') %
224 raise util.Abort(_('%s is not a parent of %s') %
225 (short(p), short(node)))
225 (short(p), short(node)))
226 parent = p
226 parent = p
227 else:
227 else:
228 if opts['parent']:
228 if opts['parent']:
229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
230 parent = p1
230 parent = p1
231
231
232 # the backout should appear on the same branch
232 # the backout should appear on the same branch
233 branch = repo.dirstate.branch()
233 branch = repo.dirstate.branch()
234 hg.clean(repo, node, show_stats=False)
234 hg.clean(repo, node, show_stats=False)
235 repo.dirstate.setbranch(branch)
235 repo.dirstate.setbranch(branch)
236 revert_opts = opts.copy()
236 revert_opts = opts.copy()
237 revert_opts['date'] = None
237 revert_opts['date'] = None
238 revert_opts['all'] = True
238 revert_opts['all'] = True
239 revert_opts['rev'] = hex(parent)
239 revert_opts['rev'] = hex(parent)
240 revert_opts['no_backup'] = None
240 revert_opts['no_backup'] = None
241 revert(ui, repo, **revert_opts)
241 revert(ui, repo, **revert_opts)
242 commit_opts = opts.copy()
242 commit_opts = opts.copy()
243 commit_opts['addremove'] = False
243 commit_opts['addremove'] = False
244 if not commit_opts['message'] and not commit_opts['logfile']:
244 if not commit_opts['message'] and not commit_opts['logfile']:
245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
246 commit_opts['force_editor'] = True
246 commit_opts['force_editor'] = True
247 commit(ui, repo, **commit_opts)
247 commit(ui, repo, **commit_opts)
248 def nice(node):
248 def nice(node):
249 return '%d:%s' % (repo.changelog.rev(node), short(node))
249 return '%d:%s' % (repo.changelog.rev(node), short(node))
250 ui.status(_('changeset %s backs out changeset %s\n') %
250 ui.status(_('changeset %s backs out changeset %s\n') %
251 (nice(repo.changelog.tip()), nice(node)))
251 (nice(repo.changelog.tip()), nice(node)))
252 if op1 != node:
252 if op1 != node:
253 hg.clean(repo, op1, show_stats=False)
253 hg.clean(repo, op1, show_stats=False)
254 if opts['merge']:
254 if opts['merge']:
255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
256 hg.merge(repo, hex(repo.changelog.tip()))
256 hg.merge(repo, hex(repo.changelog.tip()))
257 else:
257 else:
258 ui.status(_('the backout changeset is a new head - '
258 ui.status(_('the backout changeset is a new head - '
259 'do not forget to merge\n'))
259 'do not forget to merge\n'))
260 ui.status(_('(use "backout --merge" '
260 ui.status(_('(use "backout --merge" '
261 'if you want to auto-merge)\n'))
261 'if you want to auto-merge)\n'))
262
262
263 def bisect(ui, repo, rev=None, extra=None,
263 def bisect(ui, repo, rev=None, extra=None,
264 reset=None, good=None, bad=None, skip=None, noupdate=None):
264 reset=None, good=None, bad=None, skip=None, noupdate=None):
265 """subdivision search of changesets
265 """subdivision search of changesets
266
266
267 This command helps to find changesets which introduce problems.
267 This command helps to find changesets which introduce problems.
268 To use, mark the earliest changeset you know exhibits the problem
268 To use, mark the earliest changeset you know exhibits the problem
269 as bad, then mark the latest changeset which is free from the
269 as bad, then mark the latest changeset which is free from the
270 problem as good. Bisect will update your working directory to a
270 problem as good. Bisect will update your working directory to a
271 revision for testing. Once you have performed tests, mark the
271 revision for testing. Once you have performed tests, mark the
272 working directory as bad or good and bisect will either update to
272 working directory as bad or good and bisect will either update to
273 another candidate changeset or announce that it has found the bad
273 another candidate changeset or announce that it has found the bad
274 revision.
274 revision.
275 """
275 """
276 # backward compatibility
276 # backward compatibility
277 if rev in "good bad reset init".split():
277 if rev in "good bad reset init".split():
278 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
278 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
279 cmd, rev, extra = rev, extra, None
279 cmd, rev, extra = rev, extra, None
280 if cmd == "good":
280 if cmd == "good":
281 good = True
281 good = True
282 elif cmd == "bad":
282 elif cmd == "bad":
283 bad = True
283 bad = True
284 else:
284 else:
285 reset = True
285 reset = True
286 elif extra or good + bad + skip + reset > 1:
286 elif extra or good + bad + skip + reset > 1:
287 raise util.Abort("Incompatible arguments")
287 raise util.Abort("Incompatible arguments")
288
288
289 if reset:
289 if reset:
290 p = repo.join("bisect.state")
290 p = repo.join("bisect.state")
291 if os.path.exists(p):
291 if os.path.exists(p):
292 os.unlink(p)
292 os.unlink(p)
293 return
293 return
294
294
295 # load state
295 # load state
296 state = {'good': [], 'bad': [], 'skip': []}
296 state = {'good': [], 'bad': [], 'skip': []}
297 if os.path.exists(repo.join("bisect.state")):
297 if os.path.exists(repo.join("bisect.state")):
298 for l in repo.opener("bisect.state"):
298 for l in repo.opener("bisect.state"):
299 kind, node = l[:-1].split()
299 kind, node = l[:-1].split()
300 node = repo.lookup(node)
300 node = repo.lookup(node)
301 if kind not in state:
301 if kind not in state:
302 raise util.Abort(_("unknown bisect kind %s") % kind)
302 raise util.Abort(_("unknown bisect kind %s") % kind)
303 state[kind].append(node)
303 state[kind].append(node)
304
304
305 # update state
305 # update state
306 node = repo.lookup(rev or '.')
306 node = repo.lookup(rev or '.')
307 if good:
307 if good:
308 state['good'].append(node)
308 state['good'].append(node)
309 elif bad:
309 elif bad:
310 state['bad'].append(node)
310 state['bad'].append(node)
311 elif skip:
311 elif skip:
312 state['skip'].append(node)
312 state['skip'].append(node)
313
313
314 # save state
314 # save state
315 f = repo.opener("bisect.state", "w", atomictemp=True)
315 f = repo.opener("bisect.state", "w", atomictemp=True)
316 wlock = repo.wlock()
316 wlock = repo.wlock()
317 try:
317 try:
318 for kind in state:
318 for kind in state:
319 for node in state[kind]:
319 for node in state[kind]:
320 f.write("%s %s\n" % (kind, hex(node)))
320 f.write("%s %s\n" % (kind, hex(node)))
321 f.rename()
321 f.rename()
322 finally:
322 finally:
323 del wlock
323 del wlock
324
324
325 if not state['good'] or not state['bad']:
325 if not state['good'] or not state['bad']:
326 return
326 return
327
327
328 # actually bisect
328 # actually bisect
329 node, changesets, good = hbisect.bisect(repo.changelog, state)
329 node, changesets, good = hbisect.bisect(repo.changelog, state)
330 if changesets == 0:
330 if changesets == 0:
331 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
331 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
332 displayer = cmdutil.show_changeset(ui, repo, {})
332 displayer = cmdutil.show_changeset(ui, repo, {})
333 displayer.show(changenode=node)
333 displayer.show(changenode=node)
334 elif node is not None:
334 elif node is not None:
335 # compute the approximate number of remaining tests
335 # compute the approximate number of remaining tests
336 tests, size = 0, 2
336 tests, size = 0, 2
337 while size <= changesets:
337 while size <= changesets:
338 tests, size = tests + 1, size * 2
338 tests, size = tests + 1, size * 2
339 rev = repo.changelog.rev(node)
339 rev = repo.changelog.rev(node)
340 ui.write(_("Testing changeset %s:%s "
340 ui.write(_("Testing changeset %s:%s "
341 "(%s changesets remaining, ~%s tests)\n")
341 "(%s changesets remaining, ~%s tests)\n")
342 % (rev, short(node), changesets, tests))
342 % (rev, short(node), changesets, tests))
343 if not noupdate:
343 if not noupdate:
344 cmdutil.bail_if_changed(repo)
344 cmdutil.bail_if_changed(repo)
345 return hg.clean(repo, node)
345 return hg.clean(repo, node)
346
346
347 def branch(ui, repo, label=None, **opts):
347 def branch(ui, repo, label=None, **opts):
348 """set or show the current branch name
348 """set or show the current branch name
349
349
350 With no argument, show the current branch name. With one argument,
350 With no argument, show the current branch name. With one argument,
351 set the working directory branch name (the branch does not exist in
351 set the working directory branch name (the branch does not exist in
352 the repository until the next commit).
352 the repository until the next commit).
353
353
354 Unless --force is specified, branch will not let you set a
354 Unless --force is specified, branch will not let you set a
355 branch name that shadows an existing branch.
355 branch name that shadows an existing branch.
356
356
357 Use the command 'hg update' to switch to an existing branch.
357 Use the command 'hg update' to switch to an existing branch.
358 """
358 """
359
359
360 if label:
360 if label:
361 if not opts.get('force') and label in repo.branchtags():
361 if not opts.get('force') and label in repo.branchtags():
362 if label not in [p.branch() for p in repo.changectx(None).parents()]:
362 if label not in [p.branch() for p in repo.parents()]:
363 raise util.Abort(_('a branch of the same name already exists'
363 raise util.Abort(_('a branch of the same name already exists'
364 ' (use --force to override)'))
364 ' (use --force to override)'))
365 repo.dirstate.setbranch(util.fromlocal(label))
365 repo.dirstate.setbranch(util.fromlocal(label))
366 ui.status(_('marked working directory as branch %s\n') % label)
366 ui.status(_('marked working directory as branch %s\n') % label)
367 else:
367 else:
368 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
368 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
369
369
370 def branches(ui, repo, active=False):
370 def branches(ui, repo, active=False):
371 """list repository named branches
371 """list repository named branches
372
372
373 List the repository's named branches, indicating which ones are
373 List the repository's named branches, indicating which ones are
374 inactive. If active is specified, only show active branches.
374 inactive. If active is specified, only show active branches.
375
375
376 A branch is considered active if it contains repository heads.
376 A branch is considered active if it contains repository heads.
377
377
378 Use the command 'hg update' to switch to an existing branch.
378 Use the command 'hg update' to switch to an existing branch.
379 """
379 """
380 hexfunc = ui.debugflag and hex or short
380 hexfunc = ui.debugflag and hex or short
381 activebranches = [util.tolocal(repo.changectx(n).branch())
381 activebranches = [util.tolocal(repo[n].branch())
382 for n in repo.heads()]
382 for n in repo.heads()]
383 branches = [(tag in activebranches, repo.changelog.rev(node), tag)
383 branches = [(tag in activebranches, repo.changelog.rev(node), tag)
384 for tag, node in repo.branchtags().items()]
384 for tag, node in repo.branchtags().items()]
385 branches.sort()
385 branches.sort()
386 branches.reverse()
386 branches.reverse()
387
387
388 for isactive, node, tag in branches:
388 for isactive, node, tag in branches:
389 if (not active) or isactive:
389 if (not active) or isactive:
390 if ui.quiet:
390 if ui.quiet:
391 ui.write("%s\n" % tag)
391 ui.write("%s\n" % tag)
392 else:
392 else:
393 rev = str(node).rjust(32 - util.locallen(tag))
393 rev = str(node).rjust(32 - util.locallen(tag))
394 isinactive = ((not isactive) and " (inactive)") or ''
394 isinactive = ((not isactive) and " (inactive)") or ''
395 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
395 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
396 ui.write("%s%s:%s%s\n" % data)
396 ui.write("%s%s:%s%s\n" % data)
397
397
398 def bundle(ui, repo, fname, dest=None, **opts):
398 def bundle(ui, repo, fname, dest=None, **opts):
399 """create a changegroup file
399 """create a changegroup file
400
400
401 Generate a compressed changegroup file collecting changesets not
401 Generate a compressed changegroup file collecting changesets not
402 found in the other repository.
402 found in the other repository.
403
403
404 If no destination repository is specified the destination is
404 If no destination repository is specified the destination is
405 assumed to have all the nodes specified by one or more --base
405 assumed to have all the nodes specified by one or more --base
406 parameters. To create a bundle containing all changesets, use
406 parameters. To create a bundle containing all changesets, use
407 --all (or --base null). To change the compression method applied,
407 --all (or --base null). To change the compression method applied,
408 use the -t option (by default, bundles are compressed using bz2).
408 use the -t option (by default, bundles are compressed using bz2).
409
409
410 The bundle file can then be transferred using conventional means and
410 The bundle file can then be transferred using conventional means and
411 applied to another repository with the unbundle or pull command.
411 applied to another repository with the unbundle or pull command.
412 This is useful when direct push and pull are not available or when
412 This is useful when direct push and pull are not available or when
413 exporting an entire repository is undesirable.
413 exporting an entire repository is undesirable.
414
414
415 Applying bundles preserves all changeset contents including
415 Applying bundles preserves all changeset contents including
416 permissions, copy/rename information, and revision history.
416 permissions, copy/rename information, and revision history.
417 """
417 """
418 revs = opts.get('rev') or None
418 revs = opts.get('rev') or None
419 if revs:
419 if revs:
420 revs = [repo.lookup(rev) for rev in revs]
420 revs = [repo.lookup(rev) for rev in revs]
421 if opts.get('all'):
421 if opts.get('all'):
422 base = ['null']
422 base = ['null']
423 else:
423 else:
424 base = opts.get('base')
424 base = opts.get('base')
425 if base:
425 if base:
426 if dest:
426 if dest:
427 raise util.Abort(_("--base is incompatible with specifiying "
427 raise util.Abort(_("--base is incompatible with specifiying "
428 "a destination"))
428 "a destination"))
429 base = [repo.lookup(rev) for rev in base]
429 base = [repo.lookup(rev) for rev in base]
430 # create the right base
430 # create the right base
431 # XXX: nodesbetween / changegroup* should be "fixed" instead
431 # XXX: nodesbetween / changegroup* should be "fixed" instead
432 o = []
432 o = []
433 has = {nullid: None}
433 has = {nullid: None}
434 for n in base:
434 for n in base:
435 has.update(repo.changelog.reachable(n))
435 has.update(repo.changelog.reachable(n))
436 if revs:
436 if revs:
437 visit = list(revs)
437 visit = list(revs)
438 else:
438 else:
439 visit = repo.changelog.heads()
439 visit = repo.changelog.heads()
440 seen = {}
440 seen = {}
441 while visit:
441 while visit:
442 n = visit.pop(0)
442 n = visit.pop(0)
443 parents = [p for p in repo.changelog.parents(n) if p not in has]
443 parents = [p for p in repo.changelog.parents(n) if p not in has]
444 if len(parents) == 0:
444 if len(parents) == 0:
445 o.insert(0, n)
445 o.insert(0, n)
446 else:
446 else:
447 for p in parents:
447 for p in parents:
448 if p not in seen:
448 if p not in seen:
449 seen[p] = 1
449 seen[p] = 1
450 visit.append(p)
450 visit.append(p)
451 else:
451 else:
452 cmdutil.setremoteconfig(ui, opts)
452 cmdutil.setremoteconfig(ui, opts)
453 dest, revs, checkout = hg.parseurl(
453 dest, revs, checkout = hg.parseurl(
454 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
454 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
455 other = hg.repository(ui, dest)
455 other = hg.repository(ui, dest)
456 o = repo.findoutgoing(other, force=opts['force'])
456 o = repo.findoutgoing(other, force=opts['force'])
457
457
458 if revs:
458 if revs:
459 cg = repo.changegroupsubset(o, revs, 'bundle')
459 cg = repo.changegroupsubset(o, revs, 'bundle')
460 else:
460 else:
461 cg = repo.changegroup(o, 'bundle')
461 cg = repo.changegroup(o, 'bundle')
462
462
463 bundletype = opts.get('type', 'bzip2').lower()
463 bundletype = opts.get('type', 'bzip2').lower()
464 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
464 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
465 bundletype = btypes.get(bundletype)
465 bundletype = btypes.get(bundletype)
466 if bundletype not in changegroup.bundletypes:
466 if bundletype not in changegroup.bundletypes:
467 raise util.Abort(_('unknown bundle type specified with --type'))
467 raise util.Abort(_('unknown bundle type specified with --type'))
468
468
469 changegroup.writebundle(cg, fname, bundletype)
469 changegroup.writebundle(cg, fname, bundletype)
470
470
471 def cat(ui, repo, file1, *pats, **opts):
471 def cat(ui, repo, file1, *pats, **opts):
472 """output the current or given revision of files
472 """output the current or given revision of files
473
473
474 Print the specified files as they were at the given revision.
474 Print the specified files as they were at the given revision.
475 If no revision is given, the parent of the working directory is used,
475 If no revision is given, the parent of the working directory is used,
476 or tip if no revision is checked out.
476 or tip if no revision is checked out.
477
477
478 Output may be to a file, in which case the name of the file is
478 Output may be to a file, in which case the name of the file is
479 given using a format string. The formatting rules are the same as
479 given using a format string. The formatting rules are the same as
480 for the export command, with the following additions:
480 for the export command, with the following additions:
481
481
482 %s basename of file being printed
482 %s basename of file being printed
483 %d dirname of file being printed, or '.' if in repo root
483 %d dirname of file being printed, or '.' if in repo root
484 %p root-relative path name of file being printed
484 %p root-relative path name of file being printed
485 """
485 """
486 ctx = repo.changectx(opts['rev'])
486 ctx = repo[opts['rev']]
487 err = 1
487 err = 1
488 m = cmdutil.match(repo, (file1,) + pats, opts)
488 m = cmdutil.match(repo, (file1,) + pats, opts)
489 for abs in repo.walk(m, ctx.node()):
489 for abs in repo.walk(m, ctx.node()):
490 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
490 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
491 data = ctx.filectx(abs).data()
491 data = ctx.filectx(abs).data()
492 if opts.get('decode'):
492 if opts.get('decode'):
493 data = repo.wwritedata(abs, data)
493 data = repo.wwritedata(abs, data)
494 fp.write(data)
494 fp.write(data)
495 err = 0
495 err = 0
496 return err
496 return err
497
497
498 def clone(ui, source, dest=None, **opts):
498 def clone(ui, source, dest=None, **opts):
499 """make a copy of an existing repository
499 """make a copy of an existing repository
500
500
501 Create a copy of an existing repository in a new directory.
501 Create a copy of an existing repository in a new directory.
502
502
503 If no destination directory name is specified, it defaults to the
503 If no destination directory name is specified, it defaults to the
504 basename of the source.
504 basename of the source.
505
505
506 The location of the source is added to the new repository's
506 The location of the source is added to the new repository's
507 .hg/hgrc file, as the default to be used for future pulls.
507 .hg/hgrc file, as the default to be used for future pulls.
508
508
509 For efficiency, hardlinks are used for cloning whenever the source
509 For efficiency, hardlinks are used for cloning whenever the source
510 and destination are on the same filesystem (note this applies only
510 and destination are on the same filesystem (note this applies only
511 to the repository data, not to the checked out files). Some
511 to the repository data, not to the checked out files). Some
512 filesystems, such as AFS, implement hardlinking incorrectly, but
512 filesystems, such as AFS, implement hardlinking incorrectly, but
513 do not report errors. In these cases, use the --pull option to
513 do not report errors. In these cases, use the --pull option to
514 avoid hardlinking.
514 avoid hardlinking.
515
515
516 In some cases, you can clone repositories and checked out files
516 In some cases, you can clone repositories and checked out files
517 using full hardlinks with
517 using full hardlinks with
518
518
519 $ cp -al REPO REPOCLONE
519 $ cp -al REPO REPOCLONE
520
520
521 This is the fastest way to clone, but it is not always safe. The
521 This is the fastest way to clone, but it is not always safe. The
522 operation is not atomic (making sure REPO is not modified during
522 operation is not atomic (making sure REPO is not modified during
523 the operation is up to you) and you have to make sure your editor
523 the operation is up to you) and you have to make sure your editor
524 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
524 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
525 this is not compatible with certain extensions that place their
525 this is not compatible with certain extensions that place their
526 metadata under the .hg directory, such as mq.
526 metadata under the .hg directory, such as mq.
527
527
528 If you use the -r option to clone up to a specific revision, no
528 If you use the -r option to clone up to a specific revision, no
529 subsequent revisions will be present in the cloned repository.
529 subsequent revisions will be present in the cloned repository.
530 This option implies --pull, even on local repositories.
530 This option implies --pull, even on local repositories.
531
531
532 If the -U option is used, the new clone will contain only a repository
532 If the -U option is used, the new clone will contain only a repository
533 (.hg) and no working copy (the working copy parent is the null revision).
533 (.hg) and no working copy (the working copy parent is the null revision).
534
534
535 See pull for valid source format details.
535 See pull for valid source format details.
536
536
537 It is possible to specify an ssh:// URL as the destination, but no
537 It is possible to specify an ssh:// URL as the destination, but no
538 .hg/hgrc and working directory will be created on the remote side.
538 .hg/hgrc and working directory will be created on the remote side.
539 Look at the help text for the pull command for important details
539 Look at the help text for the pull command for important details
540 about ssh:// URLs.
540 about ssh:// URLs.
541 """
541 """
542 cmdutil.setremoteconfig(ui, opts)
542 cmdutil.setremoteconfig(ui, opts)
543 hg.clone(ui, source, dest,
543 hg.clone(ui, source, dest,
544 pull=opts['pull'],
544 pull=opts['pull'],
545 stream=opts['uncompressed'],
545 stream=opts['uncompressed'],
546 rev=opts['rev'],
546 rev=opts['rev'],
547 update=not opts['noupdate'])
547 update=not opts['noupdate'])
548
548
549 def commit(ui, repo, *pats, **opts):
549 def commit(ui, repo, *pats, **opts):
550 """commit the specified files or all outstanding changes
550 """commit the specified files or all outstanding changes
551
551
552 Commit changes to the given files into the repository.
552 Commit changes to the given files into the repository.
553
553
554 If a list of files is omitted, all changes reported by "hg status"
554 If a list of files is omitted, all changes reported by "hg status"
555 will be committed.
555 will be committed.
556
556
557 If you are committing the result of a merge, do not provide any
557 If you are committing the result of a merge, do not provide any
558 file names or -I/-X filters.
558 file names or -I/-X filters.
559
559
560 If no commit message is specified, the configured editor is started to
560 If no commit message is specified, the configured editor is started to
561 enter a message.
561 enter a message.
562
562
563 See 'hg help dates' for a list of formats valid for -d/--date.
563 See 'hg help dates' for a list of formats valid for -d/--date.
564 """
564 """
565 def commitfunc(ui, repo, message, match, opts):
565 def commitfunc(ui, repo, message, match, opts):
566 return repo.commit(match.files(), message, opts['user'], opts['date'],
566 return repo.commit(match.files(), message, opts['user'], opts['date'],
567 match, force_editor=opts.get('force_editor'))
567 match, force_editor=opts.get('force_editor'))
568
568
569 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
569 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
570 if not node:
570 if not node:
571 return
571 return
572 cl = repo.changelog
572 cl = repo.changelog
573 rev = cl.rev(node)
573 rev = cl.rev(node)
574 parents = cl.parentrevs(rev)
574 parents = cl.parentrevs(rev)
575 if rev - 1 in parents:
575 if rev - 1 in parents:
576 # one of the parents was the old tip
576 # one of the parents was the old tip
577 return
577 return
578 if (parents == (nullrev, nullrev) or
578 if (parents == (nullrev, nullrev) or
579 len(cl.heads(cl.node(parents[0]))) > 1 and
579 len(cl.heads(cl.node(parents[0]))) > 1 and
580 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
580 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
581 ui.status(_('created new head\n'))
581 ui.status(_('created new head\n'))
582
582
583 def copy(ui, repo, *pats, **opts):
583 def copy(ui, repo, *pats, **opts):
584 """mark files as copied for the next commit
584 """mark files as copied for the next commit
585
585
586 Mark dest as having copies of source files. If dest is a
586 Mark dest as having copies of source files. If dest is a
587 directory, copies are put in that directory. If dest is a file,
587 directory, copies are put in that directory. If dest is a file,
588 there can only be one source.
588 there can only be one source.
589
589
590 By default, this command copies the contents of files as they
590 By default, this command copies the contents of files as they
591 stand in the working directory. If invoked with --after, the
591 stand in the working directory. If invoked with --after, the
592 operation is recorded, but no copying is performed.
592 operation is recorded, but no copying is performed.
593
593
594 This command takes effect in the next commit. To undo a copy
594 This command takes effect in the next commit. To undo a copy
595 before that, see hg revert.
595 before that, see hg revert.
596 """
596 """
597 wlock = repo.wlock(False)
597 wlock = repo.wlock(False)
598 try:
598 try:
599 return cmdutil.copy(ui, repo, pats, opts)
599 return cmdutil.copy(ui, repo, pats, opts)
600 finally:
600 finally:
601 del wlock
601 del wlock
602
602
603 def debugancestor(ui, repo, *args):
603 def debugancestor(ui, repo, *args):
604 """find the ancestor revision of two revisions in a given index"""
604 """find the ancestor revision of two revisions in a given index"""
605 if len(args) == 3:
605 if len(args) == 3:
606 index, rev1, rev2 = args
606 index, rev1, rev2 = args
607 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
607 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
608 lookup = r.lookup
608 lookup = r.lookup
609 elif len(args) == 2:
609 elif len(args) == 2:
610 if not repo:
610 if not repo:
611 raise util.Abort(_("There is no Mercurial repository here "
611 raise util.Abort(_("There is no Mercurial repository here "
612 "(.hg not found)"))
612 "(.hg not found)"))
613 rev1, rev2 = args
613 rev1, rev2 = args
614 r = repo.changelog
614 r = repo.changelog
615 lookup = repo.lookup
615 lookup = repo.lookup
616 else:
616 else:
617 raise util.Abort(_('either two or three arguments required'))
617 raise util.Abort(_('either two or three arguments required'))
618 a = r.ancestor(lookup(rev1), lookup(rev2))
618 a = r.ancestor(lookup(rev1), lookup(rev2))
619 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
619 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
620
620
621 def debugcomplete(ui, cmd='', **opts):
621 def debugcomplete(ui, cmd='', **opts):
622 """returns the completion list associated with the given command"""
622 """returns the completion list associated with the given command"""
623
623
624 if opts['options']:
624 if opts['options']:
625 options = []
625 options = []
626 otables = [globalopts]
626 otables = [globalopts]
627 if cmd:
627 if cmd:
628 aliases, entry = cmdutil.findcmd(ui, cmd, table)
628 aliases, entry = cmdutil.findcmd(ui, cmd, table)
629 otables.append(entry[1])
629 otables.append(entry[1])
630 for t in otables:
630 for t in otables:
631 for o in t:
631 for o in t:
632 if o[0]:
632 if o[0]:
633 options.append('-%s' % o[0])
633 options.append('-%s' % o[0])
634 options.append('--%s' % o[1])
634 options.append('--%s' % o[1])
635 ui.write("%s\n" % "\n".join(options))
635 ui.write("%s\n" % "\n".join(options))
636 return
636 return
637
637
638 clist = cmdutil.findpossible(ui, cmd, table).keys()
638 clist = cmdutil.findpossible(ui, cmd, table).keys()
639 clist.sort()
639 clist.sort()
640 ui.write("%s\n" % "\n".join(clist))
640 ui.write("%s\n" % "\n".join(clist))
641
641
642 def debugfsinfo(ui, path = "."):
642 def debugfsinfo(ui, path = "."):
643 file('.debugfsinfo', 'w').write('')
643 file('.debugfsinfo', 'w').write('')
644 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
644 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
645 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
645 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
646 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
646 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
647 and 'yes' or 'no'))
647 and 'yes' or 'no'))
648 os.unlink('.debugfsinfo')
648 os.unlink('.debugfsinfo')
649
649
650 def debugrebuildstate(ui, repo, rev=""):
650 def debugrebuildstate(ui, repo, rev="tip"):
651 """rebuild the dirstate as it would look like for the given revision"""
651 """rebuild the dirstate as it would look like for the given revision"""
652 if rev == "":
652 ctx = repo[rev]
653 rev = repo.changelog.tip()
654 ctx = repo.changectx(rev)
655 files = ctx.manifest()
656 wlock = repo.wlock()
653 wlock = repo.wlock()
657 try:
654 try:
658 repo.dirstate.rebuild(rev, files)
655 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
659 finally:
656 finally:
660 del wlock
657 del wlock
661
658
662 def debugcheckstate(ui, repo):
659 def debugcheckstate(ui, repo):
663 """validate the correctness of the current dirstate"""
660 """validate the correctness of the current dirstate"""
664 parent1, parent2 = repo.dirstate.parents()
661 parent1, parent2 = repo.dirstate.parents()
665 m1 = repo.changectx(parent1).manifest()
662 m1 = repo[parent1].manifest()
666 m2 = repo.changectx(parent2).manifest()
663 m2 = repo[parent2].manifest()
667 errors = 0
664 errors = 0
668 for f in repo.dirstate:
665 for f in repo.dirstate:
669 state = repo.dirstate[f]
666 state = repo.dirstate[f]
670 if state in "nr" and f not in m1:
667 if state in "nr" and f not in m1:
671 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
668 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
672 errors += 1
669 errors += 1
673 if state in "a" and f in m1:
670 if state in "a" and f in m1:
674 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
671 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
675 errors += 1
672 errors += 1
676 if state in "m" and f not in m1 and f not in m2:
673 if state in "m" and f not in m1 and f not in m2:
677 ui.warn(_("%s in state %s, but not in either manifest\n") %
674 ui.warn(_("%s in state %s, but not in either manifest\n") %
678 (f, state))
675 (f, state))
679 errors += 1
676 errors += 1
680 for f in m1:
677 for f in m1:
681 state = repo.dirstate[f]
678 state = repo.dirstate[f]
682 if state not in "nrm":
679 if state not in "nrm":
683 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
680 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
684 errors += 1
681 errors += 1
685 if errors:
682 if errors:
686 error = _(".hg/dirstate inconsistent with current parent's manifest")
683 error = _(".hg/dirstate inconsistent with current parent's manifest")
687 raise util.Abort(error)
684 raise util.Abort(error)
688
685
689 def showconfig(ui, repo, *values, **opts):
686 def showconfig(ui, repo, *values, **opts):
690 """show combined config settings from all hgrc files
687 """show combined config settings from all hgrc files
691
688
692 With no args, print names and values of all config items.
689 With no args, print names and values of all config items.
693
690
694 With one arg of the form section.name, print just the value of
691 With one arg of the form section.name, print just the value of
695 that config item.
692 that config item.
696
693
697 With multiple args, print names and values of all config items
694 With multiple args, print names and values of all config items
698 with matching section names."""
695 with matching section names."""
699
696
700 untrusted = bool(opts.get('untrusted'))
697 untrusted = bool(opts.get('untrusted'))
701 if values:
698 if values:
702 if len([v for v in values if '.' in v]) > 1:
699 if len([v for v in values if '.' in v]) > 1:
703 raise util.Abort(_('only one config item permitted'))
700 raise util.Abort(_('only one config item permitted'))
704 for section, name, value in ui.walkconfig(untrusted=untrusted):
701 for section, name, value in ui.walkconfig(untrusted=untrusted):
705 sectname = section + '.' + name
702 sectname = section + '.' + name
706 if values:
703 if values:
707 for v in values:
704 for v in values:
708 if v == section:
705 if v == section:
709 ui.write('%s=%s\n' % (sectname, value))
706 ui.write('%s=%s\n' % (sectname, value))
710 elif v == sectname:
707 elif v == sectname:
711 ui.write(value, '\n')
708 ui.write(value, '\n')
712 else:
709 else:
713 ui.write('%s=%s\n' % (sectname, value))
710 ui.write('%s=%s\n' % (sectname, value))
714
711
715 def debugsetparents(ui, repo, rev1, rev2=None):
712 def debugsetparents(ui, repo, rev1, rev2=None):
716 """manually set the parents of the current working directory
713 """manually set the parents of the current working directory
717
714
718 This is useful for writing repository conversion tools, but should
715 This is useful for writing repository conversion tools, but should
719 be used with care.
716 be used with care.
720 """
717 """
721
718
722 if not rev2:
719 if not rev2:
723 rev2 = hex(nullid)
720 rev2 = hex(nullid)
724
721
725 wlock = repo.wlock()
722 wlock = repo.wlock()
726 try:
723 try:
727 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
724 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
728 finally:
725 finally:
729 del wlock
726 del wlock
730
727
731 def debugstate(ui, repo, nodates=None):
728 def debugstate(ui, repo, nodates=None):
732 """show the contents of the current dirstate"""
729 """show the contents of the current dirstate"""
733 k = repo.dirstate._map.items()
730 k = repo.dirstate._map.items()
734 k.sort()
731 k.sort()
735 timestr = ""
732 timestr = ""
736 showdate = not nodates
733 showdate = not nodates
737 for file_, ent in k:
734 for file_, ent in k:
738 if showdate:
735 if showdate:
739 if ent[3] == -1:
736 if ent[3] == -1:
740 # Pad or slice to locale representation
737 # Pad or slice to locale representation
741 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
738 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
742 timestr = 'unset'
739 timestr = 'unset'
743 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
740 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
744 else:
741 else:
745 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
742 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
746 if ent[1] & 020000:
743 if ent[1] & 020000:
747 mode = 'lnk'
744 mode = 'lnk'
748 else:
745 else:
749 mode = '%3o' % (ent[1] & 0777)
746 mode = '%3o' % (ent[1] & 0777)
750 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
747 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
751 for f in repo.dirstate.copies():
748 for f in repo.dirstate.copies():
752 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
749 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
753
750
754 def debugdata(ui, file_, rev):
751 def debugdata(ui, file_, rev):
755 """dump the contents of a data file revision"""
752 """dump the contents of a data file revision"""
756 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
753 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
757 try:
754 try:
758 ui.write(r.revision(r.lookup(rev)))
755 ui.write(r.revision(r.lookup(rev)))
759 except KeyError:
756 except KeyError:
760 raise util.Abort(_('invalid revision identifier %s') % rev)
757 raise util.Abort(_('invalid revision identifier %s') % rev)
761
758
762 def debugdate(ui, date, range=None, **opts):
759 def debugdate(ui, date, range=None, **opts):
763 """parse and display a date"""
760 """parse and display a date"""
764 if opts["extended"]:
761 if opts["extended"]:
765 d = util.parsedate(date, util.extendeddateformats)
762 d = util.parsedate(date, util.extendeddateformats)
766 else:
763 else:
767 d = util.parsedate(date)
764 d = util.parsedate(date)
768 ui.write("internal: %s %s\n" % d)
765 ui.write("internal: %s %s\n" % d)
769 ui.write("standard: %s\n" % util.datestr(d))
766 ui.write("standard: %s\n" % util.datestr(d))
770 if range:
767 if range:
771 m = util.matchdate(range)
768 m = util.matchdate(range)
772 ui.write("match: %s\n" % m(d[0]))
769 ui.write("match: %s\n" % m(d[0]))
773
770
774 def debugindex(ui, file_):
771 def debugindex(ui, file_):
775 """dump the contents of an index file"""
772 """dump the contents of an index file"""
776 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
773 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
777 ui.write(" rev offset length base linkrev" +
774 ui.write(" rev offset length base linkrev" +
778 " nodeid p1 p2\n")
775 " nodeid p1 p2\n")
779 for i in xrange(r.count()):
776 for i in xrange(r.count()):
780 node = r.node(i)
777 node = r.node(i)
781 try:
778 try:
782 pp = r.parents(node)
779 pp = r.parents(node)
783 except:
780 except:
784 pp = [nullid, nullid]
781 pp = [nullid, nullid]
785 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
782 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
786 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
783 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
787 short(node), short(pp[0]), short(pp[1])))
784 short(node), short(pp[0]), short(pp[1])))
788
785
789 def debugindexdot(ui, file_):
786 def debugindexdot(ui, file_):
790 """dump an index DAG as a .dot file"""
787 """dump an index DAG as a .dot file"""
791 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
788 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
792 ui.write("digraph G {\n")
789 ui.write("digraph G {\n")
793 for i in xrange(r.count()):
790 for i in xrange(r.count()):
794 node = r.node(i)
791 node = r.node(i)
795 pp = r.parents(node)
792 pp = r.parents(node)
796 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
793 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
797 if pp[1] != nullid:
794 if pp[1] != nullid:
798 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
795 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
799 ui.write("}\n")
796 ui.write("}\n")
800
797
801 def debuginstall(ui):
798 def debuginstall(ui):
802 '''test Mercurial installation'''
799 '''test Mercurial installation'''
803
800
804 def writetemp(contents):
801 def writetemp(contents):
805 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
802 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
806 f = os.fdopen(fd, "wb")
803 f = os.fdopen(fd, "wb")
807 f.write(contents)
804 f.write(contents)
808 f.close()
805 f.close()
809 return name
806 return name
810
807
811 problems = 0
808 problems = 0
812
809
813 # encoding
810 # encoding
814 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
811 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
815 try:
812 try:
816 util.fromlocal("test")
813 util.fromlocal("test")
817 except util.Abort, inst:
814 except util.Abort, inst:
818 ui.write(" %s\n" % inst)
815 ui.write(" %s\n" % inst)
819 ui.write(_(" (check that your locale is properly set)\n"))
816 ui.write(_(" (check that your locale is properly set)\n"))
820 problems += 1
817 problems += 1
821
818
822 # compiled modules
819 # compiled modules
823 ui.status(_("Checking extensions...\n"))
820 ui.status(_("Checking extensions...\n"))
824 try:
821 try:
825 import bdiff, mpatch, base85
822 import bdiff, mpatch, base85
826 except Exception, inst:
823 except Exception, inst:
827 ui.write(" %s\n" % inst)
824 ui.write(" %s\n" % inst)
828 ui.write(_(" One or more extensions could not be found"))
825 ui.write(_(" One or more extensions could not be found"))
829 ui.write(_(" (check that you compiled the extensions)\n"))
826 ui.write(_(" (check that you compiled the extensions)\n"))
830 problems += 1
827 problems += 1
831
828
832 # templates
829 # templates
833 ui.status(_("Checking templates...\n"))
830 ui.status(_("Checking templates...\n"))
834 try:
831 try:
835 import templater
832 import templater
836 t = templater.templater(templater.templatepath("map-cmdline.default"))
833 t = templater.templater(templater.templatepath("map-cmdline.default"))
837 except Exception, inst:
834 except Exception, inst:
838 ui.write(" %s\n" % inst)
835 ui.write(" %s\n" % inst)
839 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
836 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
840 problems += 1
837 problems += 1
841
838
842 # patch
839 # patch
843 ui.status(_("Checking patch...\n"))
840 ui.status(_("Checking patch...\n"))
844 patchproblems = 0
841 patchproblems = 0
845 a = "1\n2\n3\n4\n"
842 a = "1\n2\n3\n4\n"
846 b = "1\n2\n3\ninsert\n4\n"
843 b = "1\n2\n3\ninsert\n4\n"
847 fa = writetemp(a)
844 fa = writetemp(a)
848 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
845 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
849 os.path.basename(fa))
846 os.path.basename(fa))
850 fd = writetemp(d)
847 fd = writetemp(d)
851
848
852 files = {}
849 files = {}
853 try:
850 try:
854 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
851 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
855 except util.Abort, e:
852 except util.Abort, e:
856 ui.write(_(" patch call failed:\n"))
853 ui.write(_(" patch call failed:\n"))
857 ui.write(" " + str(e) + "\n")
854 ui.write(" " + str(e) + "\n")
858 patchproblems += 1
855 patchproblems += 1
859 else:
856 else:
860 if list(files) != [os.path.basename(fa)]:
857 if list(files) != [os.path.basename(fa)]:
861 ui.write(_(" unexpected patch output!\n"))
858 ui.write(_(" unexpected patch output!\n"))
862 patchproblems += 1
859 patchproblems += 1
863 a = file(fa).read()
860 a = file(fa).read()
864 if a != b:
861 if a != b:
865 ui.write(_(" patch test failed!\n"))
862 ui.write(_(" patch test failed!\n"))
866 patchproblems += 1
863 patchproblems += 1
867
864
868 if patchproblems:
865 if patchproblems:
869 if ui.config('ui', 'patch'):
866 if ui.config('ui', 'patch'):
870 ui.write(_(" (Current patch tool may be incompatible with patch,"
867 ui.write(_(" (Current patch tool may be incompatible with patch,"
871 " or misconfigured. Please check your .hgrc file)\n"))
868 " or misconfigured. Please check your .hgrc file)\n"))
872 else:
869 else:
873 ui.write(_(" Internal patcher failure, please report this error"
870 ui.write(_(" Internal patcher failure, please report this error"
874 " to http://www.selenic.com/mercurial/bts\n"))
871 " to http://www.selenic.com/mercurial/bts\n"))
875 problems += patchproblems
872 problems += patchproblems
876
873
877 os.unlink(fa)
874 os.unlink(fa)
878 os.unlink(fd)
875 os.unlink(fd)
879
876
880 # editor
877 # editor
881 ui.status(_("Checking commit editor...\n"))
878 ui.status(_("Checking commit editor...\n"))
882 editor = ui.geteditor()
879 editor = ui.geteditor()
883 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
880 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
884 if not cmdpath:
881 if not cmdpath:
885 if editor == 'vi':
882 if editor == 'vi':
886 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
883 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
887 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
884 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
888 else:
885 else:
889 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
886 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
890 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
887 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
891 problems += 1
888 problems += 1
892
889
893 # check username
890 # check username
894 ui.status(_("Checking username...\n"))
891 ui.status(_("Checking username...\n"))
895 user = os.environ.get("HGUSER")
892 user = os.environ.get("HGUSER")
896 if user is None:
893 if user is None:
897 user = ui.config("ui", "username")
894 user = ui.config("ui", "username")
898 if user is None:
895 if user is None:
899 user = os.environ.get("EMAIL")
896 user = os.environ.get("EMAIL")
900 if not user:
897 if not user:
901 ui.warn(" ")
898 ui.warn(" ")
902 ui.username()
899 ui.username()
903 ui.write(_(" (specify a username in your .hgrc file)\n"))
900 ui.write(_(" (specify a username in your .hgrc file)\n"))
904
901
905 if not problems:
902 if not problems:
906 ui.status(_("No problems detected\n"))
903 ui.status(_("No problems detected\n"))
907 else:
904 else:
908 ui.write(_("%s problems detected,"
905 ui.write(_("%s problems detected,"
909 " please check your install!\n") % problems)
906 " please check your install!\n") % problems)
910
907
911 return problems
908 return problems
912
909
913 def debugrename(ui, repo, file1, *pats, **opts):
910 def debugrename(ui, repo, file1, *pats, **opts):
914 """dump rename information"""
911 """dump rename information"""
915
912
916 ctx = repo.changectx(opts.get('rev', 'tip'))
913 ctx = repo[opts.get('rev', 'tip')]
917 m = cmdutil.match(repo, (file1,) + pats, opts)
914 m = cmdutil.match(repo, (file1,) + pats, opts)
918 for abs in repo.walk(m, ctx.node()):
915 for abs in repo.walk(m, ctx.node()):
919 fctx = ctx.filectx(abs)
916 fctx = ctx.filectx(abs)
920 o = fctx.filelog().renamed(fctx.filenode())
917 o = fctx.filelog().renamed(fctx.filenode())
921 rel = m.rel(abs)
918 rel = m.rel(abs)
922 if o:
919 if o:
923 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
920 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
924 else:
921 else:
925 ui.write(_("%s not renamed\n") % rel)
922 ui.write(_("%s not renamed\n") % rel)
926
923
927 def debugwalk(ui, repo, *pats, **opts):
924 def debugwalk(ui, repo, *pats, **opts):
928 """show how files match on given patterns"""
925 """show how files match on given patterns"""
929 m = cmdutil.match(repo, pats, opts)
926 m = cmdutil.match(repo, pats, opts)
930 items = list(repo.walk(m))
927 items = list(repo.walk(m))
931 if not items:
928 if not items:
932 return
929 return
933 fmt = 'f %%-%ds %%-%ds %%s' % (
930 fmt = 'f %%-%ds %%-%ds %%s' % (
934 max([len(abs) for abs in items]),
931 max([len(abs) for abs in items]),
935 max([len(m.rel(abs)) for abs in items]))
932 max([len(m.rel(abs)) for abs in items]))
936 for abs in items:
933 for abs in items:
937 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
934 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
938 ui.write("%s\n" % line.rstrip())
935 ui.write("%s\n" % line.rstrip())
939
936
940 def diff(ui, repo, *pats, **opts):
937 def diff(ui, repo, *pats, **opts):
941 """diff repository (or selected files)
938 """diff repository (or selected files)
942
939
943 Show differences between revisions for the specified files.
940 Show differences between revisions for the specified files.
944
941
945 Differences between files are shown using the unified diff format.
942 Differences between files are shown using the unified diff format.
946
943
947 NOTE: diff may generate unexpected results for merges, as it will
944 NOTE: diff may generate unexpected results for merges, as it will
948 default to comparing against the working directory's first parent
945 default to comparing against the working directory's first parent
949 changeset if no revisions are specified.
946 changeset if no revisions are specified.
950
947
951 When two revision arguments are given, then changes are shown
948 When two revision arguments are given, then changes are shown
952 between those revisions. If only one revision is specified then
949 between those revisions. If only one revision is specified then
953 that revision is compared to the working directory, and, when no
950 that revision is compared to the working directory, and, when no
954 revisions are specified, the working directory files are compared
951 revisions are specified, the working directory files are compared
955 to its parent.
952 to its parent.
956
953
957 Without the -a option, diff will avoid generating diffs of files
954 Without the -a option, diff will avoid generating diffs of files
958 it detects as binary. With -a, diff will generate a diff anyway,
955 it detects as binary. With -a, diff will generate a diff anyway,
959 probably with undesirable results.
956 probably with undesirable results.
960 """
957 """
961 node1, node2 = cmdutil.revpair(repo, opts['rev'])
958 node1, node2 = cmdutil.revpair(repo, opts['rev'])
962
959
963 m = cmdutil.match(repo, pats, opts)
960 m = cmdutil.match(repo, pats, opts)
964 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
961 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
965
962
966 def export(ui, repo, *changesets, **opts):
963 def export(ui, repo, *changesets, **opts):
967 """dump the header and diffs for one or more changesets
964 """dump the header and diffs for one or more changesets
968
965
969 Print the changeset header and diffs for one or more revisions.
966 Print the changeset header and diffs for one or more revisions.
970
967
971 The information shown in the changeset header is: author,
968 The information shown in the changeset header is: author,
972 changeset hash, parent(s) and commit comment.
969 changeset hash, parent(s) and commit comment.
973
970
974 NOTE: export may generate unexpected diff output for merge changesets,
971 NOTE: export may generate unexpected diff output for merge changesets,
975 as it will compare the merge changeset against its first parent only.
972 as it will compare the merge changeset against its first parent only.
976
973
977 Output may be to a file, in which case the name of the file is
974 Output may be to a file, in which case the name of the file is
978 given using a format string. The formatting rules are as follows:
975 given using a format string. The formatting rules are as follows:
979
976
980 %% literal "%" character
977 %% literal "%" character
981 %H changeset hash (40 bytes of hexadecimal)
978 %H changeset hash (40 bytes of hexadecimal)
982 %N number of patches being generated
979 %N number of patches being generated
983 %R changeset revision number
980 %R changeset revision number
984 %b basename of the exporting repository
981 %b basename of the exporting repository
985 %h short-form changeset hash (12 bytes of hexadecimal)
982 %h short-form changeset hash (12 bytes of hexadecimal)
986 %n zero-padded sequence number, starting at 1
983 %n zero-padded sequence number, starting at 1
987 %r zero-padded changeset revision number
984 %r zero-padded changeset revision number
988
985
989 Without the -a option, export will avoid generating diffs of files
986 Without the -a option, export will avoid generating diffs of files
990 it detects as binary. With -a, export will generate a diff anyway,
987 it detects as binary. With -a, export will generate a diff anyway,
991 probably with undesirable results.
988 probably with undesirable results.
992
989
993 With the --switch-parent option, the diff will be against the second
990 With the --switch-parent option, the diff will be against the second
994 parent. It can be useful to review a merge.
991 parent. It can be useful to review a merge.
995 """
992 """
996 if not changesets:
993 if not changesets:
997 raise util.Abort(_("export requires at least one changeset"))
994 raise util.Abort(_("export requires at least one changeset"))
998 revs = cmdutil.revrange(repo, changesets)
995 revs = cmdutil.revrange(repo, changesets)
999 if len(revs) > 1:
996 if len(revs) > 1:
1000 ui.note(_('exporting patches:\n'))
997 ui.note(_('exporting patches:\n'))
1001 else:
998 else:
1002 ui.note(_('exporting patch:\n'))
999 ui.note(_('exporting patch:\n'))
1003 patch.export(repo, revs, template=opts['output'],
1000 patch.export(repo, revs, template=opts['output'],
1004 switch_parent=opts['switch_parent'],
1001 switch_parent=opts['switch_parent'],
1005 opts=patch.diffopts(ui, opts))
1002 opts=patch.diffopts(ui, opts))
1006
1003
1007 def grep(ui, repo, pattern, *pats, **opts):
1004 def grep(ui, repo, pattern, *pats, **opts):
1008 """search for a pattern in specified files and revisions
1005 """search for a pattern in specified files and revisions
1009
1006
1010 Search revisions of files for a regular expression.
1007 Search revisions of files for a regular expression.
1011
1008
1012 This command behaves differently than Unix grep. It only accepts
1009 This command behaves differently than Unix grep. It only accepts
1013 Python/Perl regexps. It searches repository history, not the
1010 Python/Perl regexps. It searches repository history, not the
1014 working directory. It always prints the revision number in which
1011 working directory. It always prints the revision number in which
1015 a match appears.
1012 a match appears.
1016
1013
1017 By default, grep only prints output for the first revision of a
1014 By default, grep only prints output for the first revision of a
1018 file in which it finds a match. To get it to print every revision
1015 file in which it finds a match. To get it to print every revision
1019 that contains a change in match status ("-" for a match that
1016 that contains a change in match status ("-" for a match that
1020 becomes a non-match, or "+" for a non-match that becomes a match),
1017 becomes a non-match, or "+" for a non-match that becomes a match),
1021 use the --all flag.
1018 use the --all flag.
1022 """
1019 """
1023 reflags = 0
1020 reflags = 0
1024 if opts['ignore_case']:
1021 if opts['ignore_case']:
1025 reflags |= re.I
1022 reflags |= re.I
1026 try:
1023 try:
1027 regexp = re.compile(pattern, reflags)
1024 regexp = re.compile(pattern, reflags)
1028 except Exception, inst:
1025 except Exception, inst:
1029 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1026 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1030 return None
1027 return None
1031 sep, eol = ':', '\n'
1028 sep, eol = ':', '\n'
1032 if opts['print0']:
1029 if opts['print0']:
1033 sep = eol = '\0'
1030 sep = eol = '\0'
1034
1031
1035 fcache = {}
1032 fcache = {}
1036 def getfile(fn):
1033 def getfile(fn):
1037 if fn not in fcache:
1034 if fn not in fcache:
1038 fcache[fn] = repo.file(fn)
1035 fcache[fn] = repo.file(fn)
1039 return fcache[fn]
1036 return fcache[fn]
1040
1037
1041 def matchlines(body):
1038 def matchlines(body):
1042 begin = 0
1039 begin = 0
1043 linenum = 0
1040 linenum = 0
1044 while True:
1041 while True:
1045 match = regexp.search(body, begin)
1042 match = regexp.search(body, begin)
1046 if not match:
1043 if not match:
1047 break
1044 break
1048 mstart, mend = match.span()
1045 mstart, mend = match.span()
1049 linenum += body.count('\n', begin, mstart) + 1
1046 linenum += body.count('\n', begin, mstart) + 1
1050 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1047 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1051 lend = body.find('\n', mend)
1048 lend = body.find('\n', mend)
1052 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1049 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1053 begin = lend + 1
1050 begin = lend + 1
1054
1051
1055 class linestate(object):
1052 class linestate(object):
1056 def __init__(self, line, linenum, colstart, colend):
1053 def __init__(self, line, linenum, colstart, colend):
1057 self.line = line
1054 self.line = line
1058 self.linenum = linenum
1055 self.linenum = linenum
1059 self.colstart = colstart
1056 self.colstart = colstart
1060 self.colend = colend
1057 self.colend = colend
1061
1058
1062 def __hash__(self):
1059 def __hash__(self):
1063 return hash((self.linenum, self.line))
1060 return hash((self.linenum, self.line))
1064
1061
1065 def __eq__(self, other):
1062 def __eq__(self, other):
1066 return self.line == other.line
1063 return self.line == other.line
1067
1064
1068 matches = {}
1065 matches = {}
1069 copies = {}
1066 copies = {}
1070 def grepbody(fn, rev, body):
1067 def grepbody(fn, rev, body):
1071 matches[rev].setdefault(fn, [])
1068 matches[rev].setdefault(fn, [])
1072 m = matches[rev][fn]
1069 m = matches[rev][fn]
1073 for lnum, cstart, cend, line in matchlines(body):
1070 for lnum, cstart, cend, line in matchlines(body):
1074 s = linestate(line, lnum, cstart, cend)
1071 s = linestate(line, lnum, cstart, cend)
1075 m.append(s)
1072 m.append(s)
1076
1073
1077 def difflinestates(a, b):
1074 def difflinestates(a, b):
1078 sm = difflib.SequenceMatcher(None, a, b)
1075 sm = difflib.SequenceMatcher(None, a, b)
1079 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1076 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1080 if tag == 'insert':
1077 if tag == 'insert':
1081 for i in xrange(blo, bhi):
1078 for i in xrange(blo, bhi):
1082 yield ('+', b[i])
1079 yield ('+', b[i])
1083 elif tag == 'delete':
1080 elif tag == 'delete':
1084 for i in xrange(alo, ahi):
1081 for i in xrange(alo, ahi):
1085 yield ('-', a[i])
1082 yield ('-', a[i])
1086 elif tag == 'replace':
1083 elif tag == 'replace':
1087 for i in xrange(alo, ahi):
1084 for i in xrange(alo, ahi):
1088 yield ('-', a[i])
1085 yield ('-', a[i])
1089 for i in xrange(blo, bhi):
1086 for i in xrange(blo, bhi):
1090 yield ('+', b[i])
1087 yield ('+', b[i])
1091
1088
1092 prev = {}
1089 prev = {}
1093 def display(fn, rev, states, prevstates):
1090 def display(fn, rev, states, prevstates):
1094 datefunc = ui.quiet and util.shortdate or util.datestr
1091 datefunc = ui.quiet and util.shortdate or util.datestr
1095 found = False
1092 found = False
1096 filerevmatches = {}
1093 filerevmatches = {}
1097 r = prev.get(fn, -1)
1094 r = prev.get(fn, -1)
1098 if opts['all']:
1095 if opts['all']:
1099 iter = difflinestates(states, prevstates)
1096 iter = difflinestates(states, prevstates)
1100 else:
1097 else:
1101 iter = [('', l) for l in prevstates]
1098 iter = [('', l) for l in prevstates]
1102 for change, l in iter:
1099 for change, l in iter:
1103 cols = [fn, str(r)]
1100 cols = [fn, str(r)]
1104 if opts['line_number']:
1101 if opts['line_number']:
1105 cols.append(str(l.linenum))
1102 cols.append(str(l.linenum))
1106 if opts['all']:
1103 if opts['all']:
1107 cols.append(change)
1104 cols.append(change)
1108 if opts['user']:
1105 if opts['user']:
1109 cols.append(ui.shortuser(get(r)[1]))
1106 cols.append(ui.shortuser(get(r)[1]))
1110 if opts.get('date'):
1107 if opts.get('date'):
1111 cols.append(datefunc(get(r)[2]))
1108 cols.append(datefunc(get(r)[2]))
1112 if opts['files_with_matches']:
1109 if opts['files_with_matches']:
1113 c = (fn, r)
1110 c = (fn, r)
1114 if c in filerevmatches:
1111 if c in filerevmatches:
1115 continue
1112 continue
1116 filerevmatches[c] = 1
1113 filerevmatches[c] = 1
1117 else:
1114 else:
1118 cols.append(l.line)
1115 cols.append(l.line)
1119 ui.write(sep.join(cols), eol)
1116 ui.write(sep.join(cols), eol)
1120 found = True
1117 found = True
1121 return found
1118 return found
1122
1119
1123 fstate = {}
1120 fstate = {}
1124 skip = {}
1121 skip = {}
1125 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1122 get = util.cachefunc(lambda r: repo[r].changeset())
1126 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1123 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1127 found = False
1124 found = False
1128 follow = opts.get('follow')
1125 follow = opts.get('follow')
1129 for st, rev, fns in changeiter:
1126 for st, rev, fns in changeiter:
1130 if st == 'window':
1127 if st == 'window':
1131 matches.clear()
1128 matches.clear()
1132 elif st == 'add':
1129 elif st == 'add':
1133 ctx = repo.changectx(rev)
1130 ctx = repo[rev]
1134 matches[rev] = {}
1131 matches[rev] = {}
1135 for fn in fns:
1132 for fn in fns:
1136 if fn in skip:
1133 if fn in skip:
1137 continue
1134 continue
1138 try:
1135 try:
1139 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1136 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1140 fstate.setdefault(fn, [])
1137 fstate.setdefault(fn, [])
1141 if follow:
1138 if follow:
1142 copied = getfile(fn).renamed(ctx.filenode(fn))
1139 copied = getfile(fn).renamed(ctx.filenode(fn))
1143 if copied:
1140 if copied:
1144 copies.setdefault(rev, {})[fn] = copied[0]
1141 copies.setdefault(rev, {})[fn] = copied[0]
1145 except revlog.LookupError:
1142 except revlog.LookupError:
1146 pass
1143 pass
1147 elif st == 'iter':
1144 elif st == 'iter':
1148 states = matches[rev].items()
1145 states = matches[rev].items()
1149 states.sort()
1146 states.sort()
1150 for fn, m in states:
1147 for fn, m in states:
1151 copy = copies.get(rev, {}).get(fn)
1148 copy = copies.get(rev, {}).get(fn)
1152 if fn in skip:
1149 if fn in skip:
1153 if copy:
1150 if copy:
1154 skip[copy] = True
1151 skip[copy] = True
1155 continue
1152 continue
1156 if fn in prev or fstate[fn]:
1153 if fn in prev or fstate[fn]:
1157 r = display(fn, rev, m, fstate[fn])
1154 r = display(fn, rev, m, fstate[fn])
1158 found = found or r
1155 found = found or r
1159 if r and not opts['all']:
1156 if r and not opts['all']:
1160 skip[fn] = True
1157 skip[fn] = True
1161 if copy:
1158 if copy:
1162 skip[copy] = True
1159 skip[copy] = True
1163 fstate[fn] = m
1160 fstate[fn] = m
1164 if copy:
1161 if copy:
1165 fstate[copy] = m
1162 fstate[copy] = m
1166 prev[fn] = rev
1163 prev[fn] = rev
1167
1164
1168 fstate = fstate.items()
1165 fstate = fstate.items()
1169 fstate.sort()
1166 fstate.sort()
1170 for fn, state in fstate:
1167 for fn, state in fstate:
1171 if fn in skip:
1168 if fn in skip:
1172 continue
1169 continue
1173 if fn not in copies.get(prev[fn], {}):
1170 if fn not in copies.get(prev[fn], {}):
1174 found = display(fn, rev, {}, state) or found
1171 found = display(fn, rev, {}, state) or found
1175 return (not found and 1) or 0
1172 return (not found and 1) or 0
1176
1173
1177 def heads(ui, repo, *branchrevs, **opts):
1174 def heads(ui, repo, *branchrevs, **opts):
1178 """show current repository heads or show branch heads
1175 """show current repository heads or show branch heads
1179
1176
1180 With no arguments, show all repository head changesets.
1177 With no arguments, show all repository head changesets.
1181
1178
1182 If branch or revisions names are given this will show the heads of
1179 If branch or revisions names are given this will show the heads of
1183 the specified branches or the branches those revisions are tagged
1180 the specified branches or the branches those revisions are tagged
1184 with.
1181 with.
1185
1182
1186 Repository "heads" are changesets that don't have child
1183 Repository "heads" are changesets that don't have child
1187 changesets. They are where development generally takes place and
1184 changesets. They are where development generally takes place and
1188 are the usual targets for update and merge operations.
1185 are the usual targets for update and merge operations.
1189
1186
1190 Branch heads are changesets that have a given branch tag, but have
1187 Branch heads are changesets that have a given branch tag, but have
1191 no child changesets with that tag. They are usually where
1188 no child changesets with that tag. They are usually where
1192 development on the given branch takes place.
1189 development on the given branch takes place.
1193 """
1190 """
1194 if opts['rev']:
1191 if opts['rev']:
1195 start = repo.lookup(opts['rev'])
1192 start = repo.lookup(opts['rev'])
1196 else:
1193 else:
1197 start = None
1194 start = None
1198 if not branchrevs:
1195 if not branchrevs:
1199 # Assume we're looking repo-wide heads if no revs were specified.
1196 # Assume we're looking repo-wide heads if no revs were specified.
1200 heads = repo.heads(start)
1197 heads = repo.heads(start)
1201 else:
1198 else:
1202 heads = []
1199 heads = []
1203 visitedset = util.set()
1200 visitedset = util.set()
1204 for branchrev in branchrevs:
1201 for branchrev in branchrevs:
1205 branch = repo.changectx(branchrev).branch()
1202 branch = repo[branchrev].branch()
1206 if branch in visitedset:
1203 if branch in visitedset:
1207 continue
1204 continue
1208 visitedset.add(branch)
1205 visitedset.add(branch)
1209 bheads = repo.branchheads(branch, start)
1206 bheads = repo.branchheads(branch, start)
1210 if not bheads:
1207 if not bheads:
1211 if branch != branchrev:
1208 if branch != branchrev:
1212 ui.warn(_("no changes on branch %s containing %s are "
1209 ui.warn(_("no changes on branch %s containing %s are "
1213 "reachable from %s\n")
1210 "reachable from %s\n")
1214 % (branch, branchrev, opts['rev']))
1211 % (branch, branchrev, opts['rev']))
1215 else:
1212 else:
1216 ui.warn(_("no changes on branch %s are reachable from %s\n")
1213 ui.warn(_("no changes on branch %s are reachable from %s\n")
1217 % (branch, opts['rev']))
1214 % (branch, opts['rev']))
1218 heads.extend(bheads)
1215 heads.extend(bheads)
1219 if not heads:
1216 if not heads:
1220 return 1
1217 return 1
1221 displayer = cmdutil.show_changeset(ui, repo, opts)
1218 displayer = cmdutil.show_changeset(ui, repo, opts)
1222 for n in heads:
1219 for n in heads:
1223 displayer.show(changenode=n)
1220 displayer.show(changenode=n)
1224
1221
1225 def help_(ui, name=None, with_version=False):
1222 def help_(ui, name=None, with_version=False):
1226 """show help for a command, extension, or list of commands
1223 """show help for a command, extension, or list of commands
1227
1224
1228 With no arguments, print a list of commands and short help.
1225 With no arguments, print a list of commands and short help.
1229
1226
1230 Given a command name, print help for that command.
1227 Given a command name, print help for that command.
1231
1228
1232 Given an extension name, print help for that extension, and the
1229 Given an extension name, print help for that extension, and the
1233 commands it provides."""
1230 commands it provides."""
1234 option_lists = []
1231 option_lists = []
1235
1232
1236 def addglobalopts(aliases):
1233 def addglobalopts(aliases):
1237 if ui.verbose:
1234 if ui.verbose:
1238 option_lists.append((_("global options:"), globalopts))
1235 option_lists.append((_("global options:"), globalopts))
1239 if name == 'shortlist':
1236 if name == 'shortlist':
1240 option_lists.append((_('use "hg help" for the full list '
1237 option_lists.append((_('use "hg help" for the full list '
1241 'of commands'), ()))
1238 'of commands'), ()))
1242 else:
1239 else:
1243 if name == 'shortlist':
1240 if name == 'shortlist':
1244 msg = _('use "hg help" for the full list of commands '
1241 msg = _('use "hg help" for the full list of commands '
1245 'or "hg -v" for details')
1242 'or "hg -v" for details')
1246 elif aliases:
1243 elif aliases:
1247 msg = _('use "hg -v help%s" to show aliases and '
1244 msg = _('use "hg -v help%s" to show aliases and '
1248 'global options') % (name and " " + name or "")
1245 'global options') % (name and " " + name or "")
1249 else:
1246 else:
1250 msg = _('use "hg -v help %s" to show global options') % name
1247 msg = _('use "hg -v help %s" to show global options') % name
1251 option_lists.append((msg, ()))
1248 option_lists.append((msg, ()))
1252
1249
1253 def helpcmd(name):
1250 def helpcmd(name):
1254 if with_version:
1251 if with_version:
1255 version_(ui)
1252 version_(ui)
1256 ui.write('\n')
1253 ui.write('\n')
1257
1254
1258 try:
1255 try:
1259 aliases, i = cmdutil.findcmd(ui, name, table)
1256 aliases, i = cmdutil.findcmd(ui, name, table)
1260 except cmdutil.AmbiguousCommand, inst:
1257 except cmdutil.AmbiguousCommand, inst:
1261 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1258 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1262 helplist(_('list of commands:\n\n'), select)
1259 helplist(_('list of commands:\n\n'), select)
1263 return
1260 return
1264
1261
1265 # synopsis
1262 # synopsis
1266 ui.write("%s\n" % i[2])
1263 ui.write("%s\n" % i[2])
1267
1264
1268 # aliases
1265 # aliases
1269 if not ui.quiet and len(aliases) > 1:
1266 if not ui.quiet and len(aliases) > 1:
1270 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1267 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1271
1268
1272 # description
1269 # description
1273 doc = i[0].__doc__
1270 doc = i[0].__doc__
1274 if not doc:
1271 if not doc:
1275 doc = _("(No help text available)")
1272 doc = _("(No help text available)")
1276 if ui.quiet:
1273 if ui.quiet:
1277 doc = doc.splitlines(0)[0]
1274 doc = doc.splitlines(0)[0]
1278 ui.write("\n%s\n" % doc.rstrip())
1275 ui.write("\n%s\n" % doc.rstrip())
1279
1276
1280 if not ui.quiet:
1277 if not ui.quiet:
1281 # options
1278 # options
1282 if i[1]:
1279 if i[1]:
1283 option_lists.append((_("options:\n"), i[1]))
1280 option_lists.append((_("options:\n"), i[1]))
1284
1281
1285 addglobalopts(False)
1282 addglobalopts(False)
1286
1283
1287 def helplist(header, select=None):
1284 def helplist(header, select=None):
1288 h = {}
1285 h = {}
1289 cmds = {}
1286 cmds = {}
1290 for c, e in table.items():
1287 for c, e in table.items():
1291 f = c.split("|", 1)[0]
1288 f = c.split("|", 1)[0]
1292 if select and not select(f):
1289 if select and not select(f):
1293 continue
1290 continue
1294 if name == "shortlist" and not f.startswith("^"):
1291 if name == "shortlist" and not f.startswith("^"):
1295 continue
1292 continue
1296 f = f.lstrip("^")
1293 f = f.lstrip("^")
1297 if not ui.debugflag and f.startswith("debug"):
1294 if not ui.debugflag and f.startswith("debug"):
1298 continue
1295 continue
1299 doc = e[0].__doc__
1296 doc = e[0].__doc__
1300 if not doc:
1297 if not doc:
1301 doc = _("(No help text available)")
1298 doc = _("(No help text available)")
1302 h[f] = doc.splitlines(0)[0].rstrip()
1299 h[f] = doc.splitlines(0)[0].rstrip()
1303 cmds[f] = c.lstrip("^")
1300 cmds[f] = c.lstrip("^")
1304
1301
1305 if not h:
1302 if not h:
1306 ui.status(_('no commands defined\n'))
1303 ui.status(_('no commands defined\n'))
1307 return
1304 return
1308
1305
1309 ui.status(header)
1306 ui.status(header)
1310 fns = h.keys()
1307 fns = h.keys()
1311 fns.sort()
1308 fns.sort()
1312 m = max(map(len, fns))
1309 m = max(map(len, fns))
1313 for f in fns:
1310 for f in fns:
1314 if ui.verbose:
1311 if ui.verbose:
1315 commands = cmds[f].replace("|",", ")
1312 commands = cmds[f].replace("|",", ")
1316 ui.write(" %s:\n %s\n"%(commands, h[f]))
1313 ui.write(" %s:\n %s\n"%(commands, h[f]))
1317 else:
1314 else:
1318 ui.write(' %-*s %s\n' % (m, f, h[f]))
1315 ui.write(' %-*s %s\n' % (m, f, h[f]))
1319
1316
1320 if not ui.quiet:
1317 if not ui.quiet:
1321 addglobalopts(True)
1318 addglobalopts(True)
1322
1319
1323 def helptopic(name):
1320 def helptopic(name):
1324 v = None
1321 v = None
1325 for i, d in help.helptable:
1322 for i, d in help.helptable:
1326 l = i.split('|')
1323 l = i.split('|')
1327 if name in l:
1324 if name in l:
1328 v = i
1325 v = i
1329 header = l[-1]
1326 header = l[-1]
1330 doc = d
1327 doc = d
1331 if not v:
1328 if not v:
1332 raise cmdutil.UnknownCommand(name)
1329 raise cmdutil.UnknownCommand(name)
1333
1330
1334 # description
1331 # description
1335 if not doc:
1332 if not doc:
1336 doc = _("(No help text available)")
1333 doc = _("(No help text available)")
1337 if callable(doc):
1334 if callable(doc):
1338 doc = doc()
1335 doc = doc()
1339
1336
1340 ui.write("%s\n" % header)
1337 ui.write("%s\n" % header)
1341 ui.write("%s\n" % doc.rstrip())
1338 ui.write("%s\n" % doc.rstrip())
1342
1339
1343 def helpext(name):
1340 def helpext(name):
1344 try:
1341 try:
1345 mod = extensions.find(name)
1342 mod = extensions.find(name)
1346 except KeyError:
1343 except KeyError:
1347 raise cmdutil.UnknownCommand(name)
1344 raise cmdutil.UnknownCommand(name)
1348
1345
1349 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1346 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1350 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1347 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1351 for d in doc[1:]:
1348 for d in doc[1:]:
1352 ui.write(d, '\n')
1349 ui.write(d, '\n')
1353
1350
1354 ui.status('\n')
1351 ui.status('\n')
1355
1352
1356 try:
1353 try:
1357 ct = mod.cmdtable
1354 ct = mod.cmdtable
1358 except AttributeError:
1355 except AttributeError:
1359 ct = {}
1356 ct = {}
1360
1357
1361 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1358 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1362 helplist(_('list of commands:\n\n'), modcmds.has_key)
1359 helplist(_('list of commands:\n\n'), modcmds.has_key)
1363
1360
1364 if name and name != 'shortlist':
1361 if name and name != 'shortlist':
1365 i = None
1362 i = None
1366 for f in (helpcmd, helptopic, helpext):
1363 for f in (helpcmd, helptopic, helpext):
1367 try:
1364 try:
1368 f(name)
1365 f(name)
1369 i = None
1366 i = None
1370 break
1367 break
1371 except cmdutil.UnknownCommand, inst:
1368 except cmdutil.UnknownCommand, inst:
1372 i = inst
1369 i = inst
1373 if i:
1370 if i:
1374 raise i
1371 raise i
1375
1372
1376 else:
1373 else:
1377 # program name
1374 # program name
1378 if ui.verbose or with_version:
1375 if ui.verbose or with_version:
1379 version_(ui)
1376 version_(ui)
1380 else:
1377 else:
1381 ui.status(_("Mercurial Distributed SCM\n"))
1378 ui.status(_("Mercurial Distributed SCM\n"))
1382 ui.status('\n')
1379 ui.status('\n')
1383
1380
1384 # list of commands
1381 # list of commands
1385 if name == "shortlist":
1382 if name == "shortlist":
1386 header = _('basic commands:\n\n')
1383 header = _('basic commands:\n\n')
1387 else:
1384 else:
1388 header = _('list of commands:\n\n')
1385 header = _('list of commands:\n\n')
1389
1386
1390 helplist(header)
1387 helplist(header)
1391
1388
1392 # list all option lists
1389 # list all option lists
1393 opt_output = []
1390 opt_output = []
1394 for title, options in option_lists:
1391 for title, options in option_lists:
1395 opt_output.append(("\n%s" % title, None))
1392 opt_output.append(("\n%s" % title, None))
1396 for shortopt, longopt, default, desc in options:
1393 for shortopt, longopt, default, desc in options:
1397 if "DEPRECATED" in desc and not ui.verbose: continue
1394 if "DEPRECATED" in desc and not ui.verbose: continue
1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1395 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1399 longopt and " --%s" % longopt),
1396 longopt and " --%s" % longopt),
1400 "%s%s" % (desc,
1397 "%s%s" % (desc,
1401 default
1398 default
1402 and _(" (default: %s)") % default
1399 and _(" (default: %s)") % default
1403 or "")))
1400 or "")))
1404
1401
1405 if ui.verbose:
1402 if ui.verbose:
1406 ui.write(_("\nspecial help topics:\n"))
1403 ui.write(_("\nspecial help topics:\n"))
1407 topics = []
1404 topics = []
1408 for i, d in help.helptable:
1405 for i, d in help.helptable:
1409 l = i.split('|')
1406 l = i.split('|')
1410 topics.append((", ".join(l[:-1]), l[-1]))
1407 topics.append((", ".join(l[:-1]), l[-1]))
1411 topics_len = max([len(s[0]) for s in topics])
1408 topics_len = max([len(s[0]) for s in topics])
1412 for t, desc in topics:
1409 for t, desc in topics:
1413 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1410 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1414
1411
1415 if opt_output:
1412 if opt_output:
1416 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1413 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1417 for first, second in opt_output:
1414 for first, second in opt_output:
1418 if second:
1415 if second:
1419 ui.write(" %-*s %s\n" % (opts_len, first, second))
1416 ui.write(" %-*s %s\n" % (opts_len, first, second))
1420 else:
1417 else:
1421 ui.write("%s\n" % first)
1418 ui.write("%s\n" % first)
1422
1419
1423 def identify(ui, repo, source=None,
1420 def identify(ui, repo, source=None,
1424 rev=None, num=None, id=None, branch=None, tags=None):
1421 rev=None, num=None, id=None, branch=None, tags=None):
1425 """identify the working copy or specified revision
1422 """identify the working copy or specified revision
1426
1423
1427 With no revision, print a summary of the current state of the repo.
1424 With no revision, print a summary of the current state of the repo.
1428
1425
1429 With a path, do a lookup in another repository.
1426 With a path, do a lookup in another repository.
1430
1427
1431 This summary identifies the repository state using one or two parent
1428 This summary identifies the repository state using one or two parent
1432 hash identifiers, followed by a "+" if there are uncommitted changes
1429 hash identifiers, followed by a "+" if there are uncommitted changes
1433 in the working directory, a list of tags for this revision and a branch
1430 in the working directory, a list of tags for this revision and a branch
1434 name for non-default branches.
1431 name for non-default branches.
1435 """
1432 """
1436
1433
1437 if not repo and not source:
1434 if not repo and not source:
1438 raise util.Abort(_("There is no Mercurial repository here "
1435 raise util.Abort(_("There is no Mercurial repository here "
1439 "(.hg not found)"))
1436 "(.hg not found)"))
1440
1437
1441 hexfunc = ui.debugflag and hex or short
1438 hexfunc = ui.debugflag and hex or short
1442 default = not (num or id or branch or tags)
1439 default = not (num or id or branch or tags)
1443 output = []
1440 output = []
1444
1441
1445 if source:
1442 if source:
1446 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1443 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1447 srepo = hg.repository(ui, source)
1444 srepo = hg.repository(ui, source)
1448 if not rev and revs:
1445 if not rev and revs:
1449 rev = revs[0]
1446 rev = revs[0]
1450 if not rev:
1447 if not rev:
1451 rev = "tip"
1448 rev = "tip"
1452 if num or branch or tags:
1449 if num or branch or tags:
1453 raise util.Abort(
1450 raise util.Abort(
1454 "can't query remote revision number, branch, or tags")
1451 "can't query remote revision number, branch, or tags")
1455 output = [hexfunc(srepo.lookup(rev))]
1452 output = [hexfunc(srepo.lookup(rev))]
1456 elif not rev:
1453 elif not rev:
1457 ctx = repo.changectx(None)
1454 ctx = repo[None]
1458 parents = ctx.parents()
1455 parents = ctx.parents()
1459 changed = False
1456 changed = False
1460 if default or id or num:
1457 if default or id or num:
1461 changed = ctx.files() + ctx.deleted()
1458 changed = ctx.files() + ctx.deleted()
1462 if default or id:
1459 if default or id:
1463 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1460 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1464 (changed) and "+" or "")]
1461 (changed) and "+" or "")]
1465 if num:
1462 if num:
1466 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1463 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1467 (changed) and "+" or ""))
1464 (changed) and "+" or ""))
1468 else:
1465 else:
1469 ctx = repo.changectx(rev)
1466 ctx = repo[rev]
1470 if default or id:
1467 if default or id:
1471 output = [hexfunc(ctx.node())]
1468 output = [hexfunc(ctx.node())]
1472 if num:
1469 if num:
1473 output.append(str(ctx.rev()))
1470 output.append(str(ctx.rev()))
1474
1471
1475 if not source and default and not ui.quiet:
1472 if not source and default and not ui.quiet:
1476 b = util.tolocal(ctx.branch())
1473 b = util.tolocal(ctx.branch())
1477 if b != 'default':
1474 if b != 'default':
1478 output.append("(%s)" % b)
1475 output.append("(%s)" % b)
1479
1476
1480 # multiple tags for a single parent separated by '/'
1477 # multiple tags for a single parent separated by '/'
1481 t = "/".join(ctx.tags())
1478 t = "/".join(ctx.tags())
1482 if t:
1479 if t:
1483 output.append(t)
1480 output.append(t)
1484
1481
1485 if branch:
1482 if branch:
1486 output.append(util.tolocal(ctx.branch()))
1483 output.append(util.tolocal(ctx.branch()))
1487
1484
1488 if tags:
1485 if tags:
1489 output.extend(ctx.tags())
1486 output.extend(ctx.tags())
1490
1487
1491 ui.write("%s\n" % ' '.join(output))
1488 ui.write("%s\n" % ' '.join(output))
1492
1489
1493 def import_(ui, repo, patch1, *patches, **opts):
1490 def import_(ui, repo, patch1, *patches, **opts):
1494 """import an ordered set of patches
1491 """import an ordered set of patches
1495
1492
1496 Import a list of patches and commit them individually.
1493 Import a list of patches and commit them individually.
1497
1494
1498 If there are outstanding changes in the working directory, import
1495 If there are outstanding changes in the working directory, import
1499 will abort unless given the -f flag.
1496 will abort unless given the -f flag.
1500
1497
1501 You can import a patch straight from a mail message. Even patches
1498 You can import a patch straight from a mail message. Even patches
1502 as attachments work (body part must be type text/plain or
1499 as attachments work (body part must be type text/plain or
1503 text/x-patch to be used). From and Subject headers of email
1500 text/x-patch to be used). From and Subject headers of email
1504 message are used as default committer and commit message. All
1501 message are used as default committer and commit message. All
1505 text/plain body parts before first diff are added to commit
1502 text/plain body parts before first diff are added to commit
1506 message.
1503 message.
1507
1504
1508 If the imported patch was generated by hg export, user and description
1505 If the imported patch was generated by hg export, user and description
1509 from patch override values from message headers and body. Values
1506 from patch override values from message headers and body. Values
1510 given on command line with -m and -u override these.
1507 given on command line with -m and -u override these.
1511
1508
1512 If --exact is specified, import will set the working directory
1509 If --exact is specified, import will set the working directory
1513 to the parent of each patch before applying it, and will abort
1510 to the parent of each patch before applying it, and will abort
1514 if the resulting changeset has a different ID than the one
1511 if the resulting changeset has a different ID than the one
1515 recorded in the patch. This may happen due to character set
1512 recorded in the patch. This may happen due to character set
1516 problems or other deficiencies in the text patch format.
1513 problems or other deficiencies in the text patch format.
1517
1514
1518 To read a patch from standard input, use patch name "-".
1515 To read a patch from standard input, use patch name "-".
1519 See 'hg help dates' for a list of formats valid for -d/--date.
1516 See 'hg help dates' for a list of formats valid for -d/--date.
1520 """
1517 """
1521 patches = (patch1,) + patches
1518 patches = (patch1,) + patches
1522
1519
1523 date = opts.get('date')
1520 date = opts.get('date')
1524 if date:
1521 if date:
1525 opts['date'] = util.parsedate(date)
1522 opts['date'] = util.parsedate(date)
1526
1523
1527 if opts.get('exact') or not opts['force']:
1524 if opts.get('exact') or not opts['force']:
1528 cmdutil.bail_if_changed(repo)
1525 cmdutil.bail_if_changed(repo)
1529
1526
1530 d = opts["base"]
1527 d = opts["base"]
1531 strip = opts["strip"]
1528 strip = opts["strip"]
1532 wlock = lock = None
1529 wlock = lock = None
1533 try:
1530 try:
1534 wlock = repo.wlock()
1531 wlock = repo.wlock()
1535 lock = repo.lock()
1532 lock = repo.lock()
1536 for p in patches:
1533 for p in patches:
1537 pf = os.path.join(d, p)
1534 pf = os.path.join(d, p)
1538
1535
1539 if pf == '-':
1536 if pf == '-':
1540 ui.status(_("applying patch from stdin\n"))
1537 ui.status(_("applying patch from stdin\n"))
1541 data = patch.extract(ui, sys.stdin)
1538 data = patch.extract(ui, sys.stdin)
1542 else:
1539 else:
1543 ui.status(_("applying %s\n") % p)
1540 ui.status(_("applying %s\n") % p)
1544 if os.path.exists(pf):
1541 if os.path.exists(pf):
1545 data = patch.extract(ui, file(pf, 'rb'))
1542 data = patch.extract(ui, file(pf, 'rb'))
1546 else:
1543 else:
1547 data = patch.extract(ui, urllib.urlopen(pf))
1544 data = patch.extract(ui, urllib.urlopen(pf))
1548 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1545 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1549
1546
1550 if tmpname is None:
1547 if tmpname is None:
1551 raise util.Abort(_('no diffs found'))
1548 raise util.Abort(_('no diffs found'))
1552
1549
1553 try:
1550 try:
1554 cmdline_message = cmdutil.logmessage(opts)
1551 cmdline_message = cmdutil.logmessage(opts)
1555 if cmdline_message:
1552 if cmdline_message:
1556 # pickup the cmdline msg
1553 # pickup the cmdline msg
1557 message = cmdline_message
1554 message = cmdline_message
1558 elif message:
1555 elif message:
1559 # pickup the patch msg
1556 # pickup the patch msg
1560 message = message.strip()
1557 message = message.strip()
1561 else:
1558 else:
1562 # launch the editor
1559 # launch the editor
1563 message = None
1560 message = None
1564 ui.debug(_('message:\n%s\n') % message)
1561 ui.debug(_('message:\n%s\n') % message)
1565
1562
1566 wp = repo.changectx(None).parents()
1563 wp = repo.parents()
1567 if opts.get('exact'):
1564 if opts.get('exact'):
1568 if not nodeid or not p1:
1565 if not nodeid or not p1:
1569 raise util.Abort(_('not a mercurial patch'))
1566 raise util.Abort(_('not a mercurial patch'))
1570 p1 = repo.lookup(p1)
1567 p1 = repo.lookup(p1)
1571 p2 = repo.lookup(p2 or hex(nullid))
1568 p2 = repo.lookup(p2 or hex(nullid))
1572
1569
1573 if p1 != wp[0].node():
1570 if p1 != wp[0].node():
1574 hg.clean(repo, p1)
1571 hg.clean(repo, p1)
1575 repo.dirstate.setparents(p1, p2)
1572 repo.dirstate.setparents(p1, p2)
1576 elif p2:
1573 elif p2:
1577 try:
1574 try:
1578 p1 = repo.lookup(p1)
1575 p1 = repo.lookup(p1)
1579 p2 = repo.lookup(p2)
1576 p2 = repo.lookup(p2)
1580 if p1 == wp[0].node():
1577 if p1 == wp[0].node():
1581 repo.dirstate.setparents(p1, p2)
1578 repo.dirstate.setparents(p1, p2)
1582 except RepoError:
1579 except RepoError:
1583 pass
1580 pass
1584 if opts.get('exact') or opts.get('import_branch'):
1581 if opts.get('exact') or opts.get('import_branch'):
1585 repo.dirstate.setbranch(branch or 'default')
1582 repo.dirstate.setbranch(branch or 'default')
1586
1583
1587 files = {}
1584 files = {}
1588 try:
1585 try:
1589 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1586 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1590 files=files)
1587 files=files)
1591 finally:
1588 finally:
1592 files = patch.updatedir(ui, repo, files)
1589 files = patch.updatedir(ui, repo, files)
1593 if not opts.get('no_commit'):
1590 if not opts.get('no_commit'):
1594 n = repo.commit(files, message, opts.get('user') or user,
1591 n = repo.commit(files, message, opts.get('user') or user,
1595 opts.get('date') or date)
1592 opts.get('date') or date)
1596 if opts.get('exact'):
1593 if opts.get('exact'):
1597 if hex(n) != nodeid:
1594 if hex(n) != nodeid:
1598 repo.rollback()
1595 repo.rollback()
1599 raise util.Abort(_('patch is damaged'
1596 raise util.Abort(_('patch is damaged'
1600 ' or loses information'))
1597 ' or loses information'))
1601 # Force a dirstate write so that the next transaction
1598 # Force a dirstate write so that the next transaction
1602 # backups an up-do-date file.
1599 # backups an up-do-date file.
1603 repo.dirstate.write()
1600 repo.dirstate.write()
1604 finally:
1601 finally:
1605 os.unlink(tmpname)
1602 os.unlink(tmpname)
1606 finally:
1603 finally:
1607 del lock, wlock
1604 del lock, wlock
1608
1605
1609 def incoming(ui, repo, source="default", **opts):
1606 def incoming(ui, repo, source="default", **opts):
1610 """show new changesets found in source
1607 """show new changesets found in source
1611
1608
1612 Show new changesets found in the specified path/URL or the default
1609 Show new changesets found in the specified path/URL or the default
1613 pull location. These are the changesets that would be pulled if a pull
1610 pull location. These are the changesets that would be pulled if a pull
1614 was requested.
1611 was requested.
1615
1612
1616 For remote repository, using --bundle avoids downloading the changesets
1613 For remote repository, using --bundle avoids downloading the changesets
1617 twice if the incoming is followed by a pull.
1614 twice if the incoming is followed by a pull.
1618
1615
1619 See pull for valid source format details.
1616 See pull for valid source format details.
1620 """
1617 """
1621 limit = cmdutil.loglimit(opts)
1618 limit = cmdutil.loglimit(opts)
1622 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1619 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1623 cmdutil.setremoteconfig(ui, opts)
1620 cmdutil.setremoteconfig(ui, opts)
1624
1621
1625 other = hg.repository(ui, source)
1622 other = hg.repository(ui, source)
1626 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1623 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1627 if revs:
1624 if revs:
1628 revs = [other.lookup(rev) for rev in revs]
1625 revs = [other.lookup(rev) for rev in revs]
1629 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1626 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1630 if not incoming:
1627 if not incoming:
1631 try:
1628 try:
1632 os.unlink(opts["bundle"])
1629 os.unlink(opts["bundle"])
1633 except:
1630 except:
1634 pass
1631 pass
1635 ui.status(_("no changes found\n"))
1632 ui.status(_("no changes found\n"))
1636 return 1
1633 return 1
1637
1634
1638 cleanup = None
1635 cleanup = None
1639 try:
1636 try:
1640 fname = opts["bundle"]
1637 fname = opts["bundle"]
1641 if fname or not other.local():
1638 if fname or not other.local():
1642 # create a bundle (uncompressed if other repo is not local)
1639 # create a bundle (uncompressed if other repo is not local)
1643 if revs is None:
1640 if revs is None:
1644 cg = other.changegroup(incoming, "incoming")
1641 cg = other.changegroup(incoming, "incoming")
1645 else:
1642 else:
1646 cg = other.changegroupsubset(incoming, revs, 'incoming')
1643 cg = other.changegroupsubset(incoming, revs, 'incoming')
1647 bundletype = other.local() and "HG10BZ" or "HG10UN"
1644 bundletype = other.local() and "HG10BZ" or "HG10UN"
1648 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1645 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1649 # keep written bundle?
1646 # keep written bundle?
1650 if opts["bundle"]:
1647 if opts["bundle"]:
1651 cleanup = None
1648 cleanup = None
1652 if not other.local():
1649 if not other.local():
1653 # use the created uncompressed bundlerepo
1650 # use the created uncompressed bundlerepo
1654 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1651 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1655
1652
1656 o = other.changelog.nodesbetween(incoming, revs)[0]
1653 o = other.changelog.nodesbetween(incoming, revs)[0]
1657 if opts['newest_first']:
1654 if opts['newest_first']:
1658 o.reverse()
1655 o.reverse()
1659 displayer = cmdutil.show_changeset(ui, other, opts)
1656 displayer = cmdutil.show_changeset(ui, other, opts)
1660 count = 0
1657 count = 0
1661 for n in o:
1658 for n in o:
1662 if count >= limit:
1659 if count >= limit:
1663 break
1660 break
1664 parents = [p for p in other.changelog.parents(n) if p != nullid]
1661 parents = [p for p in other.changelog.parents(n) if p != nullid]
1665 if opts['no_merges'] and len(parents) == 2:
1662 if opts['no_merges'] and len(parents) == 2:
1666 continue
1663 continue
1667 count += 1
1664 count += 1
1668 displayer.show(changenode=n)
1665 displayer.show(changenode=n)
1669 finally:
1666 finally:
1670 if hasattr(other, 'close'):
1667 if hasattr(other, 'close'):
1671 other.close()
1668 other.close()
1672 if cleanup:
1669 if cleanup:
1673 os.unlink(cleanup)
1670 os.unlink(cleanup)
1674
1671
1675 def init(ui, dest=".", **opts):
1672 def init(ui, dest=".", **opts):
1676 """create a new repository in the given directory
1673 """create a new repository in the given directory
1677
1674
1678 Initialize a new repository in the given directory. If the given
1675 Initialize a new repository in the given directory. If the given
1679 directory does not exist, it is created.
1676 directory does not exist, it is created.
1680
1677
1681 If no directory is given, the current directory is used.
1678 If no directory is given, the current directory is used.
1682
1679
1683 It is possible to specify an ssh:// URL as the destination.
1680 It is possible to specify an ssh:// URL as the destination.
1684 Look at the help text for the pull command for important details
1681 Look at the help text for the pull command for important details
1685 about ssh:// URLs.
1682 about ssh:// URLs.
1686 """
1683 """
1687 cmdutil.setremoteconfig(ui, opts)
1684 cmdutil.setremoteconfig(ui, opts)
1688 hg.repository(ui, dest, create=1)
1685 hg.repository(ui, dest, create=1)
1689
1686
1690 def locate(ui, repo, *pats, **opts):
1687 def locate(ui, repo, *pats, **opts):
1691 """locate files matching specific patterns
1688 """locate files matching specific patterns
1692
1689
1693 Print all files under Mercurial control whose names match the
1690 Print all files under Mercurial control whose names match the
1694 given patterns.
1691 given patterns.
1695
1692
1696 This command searches the entire repository by default. To search
1693 This command searches the entire repository by default. To search
1697 just the current directory and its subdirectories, use
1694 just the current directory and its subdirectories, use
1698 "--include .".
1695 "--include .".
1699
1696
1700 If no patterns are given to match, this command prints all file
1697 If no patterns are given to match, this command prints all file
1701 names.
1698 names.
1702
1699
1703 If you want to feed the output of this command into the "xargs"
1700 If you want to feed the output of this command into the "xargs"
1704 command, use the "-0" option to both this command and "xargs".
1701 command, use the "-0" option to both this command and "xargs".
1705 This will avoid the problem of "xargs" treating single filenames
1702 This will avoid the problem of "xargs" treating single filenames
1706 that contain white space as multiple filenames.
1703 that contain white space as multiple filenames.
1707 """
1704 """
1708 end = opts['print0'] and '\0' or '\n'
1705 end = opts['print0'] and '\0' or '\n'
1709 rev = opts['rev']
1706 rev = opts['rev']
1710 if rev:
1707 if rev:
1711 node = repo.lookup(rev)
1708 node = repo.lookup(rev)
1712 else:
1709 else:
1713 node = None
1710 node = None
1714
1711
1715 ret = 1
1712 ret = 1
1716 m = cmdutil.match(repo, pats, opts, default='relglob')
1713 m = cmdutil.match(repo, pats, opts, default='relglob')
1717 m.bad = lambda x,y: False
1714 m.bad = lambda x,y: False
1718 for abs in repo.walk(m, node):
1715 for abs in repo.walk(m, node):
1719 if not node and abs not in repo.dirstate:
1716 if not node and abs not in repo.dirstate:
1720 continue
1717 continue
1721 if opts['fullpath']:
1718 if opts['fullpath']:
1722 ui.write(os.path.join(repo.root, abs), end)
1719 ui.write(os.path.join(repo.root, abs), end)
1723 else:
1720 else:
1724 ui.write(((pats and m.rel(abs)) or abs), end)
1721 ui.write(((pats and m.rel(abs)) or abs), end)
1725 ret = 0
1722 ret = 0
1726
1723
1727 return ret
1724 return ret
1728
1725
1729 def log(ui, repo, *pats, **opts):
1726 def log(ui, repo, *pats, **opts):
1730 """show revision history of entire repository or files
1727 """show revision history of entire repository or files
1731
1728
1732 Print the revision history of the specified files or the entire
1729 Print the revision history of the specified files or the entire
1733 project.
1730 project.
1734
1731
1735 File history is shown without following rename or copy history of
1732 File history is shown without following rename or copy history of
1736 files. Use -f/--follow with a file name to follow history across
1733 files. Use -f/--follow with a file name to follow history across
1737 renames and copies. --follow without a file name will only show
1734 renames and copies. --follow without a file name will only show
1738 ancestors or descendants of the starting revision. --follow-first
1735 ancestors or descendants of the starting revision. --follow-first
1739 only follows the first parent of merge revisions.
1736 only follows the first parent of merge revisions.
1740
1737
1741 If no revision range is specified, the default is tip:0 unless
1738 If no revision range is specified, the default is tip:0 unless
1742 --follow is set, in which case the working directory parent is
1739 --follow is set, in which case the working directory parent is
1743 used as the starting revision.
1740 used as the starting revision.
1744
1741
1745 See 'hg help dates' for a list of formats valid for -d/--date.
1742 See 'hg help dates' for a list of formats valid for -d/--date.
1746
1743
1747 By default this command outputs: changeset id and hash, tags,
1744 By default this command outputs: changeset id and hash, tags,
1748 non-trivial parents, user, date and time, and a summary for each
1745 non-trivial parents, user, date and time, and a summary for each
1749 commit. When the -v/--verbose switch is used, the list of changed
1746 commit. When the -v/--verbose switch is used, the list of changed
1750 files and full commit message is shown.
1747 files and full commit message is shown.
1751
1748
1752 NOTE: log -p may generate unexpected diff output for merge
1749 NOTE: log -p may generate unexpected diff output for merge
1753 changesets, as it will compare the merge changeset against its
1750 changesets, as it will compare the merge changeset against its
1754 first parent only. Also, the files: list will only reflect files
1751 first parent only. Also, the files: list will only reflect files
1755 that are different from BOTH parents.
1752 that are different from BOTH parents.
1756
1753
1757 """
1754 """
1758
1755
1759 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1756 get = util.cachefunc(lambda r: repo[r].changeset())
1760 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1757 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1761
1758
1762 limit = cmdutil.loglimit(opts)
1759 limit = cmdutil.loglimit(opts)
1763 count = 0
1760 count = 0
1764
1761
1765 if opts['copies'] and opts['rev']:
1762 if opts['copies'] and opts['rev']:
1766 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1763 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1767 else:
1764 else:
1768 endrev = repo.changelog.count()
1765 endrev = repo.changelog.count()
1769 rcache = {}
1766 rcache = {}
1770 ncache = {}
1767 ncache = {}
1771 def getrenamed(fn, rev):
1768 def getrenamed(fn, rev):
1772 '''looks up all renames for a file (up to endrev) the first
1769 '''looks up all renames for a file (up to endrev) the first
1773 time the file is given. It indexes on the changerev and only
1770 time the file is given. It indexes on the changerev and only
1774 parses the manifest if linkrev != changerev.
1771 parses the manifest if linkrev != changerev.
1775 Returns rename info for fn at changerev rev.'''
1772 Returns rename info for fn at changerev rev.'''
1776 if fn not in rcache:
1773 if fn not in rcache:
1777 rcache[fn] = {}
1774 rcache[fn] = {}
1778 ncache[fn] = {}
1775 ncache[fn] = {}
1779 fl = repo.file(fn)
1776 fl = repo.file(fn)
1780 for i in xrange(fl.count()):
1777 for i in xrange(fl.count()):
1781 node = fl.node(i)
1778 node = fl.node(i)
1782 lr = fl.linkrev(node)
1779 lr = fl.linkrev(node)
1783 renamed = fl.renamed(node)
1780 renamed = fl.renamed(node)
1784 rcache[fn][lr] = renamed
1781 rcache[fn][lr] = renamed
1785 if renamed:
1782 if renamed:
1786 ncache[fn][node] = renamed
1783 ncache[fn][node] = renamed
1787 if lr >= endrev:
1784 if lr >= endrev:
1788 break
1785 break
1789 if rev in rcache[fn]:
1786 if rev in rcache[fn]:
1790 return rcache[fn][rev]
1787 return rcache[fn][rev]
1791
1788
1792 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1789 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1793 # filectx logic.
1790 # filectx logic.
1794
1791
1795 try:
1792 try:
1796 return repo.changectx(rev).filectx(fn).renamed()
1793 return repo[rev][fn].renamed()
1797 except revlog.LookupError:
1794 except revlog.LookupError:
1798 pass
1795 pass
1799 return None
1796 return None
1800
1797
1801 df = False
1798 df = False
1802 if opts["date"]:
1799 if opts["date"]:
1803 df = util.matchdate(opts["date"])
1800 df = util.matchdate(opts["date"])
1804
1801
1805 only_branches = opts['only_branch']
1802 only_branches = opts['only_branch']
1806
1803
1807 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1804 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1808 for st, rev, fns in changeiter:
1805 for st, rev, fns in changeiter:
1809 if st == 'add':
1806 if st == 'add':
1810 changenode = repo.changelog.node(rev)
1807 changenode = repo.changelog.node(rev)
1811 parents = [p for p in repo.changelog.parentrevs(rev)
1808 parents = [p for p in repo.changelog.parentrevs(rev)
1812 if p != nullrev]
1809 if p != nullrev]
1813 if opts['no_merges'] and len(parents) == 2:
1810 if opts['no_merges'] and len(parents) == 2:
1814 continue
1811 continue
1815 if opts['only_merges'] and len(parents) != 2:
1812 if opts['only_merges'] and len(parents) != 2:
1816 continue
1813 continue
1817
1814
1818 if only_branches:
1815 if only_branches:
1819 revbranch = get(rev)[5]['branch']
1816 revbranch = get(rev)[5]['branch']
1820 if revbranch not in only_branches:
1817 if revbranch not in only_branches:
1821 continue
1818 continue
1822
1819
1823 if df:
1820 if df:
1824 changes = get(rev)
1821 changes = get(rev)
1825 if not df(changes[2][0]):
1822 if not df(changes[2][0]):
1826 continue
1823 continue
1827
1824
1828 if opts['keyword']:
1825 if opts['keyword']:
1829 changes = get(rev)
1826 changes = get(rev)
1830 miss = 0
1827 miss = 0
1831 for k in [kw.lower() for kw in opts['keyword']]:
1828 for k in [kw.lower() for kw in opts['keyword']]:
1832 if not (k in changes[1].lower() or
1829 if not (k in changes[1].lower() or
1833 k in changes[4].lower() or
1830 k in changes[4].lower() or
1834 k in " ".join(changes[3]).lower()):
1831 k in " ".join(changes[3]).lower()):
1835 miss = 1
1832 miss = 1
1836 break
1833 break
1837 if miss:
1834 if miss:
1838 continue
1835 continue
1839
1836
1840 copies = []
1837 copies = []
1841 if opts.get('copies') and rev:
1838 if opts.get('copies') and rev:
1842 for fn in get(rev)[3]:
1839 for fn in get(rev)[3]:
1843 rename = getrenamed(fn, rev)
1840 rename = getrenamed(fn, rev)
1844 if rename:
1841 if rename:
1845 copies.append((fn, rename[0]))
1842 copies.append((fn, rename[0]))
1846 displayer.show(rev, changenode, copies=copies)
1843 displayer.show(rev, changenode, copies=copies)
1847 elif st == 'iter':
1844 elif st == 'iter':
1848 if count == limit: break
1845 if count == limit: break
1849 if displayer.flush(rev):
1846 if displayer.flush(rev):
1850 count += 1
1847 count += 1
1851
1848
1852 def manifest(ui, repo, node=None, rev=None):
1849 def manifest(ui, repo, node=None, rev=None):
1853 """output the current or given revision of the project manifest
1850 """output the current or given revision of the project manifest
1854
1851
1855 Print a list of version controlled files for the given revision.
1852 Print a list of version controlled files for the given revision.
1856 If no revision is given, the parent of the working directory is used,
1853 If no revision is given, the parent of the working directory is used,
1857 or tip if no revision is checked out.
1854 or tip if no revision is checked out.
1858
1855
1859 The manifest is the list of files being version controlled. If no revision
1856 The manifest is the list of files being version controlled. If no revision
1860 is given then the first parent of the working directory is used.
1857 is given then the first parent of the working directory is used.
1861
1858
1862 With -v flag, print file permissions, symlink and executable bits. With
1859 With -v flag, print file permissions, symlink and executable bits. With
1863 --debug flag, print file revision hashes.
1860 --debug flag, print file revision hashes.
1864 """
1861 """
1865
1862
1866 if rev and node:
1863 if rev and node:
1867 raise util.Abort(_("please specify just one revision"))
1864 raise util.Abort(_("please specify just one revision"))
1868
1865
1869 if not node:
1866 if not node:
1870 node = rev
1867 node = rev
1871
1868
1872 m = repo.changectx(node).manifest()
1869 m = repo[node].manifest()
1873 files = m.keys()
1870 files = m.keys()
1874 files.sort()
1871 files.sort()
1875
1872
1876 for f in files:
1873 for f in files:
1877 if ui.debugflag:
1874 if ui.debugflag:
1878 ui.write("%40s " % hex(m[f]))
1875 ui.write("%40s " % hex(m[f]))
1879 if ui.verbose:
1876 if ui.verbose:
1880 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1877 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1881 perm = m.execf(f) and "755" or "644"
1878 perm = m.execf(f) and "755" or "644"
1882 ui.write("%3s %1s " % (perm, type))
1879 ui.write("%3s %1s " % (perm, type))
1883 ui.write("%s\n" % f)
1880 ui.write("%s\n" % f)
1884
1881
1885 def merge(ui, repo, node=None, force=None, rev=None):
1882 def merge(ui, repo, node=None, force=None, rev=None):
1886 """merge working directory with another revision
1883 """merge working directory with another revision
1887
1884
1888 Merge the contents of the current working directory and the
1885 Merge the contents of the current working directory and the
1889 requested revision. Files that changed between either parent are
1886 requested revision. Files that changed between either parent are
1890 marked as changed for the next commit and a commit must be
1887 marked as changed for the next commit and a commit must be
1891 performed before any further updates are allowed.
1888 performed before any further updates are allowed.
1892
1889
1893 If no revision is specified, the working directory's parent is a
1890 If no revision is specified, the working directory's parent is a
1894 head revision, and the current branch contains exactly one other head,
1891 head revision, and the current branch contains exactly one other head,
1895 the other head is merged with by default. Otherwise, an explicit
1892 the other head is merged with by default. Otherwise, an explicit
1896 revision to merge with must be provided.
1893 revision to merge with must be provided.
1897 """
1894 """
1898
1895
1899 if rev and node:
1896 if rev and node:
1900 raise util.Abort(_("please specify just one revision"))
1897 raise util.Abort(_("please specify just one revision"))
1901 if not node:
1898 if not node:
1902 node = rev
1899 node = rev
1903
1900
1904 if not node:
1901 if not node:
1905 branch = repo.changectx(None).branch()
1902 branch = repo.changectx(None).branch()
1906 bheads = repo.branchheads()
1903 bheads = repo.branchheads()
1907 if len(bheads) > 2:
1904 if len(bheads) > 2:
1908 raise util.Abort(_("branch '%s' has %d heads - "
1905 raise util.Abort(_("branch '%s' has %d heads - "
1909 "please merge with an explicit rev") %
1906 "please merge with an explicit rev") %
1910 (branch, len(bheads)))
1907 (branch, len(bheads)))
1911
1908
1912 parent = repo.dirstate.parents()[0]
1909 parent = repo.dirstate.parents()[0]
1913 if len(bheads) == 1:
1910 if len(bheads) == 1:
1914 if len(repo.heads()) > 1:
1911 if len(repo.heads()) > 1:
1915 raise util.Abort(_("branch '%s' has one head - "
1912 raise util.Abort(_("branch '%s' has one head - "
1916 "please merge with an explicit rev") %
1913 "please merge with an explicit rev") %
1917 branch)
1914 branch)
1918 msg = _('there is nothing to merge')
1915 msg = _('there is nothing to merge')
1919 if parent != repo.lookup(repo.changectx(None).branch()):
1916 if parent != repo.lookup(repo[None].branch()):
1920 msg = _('%s - use "hg update" instead') % msg
1917 msg = _('%s - use "hg update" instead') % msg
1921 raise util.Abort(msg)
1918 raise util.Abort(msg)
1922
1919
1923 if parent not in bheads:
1920 if parent not in bheads:
1924 raise util.Abort(_('working dir not at a head rev - '
1921 raise util.Abort(_('working dir not at a head rev - '
1925 'use "hg update" or merge with an explicit rev'))
1922 'use "hg update" or merge with an explicit rev'))
1926 node = parent == bheads[0] and bheads[-1] or bheads[0]
1923 node = parent == bheads[0] and bheads[-1] or bheads[0]
1927 return hg.merge(repo, node, force=force)
1924 return hg.merge(repo, node, force=force)
1928
1925
1929 def outgoing(ui, repo, dest=None, **opts):
1926 def outgoing(ui, repo, dest=None, **opts):
1930 """show changesets not found in destination
1927 """show changesets not found in destination
1931
1928
1932 Show changesets not found in the specified destination repository or
1929 Show changesets not found in the specified destination repository or
1933 the default push location. These are the changesets that would be pushed
1930 the default push location. These are the changesets that would be pushed
1934 if a push was requested.
1931 if a push was requested.
1935
1932
1936 See pull for valid destination format details.
1933 See pull for valid destination format details.
1937 """
1934 """
1938 limit = cmdutil.loglimit(opts)
1935 limit = cmdutil.loglimit(opts)
1939 dest, revs, checkout = hg.parseurl(
1936 dest, revs, checkout = hg.parseurl(
1940 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1937 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1941 cmdutil.setremoteconfig(ui, opts)
1938 cmdutil.setremoteconfig(ui, opts)
1942 if revs:
1939 if revs:
1943 revs = [repo.lookup(rev) for rev in revs]
1940 revs = [repo.lookup(rev) for rev in revs]
1944
1941
1945 other = hg.repository(ui, dest)
1942 other = hg.repository(ui, dest)
1946 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1943 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1947 o = repo.findoutgoing(other, force=opts['force'])
1944 o = repo.findoutgoing(other, force=opts['force'])
1948 if not o:
1945 if not o:
1949 ui.status(_("no changes found\n"))
1946 ui.status(_("no changes found\n"))
1950 return 1
1947 return 1
1951 o = repo.changelog.nodesbetween(o, revs)[0]
1948 o = repo.changelog.nodesbetween(o, revs)[0]
1952 if opts['newest_first']:
1949 if opts['newest_first']:
1953 o.reverse()
1950 o.reverse()
1954 displayer = cmdutil.show_changeset(ui, repo, opts)
1951 displayer = cmdutil.show_changeset(ui, repo, opts)
1955 count = 0
1952 count = 0
1956 for n in o:
1953 for n in o:
1957 if count >= limit:
1954 if count >= limit:
1958 break
1955 break
1959 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1956 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1960 if opts['no_merges'] and len(parents) == 2:
1957 if opts['no_merges'] and len(parents) == 2:
1961 continue
1958 continue
1962 count += 1
1959 count += 1
1963 displayer.show(changenode=n)
1960 displayer.show(changenode=n)
1964
1961
1965 def parents(ui, repo, file_=None, **opts):
1962 def parents(ui, repo, file_=None, **opts):
1966 """show the parents of the working dir or revision
1963 """show the parents of the working dir or revision
1967
1964
1968 Print the working directory's parent revisions. If a
1965 Print the working directory's parent revisions. If a
1969 revision is given via --rev, the parent of that revision
1966 revision is given via --rev, the parent of that revision
1970 will be printed. If a file argument is given, revision in
1967 will be printed. If a file argument is given, revision in
1971 which the file was last changed (before the working directory
1968 which the file was last changed (before the working directory
1972 revision or the argument to --rev if given) is printed.
1969 revision or the argument to --rev if given) is printed.
1973 """
1970 """
1974 rev = opts.get('rev')
1971 rev = opts.get('rev')
1975 if rev:
1972 if rev:
1976 ctx = repo.changectx(rev)
1973 ctx = repo[rev]
1977 else:
1974 else:
1978 ctx = repo.changectx(None)
1975 ctx = repo[None]
1979
1976
1980 if file_:
1977 if file_:
1981 m = cmdutil.match(repo, (file_,), opts)
1978 m = cmdutil.match(repo, (file_,), opts)
1982 if m.anypats() or len(m.files()) != 1:
1979 if m.anypats() or len(m.files()) != 1:
1983 raise util.Abort(_('can only specify an explicit file name'))
1980 raise util.Abort(_('can only specify an explicit file name'))
1984 file_ = m.files()[0]
1981 file_ = m.files()[0]
1985 filenodes = []
1982 filenodes = []
1986 for cp in ctx.parents():
1983 for cp in ctx.parents():
1987 if not cp:
1984 if not cp:
1988 continue
1985 continue
1989 try:
1986 try:
1990 filenodes.append(cp.filenode(file_))
1987 filenodes.append(cp.filenode(file_))
1991 except revlog.LookupError:
1988 except revlog.LookupError:
1992 pass
1989 pass
1993 if not filenodes:
1990 if not filenodes:
1994 raise util.Abort(_("'%s' not found in manifest!") % file_)
1991 raise util.Abort(_("'%s' not found in manifest!") % file_)
1995 fl = repo.file(file_)
1992 fl = repo.file(file_)
1996 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1993 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1997 else:
1994 else:
1998 p = [cp.node() for cp in ctx.parents()]
1995 p = [cp.node() for cp in ctx.parents()]
1999
1996
2000 displayer = cmdutil.show_changeset(ui, repo, opts)
1997 displayer = cmdutil.show_changeset(ui, repo, opts)
2001 for n in p:
1998 for n in p:
2002 if n != nullid:
1999 if n != nullid:
2003 displayer.show(changenode=n)
2000 displayer.show(changenode=n)
2004
2001
2005 def paths(ui, repo, search=None):
2002 def paths(ui, repo, search=None):
2006 """show definition of symbolic path names
2003 """show definition of symbolic path names
2007
2004
2008 Show definition of symbolic path name NAME. If no name is given, show
2005 Show definition of symbolic path name NAME. If no name is given, show
2009 definition of available names.
2006 definition of available names.
2010
2007
2011 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2008 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2012 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2009 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2013 """
2010 """
2014 if search:
2011 if search:
2015 for name, path in ui.configitems("paths"):
2012 for name, path in ui.configitems("paths"):
2016 if name == search:
2013 if name == search:
2017 ui.write("%s\n" % util.hidepassword(path))
2014 ui.write("%s\n" % util.hidepassword(path))
2018 return
2015 return
2019 ui.warn(_("not found!\n"))
2016 ui.warn(_("not found!\n"))
2020 return 1
2017 return 1
2021 else:
2018 else:
2022 for name, path in ui.configitems("paths"):
2019 for name, path in ui.configitems("paths"):
2023 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2020 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2024
2021
2025 def postincoming(ui, repo, modheads, optupdate, checkout):
2022 def postincoming(ui, repo, modheads, optupdate, checkout):
2026 if modheads == 0:
2023 if modheads == 0:
2027 return
2024 return
2028 if optupdate:
2025 if optupdate:
2029 if modheads <= 1 or checkout:
2026 if modheads <= 1 or checkout:
2030 return hg.update(repo, checkout)
2027 return hg.update(repo, checkout)
2031 else:
2028 else:
2032 ui.status(_("not updating, since new heads added\n"))
2029 ui.status(_("not updating, since new heads added\n"))
2033 if modheads > 1:
2030 if modheads > 1:
2034 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2031 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2035 else:
2032 else:
2036 ui.status(_("(run 'hg update' to get a working copy)\n"))
2033 ui.status(_("(run 'hg update' to get a working copy)\n"))
2037
2034
2038 def pull(ui, repo, source="default", **opts):
2035 def pull(ui, repo, source="default", **opts):
2039 """pull changes from the specified source
2036 """pull changes from the specified source
2040
2037
2041 Pull changes from a remote repository to a local one.
2038 Pull changes from a remote repository to a local one.
2042
2039
2043 This finds all changes from the repository at the specified path
2040 This finds all changes from the repository at the specified path
2044 or URL and adds them to the local repository. By default, this
2041 or URL and adds them to the local repository. By default, this
2045 does not update the copy of the project in the working directory.
2042 does not update the copy of the project in the working directory.
2046
2043
2047 Valid URLs are of the form:
2044 Valid URLs are of the form:
2048
2045
2049 local/filesystem/path (or file://local/filesystem/path)
2046 local/filesystem/path (or file://local/filesystem/path)
2050 http://[user@]host[:port]/[path]
2047 http://[user@]host[:port]/[path]
2051 https://[user@]host[:port]/[path]
2048 https://[user@]host[:port]/[path]
2052 ssh://[user@]host[:port]/[path]
2049 ssh://[user@]host[:port]/[path]
2053 static-http://host[:port]/[path]
2050 static-http://host[:port]/[path]
2054
2051
2055 Paths in the local filesystem can either point to Mercurial
2052 Paths in the local filesystem can either point to Mercurial
2056 repositories or to bundle files (as created by 'hg bundle' or
2053 repositories or to bundle files (as created by 'hg bundle' or
2057 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2054 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2058 allows access to a Mercurial repository where you simply use a web
2055 allows access to a Mercurial repository where you simply use a web
2059 server to publish the .hg directory as static content.
2056 server to publish the .hg directory as static content.
2060
2057
2061 An optional identifier after # indicates a particular branch, tag,
2058 An optional identifier after # indicates a particular branch, tag,
2062 or changeset to pull.
2059 or changeset to pull.
2063
2060
2064 Some notes about using SSH with Mercurial:
2061 Some notes about using SSH with Mercurial:
2065 - SSH requires an accessible shell account on the destination machine
2062 - SSH requires an accessible shell account on the destination machine
2066 and a copy of hg in the remote path or specified with as remotecmd.
2063 and a copy of hg in the remote path or specified with as remotecmd.
2067 - path is relative to the remote user's home directory by default.
2064 - path is relative to the remote user's home directory by default.
2068 Use an extra slash at the start of a path to specify an absolute path:
2065 Use an extra slash at the start of a path to specify an absolute path:
2069 ssh://example.com//tmp/repository
2066 ssh://example.com//tmp/repository
2070 - Mercurial doesn't use its own compression via SSH; the right thing
2067 - Mercurial doesn't use its own compression via SSH; the right thing
2071 to do is to configure it in your ~/.ssh/config, e.g.:
2068 to do is to configure it in your ~/.ssh/config, e.g.:
2072 Host *.mylocalnetwork.example.com
2069 Host *.mylocalnetwork.example.com
2073 Compression no
2070 Compression no
2074 Host *
2071 Host *
2075 Compression yes
2072 Compression yes
2076 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2073 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2077 with the --ssh command line option.
2074 with the --ssh command line option.
2078 """
2075 """
2079 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2076 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2080 cmdutil.setremoteconfig(ui, opts)
2077 cmdutil.setremoteconfig(ui, opts)
2081
2078
2082 other = hg.repository(ui, source)
2079 other = hg.repository(ui, source)
2083 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2080 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2084 if revs:
2081 if revs:
2085 try:
2082 try:
2086 revs = [other.lookup(rev) for rev in revs]
2083 revs = [other.lookup(rev) for rev in revs]
2087 except NoCapability:
2084 except NoCapability:
2088 error = _("Other repository doesn't support revision lookup, "
2085 error = _("Other repository doesn't support revision lookup, "
2089 "so a rev cannot be specified.")
2086 "so a rev cannot be specified.")
2090 raise util.Abort(error)
2087 raise util.Abort(error)
2091
2088
2092 modheads = repo.pull(other, heads=revs, force=opts['force'])
2089 modheads = repo.pull(other, heads=revs, force=opts['force'])
2093 return postincoming(ui, repo, modheads, opts['update'], checkout)
2090 return postincoming(ui, repo, modheads, opts['update'], checkout)
2094
2091
2095 def push(ui, repo, dest=None, **opts):
2092 def push(ui, repo, dest=None, **opts):
2096 """push changes to the specified destination
2093 """push changes to the specified destination
2097
2094
2098 Push changes from the local repository to the given destination.
2095 Push changes from the local repository to the given destination.
2099
2096
2100 This is the symmetrical operation for pull. It helps to move
2097 This is the symmetrical operation for pull. It helps to move
2101 changes from the current repository to a different one. If the
2098 changes from the current repository to a different one. If the
2102 destination is local this is identical to a pull in that directory
2099 destination is local this is identical to a pull in that directory
2103 from the current one.
2100 from the current one.
2104
2101
2105 By default, push will refuse to run if it detects the result would
2102 By default, push will refuse to run if it detects the result would
2106 increase the number of remote heads. This generally indicates the
2103 increase the number of remote heads. This generally indicates the
2107 the client has forgotten to pull and merge before pushing.
2104 the client has forgotten to pull and merge before pushing.
2108
2105
2109 Valid URLs are of the form:
2106 Valid URLs are of the form:
2110
2107
2111 local/filesystem/path (or file://local/filesystem/path)
2108 local/filesystem/path (or file://local/filesystem/path)
2112 ssh://[user@]host[:port]/[path]
2109 ssh://[user@]host[:port]/[path]
2113 http://[user@]host[:port]/[path]
2110 http://[user@]host[:port]/[path]
2114 https://[user@]host[:port]/[path]
2111 https://[user@]host[:port]/[path]
2115
2112
2116 An optional identifier after # indicates a particular branch, tag,
2113 An optional identifier after # indicates a particular branch, tag,
2117 or changeset to push. If -r is used, the named changeset and all its
2114 or changeset to push. If -r is used, the named changeset and all its
2118 ancestors will be pushed to the remote repository.
2115 ancestors will be pushed to the remote repository.
2119
2116
2120 Look at the help text for the pull command for important details
2117 Look at the help text for the pull command for important details
2121 about ssh:// URLs.
2118 about ssh:// URLs.
2122
2119
2123 Pushing to http:// and https:// URLs is only possible, if this
2120 Pushing to http:// and https:// URLs is only possible, if this
2124 feature is explicitly enabled on the remote Mercurial server.
2121 feature is explicitly enabled on the remote Mercurial server.
2125 """
2122 """
2126 dest, revs, checkout = hg.parseurl(
2123 dest, revs, checkout = hg.parseurl(
2127 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2124 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2128 cmdutil.setremoteconfig(ui, opts)
2125 cmdutil.setremoteconfig(ui, opts)
2129
2126
2130 other = hg.repository(ui, dest)
2127 other = hg.repository(ui, dest)
2131 ui.status('pushing to %s\n' % util.hidepassword(dest))
2128 ui.status('pushing to %s\n' % util.hidepassword(dest))
2132 if revs:
2129 if revs:
2133 revs = [repo.lookup(rev) for rev in revs]
2130 revs = [repo.lookup(rev) for rev in revs]
2134 r = repo.push(other, opts['force'], revs=revs)
2131 r = repo.push(other, opts['force'], revs=revs)
2135 return r == 0
2132 return r == 0
2136
2133
2137 def rawcommit(ui, repo, *pats, **opts):
2134 def rawcommit(ui, repo, *pats, **opts):
2138 """raw commit interface (DEPRECATED)
2135 """raw commit interface (DEPRECATED)
2139
2136
2140 (DEPRECATED)
2137 (DEPRECATED)
2141 Lowlevel commit, for use in helper scripts.
2138 Lowlevel commit, for use in helper scripts.
2142
2139
2143 This command is not intended to be used by normal users, as it is
2140 This command is not intended to be used by normal users, as it is
2144 primarily useful for importing from other SCMs.
2141 primarily useful for importing from other SCMs.
2145
2142
2146 This command is now deprecated and will be removed in a future
2143 This command is now deprecated and will be removed in a future
2147 release, please use debugsetparents and commit instead.
2144 release, please use debugsetparents and commit instead.
2148 """
2145 """
2149
2146
2150 ui.warn(_("(the rawcommit command is deprecated)\n"))
2147 ui.warn(_("(the rawcommit command is deprecated)\n"))
2151
2148
2152 message = cmdutil.logmessage(opts)
2149 message = cmdutil.logmessage(opts)
2153
2150
2154 files = cmdutil.match(repo, pats, opts).files()
2151 files = cmdutil.match(repo, pats, opts).files()
2155 if opts['files']:
2152 if opts['files']:
2156 files += open(opts['files']).read().splitlines()
2153 files += open(opts['files']).read().splitlines()
2157
2154
2158 parents = [repo.lookup(p) for p in opts['parent']]
2155 parents = [repo.lookup(p) for p in opts['parent']]
2159
2156
2160 try:
2157 try:
2161 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2158 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2162 except ValueError, inst:
2159 except ValueError, inst:
2163 raise util.Abort(str(inst))
2160 raise util.Abort(str(inst))
2164
2161
2165 def recover(ui, repo):
2162 def recover(ui, repo):
2166 """roll back an interrupted transaction
2163 """roll back an interrupted transaction
2167
2164
2168 Recover from an interrupted commit or pull.
2165 Recover from an interrupted commit or pull.
2169
2166
2170 This command tries to fix the repository status after an interrupted
2167 This command tries to fix the repository status after an interrupted
2171 operation. It should only be necessary when Mercurial suggests it.
2168 operation. It should only be necessary when Mercurial suggests it.
2172 """
2169 """
2173 if repo.recover():
2170 if repo.recover():
2174 return hg.verify(repo)
2171 return hg.verify(repo)
2175 return 1
2172 return 1
2176
2173
2177 def remove(ui, repo, *pats, **opts):
2174 def remove(ui, repo, *pats, **opts):
2178 """remove the specified files on the next commit
2175 """remove the specified files on the next commit
2179
2176
2180 Schedule the indicated files for removal from the repository.
2177 Schedule the indicated files for removal from the repository.
2181
2178
2182 This only removes files from the current branch, not from the entire
2179 This only removes files from the current branch, not from the entire
2183 project history. -A can be used to remove only files that have already
2180 project history. -A can be used to remove only files that have already
2184 been deleted, -f can be used to force deletion, and -Af can be used
2181 been deleted, -f can be used to force deletion, and -Af can be used
2185 to remove files from the next revision without deleting them.
2182 to remove files from the next revision without deleting them.
2186
2183
2187 The following table details the behavior of remove for different file
2184 The following table details the behavior of remove for different file
2188 states (columns) and option combinations (rows). The file states are
2185 states (columns) and option combinations (rows). The file states are
2189 Added, Clean, Modified and Missing (as reported by hg status). The
2186 Added, Clean, Modified and Missing (as reported by hg status). The
2190 actions are Warn, Remove (from branch) and Delete (from disk).
2187 actions are Warn, Remove (from branch) and Delete (from disk).
2191
2188
2192 A C M !
2189 A C M !
2193 none W RD W R
2190 none W RD W R
2194 -f R RD RD R
2191 -f R RD RD R
2195 -A W W W R
2192 -A W W W R
2196 -Af R R R R
2193 -Af R R R R
2197
2194
2198 This command schedules the files to be removed at the next commit.
2195 This command schedules the files to be removed at the next commit.
2199 To undo a remove before that, see hg revert.
2196 To undo a remove before that, see hg revert.
2200 """
2197 """
2201
2198
2202 after, force = opts.get('after'), opts.get('force')
2199 after, force = opts.get('after'), opts.get('force')
2203 if not pats and not after:
2200 if not pats and not after:
2204 raise util.Abort(_('no files specified'))
2201 raise util.Abort(_('no files specified'))
2205
2202
2206 m = cmdutil.match(repo, pats, opts)
2203 m = cmdutil.match(repo, pats, opts)
2207 mardu = map(dict.fromkeys, repo.status(match=m))[:5]
2204 mardu = map(dict.fromkeys, repo.status(match=m))[:5]
2208 modified, added, removed, deleted, unknown = mardu
2205 modified, added, removed, deleted, unknown = mardu
2209
2206
2210 remove, forget = [], []
2207 remove, forget = [], []
2211 for abs in repo.walk(m):
2208 for abs in repo.walk(m):
2212
2209
2213 reason = None
2210 reason = None
2214 if abs in removed or abs in unknown:
2211 if abs in removed or abs in unknown:
2215 continue
2212 continue
2216
2213
2217 # last column
2214 # last column
2218 elif abs in deleted:
2215 elif abs in deleted:
2219 remove.append(abs)
2216 remove.append(abs)
2220
2217
2221 # rest of the third row
2218 # rest of the third row
2222 elif after and not force:
2219 elif after and not force:
2223 reason = _('still exists (use -f to force removal)')
2220 reason = _('still exists (use -f to force removal)')
2224
2221
2225 # rest of the first column
2222 # rest of the first column
2226 elif abs in added:
2223 elif abs in added:
2227 if not force:
2224 if not force:
2228 reason = _('has been marked for add (use -f to force removal)')
2225 reason = _('has been marked for add (use -f to force removal)')
2229 else:
2226 else:
2230 forget.append(abs)
2227 forget.append(abs)
2231
2228
2232 # rest of the third column
2229 # rest of the third column
2233 elif abs in modified:
2230 elif abs in modified:
2234 if not force:
2231 if not force:
2235 reason = _('is modified (use -f to force removal)')
2232 reason = _('is modified (use -f to force removal)')
2236 else:
2233 else:
2237 remove.append(abs)
2234 remove.append(abs)
2238
2235
2239 # rest of the second column
2236 # rest of the second column
2240 elif not reason:
2237 elif not reason:
2241 remove.append(abs)
2238 remove.append(abs)
2242
2239
2243 if reason:
2240 if reason:
2244 ui.warn(_('not removing %s: file %s\n') % (m.rel(abs), reason))
2241 ui.warn(_('not removing %s: file %s\n') % (m.rel(abs), reason))
2245 elif ui.verbose or not m.exact(abs):
2242 elif ui.verbose or not m.exact(abs):
2246 ui.status(_('removing %s\n') % m.rel(abs))
2243 ui.status(_('removing %s\n') % m.rel(abs))
2247
2244
2248 repo.forget(forget)
2245 repo.forget(forget)
2249 repo.remove(remove, unlink=not after)
2246 repo.remove(remove, unlink=not after)
2250
2247
2251 def rename(ui, repo, *pats, **opts):
2248 def rename(ui, repo, *pats, **opts):
2252 """rename files; equivalent of copy + remove
2249 """rename files; equivalent of copy + remove
2253
2250
2254 Mark dest as copies of sources; mark sources for deletion. If
2251 Mark dest as copies of sources; mark sources for deletion. If
2255 dest is a directory, copies are put in that directory. If dest is
2252 dest is a directory, copies are put in that directory. If dest is
2256 a file, there can only be one source.
2253 a file, there can only be one source.
2257
2254
2258 By default, this command copies the contents of files as they
2255 By default, this command copies the contents of files as they
2259 stand in the working directory. If invoked with --after, the
2256 stand in the working directory. If invoked with --after, the
2260 operation is recorded, but no copying is performed.
2257 operation is recorded, but no copying is performed.
2261
2258
2262 This command takes effect in the next commit. To undo a rename
2259 This command takes effect in the next commit. To undo a rename
2263 before that, see hg revert.
2260 before that, see hg revert.
2264 """
2261 """
2265 wlock = repo.wlock(False)
2262 wlock = repo.wlock(False)
2266 try:
2263 try:
2267 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2264 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2268 finally:
2265 finally:
2269 del wlock
2266 del wlock
2270
2267
2271 def resolve(ui, repo, *pats, **opts):
2268 def resolve(ui, repo, *pats, **opts):
2272 """resolve file merges from a branch merge or update
2269 """resolve file merges from a branch merge or update
2273
2270
2274 This command will attempt to resolve unresolved merges from the
2271 This command will attempt to resolve unresolved merges from the
2275 last update or merge command. This will use the local file
2272 last update or merge command. This will use the local file
2276 revision preserved at the last update or merge to cleanly retry
2273 revision preserved at the last update or merge to cleanly retry
2277 the file merge attempt. With no file or options specified, this
2274 the file merge attempt. With no file or options specified, this
2278 command will attempt to resolve all unresolved files.
2275 command will attempt to resolve all unresolved files.
2279
2276
2280 The codes used to show the status of files are:
2277 The codes used to show the status of files are:
2281 U = unresolved
2278 U = unresolved
2282 R = resolved
2279 R = resolved
2283 """
2280 """
2284
2281
2285 if len([x for x in opts if opts[x]]) > 1:
2282 if len([x for x in opts if opts[x]]) > 1:
2286 raise util.Abort(_("too many options specified"))
2283 raise util.Abort(_("too many options specified"))
2287
2284
2288 ms = merge_.mergestate(repo)
2285 ms = merge_.mergestate(repo)
2289 m = cmdutil.match(repo, pats, opts)
2286 m = cmdutil.match(repo, pats, opts)
2290
2287
2291 for f in ms:
2288 for f in ms:
2292 if m(f):
2289 if m(f):
2293 if opts.get("list"):
2290 if opts.get("list"):
2294 ui.write("%s %s\n" % (ms[f].upper(), f))
2291 ui.write("%s %s\n" % (ms[f].upper(), f))
2295 elif opts.get("mark"):
2292 elif opts.get("mark"):
2296 ms.mark(f, "r")
2293 ms.mark(f, "r")
2297 elif opts.get("unmark"):
2294 elif opts.get("unmark"):
2298 ms.mark(f, "u")
2295 ms.mark(f, "u")
2299 else:
2296 else:
2300 wctx = repo.changectx(None)
2297 wctx = repo[None]
2301 mctx = wctx.parents()[-1]
2298 mctx = wctx.parents()[-1]
2302 ms.resolve(f, wctx, mctx)
2299 ms.resolve(f, wctx, mctx)
2303
2300
2304 def revert(ui, repo, *pats, **opts):
2301 def revert(ui, repo, *pats, **opts):
2305 """restore individual files or dirs to an earlier state
2302 """restore individual files or dirs to an earlier state
2306
2303
2307 (use update -r to check out earlier revisions, revert does not
2304 (use update -r to check out earlier revisions, revert does not
2308 change the working dir parents)
2305 change the working dir parents)
2309
2306
2310 With no revision specified, revert the named files or directories
2307 With no revision specified, revert the named files or directories
2311 to the contents they had in the parent of the working directory.
2308 to the contents they had in the parent of the working directory.
2312 This restores the contents of the affected files to an unmodified
2309 This restores the contents of the affected files to an unmodified
2313 state and unschedules adds, removes, copies, and renames. If the
2310 state and unschedules adds, removes, copies, and renames. If the
2314 working directory has two parents, you must explicitly specify the
2311 working directory has two parents, you must explicitly specify the
2315 revision to revert to.
2312 revision to revert to.
2316
2313
2317 Using the -r option, revert the given files or directories to their
2314 Using the -r option, revert the given files or directories to their
2318 contents as of a specific revision. This can be helpful to "roll
2315 contents as of a specific revision. This can be helpful to "roll
2319 back" some or all of an earlier change.
2316 back" some or all of an earlier change.
2320 See 'hg help dates' for a list of formats valid for -d/--date.
2317 See 'hg help dates' for a list of formats valid for -d/--date.
2321
2318
2322 Revert modifies the working directory. It does not commit any
2319 Revert modifies the working directory. It does not commit any
2323 changes, or change the parent of the working directory. If you
2320 changes, or change the parent of the working directory. If you
2324 revert to a revision other than the parent of the working
2321 revert to a revision other than the parent of the working
2325 directory, the reverted files will thus appear modified
2322 directory, the reverted files will thus appear modified
2326 afterwards.
2323 afterwards.
2327
2324
2328 If a file has been deleted, it is restored. If the executable
2325 If a file has been deleted, it is restored. If the executable
2329 mode of a file was changed, it is reset.
2326 mode of a file was changed, it is reset.
2330
2327
2331 If names are given, all files matching the names are reverted.
2328 If names are given, all files matching the names are reverted.
2332 If no arguments are given, no files are reverted.
2329 If no arguments are given, no files are reverted.
2333
2330
2334 Modified files are saved with a .orig suffix before reverting.
2331 Modified files are saved with a .orig suffix before reverting.
2335 To disable these backups, use --no-backup.
2332 To disable these backups, use --no-backup.
2336 """
2333 """
2337
2334
2338 if opts["date"]:
2335 if opts["date"]:
2339 if opts["rev"]:
2336 if opts["rev"]:
2340 raise util.Abort(_("you can't specify a revision and a date"))
2337 raise util.Abort(_("you can't specify a revision and a date"))
2341 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2338 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2342
2339
2343 if not pats and not opts['all']:
2340 if not pats and not opts['all']:
2344 raise util.Abort(_('no files or directories specified; '
2341 raise util.Abort(_('no files or directories specified; '
2345 'use --all to revert the whole repo'))
2342 'use --all to revert the whole repo'))
2346
2343
2347 parent, p2 = repo.dirstate.parents()
2344 parent, p2 = repo.dirstate.parents()
2348 if not opts['rev'] and p2 != nullid:
2345 if not opts['rev'] and p2 != nullid:
2349 raise util.Abort(_('uncommitted merge - please provide a '
2346 raise util.Abort(_('uncommitted merge - please provide a '
2350 'specific revision'))
2347 'specific revision'))
2351 ctx = repo.changectx(opts['rev'])
2348 ctx = repo[opts['rev']]
2352 node = ctx.node()
2349 node = ctx.node()
2353 mf = ctx.manifest()
2350 mf = ctx.manifest()
2354 if node == parent:
2351 if node == parent:
2355 pmf = mf
2352 pmf = mf
2356 else:
2353 else:
2357 pmf = None
2354 pmf = None
2358
2355
2359 # need all matching names in dirstate and manifest of target rev,
2356 # need all matching names in dirstate and manifest of target rev,
2360 # so have to walk both. do not print errors if files exist in one
2357 # so have to walk both. do not print errors if files exist in one
2361 # but not other.
2358 # but not other.
2362
2359
2363 names = {}
2360 names = {}
2364
2361
2365 wlock = repo.wlock()
2362 wlock = repo.wlock()
2366 try:
2363 try:
2367 # walk dirstate.
2364 # walk dirstate.
2368 files = []
2365 files = []
2369
2366
2370 m = cmdutil.match(repo, pats, opts)
2367 m = cmdutil.match(repo, pats, opts)
2371 m.bad = lambda x,y: False
2368 m.bad = lambda x,y: False
2372 for abs in repo.walk(m):
2369 for abs in repo.walk(m):
2373 names[abs] = m.rel(abs), m.exact(abs)
2370 names[abs] = m.rel(abs), m.exact(abs)
2374
2371
2375 # walk target manifest.
2372 # walk target manifest.
2376
2373
2377 def badfn(path, msg):
2374 def badfn(path, msg):
2378 if path in names:
2375 if path in names:
2379 return False
2376 return False
2380 path_ = path + '/'
2377 path_ = path + '/'
2381 for f in names:
2378 for f in names:
2382 if f.startswith(path_):
2379 if f.startswith(path_):
2383 return False
2380 return False
2384 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2381 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2385 return False
2382 return False
2386
2383
2387 m = cmdutil.match(repo, pats, opts)
2384 m = cmdutil.match(repo, pats, opts)
2388 m.bad = badfn
2385 m.bad = badfn
2389 for abs in repo.walk(m, node=node):
2386 for abs in repo.walk(m, node=node):
2390 if abs not in names:
2387 if abs not in names:
2391 names[abs] = m.rel(abs), m.exact(abs)
2388 names[abs] = m.rel(abs), m.exact(abs)
2392
2389
2393 m = cmdutil.matchfiles(repo, names)
2390 m = cmdutil.matchfiles(repo, names)
2394 changes = repo.status(match=m)[:4]
2391 changes = repo.status(match=m)[:4]
2395 modified, added, removed, deleted = map(dict.fromkeys, changes)
2392 modified, added, removed, deleted = map(dict.fromkeys, changes)
2396
2393
2397 # if f is a rename, also revert the source
2394 # if f is a rename, also revert the source
2398 cwd = repo.getcwd()
2395 cwd = repo.getcwd()
2399 for f in added:
2396 for f in added:
2400 src = repo.dirstate.copied(f)
2397 src = repo.dirstate.copied(f)
2401 if src and src not in names and repo.dirstate[src] == 'r':
2398 if src and src not in names and repo.dirstate[src] == 'r':
2402 removed[src] = None
2399 removed[src] = None
2403 names[src] = (repo.pathto(src, cwd), True)
2400 names[src] = (repo.pathto(src, cwd), True)
2404
2401
2405 def removeforget(abs):
2402 def removeforget(abs):
2406 if repo.dirstate[abs] == 'a':
2403 if repo.dirstate[abs] == 'a':
2407 return _('forgetting %s\n')
2404 return _('forgetting %s\n')
2408 return _('removing %s\n')
2405 return _('removing %s\n')
2409
2406
2410 revert = ([], _('reverting %s\n'))
2407 revert = ([], _('reverting %s\n'))
2411 add = ([], _('adding %s\n'))
2408 add = ([], _('adding %s\n'))
2412 remove = ([], removeforget)
2409 remove = ([], removeforget)
2413 undelete = ([], _('undeleting %s\n'))
2410 undelete = ([], _('undeleting %s\n'))
2414
2411
2415 disptable = (
2412 disptable = (
2416 # dispatch table:
2413 # dispatch table:
2417 # file state
2414 # file state
2418 # action if in target manifest
2415 # action if in target manifest
2419 # action if not in target manifest
2416 # action if not in target manifest
2420 # make backup if in target manifest
2417 # make backup if in target manifest
2421 # make backup if not in target manifest
2418 # make backup if not in target manifest
2422 (modified, revert, remove, True, True),
2419 (modified, revert, remove, True, True),
2423 (added, revert, remove, True, False),
2420 (added, revert, remove, True, False),
2424 (removed, undelete, None, False, False),
2421 (removed, undelete, None, False, False),
2425 (deleted, revert, remove, False, False),
2422 (deleted, revert, remove, False, False),
2426 )
2423 )
2427
2424
2428 entries = names.items()
2425 entries = names.items()
2429 entries.sort()
2426 entries.sort()
2430
2427
2431 for abs, (rel, exact) in entries:
2428 for abs, (rel, exact) in entries:
2432 mfentry = mf.get(abs)
2429 mfentry = mf.get(abs)
2433 target = repo.wjoin(abs)
2430 target = repo.wjoin(abs)
2434 def handle(xlist, dobackup):
2431 def handle(xlist, dobackup):
2435 xlist[0].append(abs)
2432 xlist[0].append(abs)
2436 if dobackup and not opts['no_backup'] and util.lexists(target):
2433 if dobackup and not opts['no_backup'] and util.lexists(target):
2437 bakname = "%s.orig" % rel
2434 bakname = "%s.orig" % rel
2438 ui.note(_('saving current version of %s as %s\n') %
2435 ui.note(_('saving current version of %s as %s\n') %
2439 (rel, bakname))
2436 (rel, bakname))
2440 if not opts.get('dry_run'):
2437 if not opts.get('dry_run'):
2441 util.copyfile(target, bakname)
2438 util.copyfile(target, bakname)
2442 if ui.verbose or not exact:
2439 if ui.verbose or not exact:
2443 msg = xlist[1]
2440 msg = xlist[1]
2444 if not isinstance(msg, basestring):
2441 if not isinstance(msg, basestring):
2445 msg = msg(abs)
2442 msg = msg(abs)
2446 ui.status(msg % rel)
2443 ui.status(msg % rel)
2447 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2444 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2448 if abs not in table: continue
2445 if abs not in table: continue
2449 # file has changed in dirstate
2446 # file has changed in dirstate
2450 if mfentry:
2447 if mfentry:
2451 handle(hitlist, backuphit)
2448 handle(hitlist, backuphit)
2452 elif misslist is not None:
2449 elif misslist is not None:
2453 handle(misslist, backupmiss)
2450 handle(misslist, backupmiss)
2454 break
2451 break
2455 else:
2452 else:
2456 if abs not in repo.dirstate:
2453 if abs not in repo.dirstate:
2457 if mfentry:
2454 if mfentry:
2458 handle(add, True)
2455 handle(add, True)
2459 elif exact:
2456 elif exact:
2460 ui.warn(_('file not managed: %s\n') % rel)
2457 ui.warn(_('file not managed: %s\n') % rel)
2461 continue
2458 continue
2462 # file has not changed in dirstate
2459 # file has not changed in dirstate
2463 if node == parent:
2460 if node == parent:
2464 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2461 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2465 continue
2462 continue
2466 if pmf is None:
2463 if pmf is None:
2467 # only need parent manifest in this unlikely case,
2464 # only need parent manifest in this unlikely case,
2468 # so do not read by default
2465 # so do not read by default
2469 pmf = repo.changectx(parent).manifest()
2466 pmf = repo[parent].manifest()
2470 if abs in pmf:
2467 if abs in pmf:
2471 if mfentry:
2468 if mfentry:
2472 # if version of file is same in parent and target
2469 # if version of file is same in parent and target
2473 # manifests, do nothing
2470 # manifests, do nothing
2474 if (pmf[abs] != mfentry or
2471 if (pmf[abs] != mfentry or
2475 pmf.flags(abs) != mf.flags(abs)):
2472 pmf.flags(abs) != mf.flags(abs)):
2476 handle(revert, False)
2473 handle(revert, False)
2477 else:
2474 else:
2478 handle(remove, False)
2475 handle(remove, False)
2479
2476
2480 if not opts.get('dry_run'):
2477 if not opts.get('dry_run'):
2481 def checkout(f):
2478 def checkout(f):
2482 fc = ctx[f]
2479 fc = ctx[f]
2483 repo.wwrite(f, fc.data(), fc.flags())
2480 repo.wwrite(f, fc.data(), fc.flags())
2484
2481
2485 audit_path = util.path_auditor(repo.root)
2482 audit_path = util.path_auditor(repo.root)
2486 for f in remove[0]:
2483 for f in remove[0]:
2487 if repo.dirstate[f] == 'a':
2484 if repo.dirstate[f] == 'a':
2488 repo.dirstate.forget(f)
2485 repo.dirstate.forget(f)
2489 continue
2486 continue
2490 audit_path(f)
2487 audit_path(f)
2491 try:
2488 try:
2492 util.unlink(repo.wjoin(f))
2489 util.unlink(repo.wjoin(f))
2493 except OSError:
2490 except OSError:
2494 pass
2491 pass
2495 repo.dirstate.remove(f)
2492 repo.dirstate.remove(f)
2496
2493
2497 normal = None
2494 normal = None
2498 if node == parent:
2495 if node == parent:
2499 # We're reverting to our parent. If possible, we'd like status
2496 # We're reverting to our parent. If possible, we'd like status
2500 # to report the file as clean. We have to use normallookup for
2497 # to report the file as clean. We have to use normallookup for
2501 # merges to avoid losing information about merged/dirty files.
2498 # merges to avoid losing information about merged/dirty files.
2502 if p2 != nullid:
2499 if p2 != nullid:
2503 normal = repo.dirstate.normallookup
2500 normal = repo.dirstate.normallookup
2504 else:
2501 else:
2505 normal = repo.dirstate.normal
2502 normal = repo.dirstate.normal
2506 for f in revert[0]:
2503 for f in revert[0]:
2507 checkout(f)
2504 checkout(f)
2508 if normal:
2505 if normal:
2509 normal(f)
2506 normal(f)
2510
2507
2511 for f in add[0]:
2508 for f in add[0]:
2512 checkout(f)
2509 checkout(f)
2513 repo.dirstate.add(f)
2510 repo.dirstate.add(f)
2514
2511
2515 normal = repo.dirstate.normallookup
2512 normal = repo.dirstate.normallookup
2516 if node == parent and p2 == nullid:
2513 if node == parent and p2 == nullid:
2517 normal = repo.dirstate.normal
2514 normal = repo.dirstate.normal
2518 for f in undelete[0]:
2515 for f in undelete[0]:
2519 checkout(f)
2516 checkout(f)
2520 normal(f)
2517 normal(f)
2521
2518
2522 finally:
2519 finally:
2523 del wlock
2520 del wlock
2524
2521
2525 def rollback(ui, repo):
2522 def rollback(ui, repo):
2526 """roll back the last transaction
2523 """roll back the last transaction
2527
2524
2528 This command should be used with care. There is only one level of
2525 This command should be used with care. There is only one level of
2529 rollback, and there is no way to undo a rollback. It will also
2526 rollback, and there is no way to undo a rollback. It will also
2530 restore the dirstate at the time of the last transaction, losing
2527 restore the dirstate at the time of the last transaction, losing
2531 any dirstate changes since that time.
2528 any dirstate changes since that time.
2532
2529
2533 Transactions are used to encapsulate the effects of all commands
2530 Transactions are used to encapsulate the effects of all commands
2534 that create new changesets or propagate existing changesets into a
2531 that create new changesets or propagate existing changesets into a
2535 repository. For example, the following commands are transactional,
2532 repository. For example, the following commands are transactional,
2536 and their effects can be rolled back:
2533 and their effects can be rolled back:
2537
2534
2538 commit
2535 commit
2539 import
2536 import
2540 pull
2537 pull
2541 push (with this repository as destination)
2538 push (with this repository as destination)
2542 unbundle
2539 unbundle
2543
2540
2544 This command is not intended for use on public repositories. Once
2541 This command is not intended for use on public repositories. Once
2545 changes are visible for pull by other users, rolling a transaction
2542 changes are visible for pull by other users, rolling a transaction
2546 back locally is ineffective (someone else may already have pulled
2543 back locally is ineffective (someone else may already have pulled
2547 the changes). Furthermore, a race is possible with readers of the
2544 the changes). Furthermore, a race is possible with readers of the
2548 repository; for example an in-progress pull from the repository
2545 repository; for example an in-progress pull from the repository
2549 may fail if a rollback is performed.
2546 may fail if a rollback is performed.
2550 """
2547 """
2551 repo.rollback()
2548 repo.rollback()
2552
2549
2553 def root(ui, repo):
2550 def root(ui, repo):
2554 """print the root (top) of the current working dir
2551 """print the root (top) of the current working dir
2555
2552
2556 Print the root directory of the current repository.
2553 Print the root directory of the current repository.
2557 """
2554 """
2558 ui.write(repo.root + "\n")
2555 ui.write(repo.root + "\n")
2559
2556
2560 def serve(ui, repo, **opts):
2557 def serve(ui, repo, **opts):
2561 """export the repository via HTTP
2558 """export the repository via HTTP
2562
2559
2563 Start a local HTTP repository browser and pull server.
2560 Start a local HTTP repository browser and pull server.
2564
2561
2565 By default, the server logs accesses to stdout and errors to
2562 By default, the server logs accesses to stdout and errors to
2566 stderr. Use the "-A" and "-E" options to log to files.
2563 stderr. Use the "-A" and "-E" options to log to files.
2567 """
2564 """
2568
2565
2569 if opts["stdio"]:
2566 if opts["stdio"]:
2570 if repo is None:
2567 if repo is None:
2571 raise RepoError(_("There is no Mercurial repository here"
2568 raise RepoError(_("There is no Mercurial repository here"
2572 " (.hg not found)"))
2569 " (.hg not found)"))
2573 s = sshserver.sshserver(ui, repo)
2570 s = sshserver.sshserver(ui, repo)
2574 s.serve_forever()
2571 s.serve_forever()
2575
2572
2576 parentui = ui.parentui or ui
2573 parentui = ui.parentui or ui
2577 optlist = ("name templates style address port prefix ipv6"
2574 optlist = ("name templates style address port prefix ipv6"
2578 " accesslog errorlog webdir_conf certificate")
2575 " accesslog errorlog webdir_conf certificate")
2579 for o in optlist.split():
2576 for o in optlist.split():
2580 if opts[o]:
2577 if opts[o]:
2581 parentui.setconfig("web", o, str(opts[o]))
2578 parentui.setconfig("web", o, str(opts[o]))
2582 if (repo is not None) and (repo.ui != parentui):
2579 if (repo is not None) and (repo.ui != parentui):
2583 repo.ui.setconfig("web", o, str(opts[o]))
2580 repo.ui.setconfig("web", o, str(opts[o]))
2584
2581
2585 if repo is None and not ui.config("web", "webdir_conf"):
2582 if repo is None and not ui.config("web", "webdir_conf"):
2586 raise RepoError(_("There is no Mercurial repository here"
2583 raise RepoError(_("There is no Mercurial repository here"
2587 " (.hg not found)"))
2584 " (.hg not found)"))
2588
2585
2589 class service:
2586 class service:
2590 def init(self):
2587 def init(self):
2591 util.set_signal_handler()
2588 util.set_signal_handler()
2592 self.httpd = hgweb.server.create_server(parentui, repo)
2589 self.httpd = hgweb.server.create_server(parentui, repo)
2593
2590
2594 if not ui.verbose: return
2591 if not ui.verbose: return
2595
2592
2596 if self.httpd.prefix:
2593 if self.httpd.prefix:
2597 prefix = self.httpd.prefix.strip('/') + '/'
2594 prefix = self.httpd.prefix.strip('/') + '/'
2598 else:
2595 else:
2599 prefix = ''
2596 prefix = ''
2600
2597
2601 port = ':%d' % self.httpd.port
2598 port = ':%d' % self.httpd.port
2602 if port == ':80':
2599 if port == ':80':
2603 port = ''
2600 port = ''
2604
2601
2605 bindaddr = self.httpd.addr
2602 bindaddr = self.httpd.addr
2606 if bindaddr == '0.0.0.0':
2603 if bindaddr == '0.0.0.0':
2607 bindaddr = '*'
2604 bindaddr = '*'
2608 elif ':' in bindaddr: # IPv6
2605 elif ':' in bindaddr: # IPv6
2609 bindaddr = '[%s]' % bindaddr
2606 bindaddr = '[%s]' % bindaddr
2610
2607
2611 fqaddr = self.httpd.fqaddr
2608 fqaddr = self.httpd.fqaddr
2612 if ':' in fqaddr:
2609 if ':' in fqaddr:
2613 fqaddr = '[%s]' % fqaddr
2610 fqaddr = '[%s]' % fqaddr
2614 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2611 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2615 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2612 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2616
2613
2617 def run(self):
2614 def run(self):
2618 self.httpd.serve_forever()
2615 self.httpd.serve_forever()
2619
2616
2620 service = service()
2617 service = service()
2621
2618
2622 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2619 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2623
2620
2624 def status(ui, repo, *pats, **opts):
2621 def status(ui, repo, *pats, **opts):
2625 """show changed files in the working directory
2622 """show changed files in the working directory
2626
2623
2627 Show status of files in the repository. If names are given, only
2624 Show status of files in the repository. If names are given, only
2628 files that match are shown. Files that are clean or ignored or
2625 files that match are shown. Files that are clean or ignored or
2629 source of a copy/move operation, are not listed unless -c (clean),
2626 source of a copy/move operation, are not listed unless -c (clean),
2630 -i (ignored), -C (copies) or -A is given. Unless options described
2627 -i (ignored), -C (copies) or -A is given. Unless options described
2631 with "show only ..." are given, the options -mardu are used.
2628 with "show only ..." are given, the options -mardu are used.
2632
2629
2633 Option -q/--quiet hides untracked (unknown and ignored) files
2630 Option -q/--quiet hides untracked (unknown and ignored) files
2634 unless explicitly requested with -u/--unknown or -i/-ignored.
2631 unless explicitly requested with -u/--unknown or -i/-ignored.
2635
2632
2636 NOTE: status may appear to disagree with diff if permissions have
2633 NOTE: status may appear to disagree with diff if permissions have
2637 changed or a merge has occurred. The standard diff format does not
2634 changed or a merge has occurred. The standard diff format does not
2638 report permission changes and diff only reports changes relative
2635 report permission changes and diff only reports changes relative
2639 to one merge parent.
2636 to one merge parent.
2640
2637
2641 If one revision is given, it is used as the base revision.
2638 If one revision is given, it is used as the base revision.
2642 If two revisions are given, the difference between them is shown.
2639 If two revisions are given, the difference between them is shown.
2643
2640
2644 The codes used to show the status of files are:
2641 The codes used to show the status of files are:
2645 M = modified
2642 M = modified
2646 A = added
2643 A = added
2647 R = removed
2644 R = removed
2648 C = clean
2645 C = clean
2649 ! = deleted, but still tracked
2646 ! = deleted, but still tracked
2650 ? = not tracked
2647 ? = not tracked
2651 I = ignored
2648 I = ignored
2652 = the previous added file was copied from here
2649 = the previous added file was copied from here
2653 """
2650 """
2654
2651
2655 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2652 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2656 cwd = (pats and repo.getcwd()) or ''
2653 cwd = (pats and repo.getcwd()) or ''
2657 end = opts['print0'] and '\0' or '\n'
2654 end = opts['print0'] and '\0' or '\n'
2658 copy = {}
2655 copy = {}
2659 states = 'modified added removed deleted unknown ignored clean'.split()
2656 states = 'modified added removed deleted unknown ignored clean'.split()
2660 show = [k for k in states if opts[k]]
2657 show = [k for k in states if opts[k]]
2661 if opts['all']:
2658 if opts['all']:
2662 show += ui.quiet and (states[:4] + ['clean']) or states
2659 show += ui.quiet and (states[:4] + ['clean']) or states
2663 if not show:
2660 if not show:
2664 show = ui.quiet and states[:4] or states[:5]
2661 show = ui.quiet and states[:4] or states[:5]
2665
2662
2666 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2663 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2667 'ignored' in show, 'clean' in show, 'unknown' in show)
2664 'ignored' in show, 'clean' in show, 'unknown' in show)
2668 changestates = zip(states, 'MAR!?IC', stat)
2665 changestates = zip(states, 'MAR!?IC', stat)
2669
2666
2670 if (opts['all'] or opts['copies']) and not opts['no_status']:
2667 if (opts['all'] or opts['copies']) and not opts['no_status']:
2671 ctxn = repo.changectx(nullid)
2668 ctxn = repo[nullid]
2672 ctx1 = repo.changectx(node1)
2669 ctx1 = repo[node1]
2673 ctx2 = repo.changectx(node2)
2670 ctx2 = repo[node2]
2674 added = stat[1]
2671 added = stat[1]
2675 if node2 is None:
2672 if node2 is None:
2676 added = stat[0] + stat[1] # merged?
2673 added = stat[0] + stat[1] # merged?
2677
2674
2678 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2675 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2679 if k in added:
2676 if k in added:
2680 copy[k] = v
2677 copy[k] = v
2681 elif v in added:
2678 elif v in added:
2682 copy[v] = k
2679 copy[v] = k
2683
2680
2684 for state, char, files in changestates:
2681 for state, char, files in changestates:
2685 if state in show:
2682 if state in show:
2686 format = "%s %%s%s" % (char, end)
2683 format = "%s %%s%s" % (char, end)
2687 if opts['no_status']:
2684 if opts['no_status']:
2688 format = "%%s%s" % end
2685 format = "%%s%s" % end
2689
2686
2690 for f in files:
2687 for f in files:
2691 ui.write(format % repo.pathto(f, cwd))
2688 ui.write(format % repo.pathto(f, cwd))
2692 if f in copy:
2689 if f in copy:
2693 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2690 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2694
2691
2695 def tag(ui, repo, name1, *names, **opts):
2692 def tag(ui, repo, name1, *names, **opts):
2696 """add one or more tags for the current or given revision
2693 """add one or more tags for the current or given revision
2697
2694
2698 Name a particular revision using <name>.
2695 Name a particular revision using <name>.
2699
2696
2700 Tags are used to name particular revisions of the repository and are
2697 Tags are used to name particular revisions of the repository and are
2701 very useful to compare different revisions, to go back to significant
2698 very useful to compare different revisions, to go back to significant
2702 earlier versions or to mark branch points as releases, etc.
2699 earlier versions or to mark branch points as releases, etc.
2703
2700
2704 If no revision is given, the parent of the working directory is used,
2701 If no revision is given, the parent of the working directory is used,
2705 or tip if no revision is checked out.
2702 or tip if no revision is checked out.
2706
2703
2707 To facilitate version control, distribution, and merging of tags,
2704 To facilitate version control, distribution, and merging of tags,
2708 they are stored as a file named ".hgtags" which is managed
2705 they are stored as a file named ".hgtags" which is managed
2709 similarly to other project files and can be hand-edited if
2706 similarly to other project files and can be hand-edited if
2710 necessary. The file '.hg/localtags' is used for local tags (not
2707 necessary. The file '.hg/localtags' is used for local tags (not
2711 shared among repositories).
2708 shared among repositories).
2712
2709
2713 See 'hg help dates' for a list of formats valid for -d/--date.
2710 See 'hg help dates' for a list of formats valid for -d/--date.
2714 """
2711 """
2715
2712
2716 rev_ = "."
2713 rev_ = "."
2717 names = (name1,) + names
2714 names = (name1,) + names
2718 if len(names) != len(dict.fromkeys(names)):
2715 if len(names) != len(dict.fromkeys(names)):
2719 raise util.Abort(_('tag names must be unique'))
2716 raise util.Abort(_('tag names must be unique'))
2720 for n in names:
2717 for n in names:
2721 if n in ['tip', '.', 'null']:
2718 if n in ['tip', '.', 'null']:
2722 raise util.Abort(_('the name \'%s\' is reserved') % n)
2719 raise util.Abort(_('the name \'%s\' is reserved') % n)
2723 if opts['rev'] and opts['remove']:
2720 if opts['rev'] and opts['remove']:
2724 raise util.Abort(_("--rev and --remove are incompatible"))
2721 raise util.Abort(_("--rev and --remove are incompatible"))
2725 if opts['rev']:
2722 if opts['rev']:
2726 rev_ = opts['rev']
2723 rev_ = opts['rev']
2727 message = opts['message']
2724 message = opts['message']
2728 if opts['remove']:
2725 if opts['remove']:
2729 expectedtype = opts['local'] and 'local' or 'global'
2726 expectedtype = opts['local'] and 'local' or 'global'
2730 for n in names:
2727 for n in names:
2731 if not repo.tagtype(n):
2728 if not repo.tagtype(n):
2732 raise util.Abort(_('tag \'%s\' does not exist') % n)
2729 raise util.Abort(_('tag \'%s\' does not exist') % n)
2733 if repo.tagtype(n) != expectedtype:
2730 if repo.tagtype(n) != expectedtype:
2734 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2731 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2735 (n, expectedtype))
2732 (n, expectedtype))
2736 rev_ = nullid
2733 rev_ = nullid
2737 if not message:
2734 if not message:
2738 message = _('Removed tag %s') % ', '.join(names)
2735 message = _('Removed tag %s') % ', '.join(names)
2739 elif not opts['force']:
2736 elif not opts['force']:
2740 for n in names:
2737 for n in names:
2741 if n in repo.tags():
2738 if n in repo.tags():
2742 raise util.Abort(_('tag \'%s\' already exists '
2739 raise util.Abort(_('tag \'%s\' already exists '
2743 '(use -f to force)') % n)
2740 '(use -f to force)') % n)
2744 if not rev_ and repo.dirstate.parents()[1] != nullid:
2741 if not rev_ and repo.dirstate.parents()[1] != nullid:
2745 raise util.Abort(_('uncommitted merge - please provide a '
2742 raise util.Abort(_('uncommitted merge - please provide a '
2746 'specific revision'))
2743 'specific revision'))
2747 r = repo.changectx(rev_).node()
2744 r = repo[rev_].node()
2748
2745
2749 if not message:
2746 if not message:
2750 message = (_('Added tag %s for changeset %s') %
2747 message = (_('Added tag %s for changeset %s') %
2751 (', '.join(names), short(r)))
2748 (', '.join(names), short(r)))
2752
2749
2753 date = opts.get('date')
2750 date = opts.get('date')
2754 if date:
2751 if date:
2755 date = util.parsedate(date)
2752 date = util.parsedate(date)
2756
2753
2757 repo.tag(names, r, message, opts['local'], opts['user'], date)
2754 repo.tag(names, r, message, opts['local'], opts['user'], date)
2758
2755
2759 def tags(ui, repo):
2756 def tags(ui, repo):
2760 """list repository tags
2757 """list repository tags
2761
2758
2762 List the repository tags.
2759 List the repository tags.
2763
2760
2764 This lists both regular and local tags. When the -v/--verbose switch
2761 This lists both regular and local tags. When the -v/--verbose switch
2765 is used, a third column "local" is printed for local tags.
2762 is used, a third column "local" is printed for local tags.
2766 """
2763 """
2767
2764
2768 l = repo.tagslist()
2765 l = repo.tagslist()
2769 l.reverse()
2766 l.reverse()
2770 hexfunc = ui.debugflag and hex or short
2767 hexfunc = ui.debugflag and hex or short
2771 tagtype = ""
2768 tagtype = ""
2772
2769
2773 for t, n in l:
2770 for t, n in l:
2774 if ui.quiet:
2771 if ui.quiet:
2775 ui.write("%s\n" % t)
2772 ui.write("%s\n" % t)
2776 continue
2773 continue
2777
2774
2778 try:
2775 try:
2779 hn = hexfunc(n)
2776 hn = hexfunc(n)
2780 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2777 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2781 except revlog.LookupError:
2778 except revlog.LookupError:
2782 r = " ?:%s" % hn
2779 r = " ?:%s" % hn
2783 else:
2780 else:
2784 spaces = " " * (30 - util.locallen(t))
2781 spaces = " " * (30 - util.locallen(t))
2785 if ui.verbose:
2782 if ui.verbose:
2786 if repo.tagtype(t) == 'local':
2783 if repo.tagtype(t) == 'local':
2787 tagtype = " local"
2784 tagtype = " local"
2788 else:
2785 else:
2789 tagtype = ""
2786 tagtype = ""
2790 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2787 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2791
2788
2792 def tip(ui, repo, **opts):
2789 def tip(ui, repo, **opts):
2793 """show the tip revision
2790 """show the tip revision
2794
2791
2795 The tip revision (usually just called the tip) is the most
2792 The tip revision (usually just called the tip) is the most
2796 recently added changeset in the repository, the most recently
2793 recently added changeset in the repository, the most recently
2797 changed head.
2794 changed head.
2798
2795
2799 If you have just made a commit, that commit will be the tip. If
2796 If you have just made a commit, that commit will be the tip. If
2800 you have just pulled changes from another repository, the tip of
2797 you have just pulled changes from another repository, the tip of
2801 that repository becomes the current tip. The "tip" tag is special
2798 that repository becomes the current tip. The "tip" tag is special
2802 and cannot be renamed or assigned to a different changeset.
2799 and cannot be renamed or assigned to a different changeset.
2803 """
2800 """
2804 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2801 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2805
2802
2806 def unbundle(ui, repo, fname1, *fnames, **opts):
2803 def unbundle(ui, repo, fname1, *fnames, **opts):
2807 """apply one or more changegroup files
2804 """apply one or more changegroup files
2808
2805
2809 Apply one or more compressed changegroup files generated by the
2806 Apply one or more compressed changegroup files generated by the
2810 bundle command.
2807 bundle command.
2811 """
2808 """
2812 fnames = (fname1,) + fnames
2809 fnames = (fname1,) + fnames
2813
2810
2814 lock = None
2811 lock = None
2815 try:
2812 try:
2816 lock = repo.lock()
2813 lock = repo.lock()
2817 for fname in fnames:
2814 for fname in fnames:
2818 if os.path.exists(fname):
2815 if os.path.exists(fname):
2819 f = open(fname, "rb")
2816 f = open(fname, "rb")
2820 else:
2817 else:
2821 f = urllib.urlopen(fname)
2818 f = urllib.urlopen(fname)
2822 gen = changegroup.readbundle(f, fname)
2819 gen = changegroup.readbundle(f, fname)
2823 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2820 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2824 finally:
2821 finally:
2825 del lock
2822 del lock
2826
2823
2827 return postincoming(ui, repo, modheads, opts['update'], None)
2824 return postincoming(ui, repo, modheads, opts['update'], None)
2828
2825
2829 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2826 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2830 """update working directory
2827 """update working directory
2831
2828
2832 Update the repository's working directory to the specified revision,
2829 Update the repository's working directory to the specified revision,
2833 or the tip of the current branch if none is specified.
2830 or the tip of the current branch if none is specified.
2834
2831
2835 If the requested revision is a descendant of the working
2832 If the requested revision is a descendant of the working
2836 directory, any outstanding changes in the working directory will
2833 directory, any outstanding changes in the working directory will
2837 be merged into the result. If it is not directly descended but is
2834 be merged into the result. If it is not directly descended but is
2838 on the same named branch, update aborts with a suggestion to use
2835 on the same named branch, update aborts with a suggestion to use
2839 merge or update -C instead.
2836 merge or update -C instead.
2840
2837
2841 If the requested revision is on a different named branch and the
2838 If the requested revision is on a different named branch and the
2842 working directory is clean, update quietly switches branches.
2839 working directory is clean, update quietly switches branches.
2843
2840
2844 See 'hg help dates' for a list of formats valid for --date.
2841 See 'hg help dates' for a list of formats valid for --date.
2845 """
2842 """
2846 if rev and node:
2843 if rev and node:
2847 raise util.Abort(_("please specify just one revision"))
2844 raise util.Abort(_("please specify just one revision"))
2848
2845
2849 if not rev:
2846 if not rev:
2850 rev = node
2847 rev = node
2851
2848
2852 if date:
2849 if date:
2853 if rev:
2850 if rev:
2854 raise util.Abort(_("you can't specify a revision and a date"))
2851 raise util.Abort(_("you can't specify a revision and a date"))
2855 rev = cmdutil.finddate(ui, repo, date)
2852 rev = cmdutil.finddate(ui, repo, date)
2856
2853
2857 if clean:
2854 if clean:
2858 return hg.clean(repo, rev)
2855 return hg.clean(repo, rev)
2859 else:
2856 else:
2860 return hg.update(repo, rev)
2857 return hg.update(repo, rev)
2861
2858
2862 def verify(ui, repo):
2859 def verify(ui, repo):
2863 """verify the integrity of the repository
2860 """verify the integrity of the repository
2864
2861
2865 Verify the integrity of the current repository.
2862 Verify the integrity of the current repository.
2866
2863
2867 This will perform an extensive check of the repository's
2864 This will perform an extensive check of the repository's
2868 integrity, validating the hashes and checksums of each entry in
2865 integrity, validating the hashes and checksums of each entry in
2869 the changelog, manifest, and tracked files, as well as the
2866 the changelog, manifest, and tracked files, as well as the
2870 integrity of their crosslinks and indices.
2867 integrity of their crosslinks and indices.
2871 """
2868 """
2872 return hg.verify(repo)
2869 return hg.verify(repo)
2873
2870
2874 def version_(ui):
2871 def version_(ui):
2875 """output version and copyright information"""
2872 """output version and copyright information"""
2876 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2873 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2877 % version.get_version())
2874 % version.get_version())
2878 ui.status(_(
2875 ui.status(_(
2879 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2876 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2880 "This is free software; see the source for copying conditions. "
2877 "This is free software; see the source for copying conditions. "
2881 "There is NO\nwarranty; "
2878 "There is NO\nwarranty; "
2882 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2879 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2883 ))
2880 ))
2884
2881
2885 # Command options and aliases are listed here, alphabetically
2882 # Command options and aliases are listed here, alphabetically
2886
2883
2887 globalopts = [
2884 globalopts = [
2888 ('R', 'repository', '',
2885 ('R', 'repository', '',
2889 _('repository root directory or symbolic path name')),
2886 _('repository root directory or symbolic path name')),
2890 ('', 'cwd', '', _('change working directory')),
2887 ('', 'cwd', '', _('change working directory')),
2891 ('y', 'noninteractive', None,
2888 ('y', 'noninteractive', None,
2892 _('do not prompt, assume \'yes\' for any required answers')),
2889 _('do not prompt, assume \'yes\' for any required answers')),
2893 ('q', 'quiet', None, _('suppress output')),
2890 ('q', 'quiet', None, _('suppress output')),
2894 ('v', 'verbose', None, _('enable additional output')),
2891 ('v', 'verbose', None, _('enable additional output')),
2895 ('', 'config', [], _('set/override config option')),
2892 ('', 'config', [], _('set/override config option')),
2896 ('', 'debug', None, _('enable debugging output')),
2893 ('', 'debug', None, _('enable debugging output')),
2897 ('', 'debugger', None, _('start debugger')),
2894 ('', 'debugger', None, _('start debugger')),
2898 ('', 'encoding', util._encoding, _('set the charset encoding')),
2895 ('', 'encoding', util._encoding, _('set the charset encoding')),
2899 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2896 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2900 ('', 'lsprof', None, _('print improved command execution profile')),
2897 ('', 'lsprof', None, _('print improved command execution profile')),
2901 ('', 'traceback', None, _('print traceback on exception')),
2898 ('', 'traceback', None, _('print traceback on exception')),
2902 ('', 'time', None, _('time how long the command takes')),
2899 ('', 'time', None, _('time how long the command takes')),
2903 ('', 'profile', None, _('print command execution profile')),
2900 ('', 'profile', None, _('print command execution profile')),
2904 ('', 'version', None, _('output version information and exit')),
2901 ('', 'version', None, _('output version information and exit')),
2905 ('h', 'help', None, _('display help and exit')),
2902 ('h', 'help', None, _('display help and exit')),
2906 ]
2903 ]
2907
2904
2908 dryrunopts = [('n', 'dry-run', None,
2905 dryrunopts = [('n', 'dry-run', None,
2909 _('do not perform actions, just print output'))]
2906 _('do not perform actions, just print output'))]
2910
2907
2911 remoteopts = [
2908 remoteopts = [
2912 ('e', 'ssh', '', _('specify ssh command to use')),
2909 ('e', 'ssh', '', _('specify ssh command to use')),
2913 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2910 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2914 ]
2911 ]
2915
2912
2916 walkopts = [
2913 walkopts = [
2917 ('I', 'include', [], _('include names matching the given patterns')),
2914 ('I', 'include', [], _('include names matching the given patterns')),
2918 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2915 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2919 ]
2916 ]
2920
2917
2921 commitopts = [
2918 commitopts = [
2922 ('m', 'message', '', _('use <text> as commit message')),
2919 ('m', 'message', '', _('use <text> as commit message')),
2923 ('l', 'logfile', '', _('read commit message from <file>')),
2920 ('l', 'logfile', '', _('read commit message from <file>')),
2924 ]
2921 ]
2925
2922
2926 commitopts2 = [
2923 commitopts2 = [
2927 ('d', 'date', '', _('record datecode as commit date')),
2924 ('d', 'date', '', _('record datecode as commit date')),
2928 ('u', 'user', '', _('record user as committer')),
2925 ('u', 'user', '', _('record user as committer')),
2929 ]
2926 ]
2930
2927
2931 templateopts = [
2928 templateopts = [
2932 ('', 'style', '', _('display using template map file')),
2929 ('', 'style', '', _('display using template map file')),
2933 ('', 'template', '', _('display with template')),
2930 ('', 'template', '', _('display with template')),
2934 ]
2931 ]
2935
2932
2936 logopts = [
2933 logopts = [
2937 ('p', 'patch', None, _('show patch')),
2934 ('p', 'patch', None, _('show patch')),
2938 ('l', 'limit', '', _('limit number of changes displayed')),
2935 ('l', 'limit', '', _('limit number of changes displayed')),
2939 ('M', 'no-merges', None, _('do not show merges')),
2936 ('M', 'no-merges', None, _('do not show merges')),
2940 ] + templateopts
2937 ] + templateopts
2941
2938
2942 diffopts = [
2939 diffopts = [
2943 ('a', 'text', None, _('treat all files as text')),
2940 ('a', 'text', None, _('treat all files as text')),
2944 ('g', 'git', None, _('use git extended diff format')),
2941 ('g', 'git', None, _('use git extended diff format')),
2945 ('', 'nodates', None, _("don't include dates in diff headers"))
2942 ('', 'nodates', None, _("don't include dates in diff headers"))
2946 ]
2943 ]
2947
2944
2948 diffopts2 = [
2945 diffopts2 = [
2949 ('p', 'show-function', None, _('show which function each change is in')),
2946 ('p', 'show-function', None, _('show which function each change is in')),
2950 ('w', 'ignore-all-space', None,
2947 ('w', 'ignore-all-space', None,
2951 _('ignore white space when comparing lines')),
2948 _('ignore white space when comparing lines')),
2952 ('b', 'ignore-space-change', None,
2949 ('b', 'ignore-space-change', None,
2953 _('ignore changes in the amount of white space')),
2950 _('ignore changes in the amount of white space')),
2954 ('B', 'ignore-blank-lines', None,
2951 ('B', 'ignore-blank-lines', None,
2955 _('ignore changes whose lines are all blank')),
2952 _('ignore changes whose lines are all blank')),
2956 ('U', 'unified', '', _('number of lines of context to show'))
2953 ('U', 'unified', '', _('number of lines of context to show'))
2957 ]
2954 ]
2958
2955
2959 table = {
2956 table = {
2960 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2957 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2961 "addremove":
2958 "addremove":
2962 (addremove,
2959 (addremove,
2963 [('s', 'similarity', '',
2960 [('s', 'similarity', '',
2964 _('guess renamed files by similarity (0<=s<=100)')),
2961 _('guess renamed files by similarity (0<=s<=100)')),
2965 ] + walkopts + dryrunopts,
2962 ] + walkopts + dryrunopts,
2966 _('hg addremove [OPTION]... [FILE]...')),
2963 _('hg addremove [OPTION]... [FILE]...')),
2967 "^annotate|blame":
2964 "^annotate|blame":
2968 (annotate,
2965 (annotate,
2969 [('r', 'rev', '', _('annotate the specified revision')),
2966 [('r', 'rev', '', _('annotate the specified revision')),
2970 ('f', 'follow', None, _('follow file copies and renames')),
2967 ('f', 'follow', None, _('follow file copies and renames')),
2971 ('a', 'text', None, _('treat all files as text')),
2968 ('a', 'text', None, _('treat all files as text')),
2972 ('u', 'user', None, _('list the author (long with -v)')),
2969 ('u', 'user', None, _('list the author (long with -v)')),
2973 ('d', 'date', None, _('list the date (short with -q)')),
2970 ('d', 'date', None, _('list the date (short with -q)')),
2974 ('n', 'number', None, _('list the revision number (default)')),
2971 ('n', 'number', None, _('list the revision number (default)')),
2975 ('c', 'changeset', None, _('list the changeset')),
2972 ('c', 'changeset', None, _('list the changeset')),
2976 ('l', 'line-number', None,
2973 ('l', 'line-number', None,
2977 _('show line number at the first appearance'))
2974 _('show line number at the first appearance'))
2978 ] + walkopts,
2975 ] + walkopts,
2979 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2976 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2980 "archive":
2977 "archive":
2981 (archive,
2978 (archive,
2982 [('', 'no-decode', None, _('do not pass files through decoders')),
2979 [('', 'no-decode', None, _('do not pass files through decoders')),
2983 ('p', 'prefix', '', _('directory prefix for files in archive')),
2980 ('p', 'prefix', '', _('directory prefix for files in archive')),
2984 ('r', 'rev', '', _('revision to distribute')),
2981 ('r', 'rev', '', _('revision to distribute')),
2985 ('t', 'type', '', _('type of distribution to create')),
2982 ('t', 'type', '', _('type of distribution to create')),
2986 ] + walkopts,
2983 ] + walkopts,
2987 _('hg archive [OPTION]... DEST')),
2984 _('hg archive [OPTION]... DEST')),
2988 "backout":
2985 "backout":
2989 (backout,
2986 (backout,
2990 [('', 'merge', None,
2987 [('', 'merge', None,
2991 _('merge with old dirstate parent after backout')),
2988 _('merge with old dirstate parent after backout')),
2992 ('', 'parent', '', _('parent to choose when backing out merge')),
2989 ('', 'parent', '', _('parent to choose when backing out merge')),
2993 ('r', 'rev', '', _('revision to backout')),
2990 ('r', 'rev', '', _('revision to backout')),
2994 ] + walkopts + commitopts + commitopts2,
2991 ] + walkopts + commitopts + commitopts2,
2995 _('hg backout [OPTION]... [-r] REV')),
2992 _('hg backout [OPTION]... [-r] REV')),
2996 "bisect":
2993 "bisect":
2997 (bisect,
2994 (bisect,
2998 [('r', 'reset', False, _('reset bisect state')),
2995 [('r', 'reset', False, _('reset bisect state')),
2999 ('g', 'good', False, _('mark changeset good')),
2996 ('g', 'good', False, _('mark changeset good')),
3000 ('b', 'bad', False, _('mark changeset bad')),
2997 ('b', 'bad', False, _('mark changeset bad')),
3001 ('s', 'skip', False, _('skip testing changeset')),
2998 ('s', 'skip', False, _('skip testing changeset')),
3002 ('U', 'noupdate', False, _('do not update to target'))],
2999 ('U', 'noupdate', False, _('do not update to target'))],
3003 _("hg bisect [-gbsr] [REV]")),
3000 _("hg bisect [-gbsr] [REV]")),
3004 "branch":
3001 "branch":
3005 (branch,
3002 (branch,
3006 [('f', 'force', None,
3003 [('f', 'force', None,
3007 _('set branch name even if it shadows an existing branch'))],
3004 _('set branch name even if it shadows an existing branch'))],
3008 _('hg branch [-f] [NAME]')),
3005 _('hg branch [-f] [NAME]')),
3009 "branches":
3006 "branches":
3010 (branches,
3007 (branches,
3011 [('a', 'active', False,
3008 [('a', 'active', False,
3012 _('show only branches that have unmerged heads'))],
3009 _('show only branches that have unmerged heads'))],
3013 _('hg branches [-a]')),
3010 _('hg branches [-a]')),
3014 "bundle":
3011 "bundle":
3015 (bundle,
3012 (bundle,
3016 [('f', 'force', None,
3013 [('f', 'force', None,
3017 _('run even when remote repository is unrelated')),
3014 _('run even when remote repository is unrelated')),
3018 ('r', 'rev', [],
3015 ('r', 'rev', [],
3019 _('a changeset up to which you would like to bundle')),
3016 _('a changeset up to which you would like to bundle')),
3020 ('', 'base', [],
3017 ('', 'base', [],
3021 _('a base changeset to specify instead of a destination')),
3018 _('a base changeset to specify instead of a destination')),
3022 ('a', 'all', None, _('bundle all changesets in the repository')),
3019 ('a', 'all', None, _('bundle all changesets in the repository')),
3023 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3020 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3024 ] + remoteopts,
3021 ] + remoteopts,
3025 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3022 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3026 "cat":
3023 "cat":
3027 (cat,
3024 (cat,
3028 [('o', 'output', '', _('print output to file with formatted name')),
3025 [('o', 'output', '', _('print output to file with formatted name')),
3029 ('r', 'rev', '', _('print the given revision')),
3026 ('r', 'rev', '', _('print the given revision')),
3030 ('', 'decode', None, _('apply any matching decode filter')),
3027 ('', 'decode', None, _('apply any matching decode filter')),
3031 ] + walkopts,
3028 ] + walkopts,
3032 _('hg cat [OPTION]... FILE...')),
3029 _('hg cat [OPTION]... FILE...')),
3033 "^clone":
3030 "^clone":
3034 (clone,
3031 (clone,
3035 [('U', 'noupdate', None,
3032 [('U', 'noupdate', None,
3036 _('the clone will only contain a repository (no working copy)')),
3033 _('the clone will only contain a repository (no working copy)')),
3037 ('r', 'rev', [],
3034 ('r', 'rev', [],
3038 _('a changeset you would like to have after cloning')),
3035 _('a changeset you would like to have after cloning')),
3039 ('', 'pull', None, _('use pull protocol to copy metadata')),
3036 ('', 'pull', None, _('use pull protocol to copy metadata')),
3040 ('', 'uncompressed', None,
3037 ('', 'uncompressed', None,
3041 _('use uncompressed transfer (fast over LAN)')),
3038 _('use uncompressed transfer (fast over LAN)')),
3042 ] + remoteopts,
3039 ] + remoteopts,
3043 _('hg clone [OPTION]... SOURCE [DEST]')),
3040 _('hg clone [OPTION]... SOURCE [DEST]')),
3044 "^commit|ci":
3041 "^commit|ci":
3045 (commit,
3042 (commit,
3046 [('A', 'addremove', None,
3043 [('A', 'addremove', None,
3047 _('mark new/missing files as added/removed before committing')),
3044 _('mark new/missing files as added/removed before committing')),
3048 ] + walkopts + commitopts + commitopts2,
3045 ] + walkopts + commitopts + commitopts2,
3049 _('hg commit [OPTION]... [FILE]...')),
3046 _('hg commit [OPTION]... [FILE]...')),
3050 "copy|cp":
3047 "copy|cp":
3051 (copy,
3048 (copy,
3052 [('A', 'after', None, _('record a copy that has already occurred')),
3049 [('A', 'after', None, _('record a copy that has already occurred')),
3053 ('f', 'force', None,
3050 ('f', 'force', None,
3054 _('forcibly copy over an existing managed file')),
3051 _('forcibly copy over an existing managed file')),
3055 ] + walkopts + dryrunopts,
3052 ] + walkopts + dryrunopts,
3056 _('hg copy [OPTION]... [SOURCE]... DEST')),
3053 _('hg copy [OPTION]... [SOURCE]... DEST')),
3057 "debugancestor": (debugancestor, [],
3054 "debugancestor": (debugancestor, [],
3058 _('hg debugancestor [INDEX] REV1 REV2')),
3055 _('hg debugancestor [INDEX] REV1 REV2')),
3059 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3056 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3060 "debugcomplete":
3057 "debugcomplete":
3061 (debugcomplete,
3058 (debugcomplete,
3062 [('o', 'options', None, _('show the command options'))],
3059 [('o', 'options', None, _('show the command options'))],
3063 _('hg debugcomplete [-o] CMD')),
3060 _('hg debugcomplete [-o] CMD')),
3064 "debugdate":
3061 "debugdate":
3065 (debugdate,
3062 (debugdate,
3066 [('e', 'extended', None, _('try extended date formats'))],
3063 [('e', 'extended', None, _('try extended date formats'))],
3067 _('hg debugdate [-e] DATE [RANGE]')),
3064 _('hg debugdate [-e] DATE [RANGE]')),
3068 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3065 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3069 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3066 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3070 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3067 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3071 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3068 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3072 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3069 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3073 "debugrawcommit|rawcommit":
3070 "debugrawcommit|rawcommit":
3074 (rawcommit,
3071 (rawcommit,
3075 [('p', 'parent', [], _('parent')),
3072 [('p', 'parent', [], _('parent')),
3076 ('F', 'files', '', _('file list'))
3073 ('F', 'files', '', _('file list'))
3077 ] + commitopts + commitopts2,
3074 ] + commitopts + commitopts2,
3078 _('hg debugrawcommit [OPTION]... [FILE]...')),
3075 _('hg debugrawcommit [OPTION]... [FILE]...')),
3079 "debugrebuildstate":
3076 "debugrebuildstate":
3080 (debugrebuildstate,
3077 (debugrebuildstate,
3081 [('r', 'rev', '', _('revision to rebuild to'))],
3078 [('r', 'rev', '', _('revision to rebuild to'))],
3082 _('hg debugrebuildstate [-r REV] [REV]')),
3079 _('hg debugrebuildstate [-r REV] [REV]')),
3083 "debugrename":
3080 "debugrename":
3084 (debugrename,
3081 (debugrename,
3085 [('r', 'rev', '', _('revision to debug'))],
3082 [('r', 'rev', '', _('revision to debug'))],
3086 _('hg debugrename [-r REV] FILE')),
3083 _('hg debugrename [-r REV] FILE')),
3087 "debugsetparents":
3084 "debugsetparents":
3088 (debugsetparents,
3085 (debugsetparents,
3089 [],
3086 [],
3090 _('hg debugsetparents REV1 [REV2]')),
3087 _('hg debugsetparents REV1 [REV2]')),
3091 "debugstate":
3088 "debugstate":
3092 (debugstate,
3089 (debugstate,
3093 [('', 'nodates', None, _('do not display the saved mtime'))],
3090 [('', 'nodates', None, _('do not display the saved mtime'))],
3094 _('hg debugstate [OPTS]')),
3091 _('hg debugstate [OPTS]')),
3095 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3092 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3096 "^diff":
3093 "^diff":
3097 (diff,
3094 (diff,
3098 [('r', 'rev', [], _('revision'))
3095 [('r', 'rev', [], _('revision'))
3099 ] + diffopts + diffopts2 + walkopts,
3096 ] + diffopts + diffopts2 + walkopts,
3100 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3097 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3101 "^export":
3098 "^export":
3102 (export,
3099 (export,
3103 [('o', 'output', '', _('print output to file with formatted name')),
3100 [('o', 'output', '', _('print output to file with formatted name')),
3104 ('', 'switch-parent', None, _('diff against the second parent'))
3101 ('', 'switch-parent', None, _('diff against the second parent'))
3105 ] + diffopts,
3102 ] + diffopts,
3106 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3103 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3107 "grep":
3104 "grep":
3108 (grep,
3105 (grep,
3109 [('0', 'print0', None, _('end fields with NUL')),
3106 [('0', 'print0', None, _('end fields with NUL')),
3110 ('', 'all', None, _('print all revisions that match')),
3107 ('', 'all', None, _('print all revisions that match')),
3111 ('f', 'follow', None,
3108 ('f', 'follow', None,
3112 _('follow changeset history, or file history across copies and renames')),
3109 _('follow changeset history, or file history across copies and renames')),
3113 ('i', 'ignore-case', None, _('ignore case when matching')),
3110 ('i', 'ignore-case', None, _('ignore case when matching')),
3114 ('l', 'files-with-matches', None,
3111 ('l', 'files-with-matches', None,
3115 _('print only filenames and revs that match')),
3112 _('print only filenames and revs that match')),
3116 ('n', 'line-number', None, _('print matching line numbers')),
3113 ('n', 'line-number', None, _('print matching line numbers')),
3117 ('r', 'rev', [], _('search in given revision range')),
3114 ('r', 'rev', [], _('search in given revision range')),
3118 ('u', 'user', None, _('list the author (long with -v)')),
3115 ('u', 'user', None, _('list the author (long with -v)')),
3119 ('d', 'date', None, _('list the date (short with -q)')),
3116 ('d', 'date', None, _('list the date (short with -q)')),
3120 ] + walkopts,
3117 ] + walkopts,
3121 _('hg grep [OPTION]... PATTERN [FILE]...')),
3118 _('hg grep [OPTION]... PATTERN [FILE]...')),
3122 "heads":
3119 "heads":
3123 (heads,
3120 (heads,
3124 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3121 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3125 ] + templateopts,
3122 ] + templateopts,
3126 _('hg heads [-r REV] [REV]...')),
3123 _('hg heads [-r REV] [REV]...')),
3127 "help": (help_, [], _('hg help [COMMAND]')),
3124 "help": (help_, [], _('hg help [COMMAND]')),
3128 "identify|id":
3125 "identify|id":
3129 (identify,
3126 (identify,
3130 [('r', 'rev', '', _('identify the specified rev')),
3127 [('r', 'rev', '', _('identify the specified rev')),
3131 ('n', 'num', None, _('show local revision number')),
3128 ('n', 'num', None, _('show local revision number')),
3132 ('i', 'id', None, _('show global revision id')),
3129 ('i', 'id', None, _('show global revision id')),
3133 ('b', 'branch', None, _('show branch')),
3130 ('b', 'branch', None, _('show branch')),
3134 ('t', 'tags', None, _('show tags'))],
3131 ('t', 'tags', None, _('show tags'))],
3135 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3132 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3136 "import|patch":
3133 "import|patch":
3137 (import_,
3134 (import_,
3138 [('p', 'strip', 1,
3135 [('p', 'strip', 1,
3139 _('directory strip option for patch. This has the same\n'
3136 _('directory strip option for patch. This has the same\n'
3140 'meaning as the corresponding patch option')),
3137 'meaning as the corresponding patch option')),
3141 ('b', 'base', '', _('base path')),
3138 ('b', 'base', '', _('base path')),
3142 ('f', 'force', None,
3139 ('f', 'force', None,
3143 _('skip check for outstanding uncommitted changes')),
3140 _('skip check for outstanding uncommitted changes')),
3144 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3141 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3145 ('', 'exact', None,
3142 ('', 'exact', None,
3146 _('apply patch to the nodes from which it was generated')),
3143 _('apply patch to the nodes from which it was generated')),
3147 ('', 'import-branch', None,
3144 ('', 'import-branch', None,
3148 _('Use any branch information in patch (implied by --exact)'))] +
3145 _('Use any branch information in patch (implied by --exact)'))] +
3149 commitopts + commitopts2,
3146 commitopts + commitopts2,
3150 _('hg import [OPTION]... PATCH...')),
3147 _('hg import [OPTION]... PATCH...')),
3151 "incoming|in":
3148 "incoming|in":
3152 (incoming,
3149 (incoming,
3153 [('f', 'force', None,
3150 [('f', 'force', None,
3154 _('run even when remote repository is unrelated')),
3151 _('run even when remote repository is unrelated')),
3155 ('n', 'newest-first', None, _('show newest record first')),
3152 ('n', 'newest-first', None, _('show newest record first')),
3156 ('', 'bundle', '', _('file to store the bundles into')),
3153 ('', 'bundle', '', _('file to store the bundles into')),
3157 ('r', 'rev', [],
3154 ('r', 'rev', [],
3158 _('a specific revision up to which you would like to pull')),
3155 _('a specific revision up to which you would like to pull')),
3159 ] + logopts + remoteopts,
3156 ] + logopts + remoteopts,
3160 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3157 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3161 ' [--bundle FILENAME] [SOURCE]')),
3158 ' [--bundle FILENAME] [SOURCE]')),
3162 "^init":
3159 "^init":
3163 (init,
3160 (init,
3164 remoteopts,
3161 remoteopts,
3165 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3162 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3166 "locate":
3163 "locate":
3167 (locate,
3164 (locate,
3168 [('r', 'rev', '', _('search the repository as it stood at rev')),
3165 [('r', 'rev', '', _('search the repository as it stood at rev')),
3169 ('0', 'print0', None,
3166 ('0', 'print0', None,
3170 _('end filenames with NUL, for use with xargs')),
3167 _('end filenames with NUL, for use with xargs')),
3171 ('f', 'fullpath', None,
3168 ('f', 'fullpath', None,
3172 _('print complete paths from the filesystem root')),
3169 _('print complete paths from the filesystem root')),
3173 ] + walkopts,
3170 ] + walkopts,
3174 _('hg locate [OPTION]... [PATTERN]...')),
3171 _('hg locate [OPTION]... [PATTERN]...')),
3175 "^log|history":
3172 "^log|history":
3176 (log,
3173 (log,
3177 [('f', 'follow', None,
3174 [('f', 'follow', None,
3178 _('follow changeset history, or file history across copies and renames')),
3175 _('follow changeset history, or file history across copies and renames')),
3179 ('', 'follow-first', None,
3176 ('', 'follow-first', None,
3180 _('only follow the first parent of merge changesets')),
3177 _('only follow the first parent of merge changesets')),
3181 ('d', 'date', '', _('show revs matching date spec')),
3178 ('d', 'date', '', _('show revs matching date spec')),
3182 ('C', 'copies', None, _('show copied files')),
3179 ('C', 'copies', None, _('show copied files')),
3183 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3180 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3184 ('r', 'rev', [], _('show the specified revision or range')),
3181 ('r', 'rev', [], _('show the specified revision or range')),
3185 ('', 'removed', None, _('include revs where files were removed')),
3182 ('', 'removed', None, _('include revs where files were removed')),
3186 ('m', 'only-merges', None, _('show only merges')),
3183 ('m', 'only-merges', None, _('show only merges')),
3187 ('b', 'only-branch', [],
3184 ('b', 'only-branch', [],
3188 _('show only changesets within the given named branch')),
3185 _('show only changesets within the given named branch')),
3189 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3186 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3190 ] + logopts + walkopts,
3187 ] + logopts + walkopts,
3191 _('hg log [OPTION]... [FILE]')),
3188 _('hg log [OPTION]... [FILE]')),
3192 "manifest":
3189 "manifest":
3193 (manifest,
3190 (manifest,
3194 [('r', 'rev', '', _('revision to display'))],
3191 [('r', 'rev', '', _('revision to display'))],
3195 _('hg manifest [-r REV]')),
3192 _('hg manifest [-r REV]')),
3196 "^merge":
3193 "^merge":
3197 (merge,
3194 (merge,
3198 [('f', 'force', None, _('force a merge with outstanding changes')),
3195 [('f', 'force', None, _('force a merge with outstanding changes')),
3199 ('r', 'rev', '', _('revision to merge')),
3196 ('r', 'rev', '', _('revision to merge')),
3200 ],
3197 ],
3201 _('hg merge [-f] [[-r] REV]')),
3198 _('hg merge [-f] [[-r] REV]')),
3202 "outgoing|out":
3199 "outgoing|out":
3203 (outgoing,
3200 (outgoing,
3204 [('f', 'force', None,
3201 [('f', 'force', None,
3205 _('run even when remote repository is unrelated')),
3202 _('run even when remote repository is unrelated')),
3206 ('r', 'rev', [],
3203 ('r', 'rev', [],
3207 _('a specific revision up to which you would like to push')),
3204 _('a specific revision up to which you would like to push')),
3208 ('n', 'newest-first', None, _('show newest record first')),
3205 ('n', 'newest-first', None, _('show newest record first')),
3209 ] + logopts + remoteopts,
3206 ] + logopts + remoteopts,
3210 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3207 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3211 "^parents":
3208 "^parents":
3212 (parents,
3209 (parents,
3213 [('r', 'rev', '', _('show parents from the specified rev')),
3210 [('r', 'rev', '', _('show parents from the specified rev')),
3214 ] + templateopts,
3211 ] + templateopts,
3215 _('hg parents [-r REV] [FILE]')),
3212 _('hg parents [-r REV] [FILE]')),
3216 "paths": (paths, [], _('hg paths [NAME]')),
3213 "paths": (paths, [], _('hg paths [NAME]')),
3217 "^pull":
3214 "^pull":
3218 (pull,
3215 (pull,
3219 [('u', 'update', None,
3216 [('u', 'update', None,
3220 _('update to new tip if changesets were pulled')),
3217 _('update to new tip if changesets were pulled')),
3221 ('f', 'force', None,
3218 ('f', 'force', None,
3222 _('run even when remote repository is unrelated')),
3219 _('run even when remote repository is unrelated')),
3223 ('r', 'rev', [],
3220 ('r', 'rev', [],
3224 _('a specific revision up to which you would like to pull')),
3221 _('a specific revision up to which you would like to pull')),
3225 ] + remoteopts,
3222 ] + remoteopts,
3226 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3223 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3227 "^push":
3224 "^push":
3228 (push,
3225 (push,
3229 [('f', 'force', None, _('force push')),
3226 [('f', 'force', None, _('force push')),
3230 ('r', 'rev', [],
3227 ('r', 'rev', [],
3231 _('a specific revision up to which you would like to push')),
3228 _('a specific revision up to which you would like to push')),
3232 ] + remoteopts,
3229 ] + remoteopts,
3233 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3230 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3234 "recover": (recover, [], _('hg recover')),
3231 "recover": (recover, [], _('hg recover')),
3235 "^remove|rm":
3232 "^remove|rm":
3236 (remove,
3233 (remove,
3237 [('A', 'after', None, _('record delete for missing files')),
3234 [('A', 'after', None, _('record delete for missing files')),
3238 ('f', 'force', None,
3235 ('f', 'force', None,
3239 _('remove (and delete) file even if added or modified')),
3236 _('remove (and delete) file even if added or modified')),
3240 ] + walkopts,
3237 ] + walkopts,
3241 _('hg remove [OPTION]... FILE...')),
3238 _('hg remove [OPTION]... FILE...')),
3242 "rename|mv":
3239 "rename|mv":
3243 (rename,
3240 (rename,
3244 [('A', 'after', None, _('record a rename that has already occurred')),
3241 [('A', 'after', None, _('record a rename that has already occurred')),
3245 ('f', 'force', None,
3242 ('f', 'force', None,
3246 _('forcibly copy over an existing managed file')),
3243 _('forcibly copy over an existing managed file')),
3247 ] + walkopts + dryrunopts,
3244 ] + walkopts + dryrunopts,
3248 _('hg rename [OPTION]... SOURCE... DEST')),
3245 _('hg rename [OPTION]... SOURCE... DEST')),
3249 "resolve":
3246 "resolve":
3250 (resolve,
3247 (resolve,
3251 [('l', 'list', None, _('list state of files needing merge')),
3248 [('l', 'list', None, _('list state of files needing merge')),
3252 ('m', 'mark', None, _('mark files as resolved')),
3249 ('m', 'mark', None, _('mark files as resolved')),
3253 ('u', 'unmark', None, _('unmark files as resolved'))],
3250 ('u', 'unmark', None, _('unmark files as resolved'))],
3254 ('hg resolve [OPTION] [FILES...]')),
3251 ('hg resolve [OPTION] [FILES...]')),
3255 "revert":
3252 "revert":
3256 (revert,
3253 (revert,
3257 [('a', 'all', None, _('revert all changes when no arguments given')),
3254 [('a', 'all', None, _('revert all changes when no arguments given')),
3258 ('d', 'date', '', _('tipmost revision matching date')),
3255 ('d', 'date', '', _('tipmost revision matching date')),
3259 ('r', 'rev', '', _('revision to revert to')),
3256 ('r', 'rev', '', _('revision to revert to')),
3260 ('', 'no-backup', None, _('do not save backup copies of files')),
3257 ('', 'no-backup', None, _('do not save backup copies of files')),
3261 ] + walkopts + dryrunopts,
3258 ] + walkopts + dryrunopts,
3262 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3259 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3263 "rollback": (rollback, [], _('hg rollback')),
3260 "rollback": (rollback, [], _('hg rollback')),
3264 "root": (root, [], _('hg root')),
3261 "root": (root, [], _('hg root')),
3265 "^serve":
3262 "^serve":
3266 (serve,
3263 (serve,
3267 [('A', 'accesslog', '', _('name of access log file to write to')),
3264 [('A', 'accesslog', '', _('name of access log file to write to')),
3268 ('d', 'daemon', None, _('run server in background')),
3265 ('d', 'daemon', None, _('run server in background')),
3269 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3266 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3270 ('E', 'errorlog', '', _('name of error log file to write to')),
3267 ('E', 'errorlog', '', _('name of error log file to write to')),
3271 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3268 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3272 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3269 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3273 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3270 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3274 ('n', 'name', '',
3271 ('n', 'name', '',
3275 _('name to show in web pages (default: working dir)')),
3272 _('name to show in web pages (default: working dir)')),
3276 ('', 'webdir-conf', '', _('name of the webdir config file'
3273 ('', 'webdir-conf', '', _('name of the webdir config file'
3277 ' (serve more than one repo)')),
3274 ' (serve more than one repo)')),
3278 ('', 'pid-file', '', _('name of file to write process ID to')),
3275 ('', 'pid-file', '', _('name of file to write process ID to')),
3279 ('', 'stdio', None, _('for remote clients')),
3276 ('', 'stdio', None, _('for remote clients')),
3280 ('t', 'templates', '', _('web templates to use')),
3277 ('t', 'templates', '', _('web templates to use')),
3281 ('', 'style', '', _('template style to use')),
3278 ('', 'style', '', _('template style to use')),
3282 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3279 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3283 ('', 'certificate', '', _('SSL certificate file'))],
3280 ('', 'certificate', '', _('SSL certificate file'))],
3284 _('hg serve [OPTION]...')),
3281 _('hg serve [OPTION]...')),
3285 "showconfig|debugconfig":
3282 "showconfig|debugconfig":
3286 (showconfig,
3283 (showconfig,
3287 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3284 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3288 _('hg showconfig [-u] [NAME]...')),
3285 _('hg showconfig [-u] [NAME]...')),
3289 "^status|st":
3286 "^status|st":
3290 (status,
3287 (status,
3291 [('A', 'all', None, _('show status of all files')),
3288 [('A', 'all', None, _('show status of all files')),
3292 ('m', 'modified', None, _('show only modified files')),
3289 ('m', 'modified', None, _('show only modified files')),
3293 ('a', 'added', None, _('show only added files')),
3290 ('a', 'added', None, _('show only added files')),
3294 ('r', 'removed', None, _('show only removed files')),
3291 ('r', 'removed', None, _('show only removed files')),
3295 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3292 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3296 ('c', 'clean', None, _('show only files without changes')),
3293 ('c', 'clean', None, _('show only files without changes')),
3297 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3294 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3298 ('i', 'ignored', None, _('show only ignored files')),
3295 ('i', 'ignored', None, _('show only ignored files')),
3299 ('n', 'no-status', None, _('hide status prefix')),
3296 ('n', 'no-status', None, _('hide status prefix')),
3300 ('C', 'copies', None, _('show source of copied files')),
3297 ('C', 'copies', None, _('show source of copied files')),
3301 ('0', 'print0', None,
3298 ('0', 'print0', None,
3302 _('end filenames with NUL, for use with xargs')),
3299 _('end filenames with NUL, for use with xargs')),
3303 ('', 'rev', [], _('show difference from revision')),
3300 ('', 'rev', [], _('show difference from revision')),
3304 ] + walkopts,
3301 ] + walkopts,
3305 _('hg status [OPTION]... [FILE]...')),
3302 _('hg status [OPTION]... [FILE]...')),
3306 "tag":
3303 "tag":
3307 (tag,
3304 (tag,
3308 [('f', 'force', None, _('replace existing tag')),
3305 [('f', 'force', None, _('replace existing tag')),
3309 ('l', 'local', None, _('make the tag local')),
3306 ('l', 'local', None, _('make the tag local')),
3310 ('r', 'rev', '', _('revision to tag')),
3307 ('r', 'rev', '', _('revision to tag')),
3311 ('', 'remove', None, _('remove a tag')),
3308 ('', 'remove', None, _('remove a tag')),
3312 # -l/--local is already there, commitopts cannot be used
3309 # -l/--local is already there, commitopts cannot be used
3313 ('m', 'message', '', _('use <text> as commit message')),
3310 ('m', 'message', '', _('use <text> as commit message')),
3314 ] + commitopts2,
3311 ] + commitopts2,
3315 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3312 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3316 "tags": (tags, [], _('hg tags')),
3313 "tags": (tags, [], _('hg tags')),
3317 "tip":
3314 "tip":
3318 (tip,
3315 (tip,
3319 [('p', 'patch', None, _('show patch')),
3316 [('p', 'patch', None, _('show patch')),
3320 ] + templateopts,
3317 ] + templateopts,
3321 _('hg tip [-p]')),
3318 _('hg tip [-p]')),
3322 "unbundle":
3319 "unbundle":
3323 (unbundle,
3320 (unbundle,
3324 [('u', 'update', None,
3321 [('u', 'update', None,
3325 _('update to new tip if changesets were unbundled'))],
3322 _('update to new tip if changesets were unbundled'))],
3326 _('hg unbundle [-u] FILE...')),
3323 _('hg unbundle [-u] FILE...')),
3327 "^update|up|checkout|co":
3324 "^update|up|checkout|co":
3328 (update,
3325 (update,
3329 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3326 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3330 ('d', 'date', '', _('tipmost revision matching date')),
3327 ('d', 'date', '', _('tipmost revision matching date')),
3331 ('r', 'rev', '', _('revision'))],
3328 ('r', 'rev', '', _('revision'))],
3332 _('hg update [-C] [-d DATE] [[-r] REV]')),
3329 _('hg update [-C] [-d DATE] [[-r] REV]')),
3333 "verify": (verify, [], _('hg verify')),
3330 "verify": (verify, [], _('hg verify')),
3334 "version": (version_, [], _('hg version')),
3331 "version": (version_, [], _('hg version')),
3335 }
3332 }
3336
3333
3337 norepo = ("clone init version help debugcomplete debugdata"
3334 norepo = ("clone init version help debugcomplete debugdata"
3338 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3335 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3339 optionalrepo = ("identify paths serve showconfig debugancestor")
3336 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,751 +1,751 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import nullid, nullrev, short
8 from node import nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, revlog, util, os, errno
10 import ancestor, bdiff, revlog, util, os, errno
11
11
12 class changectx(object):
12 class changectx(object):
13 """A changecontext object makes access to data related to a particular
13 """A changecontext object makes access to data related to a particular
14 changeset convenient."""
14 changeset convenient."""
15 def __init__(self, repo, changeid=''):
15 def __init__(self, repo, changeid=''):
16 """changeid is a revision number, node, or tag"""
16 """changeid is a revision number, node, or tag"""
17 if changeid == '':
17 if changeid == '':
18 changeid = '.'
18 changeid = '.'
19 self._repo = repo
19 self._repo = repo
20 self._node = self._repo.lookup(changeid)
20 self._node = self._repo.lookup(changeid)
21 self._rev = self._repo.changelog.rev(self._node)
21 self._rev = self._repo.changelog.rev(self._node)
22
22
23 def __str__(self):
23 def __str__(self):
24 return short(self.node())
24 return short(self.node())
25
25
26 def __repr__(self):
26 def __repr__(self):
27 return "<changectx %s>" % str(self)
27 return "<changectx %s>" % str(self)
28
28
29 def __hash__(self):
29 def __hash__(self):
30 try:
30 try:
31 return hash(self._rev)
31 return hash(self._rev)
32 except AttributeError:
32 except AttributeError:
33 return id(self)
33 return id(self)
34
34
35 def __eq__(self, other):
35 def __eq__(self, other):
36 try:
36 try:
37 return self._rev == other._rev
37 return self._rev == other._rev
38 except AttributeError:
38 except AttributeError:
39 return False
39 return False
40
40
41 def __ne__(self, other):
41 def __ne__(self, other):
42 return not (self == other)
42 return not (self == other)
43
43
44 def __nonzero__(self):
44 def __nonzero__(self):
45 return self._rev != nullrev
45 return self._rev != nullrev
46
46
47 def __getattr__(self, name):
47 def __getattr__(self, name):
48 if name == '_changeset':
48 if name == '_changeset':
49 self._changeset = self._repo.changelog.read(self.node())
49 self._changeset = self._repo.changelog.read(self.node())
50 return self._changeset
50 return self._changeset
51 elif name == '_manifest':
51 elif name == '_manifest':
52 self._manifest = self._repo.manifest.read(self._changeset[0])
52 self._manifest = self._repo.manifest.read(self._changeset[0])
53 return self._manifest
53 return self._manifest
54 elif name == '_manifestdelta':
54 elif name == '_manifestdelta':
55 md = self._repo.manifest.readdelta(self._changeset[0])
55 md = self._repo.manifest.readdelta(self._changeset[0])
56 self._manifestdelta = md
56 self._manifestdelta = md
57 return self._manifestdelta
57 return self._manifestdelta
58 elif name == '_parents':
58 elif name == '_parents':
59 p = self._repo.changelog.parents(self._node)
59 p = self._repo.changelog.parents(self._node)
60 if p[1] == nullid:
60 if p[1] == nullid:
61 p = p[:-1]
61 p = p[:-1]
62 self._parents = [changectx(self._repo, x) for x in p]
62 self._parents = [changectx(self._repo, x) for x in p]
63 return self._parents
63 return self._parents
64 else:
64 else:
65 raise AttributeError, name
65 raise AttributeError, name
66
66
67 def __contains__(self, key):
67 def __contains__(self, key):
68 return key in self._manifest
68 return key in self._manifest
69
69
70 def __getitem__(self, key):
70 def __getitem__(self, key):
71 return self.filectx(key)
71 return self.filectx(key)
72
72
73 def __iter__(self):
73 def __iter__(self):
74 a = self._manifest.keys()
74 a = self._manifest.keys()
75 a.sort()
75 a.sort()
76 for f in a:
76 for f in a:
77 yield f
77 yield f
78
78
79 def changeset(self): return self._changeset
79 def changeset(self): return self._changeset
80 def manifest(self): return self._manifest
80 def manifest(self): return self._manifest
81
81
82 def rev(self): return self._rev
82 def rev(self): return self._rev
83 def node(self): return self._node
83 def node(self): return self._node
84 def user(self): return self._changeset[1]
84 def user(self): return self._changeset[1]
85 def date(self): return self._changeset[2]
85 def date(self): return self._changeset[2]
86 def files(self): return self._changeset[3]
86 def files(self): return self._changeset[3]
87 def description(self): return self._changeset[4]
87 def description(self): return self._changeset[4]
88 def branch(self): return self._changeset[5].get("branch")
88 def branch(self): return self._changeset[5].get("branch")
89 def extra(self): return self._changeset[5]
89 def extra(self): return self._changeset[5]
90 def tags(self): return self._repo.nodetags(self._node)
90 def tags(self): return self._repo.nodetags(self._node)
91
91
92 def parents(self):
92 def parents(self):
93 """return contexts for each parent changeset"""
93 """return contexts for each parent changeset"""
94 return self._parents
94 return self._parents
95
95
96 def children(self):
96 def children(self):
97 """return contexts for each child changeset"""
97 """return contexts for each child changeset"""
98 c = self._repo.changelog.children(self._node)
98 c = self._repo.changelog.children(self._node)
99 return [changectx(self._repo, x) for x in c]
99 return [changectx(self._repo, x) for x in c]
100
100
101 def _fileinfo(self, path):
101 def _fileinfo(self, path):
102 if '_manifest' in self.__dict__:
102 if '_manifest' in self.__dict__:
103 try:
103 try:
104 return self._manifest[path], self._manifest.flags(path)
104 return self._manifest[path], self._manifest.flags(path)
105 except KeyError:
105 except KeyError:
106 raise revlog.LookupError(self._node, path,
106 raise revlog.LookupError(self._node, path,
107 _('not found in manifest'))
107 _('not found in manifest'))
108 if '_manifestdelta' in self.__dict__ or path in self.files():
108 if '_manifestdelta' in self.__dict__ or path in self.files():
109 if path in self._manifestdelta:
109 if path in self._manifestdelta:
110 return self._manifestdelta[path], self._manifestdelta.flags(path)
110 return self._manifestdelta[path], self._manifestdelta.flags(path)
111 node, flag = self._repo.manifest.find(self._changeset[0], path)
111 node, flag = self._repo.manifest.find(self._changeset[0], path)
112 if not node:
112 if not node:
113 raise revlog.LookupError(self._node, path,
113 raise revlog.LookupError(self._node, path,
114 _('not found in manifest'))
114 _('not found in manifest'))
115
115
116 return node, flag
116 return node, flag
117
117
118 def filenode(self, path):
118 def filenode(self, path):
119 return self._fileinfo(path)[0]
119 return self._fileinfo(path)[0]
120
120
121 def flags(self, path):
121 def flags(self, path):
122 try:
122 try:
123 return self._fileinfo(path)[1]
123 return self._fileinfo(path)[1]
124 except revlog.LookupError:
124 except revlog.LookupError:
125 return ''
125 return ''
126
126
127 def filectx(self, path, fileid=None, filelog=None):
127 def filectx(self, path, fileid=None, filelog=None):
128 """get a file context from this changeset"""
128 """get a file context from this changeset"""
129 if fileid is None:
129 if fileid is None:
130 fileid = self.filenode(path)
130 fileid = self.filenode(path)
131 return filectx(self._repo, path, fileid=fileid,
131 return filectx(self._repo, path, fileid=fileid,
132 changectx=self, filelog=filelog)
132 changectx=self, filelog=filelog)
133
133
134 def filectxs(self):
134 def filectxs(self):
135 """generate a file context for each file in this changeset's
135 """generate a file context for each file in this changeset's
136 manifest"""
136 manifest"""
137 mf = self.manifest()
137 mf = self.manifest()
138 m = mf.keys()
138 m = mf.keys()
139 m.sort()
139 m.sort()
140 for f in m:
140 for f in m:
141 yield self.filectx(f, fileid=mf[f])
141 yield self.filectx(f, fileid=mf[f])
142
142
143 def ancestor(self, c2):
143 def ancestor(self, c2):
144 """
144 """
145 return the ancestor context of self and c2
145 return the ancestor context of self and c2
146 """
146 """
147 n = self._repo.changelog.ancestor(self._node, c2._node)
147 n = self._repo.changelog.ancestor(self._node, c2._node)
148 return changectx(self._repo, n)
148 return changectx(self._repo, n)
149
149
150 class filectx(object):
150 class filectx(object):
151 """A filecontext object makes access to data related to a particular
151 """A filecontext object makes access to data related to a particular
152 filerevision convenient."""
152 filerevision convenient."""
153 def __init__(self, repo, path, changeid=None, fileid=None,
153 def __init__(self, repo, path, changeid=None, fileid=None,
154 filelog=None, changectx=None):
154 filelog=None, changectx=None):
155 """changeid can be a changeset revision, node, or tag.
155 """changeid can be a changeset revision, node, or tag.
156 fileid can be a file revision or node."""
156 fileid can be a file revision or node."""
157 self._repo = repo
157 self._repo = repo
158 self._path = path
158 self._path = path
159
159
160 assert (changeid is not None
160 assert (changeid is not None
161 or fileid is not None
161 or fileid is not None
162 or changectx is not None)
162 or changectx is not None)
163
163
164 if filelog:
164 if filelog:
165 self._filelog = filelog
165 self._filelog = filelog
166
166
167 if changeid is not None:
167 if changeid is not None:
168 self._changeid = changeid
168 self._changeid = changeid
169 if changectx is not None:
169 if changectx is not None:
170 self._changectx = changectx
170 self._changectx = changectx
171 if fileid is not None:
171 if fileid is not None:
172 self._fileid = fileid
172 self._fileid = fileid
173
173
174 def __getattr__(self, name):
174 def __getattr__(self, name):
175 if name == '_changectx':
175 if name == '_changectx':
176 self._changectx = changectx(self._repo, self._changeid)
176 self._changectx = changectx(self._repo, self._changeid)
177 return self._changectx
177 return self._changectx
178 elif name == '_filelog':
178 elif name == '_filelog':
179 self._filelog = self._repo.file(self._path)
179 self._filelog = self._repo.file(self._path)
180 return self._filelog
180 return self._filelog
181 elif name == '_changeid':
181 elif name == '_changeid':
182 if '_changectx' in self.__dict__:
182 if '_changectx' in self.__dict__:
183 self._changeid = self._changectx.rev()
183 self._changeid = self._changectx.rev()
184 else:
184 else:
185 self._changeid = self._filelog.linkrev(self._filenode)
185 self._changeid = self._filelog.linkrev(self._filenode)
186 return self._changeid
186 return self._changeid
187 elif name == '_filenode':
187 elif name == '_filenode':
188 if '_fileid' in self.__dict__:
188 if '_fileid' in self.__dict__:
189 self._filenode = self._filelog.lookup(self._fileid)
189 self._filenode = self._filelog.lookup(self._fileid)
190 else:
190 else:
191 self._filenode = self._changectx.filenode(self._path)
191 self._filenode = self._changectx.filenode(self._path)
192 return self._filenode
192 return self._filenode
193 elif name == '_filerev':
193 elif name == '_filerev':
194 self._filerev = self._filelog.rev(self._filenode)
194 self._filerev = self._filelog.rev(self._filenode)
195 return self._filerev
195 return self._filerev
196 elif name == '_repopath':
196 elif name == '_repopath':
197 self._repopath = self._path
197 self._repopath = self._path
198 return self._repopath
198 return self._repopath
199 else:
199 else:
200 raise AttributeError, name
200 raise AttributeError, name
201
201
202 def __nonzero__(self):
202 def __nonzero__(self):
203 try:
203 try:
204 n = self._filenode
204 n = self._filenode
205 return True
205 return True
206 except revlog.LookupError:
206 except revlog.LookupError:
207 # file is missing
207 # file is missing
208 return False
208 return False
209
209
210 def __str__(self):
210 def __str__(self):
211 return "%s@%s" % (self.path(), short(self.node()))
211 return "%s@%s" % (self.path(), short(self.node()))
212
212
213 def __repr__(self):
213 def __repr__(self):
214 return "<filectx %s>" % str(self)
214 return "<filectx %s>" % str(self)
215
215
216 def __hash__(self):
216 def __hash__(self):
217 try:
217 try:
218 return hash((self._path, self._fileid))
218 return hash((self._path, self._fileid))
219 except AttributeError:
219 except AttributeError:
220 return id(self)
220 return id(self)
221
221
222 def __eq__(self, other):
222 def __eq__(self, other):
223 try:
223 try:
224 return (self._path == other._path
224 return (self._path == other._path
225 and self._fileid == other._fileid)
225 and self._fileid == other._fileid)
226 except AttributeError:
226 except AttributeError:
227 return False
227 return False
228
228
229 def __ne__(self, other):
229 def __ne__(self, other):
230 return not (self == other)
230 return not (self == other)
231
231
232 def filectx(self, fileid):
232 def filectx(self, fileid):
233 '''opens an arbitrary revision of the file without
233 '''opens an arbitrary revision of the file without
234 opening a new filelog'''
234 opening a new filelog'''
235 return filectx(self._repo, self._path, fileid=fileid,
235 return filectx(self._repo, self._path, fileid=fileid,
236 filelog=self._filelog)
236 filelog=self._filelog)
237
237
238 def filerev(self): return self._filerev
238 def filerev(self): return self._filerev
239 def filenode(self): return self._filenode
239 def filenode(self): return self._filenode
240 def flags(self): return self._changectx.flags(self._path)
240 def flags(self): return self._changectx.flags(self._path)
241 def filelog(self): return self._filelog
241 def filelog(self): return self._filelog
242
242
243 def rev(self):
243 def rev(self):
244 if '_changectx' in self.__dict__:
244 if '_changectx' in self.__dict__:
245 return self._changectx.rev()
245 return self._changectx.rev()
246 if '_changeid' in self.__dict__:
246 if '_changeid' in self.__dict__:
247 return self._changectx.rev()
247 return self._changectx.rev()
248 return self._filelog.linkrev(self._filenode)
248 return self._filelog.linkrev(self._filenode)
249
249
250 def linkrev(self): return self._filelog.linkrev(self._filenode)
250 def linkrev(self): return self._filelog.linkrev(self._filenode)
251 def node(self): return self._changectx.node()
251 def node(self): return self._changectx.node()
252 def user(self): return self._changectx.user()
252 def user(self): return self._changectx.user()
253 def date(self): return self._changectx.date()
253 def date(self): return self._changectx.date()
254 def files(self): return self._changectx.files()
254 def files(self): return self._changectx.files()
255 def description(self): return self._changectx.description()
255 def description(self): return self._changectx.description()
256 def branch(self): return self._changectx.branch()
256 def branch(self): return self._changectx.branch()
257 def manifest(self): return self._changectx.manifest()
257 def manifest(self): return self._changectx.manifest()
258 def changectx(self): return self._changectx
258 def changectx(self): return self._changectx
259
259
260 def data(self): return self._filelog.read(self._filenode)
260 def data(self): return self._filelog.read(self._filenode)
261 def path(self): return self._path
261 def path(self): return self._path
262 def size(self): return self._filelog.size(self._filerev)
262 def size(self): return self._filelog.size(self._filerev)
263
263
264 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
264 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
265
265
266 def renamed(self):
266 def renamed(self):
267 """check if file was actually renamed in this changeset revision
267 """check if file was actually renamed in this changeset revision
268
268
269 If rename logged in file revision, we report copy for changeset only
269 If rename logged in file revision, we report copy for changeset only
270 if file revisions linkrev points back to the changeset in question
270 if file revisions linkrev points back to the changeset in question
271 or both changeset parents contain different file revisions.
271 or both changeset parents contain different file revisions.
272 """
272 """
273
273
274 renamed = self._filelog.renamed(self._filenode)
274 renamed = self._filelog.renamed(self._filenode)
275 if not renamed:
275 if not renamed:
276 return renamed
276 return renamed
277
277
278 if self.rev() == self.linkrev():
278 if self.rev() == self.linkrev():
279 return renamed
279 return renamed
280
280
281 name = self.path()
281 name = self.path()
282 fnode = self._filenode
282 fnode = self._filenode
283 for p in self._changectx.parents():
283 for p in self._changectx.parents():
284 try:
284 try:
285 if fnode == p.filenode(name):
285 if fnode == p.filenode(name):
286 return None
286 return None
287 except revlog.LookupError:
287 except revlog.LookupError:
288 pass
288 pass
289 return renamed
289 return renamed
290
290
291 def parents(self):
291 def parents(self):
292 p = self._path
292 p = self._path
293 fl = self._filelog
293 fl = self._filelog
294 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
294 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
295
295
296 r = self._filelog.renamed(self._filenode)
296 r = self._filelog.renamed(self._filenode)
297 if r:
297 if r:
298 pl[0] = (r[0], r[1], None)
298 pl[0] = (r[0], r[1], None)
299
299
300 return [filectx(self._repo, p, fileid=n, filelog=l)
300 return [filectx(self._repo, p, fileid=n, filelog=l)
301 for p,n,l in pl if n != nullid]
301 for p,n,l in pl if n != nullid]
302
302
303 def children(self):
303 def children(self):
304 # hard for renames
304 # hard for renames
305 c = self._filelog.children(self._filenode)
305 c = self._filelog.children(self._filenode)
306 return [filectx(self._repo, self._path, fileid=x,
306 return [filectx(self._repo, self._path, fileid=x,
307 filelog=self._filelog) for x in c]
307 filelog=self._filelog) for x in c]
308
308
309 def annotate(self, follow=False, linenumber=None):
309 def annotate(self, follow=False, linenumber=None):
310 '''returns a list of tuples of (ctx, line) for each line
310 '''returns a list of tuples of (ctx, line) for each line
311 in the file, where ctx is the filectx of the node where
311 in the file, where ctx is the filectx of the node where
312 that line was last changed.
312 that line was last changed.
313 This returns tuples of ((ctx, linenumber), line) for each line,
313 This returns tuples of ((ctx, linenumber), line) for each line,
314 if "linenumber" parameter is NOT "None".
314 if "linenumber" parameter is NOT "None".
315 In such tuples, linenumber means one at the first appearance
315 In such tuples, linenumber means one at the first appearance
316 in the managed file.
316 in the managed file.
317 To reduce annotation cost,
317 To reduce annotation cost,
318 this returns fixed value(False is used) as linenumber,
318 this returns fixed value(False is used) as linenumber,
319 if "linenumber" parameter is "False".'''
319 if "linenumber" parameter is "False".'''
320
320
321 def decorate_compat(text, rev):
321 def decorate_compat(text, rev):
322 return ([rev] * len(text.splitlines()), text)
322 return ([rev] * len(text.splitlines()), text)
323
323
324 def without_linenumber(text, rev):
324 def without_linenumber(text, rev):
325 return ([(rev, False)] * len(text.splitlines()), text)
325 return ([(rev, False)] * len(text.splitlines()), text)
326
326
327 def with_linenumber(text, rev):
327 def with_linenumber(text, rev):
328 size = len(text.splitlines())
328 size = len(text.splitlines())
329 return ([(rev, i) for i in xrange(1, size + 1)], text)
329 return ([(rev, i) for i in xrange(1, size + 1)], text)
330
330
331 decorate = (((linenumber is None) and decorate_compat) or
331 decorate = (((linenumber is None) and decorate_compat) or
332 (linenumber and with_linenumber) or
332 (linenumber and with_linenumber) or
333 without_linenumber)
333 without_linenumber)
334
334
335 def pair(parent, child):
335 def pair(parent, child):
336 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
336 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
337 child[0][b1:b2] = parent[0][a1:a2]
337 child[0][b1:b2] = parent[0][a1:a2]
338 return child
338 return child
339
339
340 getlog = util.cachefunc(lambda x: self._repo.file(x))
340 getlog = util.cachefunc(lambda x: self._repo.file(x))
341 def getctx(path, fileid):
341 def getctx(path, fileid):
342 log = path == self._path and self._filelog or getlog(path)
342 log = path == self._path and self._filelog or getlog(path)
343 return filectx(self._repo, path, fileid=fileid, filelog=log)
343 return filectx(self._repo, path, fileid=fileid, filelog=log)
344 getctx = util.cachefunc(getctx)
344 getctx = util.cachefunc(getctx)
345
345
346 def parents(f):
346 def parents(f):
347 # we want to reuse filectx objects as much as possible
347 # we want to reuse filectx objects as much as possible
348 p = f._path
348 p = f._path
349 if f._filerev is None: # working dir
349 if f._filerev is None: # working dir
350 pl = [(n.path(), n.filerev()) for n in f.parents()]
350 pl = [(n.path(), n.filerev()) for n in f.parents()]
351 else:
351 else:
352 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
352 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
353
353
354 if follow:
354 if follow:
355 r = f.renamed()
355 r = f.renamed()
356 if r:
356 if r:
357 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
357 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
358
358
359 return [getctx(p, n) for p, n in pl if n != nullrev]
359 return [getctx(p, n) for p, n in pl if n != nullrev]
360
360
361 # use linkrev to find the first changeset where self appeared
361 # use linkrev to find the first changeset where self appeared
362 if self.rev() != self.linkrev():
362 if self.rev() != self.linkrev():
363 base = self.filectx(self.filerev())
363 base = self.filectx(self.filerev())
364 else:
364 else:
365 base = self
365 base = self
366
366
367 # find all ancestors
367 # find all ancestors
368 needed = {base: 1}
368 needed = {base: 1}
369 visit = [base]
369 visit = [base]
370 files = [base._path]
370 files = [base._path]
371 while visit:
371 while visit:
372 f = visit.pop(0)
372 f = visit.pop(0)
373 for p in parents(f):
373 for p in parents(f):
374 if p not in needed:
374 if p not in needed:
375 needed[p] = 1
375 needed[p] = 1
376 visit.append(p)
376 visit.append(p)
377 if p._path not in files:
377 if p._path not in files:
378 files.append(p._path)
378 files.append(p._path)
379 else:
379 else:
380 # count how many times we'll use this
380 # count how many times we'll use this
381 needed[p] += 1
381 needed[p] += 1
382
382
383 # sort by revision (per file) which is a topological order
383 # sort by revision (per file) which is a topological order
384 visit = []
384 visit = []
385 for f in files:
385 for f in files:
386 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
386 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
387 visit.extend(fn)
387 visit.extend(fn)
388 visit.sort()
388 visit.sort()
389 hist = {}
389 hist = {}
390
390
391 for r, f in visit:
391 for r, f in visit:
392 curr = decorate(f.data(), f)
392 curr = decorate(f.data(), f)
393 for p in parents(f):
393 for p in parents(f):
394 if p != nullid:
394 if p != nullid:
395 curr = pair(hist[p], curr)
395 curr = pair(hist[p], curr)
396 # trim the history of unneeded revs
396 # trim the history of unneeded revs
397 needed[p] -= 1
397 needed[p] -= 1
398 if not needed[p]:
398 if not needed[p]:
399 del hist[p]
399 del hist[p]
400 hist[f] = curr
400 hist[f] = curr
401
401
402 return zip(hist[f][0], hist[f][1].splitlines(1))
402 return zip(hist[f][0], hist[f][1].splitlines(1))
403
403
404 def ancestor(self, fc2):
404 def ancestor(self, fc2):
405 """
405 """
406 find the common ancestor file context, if any, of self, and fc2
406 find the common ancestor file context, if any, of self, and fc2
407 """
407 """
408
408
409 acache = {}
409 acache = {}
410
410
411 # prime the ancestor cache for the working directory
411 # prime the ancestor cache for the working directory
412 for c in (self, fc2):
412 for c in (self, fc2):
413 if c._filerev == None:
413 if c._filerev == None:
414 pl = [(n.path(), n.filenode()) for n in c.parents()]
414 pl = [(n.path(), n.filenode()) for n in c.parents()]
415 acache[(c._path, None)] = pl
415 acache[(c._path, None)] = pl
416
416
417 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
417 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
418 def parents(vertex):
418 def parents(vertex):
419 if vertex in acache:
419 if vertex in acache:
420 return acache[vertex]
420 return acache[vertex]
421 f, n = vertex
421 f, n = vertex
422 if f not in flcache:
422 if f not in flcache:
423 flcache[f] = self._repo.file(f)
423 flcache[f] = self._repo.file(f)
424 fl = flcache[f]
424 fl = flcache[f]
425 pl = [(f, p) for p in fl.parents(n) if p != nullid]
425 pl = [(f, p) for p in fl.parents(n) if p != nullid]
426 re = fl.renamed(n)
426 re = fl.renamed(n)
427 if re:
427 if re:
428 pl.append(re)
428 pl.append(re)
429 acache[vertex] = pl
429 acache[vertex] = pl
430 return pl
430 return pl
431
431
432 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
432 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
433 v = ancestor.ancestor(a, b, parents)
433 v = ancestor.ancestor(a, b, parents)
434 if v:
434 if v:
435 f, n = v
435 f, n = v
436 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
436 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
437
437
438 return None
438 return None
439
439
440 class workingctx(changectx):
440 class workingctx(changectx):
441 """A workingctx object makes access to data related to
441 """A workingctx object makes access to data related to
442 the current working directory convenient.
442 the current working directory convenient.
443 parents - a pair of parent nodeids, or None to use the dirstate.
443 parents - a pair of parent nodeids, or None to use the dirstate.
444 date - any valid date string or (unixtime, offset), or None.
444 date - any valid date string or (unixtime, offset), or None.
445 user - username string, or None.
445 user - username string, or None.
446 extra - a dictionary of extra values, or None.
446 extra - a dictionary of extra values, or None.
447 changes - a list of file lists as returned by localrepo.status()
447 changes - a list of file lists as returned by localrepo.status()
448 or None to use the repository status.
448 or None to use the repository status.
449 """
449 """
450 def __init__(self, repo, parents=None, text="", user=None, date=None,
450 def __init__(self, repo, parents=None, text="", user=None, date=None,
451 extra=None, changes=None):
451 extra=None, changes=None):
452 self._repo = repo
452 self._repo = repo
453 self._rev = None
453 self._rev = None
454 self._node = None
454 self._node = None
455 self._text = text
455 self._text = text
456 if date:
456 if date:
457 self._date = util.parsedate(date)
457 self._date = util.parsedate(date)
458 else:
458 else:
459 self._date = util.makedate()
459 self._date = util.makedate()
460 if user:
460 if user:
461 self._user = user
461 self._user = user
462 else:
462 else:
463 self._user = self._repo.ui.username()
463 self._user = self._repo.ui.username()
464 if parents:
464 if parents:
465 p1, p2 = parents
465 p1, p2 = parents
466 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
466 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
467 if changes:
467 if changes:
468 self._status = list(changes)
468 self._status = list(changes)
469
469
470 self._extra = {}
470 self._extra = {}
471 if extra:
471 if extra:
472 self._extra = extra.copy()
472 self._extra = extra.copy()
473 if 'branch' not in self._extra:
473 if 'branch' not in self._extra:
474 branch = self._repo.dirstate.branch()
474 branch = self._repo.dirstate.branch()
475 try:
475 try:
476 branch = branch.decode('UTF-8').encode('UTF-8')
476 branch = branch.decode('UTF-8').encode('UTF-8')
477 except UnicodeDecodeError:
477 except UnicodeDecodeError:
478 raise util.Abort(_('branch name not in UTF-8!'))
478 raise util.Abort(_('branch name not in UTF-8!'))
479 self._extra['branch'] = branch
479 self._extra['branch'] = branch
480 if self._extra['branch'] == '':
480 if self._extra['branch'] == '':
481 self._extra['branch'] = 'default'
481 self._extra['branch'] = 'default'
482
482
483 def __str__(self):
483 def __str__(self):
484 return str(self._parents[0]) + "+"
484 return str(self._parents[0]) + "+"
485
485
486 def __nonzero__(self):
486 def __nonzero__(self):
487 return True
487 return True
488
488
489 def __getattr__(self, name):
489 def __getattr__(self, name):
490 if name == '_status':
490 if name == '_status':
491 self._status = self._repo.status()
491 self._status = self._repo.status()
492 return self._status
492 return self._status
493 if name == '_manifest':
493 if name == '_manifest':
494 self._buildmanifest()
494 self._buildmanifest()
495 return self._manifest
495 return self._manifest
496 elif name == '_parents':
496 elif name == '_parents':
497 p = self._repo.dirstate.parents()
497 p = self._repo.dirstate.parents()
498 if p[1] == nullid:
498 if p[1] == nullid:
499 p = p[:-1]
499 p = p[:-1]
500 self._parents = [changectx(self._repo, x) for x in p]
500 self._parents = [changectx(self._repo, x) for x in p]
501 return self._parents
501 return self._parents
502 else:
502 else:
503 raise AttributeError, name
503 raise AttributeError, name
504
504
505 def _buildmanifest(self):
505 def _buildmanifest(self):
506 """generate a manifest corresponding to the working directory"""
506 """generate a manifest corresponding to the working directory"""
507
507
508 man = self._parents[0].manifest().copy()
508 man = self._parents[0].manifest().copy()
509 copied = self._repo.dirstate.copies()
509 copied = self._repo.dirstate.copies()
510 cf = lambda x: man.flags(copied.get(x, x))
510 cf = lambda x: man.flags(copied.get(x, x))
511 ff = self._repo.dirstate.flagfunc(cf)
511 ff = self._repo.dirstate.flagfunc(cf)
512 modified, added, removed, deleted, unknown = self._status[:5]
512 modified, added, removed, deleted, unknown = self._status[:5]
513 for i, l in (("a", added), ("m", modified), ("u", unknown)):
513 for i, l in (("a", added), ("m", modified), ("u", unknown)):
514 for f in l:
514 for f in l:
515 man[f] = man.get(copied.get(f, f), nullid) + i
515 man[f] = man.get(copied.get(f, f), nullid) + i
516 try:
516 try:
517 man.set(f, ff(f))
517 man.set(f, ff(f))
518 except OSError:
518 except OSError:
519 pass
519 pass
520
520
521 for f in deleted + removed:
521 for f in deleted + removed:
522 if f in man:
522 if f in man:
523 del man[f]
523 del man[f]
524
524
525 self._manifest = man
525 self._manifest = man
526
526
527 def manifest(self): return self._manifest
527 def manifest(self): return self._manifest
528
528
529 def user(self): return self._user
529 def user(self): return self._user
530 def date(self): return self._date
530 def date(self): return self._date
531 def description(self): return self._text
531 def description(self): return self._text
532 def files(self):
532 def files(self):
533 f = self.modified() + self.added() + self.removed()
533 f = self.modified() + self.added() + self.removed()
534 f.sort()
534 f.sort()
535 return f
535 return f
536
536
537 def modified(self): return self._status[0]
537 def modified(self): return self._status[0]
538 def added(self): return self._status[1]
538 def added(self): return self._status[1]
539 def removed(self): return self._status[2]
539 def removed(self): return self._status[2]
540 def deleted(self): return self._status[3]
540 def deleted(self): return self._status[3]
541 def unknown(self): return self._status[4]
541 def unknown(self): return self._status[4]
542 def clean(self): return self._status[5]
542 def clean(self): return self._status[5]
543 def branch(self): return self._extra['branch']
543 def branch(self): return self._extra['branch']
544 def extra(self): return self._extra
544 def extra(self): return self._extra
545
545
546 def tags(self):
546 def tags(self):
547 t = []
547 t = []
548 [t.extend(p.tags()) for p in self.parents()]
548 [t.extend(p.tags()) for p in self.parents()]
549 return t
549 return t
550
550
551 def children(self):
551 def children(self):
552 return []
552 return []
553
553
554 def flags(self, path):
554 def flags(self, path):
555 if '_manifest' in self.__dict__:
555 if '_manifest' in self.__dict__:
556 try:
556 try:
557 return self._manifest.flags(path)
557 return self._manifest.flags(path)
558 except KeyError:
558 except KeyError:
559 return ''
559 return ''
560
560
561 pnode = self._parents[0].changeset()[0]
561 pnode = self._parents[0].changeset()[0]
562 orig = self._repo.dirstate.copies().get(path, path)
562 orig = self._repo.dirstate.copies().get(path, path)
563 node, flag = self._repo.manifest.find(pnode, orig)
563 node, flag = self._repo.manifest.find(pnode, orig)
564 try:
564 try:
565 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
565 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
566 return ff(path)
566 return ff(path)
567 except OSError:
567 except OSError:
568 pass
568 pass
569
569
570 if not node or path in self.deleted() or path in self.removed():
570 if not node or path in self.deleted() or path in self.removed():
571 return ''
571 return ''
572 return flag
572 return flag
573
573
574 def filectx(self, path, filelog=None):
574 def filectx(self, path, filelog=None):
575 """get a file context from the working directory"""
575 """get a file context from the working directory"""
576 return workingfilectx(self._repo, path, workingctx=self,
576 return workingfilectx(self._repo, path, workingctx=self,
577 filelog=filelog)
577 filelog=filelog)
578
578
579 def ancestor(self, c2):
579 def ancestor(self, c2):
580 """return the ancestor context of self and c2"""
580 """return the ancestor context of self and c2"""
581 return self._parents[0].ancestor(c2) # punt on two parents for now
581 return self._parents[0].ancestor(c2) # punt on two parents for now
582
582
583 class workingfilectx(filectx):
583 class workingfilectx(filectx):
584 """A workingfilectx object makes access to data related to a particular
584 """A workingfilectx object makes access to data related to a particular
585 file in the working directory convenient."""
585 file in the working directory convenient."""
586 def __init__(self, repo, path, filelog=None, workingctx=None):
586 def __init__(self, repo, path, filelog=None, workingctx=None):
587 """changeid can be a changeset revision, node, or tag.
587 """changeid can be a changeset revision, node, or tag.
588 fileid can be a file revision or node."""
588 fileid can be a file revision or node."""
589 self._repo = repo
589 self._repo = repo
590 self._path = path
590 self._path = path
591 self._changeid = None
591 self._changeid = None
592 self._filerev = self._filenode = None
592 self._filerev = self._filenode = None
593
593
594 if filelog:
594 if filelog:
595 self._filelog = filelog
595 self._filelog = filelog
596 if workingctx:
596 if workingctx:
597 self._changectx = workingctx
597 self._changectx = workingctx
598
598
599 def __getattr__(self, name):
599 def __getattr__(self, name):
600 if name == '_changectx':
600 if name == '_changectx':
601 self._changectx = workingctx(self._repo)
601 self._changectx = workingctx(self._repo)
602 return self._changectx
602 return self._changectx
603 elif name == '_repopath':
603 elif name == '_repopath':
604 self._repopath = (self._repo.dirstate.copied(self._path)
604 self._repopath = (self._repo.dirstate.copied(self._path)
605 or self._path)
605 or self._path)
606 return self._repopath
606 return self._repopath
607 elif name == '_filelog':
607 elif name == '_filelog':
608 self._filelog = self._repo.file(self._repopath)
608 self._filelog = self._repo.file(self._repopath)
609 return self._filelog
609 return self._filelog
610 else:
610 else:
611 raise AttributeError, name
611 raise AttributeError, name
612
612
613 def __nonzero__(self):
613 def __nonzero__(self):
614 return True
614 return True
615
615
616 def __str__(self):
616 def __str__(self):
617 return "%s@%s" % (self.path(), self._changectx)
617 return "%s@%s" % (self.path(), self._changectx)
618
618
619 def filectx(self, fileid):
619 def filectx(self, fileid):
620 '''opens an arbitrary revision of the file without
620 '''opens an arbitrary revision of the file without
621 opening a new filelog'''
621 opening a new filelog'''
622 return filectx(self._repo, self._repopath, fileid=fileid,
622 return filectx(self._repo, self._repopath, fileid=fileid,
623 filelog=self._filelog)
623 filelog=self._filelog)
624
624
625 def rev(self):
625 def rev(self):
626 if '_changectx' in self.__dict__:
626 if '_changectx' in self.__dict__:
627 return self._changectx.rev()
627 return self._changectx.rev()
628 return self._filelog.linkrev(self._filenode)
628 return self._filelog.linkrev(self._filenode)
629
629
630 def data(self): return self._repo.wread(self._path)
630 def data(self): return self._repo.wread(self._path)
631 def renamed(self):
631 def renamed(self):
632 rp = self._repopath
632 rp = self._repopath
633 if rp == self._path:
633 if rp == self._path:
634 return None
634 return None
635 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
635 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
636
636
637 def parents(self):
637 def parents(self):
638 '''return parent filectxs, following copies if necessary'''
638 '''return parent filectxs, following copies if necessary'''
639 p = self._path
639 p = self._path
640 rp = self._repopath
640 rp = self._repopath
641 pcl = self._changectx._parents
641 pcl = self._changectx._parents
642 fl = self._filelog
642 fl = self._filelog
643 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
643 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
644 if len(pcl) > 1:
644 if len(pcl) > 1:
645 if rp != p:
645 if rp != p:
646 fl = None
646 fl = None
647 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
647 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
648
648
649 return [filectx(self._repo, p, fileid=n, filelog=l)
649 return [filectx(self._repo, p, fileid=n, filelog=l)
650 for p,n,l in pl if n != nullid]
650 for p,n,l in pl if n != nullid]
651
651
652 def children(self):
652 def children(self):
653 return []
653 return []
654
654
655 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
655 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
656 def date(self):
656 def date(self):
657 t, tz = self._changectx.date()
657 t, tz = self._changectx.date()
658 try:
658 try:
659 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
659 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
660 except OSError, err:
660 except OSError, err:
661 if err.errno != errno.ENOENT: raise
661 if err.errno != errno.ENOENT: raise
662 return (t, tz)
662 return (t, tz)
663
663
664 def cmp(self, text): return self._repo.wread(self._path) == text
664 def cmp(self, text): return self._repo.wread(self._path) == text
665
665
666 class memctx(object):
666 class memctx(object):
667 """A memctx is a subset of changectx supposed to be built on memory
667 """A memctx is a subset of changectx supposed to be built on memory
668 and passed to commit functions.
668 and passed to commit functions.
669
669
670 NOTE: this interface and the related memfilectx are experimental and
670 NOTE: this interface and the related memfilectx are experimental and
671 may change without notice.
671 may change without notice.
672
672
673 parents - a pair of parent nodeids.
673 parents - a pair of parent nodeids.
674 filectxfn - a callable taking (repo, memctx, path) arguments and
674 filectxfn - a callable taking (repo, memctx, path) arguments and
675 returning a memctx object.
675 returning a memctx object.
676 date - any valid date string or (unixtime, offset), or None.
676 date - any valid date string or (unixtime, offset), or None.
677 user - username string, or None.
677 user - username string, or None.
678 extra - a dictionary of extra values, or None.
678 extra - a dictionary of extra values, or None.
679 """
679 """
680 def __init__(self, repo, parents, text, files, filectxfn, user=None,
680 def __init__(self, repo, parents, text, files, filectxfn, user=None,
681 date=None, extra=None):
681 date=None, extra=None):
682 self._repo = repo
682 self._repo = repo
683 self._rev = None
683 self._rev = None
684 self._node = None
684 self._node = None
685 self._text = text
685 self._text = text
686 self._date = date and util.parsedate(date) or util.makedate()
686 self._date = date and util.parsedate(date) or util.makedate()
687 self._user = user or self._repo.ui.username()
687 self._user = user or self._repo.ui.username()
688 parents = [(p or nullid) for p in parents]
688 parents = [(p or nullid) for p in parents]
689 p1, p2 = parents
689 p1, p2 = parents
690 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
690 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
691 files = list(files)
691 files = list(files)
692 files.sort()
692 files.sort()
693 self._status = [files, [], [], [], []]
693 self._status = [files, [], [], [], []]
694 self._filectxfn = filectxfn
694 self._filectxfn = filectxfn
695
695
696 self._extra = extra and extra.copy() or {}
696 self._extra = extra and extra.copy() or {}
697 if 'branch' not in self._extra:
697 if 'branch' not in self._extra:
698 self._extra['branch'] = 'default'
698 self._extra['branch'] = 'default'
699 elif self._extra.get('branch') == '':
699 elif self._extra.get('branch') == '':
700 self._extra['branch'] = 'default'
700 self._extra['branch'] = 'default'
701
701
702 def __str__(self):
702 def __str__(self):
703 return str(self._parents[0]) + "+"
703 return str(self._parents[0]) + "+"
704
704
705 def __nonzero__(self):
705 def __nonzero__(self):
706 return True
706 return True
707
707
708 def user(self): return self._user
708 def user(self): return self._user
709 def date(self): return self._date
709 def date(self): return self._date
710 def description(self): return self._text
710 def description(self): return self._text
711 def files(self): return self.modified()
711 def files(self): return self.modified()
712 def modified(self): return self._status[0]
712 def modified(self): return self._status[0]
713 def added(self): return self._status[1]
713 def added(self): return self._status[1]
714 def removed(self): return self._status[2]
714 def removed(self): return self._status[2]
715 def deleted(self): return self._status[3]
715 def deleted(self): return self._status[3]
716 def unknown(self): return self._status[4]
716 def unknown(self): return self._status[4]
717 def clean(self): return self._status[5]
717 def clean(self): return self._status[5]
718 def branch(self): return self._extra['branch']
718 def branch(self): return self._extra['branch']
719 def extra(self): return self._extra
719 def extra(self): return self._extra
720 def flags(self, f): return self[f].flags()
720 def flags(self, f): return self[f].flags()
721
721
722 def parents(self):
722 def parents(self):
723 """return contexts for each parent changeset"""
723 """return contexts for each parent changeset"""
724 return self._parents
724 return self._parents
725
725
726 def filectx(self, path, filelog=None):
726 def filectx(self, path, filelog=None):
727 """get a file context from the working directory"""
727 """get a file context from the working directory"""
728 return self._filectxfn(self._repo, self, path)
728 return self._filectxfn(self._repo, self, path)
729
729
730 class memfilectx(object):
730 class memfilectx(object):
731 """A memfilectx is a subset of filectx supposed to be built by client
731 """A memfilectx is a subset of filectx supposed to be built by client
732 code and passed to commit functions.
732 code and passed to commit functions.
733 """
733 """
734 def __init__(self, path, data, islink, isexec, copied):
734 def __init__(self, path, data, islink, isexec, copied):
735 """copied is the source file path, or None."""
735 """copied is the source file path, or None."""
736 self._path = path
736 self._path = path
737 self._data = data
737 self._data = data
738 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
738 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
739 self._copied = None
739 self._copied = None
740 if copied:
740 if copied:
741 self._copied = (copied, nullid)
741 self._copied = (copied, nullid)
742
742
743 def __nonzero__(self): return True
743 def __nonzero__(self): return True
744 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
744 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
745 def path(self): return self._path
745 def path(self): return self._path
746 def data(self): return self._data
746 def data(self): return self._data
747 def flags(self): return self._flags
747 def flags(self): return self._flags
748 def isexec(self): return 'x' in self._flags
748 def isexec(self): return 'x' in self._flags
749 def islink(self): return 'l' in self._flags
749 def islink(self): return 'l' in self._flags
750 def renamed(self): return self._copied
750 def renamed(self): return self._copied
751
751
@@ -1,74 +1,74 b''
1 # Revision graph generator for Mercurial
1 # Revision graph generator for Mercurial
2 #
2 #
3 # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
3 # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
4 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of
6 # This software may be used and distributed according to the terms of
7 # the GNU General Public License, incorporated herein by reference.
7 # the GNU General Public License, incorporated herein by reference.
8
8
9 from node import nullrev, short
9 from node import nullrev, short
10 import ui, hg, util, templatefilters
10 import ui, hg, util, templatefilters
11
11
12 def graph(repo, start_rev, stop_rev):
12 def graph(repo, start_rev, stop_rev):
13 """incremental revision grapher
13 """incremental revision grapher
14
14
15 This generator function walks through the revision history from
15 This generator function walks through the revision history from
16 revision start_rev to revision stop_rev (which must be less than
16 revision start_rev to revision stop_rev (which must be less than
17 or equal to start_rev) and for each revision emits tuples with the
17 or equal to start_rev) and for each revision emits tuples with the
18 following elements:
18 following elements:
19
19
20 - Current node
20 - Current node
21 - Column and color for the current node
21 - Column and color for the current node
22 - Edges; a list of (col, next_col, color) indicating the edges between
22 - Edges; a list of (col, next_col, color) indicating the edges between
23 the current node and its parents.
23 the current node and its parents.
24 - First line of the changeset description
24 - First line of the changeset description
25 - The changeset author
25 - The changeset author
26 - The changeset date/time
26 - The changeset date/time
27 """
27 """
28
28
29 assert start_rev >= stop_rev
29 assert start_rev >= stop_rev
30 curr_rev = start_rev
30 curr_rev = start_rev
31 revs = []
31 revs = []
32 cl = repo.changelog
32 cl = repo.changelog
33 colors = {}
33 colors = {}
34 new_color = 1
34 new_color = 1
35
35
36 while curr_rev >= stop_rev:
36 while curr_rev >= stop_rev:
37 node = cl.node(curr_rev)
37 node = cl.node(curr_rev)
38
38
39 # Compute revs and next_revs
39 # Compute revs and next_revs
40 if curr_rev not in revs:
40 if curr_rev not in revs:
41 revs.append(curr_rev) # new head
41 revs.append(curr_rev) # new head
42 colors[curr_rev] = new_color
42 colors[curr_rev] = new_color
43 new_color += 1
43 new_color += 1
44
44
45 idx = revs.index(curr_rev)
45 idx = revs.index(curr_rev)
46 color = colors.pop(curr_rev)
46 color = colors.pop(curr_rev)
47 next = revs[:]
47 next = revs[:]
48
48
49 # Add parents to next_revs
49 # Add parents to next_revs
50 parents = [x for x in cl.parentrevs(curr_rev) if x != nullrev]
50 parents = [x for x in cl.parentrevs(curr_rev) if x != nullrev]
51 addparents = [p for p in parents if p not in next]
51 addparents = [p for p in parents if p not in next]
52 next[idx:idx + 1] = addparents
52 next[idx:idx + 1] = addparents
53
53
54 # Set colors for the parents
54 # Set colors for the parents
55 for i, p in enumerate(addparents):
55 for i, p in enumerate(addparents):
56 if not i:
56 if not i:
57 colors[p] = color
57 colors[p] = color
58 else:
58 else:
59 colors[p] = new_color
59 colors[p] = new_color
60 new_color += 1
60 new_color += 1
61
61
62 # Add edges to the graph
62 # Add edges to the graph
63 edges = []
63 edges = []
64 for col, r in enumerate(revs):
64 for col, r in enumerate(revs):
65 if r in next:
65 if r in next:
66 edges.append((col, next.index(r), colors[r]))
66 edges.append((col, next.index(r), colors[r]))
67 elif r == curr_rev:
67 elif r == curr_rev:
68 for p in parents:
68 for p in parents:
69 edges.append((col, next.index(p), colors[p]))
69 edges.append((col, next.index(p), colors[p]))
70
70
71 # Yield and move on
71 # Yield and move on
72 yield (repo.changectx(curr_rev), (idx, color), edges)
72 yield (repo[curr_rev], (idx, color), edges)
73 revs = next
73 revs = next
74 curr_rev -= 1
74 curr_rev -= 1
@@ -1,379 +1,379 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes
9 import os, mimetypes
10 from mercurial.node import hex, nullid
10 from mercurial.node import hex, nullid
11 from mercurial.repo import RepoError
11 from mercurial.repo import RepoError
12 from mercurial import mdiff, ui, hg, util, patch, hook
12 from mercurial import mdiff, ui, hg, util, patch, hook
13 from mercurial import revlog, templater, templatefilters, changegroup
13 from mercurial import revlog, templater, templatefilters, changegroup
14 from common import get_mtime, style_map, paritygen, countgen, ErrorResponse
14 from common import get_mtime, style_map, paritygen, countgen, ErrorResponse
15 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
15 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
16 from request import wsgirequest
16 from request import wsgirequest
17 import webcommands, protocol, webutil
17 import webcommands, protocol, webutil
18
18
19 shortcuts = {
19 shortcuts = {
20 'cl': [('cmd', ['changelog']), ('rev', None)],
20 'cl': [('cmd', ['changelog']), ('rev', None)],
21 'sl': [('cmd', ['shortlog']), ('rev', None)],
21 'sl': [('cmd', ['shortlog']), ('rev', None)],
22 'cs': [('cmd', ['changeset']), ('node', None)],
22 'cs': [('cmd', ['changeset']), ('node', None)],
23 'f': [('cmd', ['file']), ('filenode', None)],
23 'f': [('cmd', ['file']), ('filenode', None)],
24 'fl': [('cmd', ['filelog']), ('filenode', None)],
24 'fl': [('cmd', ['filelog']), ('filenode', None)],
25 'fd': [('cmd', ['filediff']), ('node', None)],
25 'fd': [('cmd', ['filediff']), ('node', None)],
26 'fa': [('cmd', ['annotate']), ('filenode', None)],
26 'fa': [('cmd', ['annotate']), ('filenode', None)],
27 'mf': [('cmd', ['manifest']), ('manifest', None)],
27 'mf': [('cmd', ['manifest']), ('manifest', None)],
28 'ca': [('cmd', ['archive']), ('node', None)],
28 'ca': [('cmd', ['archive']), ('node', None)],
29 'tags': [('cmd', ['tags'])],
29 'tags': [('cmd', ['tags'])],
30 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
30 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
31 'static': [('cmd', ['static']), ('file', None)]
31 'static': [('cmd', ['static']), ('file', None)]
32 }
32 }
33
33
34 class hgweb(object):
34 class hgweb(object):
35 def __init__(self, repo, name=None):
35 def __init__(self, repo, name=None):
36 if isinstance(repo, str):
36 if isinstance(repo, str):
37 parentui = ui.ui(report_untrusted=False, interactive=False)
37 parentui = ui.ui(report_untrusted=False, interactive=False)
38 self.repo = hg.repository(parentui, repo)
38 self.repo = hg.repository(parentui, repo)
39 else:
39 else:
40 self.repo = repo
40 self.repo = repo
41
41
42 hook.redirect(True)
42 hook.redirect(True)
43 self.mtime = -1
43 self.mtime = -1
44 self.reponame = name
44 self.reponame = name
45 self.archives = 'zip', 'gz', 'bz2'
45 self.archives = 'zip', 'gz', 'bz2'
46 self.stripecount = 1
46 self.stripecount = 1
47 self._capabilities = None
47 self._capabilities = None
48 # a repo owner may set web.templates in .hg/hgrc to get any file
48 # a repo owner may set web.templates in .hg/hgrc to get any file
49 # readable by the user running the CGI script
49 # readable by the user running the CGI script
50 self.templatepath = self.config("web", "templates",
50 self.templatepath = self.config("web", "templates",
51 templater.templatepath(),
51 templater.templatepath(),
52 untrusted=False)
52 untrusted=False)
53
53
54 # The CGI scripts are often run by a user different from the repo owner.
54 # The CGI scripts are often run by a user different from the repo owner.
55 # Trust the settings from the .hg/hgrc files by default.
55 # Trust the settings from the .hg/hgrc files by default.
56 def config(self, section, name, default=None, untrusted=True):
56 def config(self, section, name, default=None, untrusted=True):
57 return self.repo.ui.config(section, name, default,
57 return self.repo.ui.config(section, name, default,
58 untrusted=untrusted)
58 untrusted=untrusted)
59
59
60 def configbool(self, section, name, default=False, untrusted=True):
60 def configbool(self, section, name, default=False, untrusted=True):
61 return self.repo.ui.configbool(section, name, default,
61 return self.repo.ui.configbool(section, name, default,
62 untrusted=untrusted)
62 untrusted=untrusted)
63
63
64 def configlist(self, section, name, default=None, untrusted=True):
64 def configlist(self, section, name, default=None, untrusted=True):
65 return self.repo.ui.configlist(section, name, default,
65 return self.repo.ui.configlist(section, name, default,
66 untrusted=untrusted)
66 untrusted=untrusted)
67
67
68 def refresh(self):
68 def refresh(self):
69 mtime = get_mtime(self.repo.root)
69 mtime = get_mtime(self.repo.root)
70 if mtime != self.mtime:
70 if mtime != self.mtime:
71 self.mtime = mtime
71 self.mtime = mtime
72 self.repo = hg.repository(self.repo.ui, self.repo.root)
72 self.repo = hg.repository(self.repo.ui, self.repo.root)
73 self.maxchanges = int(self.config("web", "maxchanges", 10))
73 self.maxchanges = int(self.config("web", "maxchanges", 10))
74 self.stripecount = int(self.config("web", "stripes", 1))
74 self.stripecount = int(self.config("web", "stripes", 1))
75 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
75 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
76 self.maxfiles = int(self.config("web", "maxfiles", 10))
76 self.maxfiles = int(self.config("web", "maxfiles", 10))
77 self.allowpull = self.configbool("web", "allowpull", True)
77 self.allowpull = self.configbool("web", "allowpull", True)
78 self.encoding = self.config("web", "encoding", util._encoding)
78 self.encoding = self.config("web", "encoding", util._encoding)
79 self._capabilities = None
79 self._capabilities = None
80
80
81 def capabilities(self):
81 def capabilities(self):
82 if self._capabilities is not None:
82 if self._capabilities is not None:
83 return self._capabilities
83 return self._capabilities
84 caps = ['lookup', 'changegroupsubset']
84 caps = ['lookup', 'changegroupsubset']
85 if self.configbool('server', 'uncompressed'):
85 if self.configbool('server', 'uncompressed'):
86 caps.append('stream=%d' % self.repo.changelog.version)
86 caps.append('stream=%d' % self.repo.changelog.version)
87 if changegroup.bundlepriority:
87 if changegroup.bundlepriority:
88 caps.append('unbundle=%s' % ','.join(changegroup.bundlepriority))
88 caps.append('unbundle=%s' % ','.join(changegroup.bundlepriority))
89 self._capabilities = caps
89 self._capabilities = caps
90 return caps
90 return caps
91
91
92 def run(self):
92 def run(self):
93 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
93 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
94 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
94 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
95 import mercurial.hgweb.wsgicgi as wsgicgi
95 import mercurial.hgweb.wsgicgi as wsgicgi
96 wsgicgi.launch(self)
96 wsgicgi.launch(self)
97
97
98 def __call__(self, env, respond):
98 def __call__(self, env, respond):
99 req = wsgirequest(env, respond)
99 req = wsgirequest(env, respond)
100 self.run_wsgi(req)
100 self.run_wsgi(req)
101 return req
101 return req
102
102
103 def run_wsgi(self, req):
103 def run_wsgi(self, req):
104
104
105 self.refresh()
105 self.refresh()
106
106
107 # expand form shortcuts
107 # expand form shortcuts
108
108
109 for k in shortcuts.iterkeys():
109 for k in shortcuts.iterkeys():
110 if k in req.form:
110 if k in req.form:
111 for name, value in shortcuts[k]:
111 for name, value in shortcuts[k]:
112 if value is None:
112 if value is None:
113 value = req.form[k]
113 value = req.form[k]
114 req.form[name] = value
114 req.form[name] = value
115 del req.form[k]
115 del req.form[k]
116
116
117 # work with CGI variables to create coherent structure
117 # work with CGI variables to create coherent structure
118 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
118 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
119
119
120 req.url = req.env['SCRIPT_NAME']
120 req.url = req.env['SCRIPT_NAME']
121 if not req.url.endswith('/'):
121 if not req.url.endswith('/'):
122 req.url += '/'
122 req.url += '/'
123 if 'REPO_NAME' in req.env:
123 if 'REPO_NAME' in req.env:
124 req.url += req.env['REPO_NAME'] + '/'
124 req.url += req.env['REPO_NAME'] + '/'
125
125
126 if 'PATH_INFO' in req.env:
126 if 'PATH_INFO' in req.env:
127 parts = req.env['PATH_INFO'].strip('/').split('/')
127 parts = req.env['PATH_INFO'].strip('/').split('/')
128 repo_parts = req.env.get('REPO_NAME', '').split('/')
128 repo_parts = req.env.get('REPO_NAME', '').split('/')
129 if parts[:len(repo_parts)] == repo_parts:
129 if parts[:len(repo_parts)] == repo_parts:
130 parts = parts[len(repo_parts):]
130 parts = parts[len(repo_parts):]
131 query = '/'.join(parts)
131 query = '/'.join(parts)
132 else:
132 else:
133 query = req.env['QUERY_STRING'].split('&', 1)[0]
133 query = req.env['QUERY_STRING'].split('&', 1)[0]
134 query = query.split(';', 1)[0]
134 query = query.split(';', 1)[0]
135
135
136 # translate user-visible url structure to internal structure
136 # translate user-visible url structure to internal structure
137
137
138 args = query.split('/', 2)
138 args = query.split('/', 2)
139 if 'cmd' not in req.form and args and args[0]:
139 if 'cmd' not in req.form and args and args[0]:
140
140
141 cmd = args.pop(0)
141 cmd = args.pop(0)
142 style = cmd.rfind('-')
142 style = cmd.rfind('-')
143 if style != -1:
143 if style != -1:
144 req.form['style'] = [cmd[:style]]
144 req.form['style'] = [cmd[:style]]
145 cmd = cmd[style+1:]
145 cmd = cmd[style+1:]
146
146
147 # avoid accepting e.g. style parameter as command
147 # avoid accepting e.g. style parameter as command
148 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
148 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
149 req.form['cmd'] = [cmd]
149 req.form['cmd'] = [cmd]
150
150
151 if args and args[0]:
151 if args and args[0]:
152 node = args.pop(0)
152 node = args.pop(0)
153 req.form['node'] = [node]
153 req.form['node'] = [node]
154 if args:
154 if args:
155 req.form['file'] = args
155 req.form['file'] = args
156
156
157 if cmd == 'static':
157 if cmd == 'static':
158 req.form['file'] = req.form['node']
158 req.form['file'] = req.form['node']
159 elif cmd == 'archive':
159 elif cmd == 'archive':
160 fn = req.form['node'][0]
160 fn = req.form['node'][0]
161 for type_, spec in self.archive_specs.iteritems():
161 for type_, spec in self.archive_specs.iteritems():
162 ext = spec[2]
162 ext = spec[2]
163 if fn.endswith(ext):
163 if fn.endswith(ext):
164 req.form['node'] = [fn[:-len(ext)]]
164 req.form['node'] = [fn[:-len(ext)]]
165 req.form['type'] = [type_]
165 req.form['type'] = [type_]
166
166
167 # process this if it's a protocol request
167 # process this if it's a protocol request
168
168
169 cmd = req.form.get('cmd', [''])[0]
169 cmd = req.form.get('cmd', [''])[0]
170 if cmd in protocol.__all__:
170 if cmd in protocol.__all__:
171 method = getattr(protocol, cmd)
171 method = getattr(protocol, cmd)
172 method(self, req)
172 method(self, req)
173 return
173 return
174
174
175 # process the web interface request
175 # process the web interface request
176
176
177 try:
177 try:
178
178
179 tmpl = self.templater(req)
179 tmpl = self.templater(req)
180 ctype = tmpl('mimetype', encoding=self.encoding)
180 ctype = tmpl('mimetype', encoding=self.encoding)
181 ctype = templater.stringify(ctype)
181 ctype = templater.stringify(ctype)
182
182
183 if cmd == '':
183 if cmd == '':
184 req.form['cmd'] = [tmpl.cache['default']]
184 req.form['cmd'] = [tmpl.cache['default']]
185 cmd = req.form['cmd'][0]
185 cmd = req.form['cmd'][0]
186
186
187 if cmd not in webcommands.__all__:
187 if cmd not in webcommands.__all__:
188 msg = 'no such method: %s' % cmd
188 msg = 'no such method: %s' % cmd
189 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
189 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
190 elif cmd == 'file' and 'raw' in req.form.get('style', []):
190 elif cmd == 'file' and 'raw' in req.form.get('style', []):
191 self.ctype = ctype
191 self.ctype = ctype
192 content = webcommands.rawfile(self, req, tmpl)
192 content = webcommands.rawfile(self, req, tmpl)
193 else:
193 else:
194 content = getattr(webcommands, cmd)(self, req, tmpl)
194 content = getattr(webcommands, cmd)(self, req, tmpl)
195 req.respond(HTTP_OK, ctype)
195 req.respond(HTTP_OK, ctype)
196
196
197 req.write(content)
197 req.write(content)
198 del tmpl
198 del tmpl
199
199
200 except revlog.LookupError, err:
200 except revlog.LookupError, err:
201 req.respond(HTTP_NOT_FOUND, ctype)
201 req.respond(HTTP_NOT_FOUND, ctype)
202 msg = str(err)
202 msg = str(err)
203 if 'manifest' not in msg:
203 if 'manifest' not in msg:
204 msg = 'revision not found: %s' % err.name
204 msg = 'revision not found: %s' % err.name
205 req.write(tmpl('error', error=msg))
205 req.write(tmpl('error', error=msg))
206 except (RepoError, revlog.RevlogError), inst:
206 except (RepoError, revlog.RevlogError), inst:
207 req.respond(HTTP_SERVER_ERROR, ctype)
207 req.respond(HTTP_SERVER_ERROR, ctype)
208 req.write(tmpl('error', error=str(inst)))
208 req.write(tmpl('error', error=str(inst)))
209 except ErrorResponse, inst:
209 except ErrorResponse, inst:
210 req.respond(inst.code, ctype)
210 req.respond(inst.code, ctype)
211 req.write(tmpl('error', error=inst.message))
211 req.write(tmpl('error', error=inst.message))
212
212
213 def templater(self, req):
213 def templater(self, req):
214
214
215 # determine scheme, port and server name
215 # determine scheme, port and server name
216 # this is needed to create absolute urls
216 # this is needed to create absolute urls
217
217
218 proto = req.env.get('wsgi.url_scheme')
218 proto = req.env.get('wsgi.url_scheme')
219 if proto == 'https':
219 if proto == 'https':
220 proto = 'https'
220 proto = 'https'
221 default_port = "443"
221 default_port = "443"
222 else:
222 else:
223 proto = 'http'
223 proto = 'http'
224 default_port = "80"
224 default_port = "80"
225
225
226 port = req.env["SERVER_PORT"]
226 port = req.env["SERVER_PORT"]
227 port = port != default_port and (":" + port) or ""
227 port = port != default_port and (":" + port) or ""
228 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
228 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
229 staticurl = self.config("web", "staticurl") or req.url + 'static/'
229 staticurl = self.config("web", "staticurl") or req.url + 'static/'
230 if not staticurl.endswith('/'):
230 if not staticurl.endswith('/'):
231 staticurl += '/'
231 staticurl += '/'
232
232
233 # some functions for the templater
233 # some functions for the templater
234
234
235 def header(**map):
235 def header(**map):
236 yield tmpl('header', encoding=self.encoding, **map)
236 yield tmpl('header', encoding=self.encoding, **map)
237
237
238 def footer(**map):
238 def footer(**map):
239 yield tmpl("footer", **map)
239 yield tmpl("footer", **map)
240
240
241 def motd(**map):
241 def motd(**map):
242 yield self.config("web", "motd", "")
242 yield self.config("web", "motd", "")
243
243
244 def sessionvars(**map):
244 def sessionvars(**map):
245 fields = []
245 fields = []
246 if 'style' in req.form:
246 if 'style' in req.form:
247 style = req.form['style'][0]
247 style = req.form['style'][0]
248 if style != self.config('web', 'style', ''):
248 if style != self.config('web', 'style', ''):
249 fields.append(('style', style))
249 fields.append(('style', style))
250
250
251 separator = req.url[-1] == '?' and ';' or '?'
251 separator = req.url[-1] == '?' and ';' or '?'
252 for name, value in fields:
252 for name, value in fields:
253 yield dict(name=name, value=value, separator=separator)
253 yield dict(name=name, value=value, separator=separator)
254 separator = ';'
254 separator = ';'
255
255
256 # figure out which style to use
256 # figure out which style to use
257
257
258 style = self.config("web", "style", "")
258 style = self.config("web", "style", "")
259 if 'style' in req.form:
259 if 'style' in req.form:
260 style = req.form['style'][0]
260 style = req.form['style'][0]
261 mapfile = style_map(self.templatepath, style)
261 mapfile = style_map(self.templatepath, style)
262
262
263 if not self.reponame:
263 if not self.reponame:
264 self.reponame = (self.config("web", "name")
264 self.reponame = (self.config("web", "name")
265 or req.env.get('REPO_NAME')
265 or req.env.get('REPO_NAME')
266 or req.url.strip('/') or self.repo.root)
266 or req.url.strip('/') or self.repo.root)
267
267
268 # create the templater
268 # create the templater
269
269
270 tmpl = templater.templater(mapfile, templatefilters.filters,
270 tmpl = templater.templater(mapfile, templatefilters.filters,
271 defaults={"url": req.url,
271 defaults={"url": req.url,
272 "staticurl": staticurl,
272 "staticurl": staticurl,
273 "urlbase": urlbase,
273 "urlbase": urlbase,
274 "repo": self.reponame,
274 "repo": self.reponame,
275 "header": header,
275 "header": header,
276 "footer": footer,
276 "footer": footer,
277 "motd": motd,
277 "motd": motd,
278 "sessionvars": sessionvars
278 "sessionvars": sessionvars
279 })
279 })
280 return tmpl
280 return tmpl
281
281
282 def archivelist(self, nodeid):
282 def archivelist(self, nodeid):
283 allowed = self.configlist("web", "allow_archive")
283 allowed = self.configlist("web", "allow_archive")
284 for i, spec in self.archive_specs.iteritems():
284 for i, spec in self.archive_specs.iteritems():
285 if i in allowed or self.configbool("web", "allow" + i):
285 if i in allowed or self.configbool("web", "allow" + i):
286 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
286 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
287
287
288 def listfilediffs(self, tmpl, files, changeset):
288 def listfilediffs(self, tmpl, files, changeset):
289 for f in files[:self.maxfiles]:
289 for f in files[:self.maxfiles]:
290 yield tmpl("filedifflink", node=hex(changeset), file=f)
290 yield tmpl("filedifflink", node=hex(changeset), file=f)
291 if len(files) > self.maxfiles:
291 if len(files) > self.maxfiles:
292 yield tmpl("fileellipses")
292 yield tmpl("fileellipses")
293
293
294 def diff(self, tmpl, node1, node2, files):
294 def diff(self, tmpl, node1, node2, files):
295 def filterfiles(filters, files):
295 def filterfiles(filters, files):
296 l = [x for x in files if x in filters]
296 l = [x for x in files if x in filters]
297
297
298 for t in filters:
298 for t in filters:
299 if t and t[-1] != os.sep:
299 if t and t[-1] != os.sep:
300 t += os.sep
300 t += os.sep
301 l += [x for x in files if x.startswith(t)]
301 l += [x for x in files if x.startswith(t)]
302 return l
302 return l
303
303
304 parity = paritygen(self.stripecount)
304 parity = paritygen(self.stripecount)
305 def diffblock(diff, f, fn):
305 def diffblock(diff, f, fn):
306 yield tmpl("diffblock",
306 yield tmpl("diffblock",
307 lines=prettyprintlines(diff),
307 lines=prettyprintlines(diff),
308 parity=parity.next(),
308 parity=parity.next(),
309 file=f,
309 file=f,
310 filenode=hex(fn or nullid))
310 filenode=hex(fn or nullid))
311
311
312 blockcount = countgen()
312 blockcount = countgen()
313 def prettyprintlines(diff):
313 def prettyprintlines(diff):
314 blockno = blockcount.next()
314 blockno = blockcount.next()
315 for lineno, l in enumerate(diff.splitlines(1)):
315 for lineno, l in enumerate(diff.splitlines(1)):
316 if blockno == 0:
316 if blockno == 0:
317 lineno = lineno + 1
317 lineno = lineno + 1
318 else:
318 else:
319 lineno = "%d.%d" % (blockno, lineno + 1)
319 lineno = "%d.%d" % (blockno, lineno + 1)
320 if l.startswith('+'):
320 if l.startswith('+'):
321 ltype = "difflineplus"
321 ltype = "difflineplus"
322 elif l.startswith('-'):
322 elif l.startswith('-'):
323 ltype = "difflineminus"
323 ltype = "difflineminus"
324 elif l.startswith('@'):
324 elif l.startswith('@'):
325 ltype = "difflineat"
325 ltype = "difflineat"
326 else:
326 else:
327 ltype = "diffline"
327 ltype = "diffline"
328 yield tmpl(ltype,
328 yield tmpl(ltype,
329 line=l,
329 line=l,
330 lineid="l%s" % lineno,
330 lineid="l%s" % lineno,
331 linenumber="% 8s" % lineno)
331 linenumber="% 8s" % lineno)
332
332
333 r = self.repo
333 r = self.repo
334 c1 = r.changectx(node1)
334 c1 = r[node1]
335 c2 = r.changectx(node2)
335 c2 = r[node2]
336 date1 = util.datestr(c1.date())
336 date1 = util.datestr(c1.date())
337 date2 = util.datestr(c2.date())
337 date2 = util.datestr(c2.date())
338
338
339 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
339 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
340 if files:
340 if files:
341 modified, added, removed = map(lambda x: filterfiles(files, x),
341 modified, added, removed = map(lambda x: filterfiles(files, x),
342 (modified, added, removed))
342 (modified, added, removed))
343
343
344 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
344 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
345 for f in modified:
345 for f in modified:
346 to = c1.filectx(f).data()
346 to = c1.filectx(f).data()
347 tn = c2.filectx(f).data()
347 tn = c2.filectx(f).data()
348 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
348 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
349 opts=diffopts), f, tn)
349 opts=diffopts), f, tn)
350 for f in added:
350 for f in added:
351 to = None
351 to = None
352 tn = c2.filectx(f).data()
352 tn = c2.filectx(f).data()
353 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
353 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
354 opts=diffopts), f, tn)
354 opts=diffopts), f, tn)
355 for f in removed:
355 for f in removed:
356 to = c1.filectx(f).data()
356 to = c1.filectx(f).data()
357 tn = None
357 tn = None
358 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
358 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
359 opts=diffopts), f, tn)
359 opts=diffopts), f, tn)
360
360
361 archive_specs = {
361 archive_specs = {
362 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
362 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
363 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
363 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
364 'zip': ('application/zip', 'zip', '.zip', None),
364 'zip': ('application/zip', 'zip', '.zip', None),
365 }
365 }
366
366
367 def check_perm(self, req, op, default):
367 def check_perm(self, req, op, default):
368 '''check permission for operation based on user auth.
368 '''check permission for operation based on user auth.
369 return true if op allowed, else false.
369 return true if op allowed, else false.
370 default is policy to use if no config given.'''
370 default is policy to use if no config given.'''
371
371
372 user = req.env.get('REMOTE_USER')
372 user = req.env.get('REMOTE_USER')
373
373
374 deny = self.configlist('web', 'deny_' + op)
374 deny = self.configlist('web', 'deny_' + op)
375 if deny and (not user or deny == ['*'] or user in deny):
375 if deny and (not user or deny == ['*'] or user in deny):
376 return False
376 return False
377
377
378 allow = self.configlist('web', 'allow_' + op)
378 allow = self.configlist('web', 'allow_' + op)
379 return (allow and (allow == ['*'] or user in allow)) or default
379 return (allow and (allow == ['*'] or user in allow)) or default
@@ -1,613 +1,612 b''
1 #
1 #
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, mimetypes, re, cgi
8 import os, mimetypes, re, cgi
9 import webutil
9 import webutil
10 from mercurial import revlog, archival, templatefilters
10 from mercurial import revlog, archival, templatefilters
11 from mercurial.node import short, hex, nullid
11 from mercurial.node import short, hex, nullid
12 from mercurial.util import binary, datestr
12 from mercurial.util import binary, datestr
13 from mercurial.repo import RepoError
13 from mercurial.repo import RepoError
14 from common import paritygen, staticfile, get_contact, ErrorResponse
14 from common import paritygen, staticfile, get_contact, ErrorResponse
15 from common import HTTP_OK, HTTP_NOT_FOUND
15 from common import HTTP_OK, HTTP_NOT_FOUND
16 from mercurial import graphmod
16 from mercurial import graphmod
17
17
18 # __all__ is populated with the allowed commands. Be sure to add to it if
18 # __all__ is populated with the allowed commands. Be sure to add to it if
19 # you're adding a new command, or the new command won't work.
19 # you're adding a new command, or the new command won't work.
20
20
21 __all__ = [
21 __all__ = [
22 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
22 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
23 'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog',
23 'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog',
24 'archive', 'static', 'graph',
24 'archive', 'static', 'graph',
25 ]
25 ]
26
26
27 def log(web, req, tmpl):
27 def log(web, req, tmpl):
28 if 'file' in req.form and req.form['file'][0]:
28 if 'file' in req.form and req.form['file'][0]:
29 return filelog(web, req, tmpl)
29 return filelog(web, req, tmpl)
30 else:
30 else:
31 return changelog(web, req, tmpl)
31 return changelog(web, req, tmpl)
32
32
33 def rawfile(web, req, tmpl):
33 def rawfile(web, req, tmpl):
34 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
34 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
35 if not path:
35 if not path:
36 content = manifest(web, req, tmpl)
36 content = manifest(web, req, tmpl)
37 req.respond(HTTP_OK, web.ctype)
37 req.respond(HTTP_OK, web.ctype)
38 return content
38 return content
39
39
40 try:
40 try:
41 fctx = webutil.filectx(web.repo, req)
41 fctx = webutil.filectx(web.repo, req)
42 except revlog.LookupError, inst:
42 except revlog.LookupError, inst:
43 try:
43 try:
44 content = manifest(web, req, tmpl)
44 content = manifest(web, req, tmpl)
45 req.respond(HTTP_OK, web.ctype)
45 req.respond(HTTP_OK, web.ctype)
46 return content
46 return content
47 except ErrorResponse:
47 except ErrorResponse:
48 raise inst
48 raise inst
49
49
50 path = fctx.path()
50 path = fctx.path()
51 text = fctx.data()
51 text = fctx.data()
52 mt = mimetypes.guess_type(path)[0]
52 mt = mimetypes.guess_type(path)[0]
53 if mt is None or binary(text):
53 if mt is None or binary(text):
54 mt = mt or 'application/octet-stream'
54 mt = mt or 'application/octet-stream'
55
55
56 req.respond(HTTP_OK, mt, path, len(text))
56 req.respond(HTTP_OK, mt, path, len(text))
57 return [text]
57 return [text]
58
58
59 def _filerevision(web, tmpl, fctx):
59 def _filerevision(web, tmpl, fctx):
60 f = fctx.path()
60 f = fctx.path()
61 text = fctx.data()
61 text = fctx.data()
62 fl = fctx.filelog()
62 fl = fctx.filelog()
63 n = fctx.filenode()
63 n = fctx.filenode()
64 parity = paritygen(web.stripecount)
64 parity = paritygen(web.stripecount)
65
65
66 if binary(text):
66 if binary(text):
67 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
67 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
68 text = '(binary:%s)' % mt
68 text = '(binary:%s)' % mt
69
69
70 def lines():
70 def lines():
71 for lineno, t in enumerate(text.splitlines(1)):
71 for lineno, t in enumerate(text.splitlines(1)):
72 yield {"line": t,
72 yield {"line": t,
73 "lineid": "l%d" % (lineno + 1),
73 "lineid": "l%d" % (lineno + 1),
74 "linenumber": "% 6d" % (lineno + 1),
74 "linenumber": "% 6d" % (lineno + 1),
75 "parity": parity.next()}
75 "parity": parity.next()}
76
76
77 return tmpl("filerevision",
77 return tmpl("filerevision",
78 file=f,
78 file=f,
79 path=webutil.up(f),
79 path=webutil.up(f),
80 text=lines(),
80 text=lines(),
81 rev=fctx.rev(),
81 rev=fctx.rev(),
82 node=hex(fctx.node()),
82 node=hex(fctx.node()),
83 author=fctx.user(),
83 author=fctx.user(),
84 date=fctx.date(),
84 date=fctx.date(),
85 desc=fctx.description(),
85 desc=fctx.description(),
86 branch=webutil.nodebranchnodefault(fctx),
86 branch=webutil.nodebranchnodefault(fctx),
87 parent=webutil.siblings(fctx.parents()),
87 parent=webutil.siblings(fctx.parents()),
88 child=webutil.siblings(fctx.children()),
88 child=webutil.siblings(fctx.children()),
89 rename=webutil.renamelink(fctx),
89 rename=webutil.renamelink(fctx),
90 permissions=fctx.manifest().flags(f))
90 permissions=fctx.manifest().flags(f))
91
91
92 def file(web, req, tmpl):
92 def file(web, req, tmpl):
93 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
93 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
94 if path:
94 if path:
95 try:
95 try:
96 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
96 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
97 except revlog.LookupError, inst:
97 except revlog.LookupError, inst:
98 pass
98 pass
99
99
100 try:
100 try:
101 return manifest(web, req, tmpl)
101 return manifest(web, req, tmpl)
102 except ErrorResponse:
102 except ErrorResponse:
103 raise inst
103 raise inst
104
104
105 def _search(web, tmpl, query):
105 def _search(web, tmpl, query):
106
106
107 def changelist(**map):
107 def changelist(**map):
108 cl = web.repo.changelog
108 cl = web.repo.changelog
109 count = 0
109 count = 0
110 qw = query.lower().split()
110 qw = query.lower().split()
111
111
112 def revgen():
112 def revgen():
113 for i in xrange(cl.count() - 1, 0, -100):
113 for i in xrange(cl.count() - 1, 0, -100):
114 l = []
114 l = []
115 for j in xrange(max(0, i - 100), i + 1):
115 for j in xrange(max(0, i - 100), i + 1):
116 ctx = web.repo.changectx(j)
116 ctx = web.repo[j]
117 l.append(ctx)
117 l.append(ctx)
118 l.reverse()
118 l.reverse()
119 for e in l:
119 for e in l:
120 yield e
120 yield e
121
121
122 for ctx in revgen():
122 for ctx in revgen():
123 miss = 0
123 miss = 0
124 for q in qw:
124 for q in qw:
125 if not (q in ctx.user().lower() or
125 if not (q in ctx.user().lower() or
126 q in ctx.description().lower() or
126 q in ctx.description().lower() or
127 q in " ".join(ctx.files()).lower()):
127 q in " ".join(ctx.files()).lower()):
128 miss = 1
128 miss = 1
129 break
129 break
130 if miss:
130 if miss:
131 continue
131 continue
132
132
133 count += 1
133 count += 1
134 n = ctx.node()
134 n = ctx.node()
135 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
135 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
136
136
137 yield tmpl('searchentry',
137 yield tmpl('searchentry',
138 parity=parity.next(),
138 parity=parity.next(),
139 author=ctx.user(),
139 author=ctx.user(),
140 parent=webutil.siblings(ctx.parents()),
140 parent=webutil.siblings(ctx.parents()),
141 child=webutil.siblings(ctx.children()),
141 child=webutil.siblings(ctx.children()),
142 changelogtag=showtags,
142 changelogtag=showtags,
143 desc=ctx.description(),
143 desc=ctx.description(),
144 date=ctx.date(),
144 date=ctx.date(),
145 files=web.listfilediffs(tmpl, ctx.files(), n),
145 files=web.listfilediffs(tmpl, ctx.files(), n),
146 rev=ctx.rev(),
146 rev=ctx.rev(),
147 node=hex(n),
147 node=hex(n),
148 tags=webutil.nodetagsdict(web.repo, n),
148 tags=webutil.nodetagsdict(web.repo, n),
149 inbranch=webutil.nodeinbranch(web.repo, ctx),
149 inbranch=webutil.nodeinbranch(web.repo, ctx),
150 branches=webutil.nodebranchdict(web.repo, ctx))
150 branches=webutil.nodebranchdict(web.repo, ctx))
151
151
152 if count >= web.maxchanges:
152 if count >= web.maxchanges:
153 break
153 break
154
154
155 cl = web.repo.changelog
155 cl = web.repo.changelog
156 parity = paritygen(web.stripecount)
156 parity = paritygen(web.stripecount)
157
157
158 return tmpl('search',
158 return tmpl('search',
159 query=query,
159 query=query,
160 node=hex(cl.tip()),
160 node=hex(cl.tip()),
161 entries=changelist,
161 entries=changelist,
162 archives=web.archivelist("tip"))
162 archives=web.archivelist("tip"))
163
163
164 def changelog(web, req, tmpl, shortlog = False):
164 def changelog(web, req, tmpl, shortlog = False):
165 if 'node' in req.form:
165 if 'node' in req.form:
166 ctx = webutil.changectx(web.repo, req)
166 ctx = webutil.changectx(web.repo, req)
167 else:
167 else:
168 if 'rev' in req.form:
168 if 'rev' in req.form:
169 hi = req.form['rev'][0]
169 hi = req.form['rev'][0]
170 else:
170 else:
171 hi = web.repo.changelog.count() - 1
171 hi = web.repo.changelog.count() - 1
172 try:
172 try:
173 ctx = web.repo.changectx(hi)
173 ctx = web.repo[hi]
174 except RepoError:
174 except RepoError:
175 return _search(web, tmpl, hi) # XXX redirect to 404 page?
175 return _search(web, tmpl, hi) # XXX redirect to 404 page?
176
176
177 def changelist(limit=0, **map):
177 def changelist(limit=0, **map):
178 cl = web.repo.changelog
178 cl = web.repo.changelog
179 l = [] # build a list in forward order for efficiency
179 l = [] # build a list in forward order for efficiency
180 for i in xrange(start, end):
180 for i in xrange(start, end):
181 ctx = web.repo.changectx(i)
181 ctx = web.repo[i]
182 n = ctx.node()
182 n = ctx.node()
183 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
183 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
184
184
185 l.insert(0, {"parity": parity.next(),
185 l.insert(0, {"parity": parity.next(),
186 "author": ctx.user(),
186 "author": ctx.user(),
187 "parent": webutil.siblings(ctx.parents(), i - 1),
187 "parent": webutil.siblings(ctx.parents(), i - 1),
188 "child": webutil.siblings(ctx.children(), i + 1),
188 "child": webutil.siblings(ctx.children(), i + 1),
189 "changelogtag": showtags,
189 "changelogtag": showtags,
190 "desc": ctx.description(),
190 "desc": ctx.description(),
191 "date": ctx.date(),
191 "date": ctx.date(),
192 "files": web.listfilediffs(tmpl, ctx.files(), n),
192 "files": web.listfilediffs(tmpl, ctx.files(), n),
193 "rev": i,
193 "rev": i,
194 "node": hex(n),
194 "node": hex(n),
195 "tags": webutil.nodetagsdict(web.repo, n),
195 "tags": webutil.nodetagsdict(web.repo, n),
196 "inbranch": webutil.nodeinbranch(web.repo, ctx),
196 "inbranch": webutil.nodeinbranch(web.repo, ctx),
197 "branches": webutil.nodebranchdict(web.repo, ctx)
197 "branches": webutil.nodebranchdict(web.repo, ctx)
198 })
198 })
199
199
200 if limit > 0:
200 if limit > 0:
201 l = l[:limit]
201 l = l[:limit]
202
202
203 for e in l:
203 for e in l:
204 yield e
204 yield e
205
205
206 maxchanges = shortlog and web.maxshortchanges or web.maxchanges
206 maxchanges = shortlog and web.maxshortchanges or web.maxchanges
207 cl = web.repo.changelog
207 cl = web.repo.changelog
208 count = cl.count()
208 count = cl.count()
209 pos = ctx.rev()
209 pos = ctx.rev()
210 start = max(0, pos - maxchanges + 1)
210 start = max(0, pos - maxchanges + 1)
211 end = min(count, start + maxchanges)
211 end = min(count, start + maxchanges)
212 pos = end - 1
212 pos = end - 1
213 parity = paritygen(web.stripecount, offset=start-end)
213 parity = paritygen(web.stripecount, offset=start-end)
214
214
215 changenav = webutil.revnavgen(pos, maxchanges, count, web.repo.changectx)
215 changenav = webutil.revnavgen(pos, maxchanges, count, web.repo.changectx)
216
216
217 return tmpl(shortlog and 'shortlog' or 'changelog',
217 return tmpl(shortlog and 'shortlog' or 'changelog',
218 changenav=changenav,
218 changenav=changenav,
219 node=hex(ctx.node()),
219 node=hex(ctx.node()),
220 rev=pos, changesets=count,
220 rev=pos, changesets=count,
221 entries=lambda **x: changelist(limit=0,**x),
221 entries=lambda **x: changelist(limit=0,**x),
222 latestentry=lambda **x: changelist(limit=1,**x),
222 latestentry=lambda **x: changelist(limit=1,**x),
223 archives=web.archivelist("tip"))
223 archives=web.archivelist("tip"))
224
224
225 def shortlog(web, req, tmpl):
225 def shortlog(web, req, tmpl):
226 return changelog(web, req, tmpl, shortlog = True)
226 return changelog(web, req, tmpl, shortlog = True)
227
227
228 def changeset(web, req, tmpl):
228 def changeset(web, req, tmpl):
229 ctx = webutil.changectx(web.repo, req)
229 ctx = webutil.changectx(web.repo, req)
230 n = ctx.node()
230 n = ctx.node()
231 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', n)
231 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', n)
232 parents = ctx.parents()
232 parents = ctx.parents()
233 p1 = parents[0].node()
233 p1 = parents[0].node()
234
234
235 files = []
235 files = []
236 parity = paritygen(web.stripecount)
236 parity = paritygen(web.stripecount)
237 for f in ctx.files():
237 for f in ctx.files():
238 files.append(tmpl("filenodelink",
238 files.append(tmpl("filenodelink",
239 node=hex(n), file=f,
239 node=hex(n), file=f,
240 parity=parity.next()))
240 parity=parity.next()))
241
241
242 diffs = web.diff(tmpl, p1, n, None)
242 diffs = web.diff(tmpl, p1, n, None)
243 return tmpl('changeset',
243 return tmpl('changeset',
244 diff=diffs,
244 diff=diffs,
245 rev=ctx.rev(),
245 rev=ctx.rev(),
246 node=hex(n),
246 node=hex(n),
247 parent=webutil.siblings(parents),
247 parent=webutil.siblings(parents),
248 child=webutil.siblings(ctx.children()),
248 child=webutil.siblings(ctx.children()),
249 changesettag=showtags,
249 changesettag=showtags,
250 author=ctx.user(),
250 author=ctx.user(),
251 desc=ctx.description(),
251 desc=ctx.description(),
252 date=ctx.date(),
252 date=ctx.date(),
253 files=files,
253 files=files,
254 archives=web.archivelist(hex(n)),
254 archives=web.archivelist(hex(n)),
255 tags=webutil.nodetagsdict(web.repo, n),
255 tags=webutil.nodetagsdict(web.repo, n),
256 branch=webutil.nodebranchnodefault(ctx),
256 branch=webutil.nodebranchnodefault(ctx),
257 inbranch=webutil.nodeinbranch(web.repo, ctx),
257 inbranch=webutil.nodeinbranch(web.repo, ctx),
258 branches=webutil.nodebranchdict(web.repo, ctx))
258 branches=webutil.nodebranchdict(web.repo, ctx))
259
259
260 rev = changeset
260 rev = changeset
261
261
262 def manifest(web, req, tmpl):
262 def manifest(web, req, tmpl):
263 ctx = webutil.changectx(web.repo, req)
263 ctx = webutil.changectx(web.repo, req)
264 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
264 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
265 mf = ctx.manifest()
265 mf = ctx.manifest()
266 node = ctx.node()
266 node = ctx.node()
267
267
268 files = {}
268 files = {}
269 parity = paritygen(web.stripecount)
269 parity = paritygen(web.stripecount)
270
270
271 if path and path[-1] != "/":
271 if path and path[-1] != "/":
272 path += "/"
272 path += "/"
273 l = len(path)
273 l = len(path)
274 abspath = "/" + path
274 abspath = "/" + path
275
275
276 for f, n in mf.items():
276 for f, n in mf.items():
277 if f[:l] != path:
277 if f[:l] != path:
278 continue
278 continue
279 remain = f[l:]
279 remain = f[l:]
280 if "/" in remain:
280 if "/" in remain:
281 short = remain[:remain.index("/") + 1] # bleah
281 short = remain[:remain.index("/") + 1] # bleah
282 files[short] = (f, None)
282 files[short] = (f, None)
283 else:
283 else:
284 short = os.path.basename(remain)
284 short = os.path.basename(remain)
285 files[short] = (f, n)
285 files[short] = (f, n)
286
286
287 if not files:
287 if not files:
288 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
288 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
289
289
290 def filelist(**map):
290 def filelist(**map):
291 fl = files.keys()
291 fl = files.keys()
292 fl.sort()
292 fl.sort()
293 for f in fl:
293 for f in fl:
294 full, fnode = files[f]
294 full, fnode = files[f]
295 if not fnode:
295 if not fnode:
296 continue
296 continue
297
297
298 fctx = ctx.filectx(full)
298 fctx = ctx.filectx(full)
299 yield {"file": full,
299 yield {"file": full,
300 "parity": parity.next(),
300 "parity": parity.next(),
301 "basename": f,
301 "basename": f,
302 "date": fctx.changectx().date(),
302 "date": fctx.date(),
303 "size": fctx.size(),
303 "size": fctx.size(),
304 "permissions": mf.flags(full)}
304 "permissions": mf.flags(full)}
305
305
306 def dirlist(**map):
306 def dirlist(**map):
307 fl = files.keys()
307 fl = files.keys()
308 fl.sort()
308 fl.sort()
309 for f in fl:
309 for f in fl:
310 full, fnode = files[f]
310 full, fnode = files[f]
311 if fnode:
311 if fnode:
312 continue
312 continue
313
313
314 yield {"parity": parity.next(),
314 yield {"parity": parity.next(),
315 "path": "%s%s" % (abspath, f),
315 "path": "%s%s" % (abspath, f),
316 "basename": f[:-1]}
316 "basename": f[:-1]}
317
317
318 return tmpl("manifest",
318 return tmpl("manifest",
319 rev=ctx.rev(),
319 rev=ctx.rev(),
320 node=hex(node),
320 node=hex(node),
321 path=abspath,
321 path=abspath,
322 up=webutil.up(abspath),
322 up=webutil.up(abspath),
323 upparity=parity.next(),
323 upparity=parity.next(),
324 fentries=filelist,
324 fentries=filelist,
325 dentries=dirlist,
325 dentries=dirlist,
326 archives=web.archivelist(hex(node)),
326 archives=web.archivelist(hex(node)),
327 tags=webutil.nodetagsdict(web.repo, node),
327 tags=webutil.nodetagsdict(web.repo, node),
328 inbranch=webutil.nodeinbranch(web.repo, ctx),
328 inbranch=webutil.nodeinbranch(web.repo, ctx),
329 branches=webutil.nodebranchdict(web.repo, ctx))
329 branches=webutil.nodebranchdict(web.repo, ctx))
330
330
331 def tags(web, req, tmpl):
331 def tags(web, req, tmpl):
332 i = web.repo.tagslist()
332 i = web.repo.tagslist()
333 i.reverse()
333 i.reverse()
334 parity = paritygen(web.stripecount)
334 parity = paritygen(web.stripecount)
335
335
336 def entries(notip=False,limit=0, **map):
336 def entries(notip=False,limit=0, **map):
337 count = 0
337 count = 0
338 for k, n in i:
338 for k, n in i:
339 if notip and k == "tip":
339 if notip and k == "tip":
340 continue
340 continue
341 if limit > 0 and count >= limit:
341 if limit > 0 and count >= limit:
342 continue
342 continue
343 count = count + 1
343 count = count + 1
344 yield {"parity": parity.next(),
344 yield {"parity": parity.next(),
345 "tag": k,
345 "tag": k,
346 "date": web.repo.changectx(n).date(),
346 "date": web.repo[n].date(),
347 "node": hex(n)}
347 "node": hex(n)}
348
348
349 return tmpl("tags",
349 return tmpl("tags",
350 node=hex(web.repo.changelog.tip()),
350 node=hex(web.repo.changelog.tip()),
351 entries=lambda **x: entries(False,0, **x),
351 entries=lambda **x: entries(False,0, **x),
352 entriesnotip=lambda **x: entries(True,0, **x),
352 entriesnotip=lambda **x: entries(True,0, **x),
353 latestentry=lambda **x: entries(True,1, **x))
353 latestentry=lambda **x: entries(True,1, **x))
354
354
355 def summary(web, req, tmpl):
355 def summary(web, req, tmpl):
356 i = web.repo.tagslist()
356 i = web.repo.tagslist()
357 i.reverse()
357 i.reverse()
358
358
359 def tagentries(**map):
359 def tagentries(**map):
360 parity = paritygen(web.stripecount)
360 parity = paritygen(web.stripecount)
361 count = 0
361 count = 0
362 for k, n in i:
362 for k, n in i:
363 if k == "tip": # skip tip
363 if k == "tip": # skip tip
364 continue
364 continue
365
365
366 count += 1
366 count += 1
367 if count > 10: # limit to 10 tags
367 if count > 10: # limit to 10 tags
368 break
368 break
369
369
370 yield tmpl("tagentry",
370 yield tmpl("tagentry",
371 parity=parity.next(),
371 parity=parity.next(),
372 tag=k,
372 tag=k,
373 node=hex(n),
373 node=hex(n),
374 date=web.repo.changectx(n).date())
374 date=web.repo[n].date())
375
375
376 def branches(**map):
376 def branches(**map):
377 parity = paritygen(web.stripecount)
377 parity = paritygen(web.stripecount)
378
378
379 b = web.repo.branchtags()
379 b = web.repo.branchtags()
380 l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.items()]
380 l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.items()]
381 l.sort()
381 l.sort()
382
382
383 for r,n,t in l:
383 for r,n,t in l:
384 ctx = web.repo.changectx(n)
385 yield {'parity': parity.next(),
384 yield {'parity': parity.next(),
386 'branch': t,
385 'branch': t,
387 'node': hex(n),
386 'node': hex(n),
388 'date': ctx.date()}
387 'date': web.repo[n].date()}
389
388
390 def changelist(**map):
389 def changelist(**map):
391 parity = paritygen(web.stripecount, offset=start-end)
390 parity = paritygen(web.stripecount, offset=start-end)
392 l = [] # build a list in forward order for efficiency
391 l = [] # build a list in forward order for efficiency
393 for i in xrange(start, end):
392 for i in xrange(start, end):
394 ctx = web.repo.changectx(i)
393 ctx = web.repo[i]
395 n = ctx.node()
394 n = ctx.node()
396 hn = hex(n)
395 hn = hex(n)
397
396
398 l.insert(0, tmpl(
397 l.insert(0, tmpl(
399 'shortlogentry',
398 'shortlogentry',
400 parity=parity.next(),
399 parity=parity.next(),
401 author=ctx.user(),
400 author=ctx.user(),
402 desc=ctx.description(),
401 desc=ctx.description(),
403 date=ctx.date(),
402 date=ctx.date(),
404 rev=i,
403 rev=i,
405 node=hn,
404 node=hn,
406 tags=webutil.nodetagsdict(web.repo, n),
405 tags=webutil.nodetagsdict(web.repo, n),
407 inbranch=webutil.nodeinbranch(web.repo, ctx),
406 inbranch=webutil.nodeinbranch(web.repo, ctx),
408 branches=webutil.nodebranchdict(web.repo, ctx)))
407 branches=webutil.nodebranchdict(web.repo, ctx)))
409
408
410 yield l
409 yield l
411
410
412 cl = web.repo.changelog
411 cl = web.repo.changelog
413 count = cl.count()
412 count = cl.count()
414 start = max(0, count - web.maxchanges)
413 start = max(0, count - web.maxchanges)
415 end = min(count, start + web.maxchanges)
414 end = min(count, start + web.maxchanges)
416
415
417 return tmpl("summary",
416 return tmpl("summary",
418 desc=web.config("web", "description", "unknown"),
417 desc=web.config("web", "description", "unknown"),
419 owner=get_contact(web.config) or "unknown",
418 owner=get_contact(web.config) or "unknown",
420 lastchange=cl.read(cl.tip())[2],
419 lastchange=cl.read(cl.tip())[2],
421 tags=tagentries,
420 tags=tagentries,
422 branches=branches,
421 branches=branches,
423 shortlog=changelist,
422 shortlog=changelist,
424 node=hex(cl.tip()),
423 node=hex(cl.tip()),
425 archives=web.archivelist("tip"))
424 archives=web.archivelist("tip"))
426
425
427 def filediff(web, req, tmpl):
426 def filediff(web, req, tmpl):
428 fctx = webutil.filectx(web.repo, req)
427 fctx = webutil.filectx(web.repo, req)
429 n = fctx.node()
428 n = fctx.node()
430 path = fctx.path()
429 path = fctx.path()
431 parents = fctx.parents()
430 parents = fctx.parents()
432 p1 = parents and parents[0].node() or nullid
431 p1 = parents and parents[0].node() or nullid
433
432
434 diffs = web.diff(tmpl, p1, n, [path])
433 diffs = web.diff(tmpl, p1, n, [path])
435 return tmpl("filediff",
434 return tmpl("filediff",
436 file=path,
435 file=path,
437 node=hex(n),
436 node=hex(n),
438 rev=fctx.rev(),
437 rev=fctx.rev(),
439 date=fctx.date(),
438 date=fctx.date(),
440 desc=fctx.description(),
439 desc=fctx.description(),
441 author=fctx.user(),
440 author=fctx.user(),
442 rename=webutil.renamelink(fctx),
441 rename=webutil.renamelink(fctx),
443 branch=webutil.nodebranchnodefault(fctx),
442 branch=webutil.nodebranchnodefault(fctx),
444 parent=webutil.siblings(parents),
443 parent=webutil.siblings(parents),
445 child=webutil.siblings(fctx.children()),
444 child=webutil.siblings(fctx.children()),
446 diff=diffs)
445 diff=diffs)
447
446
448 diff = filediff
447 diff = filediff
449
448
450 def annotate(web, req, tmpl):
449 def annotate(web, req, tmpl):
451 fctx = webutil.filectx(web.repo, req)
450 fctx = webutil.filectx(web.repo, req)
452 f = fctx.path()
451 f = fctx.path()
453 n = fctx.filenode()
452 n = fctx.filenode()
454 fl = fctx.filelog()
453 fl = fctx.filelog()
455 parity = paritygen(web.stripecount)
454 parity = paritygen(web.stripecount)
456
455
457 def annotate(**map):
456 def annotate(**map):
458 last = None
457 last = None
459 if binary(fctx.data()):
458 if binary(fctx.data()):
460 mt = (mimetypes.guess_type(fctx.path())[0]
459 mt = (mimetypes.guess_type(fctx.path())[0]
461 or 'application/octet-stream')
460 or 'application/octet-stream')
462 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
461 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
463 '(binary:%s)' % mt)])
462 '(binary:%s)' % mt)])
464 else:
463 else:
465 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
464 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
466 for lineno, ((f, targetline), l) in lines:
465 for lineno, ((f, targetline), l) in lines:
467 fnode = f.filenode()
466 fnode = f.filenode()
468
467
469 if last != fnode:
468 if last != fnode:
470 last = fnode
469 last = fnode
471
470
472 yield {"parity": parity.next(),
471 yield {"parity": parity.next(),
473 "node": hex(f.node()),
472 "node": hex(f.node()),
474 "rev": f.rev(),
473 "rev": f.rev(),
475 "author": f.user(),
474 "author": f.user(),
476 "desc": f.description(),
475 "desc": f.description(),
477 "file": f.path(),
476 "file": f.path(),
478 "targetline": targetline,
477 "targetline": targetline,
479 "line": l,
478 "line": l,
480 "lineid": "l%d" % (lineno + 1),
479 "lineid": "l%d" % (lineno + 1),
481 "linenumber": "% 6d" % (lineno + 1)}
480 "linenumber": "% 6d" % (lineno + 1)}
482
481
483 return tmpl("fileannotate",
482 return tmpl("fileannotate",
484 file=f,
483 file=f,
485 annotate=annotate,
484 annotate=annotate,
486 path=webutil.up(f),
485 path=webutil.up(f),
487 rev=fctx.rev(),
486 rev=fctx.rev(),
488 node=hex(fctx.node()),
487 node=hex(fctx.node()),
489 author=fctx.user(),
488 author=fctx.user(),
490 date=fctx.date(),
489 date=fctx.date(),
491 desc=fctx.description(),
490 desc=fctx.description(),
492 rename=webutil.renamelink(fctx),
491 rename=webutil.renamelink(fctx),
493 branch=webutil.nodebranchnodefault(fctx),
492 branch=webutil.nodebranchnodefault(fctx),
494 parent=webutil.siblings(fctx.parents()),
493 parent=webutil.siblings(fctx.parents()),
495 child=webutil.siblings(fctx.children()),
494 child=webutil.siblings(fctx.children()),
496 permissions=fctx.manifest().flags(f))
495 permissions=fctx.manifest().flags(f))
497
496
498 def filelog(web, req, tmpl):
497 def filelog(web, req, tmpl):
499 fctx = webutil.filectx(web.repo, req)
498 fctx = webutil.filectx(web.repo, req)
500 f = fctx.path()
499 f = fctx.path()
501 fl = fctx.filelog()
500 fl = fctx.filelog()
502 count = fl.count()
501 count = fl.count()
503 pagelen = web.maxshortchanges
502 pagelen = web.maxshortchanges
504 pos = fctx.filerev()
503 pos = fctx.filerev()
505 start = max(0, pos - pagelen + 1)
504 start = max(0, pos - pagelen + 1)
506 end = min(count, start + pagelen)
505 end = min(count, start + pagelen)
507 pos = end - 1
506 pos = end - 1
508 parity = paritygen(web.stripecount, offset=start-end)
507 parity = paritygen(web.stripecount, offset=start-end)
509
508
510 def entries(limit=0, **map):
509 def entries(limit=0, **map):
511 l = []
510 l = []
512
511
513 for i in xrange(start, end):
512 for i in xrange(start, end):
514 ctx = fctx.filectx(i)
513 ctx = fctx.filectx(i)
515 n = fl.node(i)
514 n = fl.node(i)
516
515
517 l.insert(0, {"parity": parity.next(),
516 l.insert(0, {"parity": parity.next(),
518 "filerev": i,
517 "filerev": i,
519 "file": f,
518 "file": f,
520 "node": hex(ctx.node()),
519 "node": hex(ctx.node()),
521 "author": ctx.user(),
520 "author": ctx.user(),
522 "date": ctx.date(),
521 "date": ctx.date(),
523 "rename": webutil.renamelink(fctx),
522 "rename": webutil.renamelink(fctx),
524 "parent": webutil.siblings(fctx.parents()),
523 "parent": webutil.siblings(fctx.parents()),
525 "child": webutil.siblings(fctx.children()),
524 "child": webutil.siblings(fctx.children()),
526 "desc": ctx.description()})
525 "desc": ctx.description()})
527
526
528 if limit > 0:
527 if limit > 0:
529 l = l[:limit]
528 l = l[:limit]
530
529
531 for e in l:
530 for e in l:
532 yield e
531 yield e
533
532
534 nodefunc = lambda x: fctx.filectx(fileid=x)
533 nodefunc = lambda x: fctx.filectx(fileid=x)
535 nav = webutil.revnavgen(pos, pagelen, count, nodefunc)
534 nav = webutil.revnavgen(pos, pagelen, count, nodefunc)
536 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
535 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
537 entries=lambda **x: entries(limit=0, **x),
536 entries=lambda **x: entries(limit=0, **x),
538 latestentry=lambda **x: entries(limit=1, **x))
537 latestentry=lambda **x: entries(limit=1, **x))
539
538
540
539
541 def archive(web, req, tmpl):
540 def archive(web, req, tmpl):
542 type_ = req.form.get('type', [None])[0]
541 type_ = req.form.get('type', [None])[0]
543 allowed = web.configlist("web", "allow_archive")
542 allowed = web.configlist("web", "allow_archive")
544 key = req.form['node'][0]
543 key = req.form['node'][0]
545
544
546 if not (type_ in web.archives and (type_ in allowed or
545 if not (type_ in web.archives and (type_ in allowed or
547 web.configbool("web", "allow" + type_, False))):
546 web.configbool("web", "allow" + type_, False))):
548 msg = 'Unsupported archive type: %s' % type_
547 msg = 'Unsupported archive type: %s' % type_
549 raise ErrorResponse(HTTP_NOT_FOUND, msg)
548 raise ErrorResponse(HTTP_NOT_FOUND, msg)
550
549
551 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
550 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
552 cnode = web.repo.lookup(key)
551 cnode = web.repo.lookup(key)
553 arch_version = key
552 arch_version = key
554 if cnode == key or key == 'tip':
553 if cnode == key or key == 'tip':
555 arch_version = short(cnode)
554 arch_version = short(cnode)
556 name = "%s-%s" % (reponame, arch_version)
555 name = "%s-%s" % (reponame, arch_version)
557 mimetype, artype, extension, encoding = web.archive_specs[type_]
556 mimetype, artype, extension, encoding = web.archive_specs[type_]
558 headers = [
557 headers = [
559 ('Content-Type', mimetype),
558 ('Content-Type', mimetype),
560 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
559 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
561 ]
560 ]
562 if encoding:
561 if encoding:
563 headers.append(('Content-Encoding', encoding))
562 headers.append(('Content-Encoding', encoding))
564 req.header(headers)
563 req.header(headers)
565 req.respond(HTTP_OK)
564 req.respond(HTTP_OK)
566 archival.archive(web.repo, req, cnode, artype, prefix=name)
565 archival.archive(web.repo, req, cnode, artype, prefix=name)
567 return []
566 return []
568
567
569
568
570 def static(web, req, tmpl):
569 def static(web, req, tmpl):
571 fname = req.form['file'][0]
570 fname = req.form['file'][0]
572 # a repo owner may set web.static in .hg/hgrc to get any file
571 # a repo owner may set web.static in .hg/hgrc to get any file
573 # readable by the user running the CGI script
572 # readable by the user running the CGI script
574 static = web.config("web", "static",
573 static = web.config("web", "static",
575 os.path.join(web.templatepath, "static"),
574 os.path.join(web.templatepath, "static"),
576 untrusted=False)
575 untrusted=False)
577 return [staticfile(static, fname, req)]
576 return [staticfile(static, fname, req)]
578
577
579 def graph(web, req, tmpl):
578 def graph(web, req, tmpl):
580 rev = webutil.changectx(web.repo, req).rev()
579 rev = webutil.changectx(web.repo, req).rev()
581 bg_height = 39
580 bg_height = 39
582
581
583 max_rev = web.repo.changelog.count() - 1
582 max_rev = web.repo.changelog.count() - 1
584 revcount = min(max_rev, int(req.form.get('revcount', [25])[0]))
583 revcount = min(max_rev, int(req.form.get('revcount', [25])[0]))
585 revnode = web.repo.changelog.node(rev)
584 revnode = web.repo.changelog.node(rev)
586 revnode_hex = hex(revnode)
585 revnode_hex = hex(revnode)
587 uprev = min(max_rev, rev + revcount)
586 uprev = min(max_rev, rev + revcount)
588 downrev = max(0, rev - revcount)
587 downrev = max(0, rev - revcount)
589 lessrev = max(0, rev - revcount / 2)
588 lessrev = max(0, rev - revcount / 2)
590
589
591 maxchanges = web.maxshortchanges or web.maxchanges
590 maxchanges = web.maxshortchanges or web.maxchanges
592 count = web.repo.changelog.count()
591 count = web.repo.changelog.count()
593 changenav = webutil.revnavgen(rev, maxchanges, count, web.repo.changectx)
592 changenav = webutil.revnavgen(rev, maxchanges, count, web.repo.changectx)
594
593
595 tree = list(graphmod.graph(web.repo, rev, rev - revcount))
594 tree = list(graphmod.graph(web.repo, rev, rev - revcount))
596 canvasheight = (len(tree) + 1) * bg_height - 27;
595 canvasheight = (len(tree) + 1) * bg_height - 27;
597
596
598 data = []
597 data = []
599 for i, (ctx, vtx, edges) in enumerate(tree):
598 for i, (ctx, vtx, edges) in enumerate(tree):
600 node = short(ctx.node())
599 node = short(ctx.node())
601 age = templatefilters.age(ctx.date())
600 age = templatefilters.age(ctx.date())
602 desc = templatefilters.firstline(ctx.description())
601 desc = templatefilters.firstline(ctx.description())
603 desc = cgi.escape(desc)
602 desc = cgi.escape(desc)
604 user = cgi.escape(templatefilters.person(ctx.user()))
603 user = cgi.escape(templatefilters.person(ctx.user()))
605 branch = ctx.branch()
604 branch = ctx.branch()
606 branch = branch, web.repo.branchtags().get(branch) == ctx.node()
605 branch = branch, web.repo.branchtags().get(branch) == ctx.node()
607 data.append((node, vtx, edges, desc, user, age, branch, ctx.tags()))
606 data.append((node, vtx, edges, desc, user, age, branch, ctx.tags()))
608
607
609 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
608 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
610 lessrev=lessrev, revcountmore=revcount and 2 * revcount or 1,
609 lessrev=lessrev, revcountmore=revcount and 2 * revcount or 1,
611 revcountless=revcount / 2, downrev=downrev,
610 revcountless=revcount / 2, downrev=downrev,
612 canvasheight=canvasheight, bg_height=bg_height,
611 canvasheight=canvasheight, bg_height=bg_height,
613 jsdata=data, node=revnode_hex, changenav=changenav)
612 jsdata=data, node=revnode_hex, changenav=changenav)
@@ -1,143 +1,141 b''
1 # hgweb/webutil.py - utility library for the web interface.
1 # hgweb/webutil.py - utility library for the web interface.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os
9 import os
10 from mercurial.node import hex, nullid
10 from mercurial.node import hex, nullid
11 from mercurial.repo import RepoError
11 from mercurial.repo import RepoError
12 from mercurial import util
12 from mercurial import util
13
13
14 def up(p):
14 def up(p):
15 if p[0] != "/":
15 if p[0] != "/":
16 p = "/" + p
16 p = "/" + p
17 if p[-1] == "/":
17 if p[-1] == "/":
18 p = p[:-1]
18 p = p[:-1]
19 up = os.path.dirname(p)
19 up = os.path.dirname(p)
20 if up == "/":
20 if up == "/":
21 return "/"
21 return "/"
22 return up + "/"
22 return up + "/"
23
23
24 def revnavgen(pos, pagelen, limit, nodefunc):
24 def revnavgen(pos, pagelen, limit, nodefunc):
25 def seq(factor, limit=None):
25 def seq(factor, limit=None):
26 if limit:
26 if limit:
27 yield limit
27 yield limit
28 if limit >= 20 and limit <= 40:
28 if limit >= 20 and limit <= 40:
29 yield 50
29 yield 50
30 else:
30 else:
31 yield 1 * factor
31 yield 1 * factor
32 yield 3 * factor
32 yield 3 * factor
33 for f in seq(factor * 10):
33 for f in seq(factor * 10):
34 yield f
34 yield f
35
35
36 def nav(**map):
36 def nav(**map):
37 l = []
37 l = []
38 last = 0
38 last = 0
39 for f in seq(1, pagelen):
39 for f in seq(1, pagelen):
40 if f < pagelen or f <= last:
40 if f < pagelen or f <= last:
41 continue
41 continue
42 if f > limit:
42 if f > limit:
43 break
43 break
44 last = f
44 last = f
45 if pos + f < limit:
45 if pos + f < limit:
46 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
46 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
47 if pos - f >= 0:
47 if pos - f >= 0:
48 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
48 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
49
49
50 try:
50 try:
51 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
51 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
52
52
53 for label, node in l:
53 for label, node in l:
54 yield {"label": label, "node": node}
54 yield {"label": label, "node": node}
55
55
56 yield {"label": "tip", "node": "tip"}
56 yield {"label": "tip", "node": "tip"}
57 except RepoError:
57 except RepoError:
58 pass
58 pass
59
59
60 return nav
60 return nav
61
61
62 def siblings(siblings=[], hiderev=None, **args):
62 def siblings(siblings=[], hiderev=None, **args):
63 siblings = [s for s in siblings if s.node() != nullid]
63 siblings = [s for s in siblings if s.node() != nullid]
64 if len(siblings) == 1 and siblings[0].rev() == hiderev:
64 if len(siblings) == 1 and siblings[0].rev() == hiderev:
65 return
65 return
66 for s in siblings:
66 for s in siblings:
67 d = {'node': hex(s.node()), 'rev': s.rev()}
67 d = {'node': hex(s.node()), 'rev': s.rev()}
68 if hasattr(s, 'path'):
68 if hasattr(s, 'path'):
69 d['file'] = s.path()
69 d['file'] = s.path()
70 d.update(args)
70 d.update(args)
71 yield d
71 yield d
72
72
73 def renamelink(fctx):
73 def renamelink(fctx):
74 r = fctx.renamed()
74 r = fctx.renamed()
75 if r:
75 if r:
76 return [dict(file=r[0], node=hex(r[1]))]
76 return [dict(file=r[0], node=hex(r[1]))]
77 return []
77 return []
78
78
79 def nodetagsdict(repo, node):
79 def nodetagsdict(repo, node):
80 return [{"name": i} for i in repo.nodetags(node)]
80 return [{"name": i} for i in repo.nodetags(node)]
81
81
82 def nodebranchdict(repo, ctx):
82 def nodebranchdict(repo, ctx):
83 branches = []
83 branches = []
84 branch = ctx.branch()
84 branch = ctx.branch()
85 # If this is an empty repo, ctx.node() == nullid,
85 # If this is an empty repo, ctx.node() == nullid,
86 # ctx.branch() == 'default', but branchtags() is
86 # ctx.branch() == 'default', but branchtags() is
87 # an empty dict. Using dict.get avoids a traceback.
87 # an empty dict. Using dict.get avoids a traceback.
88 if repo.branchtags().get(branch) == ctx.node():
88 if repo.branchtags().get(branch) == ctx.node():
89 branches.append({"name": branch})
89 branches.append({"name": branch})
90 return branches
90 return branches
91
91
92 def nodeinbranch(repo, ctx):
92 def nodeinbranch(repo, ctx):
93 branches = []
93 branches = []
94 branch = ctx.branch()
94 branch = ctx.branch()
95 if branch != 'default' and repo.branchtags().get(branch) != ctx.node():
95 if branch != 'default' and repo.branchtags().get(branch) != ctx.node():
96 branches.append({"name": branch})
96 branches.append({"name": branch})
97 return branches
97 return branches
98
98
99 def nodebranchnodefault(ctx):
99 def nodebranchnodefault(ctx):
100 branches = []
100 branches = []
101 branch = ctx.branch()
101 branch = ctx.branch()
102 if branch != 'default':
102 if branch != 'default':
103 branches.append({"name": branch})
103 branches.append({"name": branch})
104 return branches
104 return branches
105
105
106 def showtag(repo, tmpl, t1, node=nullid, **args):
106 def showtag(repo, tmpl, t1, node=nullid, **args):
107 for t in repo.nodetags(node):
107 for t in repo.nodetags(node):
108 yield tmpl(t1, tag=t, **args)
108 yield tmpl(t1, tag=t, **args)
109
109
110 def cleanpath(repo, path):
110 def cleanpath(repo, path):
111 path = path.lstrip('/')
111 path = path.lstrip('/')
112 return util.canonpath(repo.root, '', path)
112 return util.canonpath(repo.root, '', path)
113
113
114 def changectx(repo, req):
114 def changectx(repo, req):
115 if 'node' in req.form:
115 if 'node' in req.form:
116 changeid = req.form['node'][0]
116 changeid = req.form['node'][0]
117 elif 'manifest' in req.form:
117 elif 'manifest' in req.form:
118 changeid = req.form['manifest'][0]
118 changeid = req.form['manifest'][0]
119 else:
119 else:
120 changeid = repo.changelog.count() - 1
120 changeid = repo.changelog.count() - 1
121
121
122 try:
122 try:
123 ctx = repo.changectx(changeid)
123 ctx = repo[changeid]
124 except RepoError:
124 except RepoError:
125 man = repo.manifest
125 man = repo.manifest
126 mn = man.lookup(changeid)
126 ctx = repo[man.linkrev(man.lookup(changeid))]
127 ctx = repo.changectx(man.linkrev(mn))
128
127
129 return ctx
128 return ctx
130
129
131 def filectx(repo, req):
130 def filectx(repo, req):
132 path = cleanpath(repo, req.form['file'][0])
131 path = cleanpath(repo, req.form['file'][0])
133 if 'node' in req.form:
132 if 'node' in req.form:
134 changeid = req.form['node'][0]
133 changeid = req.form['node'][0]
135 else:
134 else:
136 changeid = req.form['filenode'][0]
135 changeid = req.form['filenode'][0]
137 try:
136 try:
138 ctx = repo.changectx(changeid)
137 fctx = repo[changeid][path]
139 fctx = ctx.filectx(path)
140 except RepoError:
138 except RepoError:
141 fctx = repo.filectx(path, fileid=changeid)
139 fctx = repo.filectx(path, fileid=changeid)
142
140
143 return fctx
141 return fctx
@@ -1,2123 +1,2127 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import lock, transaction, stat, errno, ui
12 import lock, transaction, stat, errno, ui
13 import os, revlog, time, util, extensions, hook, inspect
13 import os, revlog, time, util, extensions, hook, inspect
14 import match as match_
14 import match as match_
15
15
16 class localrepository(repo.repository):
16 class localrepository(repo.repository):
17 capabilities = util.set(('lookup', 'changegroupsubset'))
17 capabilities = util.set(('lookup', 'changegroupsubset'))
18 supported = ('revlogv1', 'store')
18 supported = ('revlogv1', 'store')
19
19
20 def __init__(self, parentui, path=None, create=0):
20 def __init__(self, parentui, path=None, create=0):
21 repo.repository.__init__(self)
21 repo.repository.__init__(self)
22 self.root = os.path.realpath(path)
22 self.root = os.path.realpath(path)
23 self.path = os.path.join(self.root, ".hg")
23 self.path = os.path.join(self.root, ".hg")
24 self.origroot = path
24 self.origroot = path
25 self.opener = util.opener(self.path)
25 self.opener = util.opener(self.path)
26 self.wopener = util.opener(self.root)
26 self.wopener = util.opener(self.root)
27
27
28 if not os.path.isdir(self.path):
28 if not os.path.isdir(self.path):
29 if create:
29 if create:
30 if not os.path.exists(path):
30 if not os.path.exists(path):
31 os.mkdir(path)
31 os.mkdir(path)
32 os.mkdir(self.path)
32 os.mkdir(self.path)
33 requirements = ["revlogv1"]
33 requirements = ["revlogv1"]
34 if parentui.configbool('format', 'usestore', True):
34 if parentui.configbool('format', 'usestore', True):
35 os.mkdir(os.path.join(self.path, "store"))
35 os.mkdir(os.path.join(self.path, "store"))
36 requirements.append("store")
36 requirements.append("store")
37 # create an invalid changelog
37 # create an invalid changelog
38 self.opener("00changelog.i", "a").write(
38 self.opener("00changelog.i", "a").write(
39 '\0\0\0\2' # represents revlogv2
39 '\0\0\0\2' # represents revlogv2
40 ' dummy changelog to prevent using the old repo layout'
40 ' dummy changelog to prevent using the old repo layout'
41 )
41 )
42 reqfile = self.opener("requires", "w")
42 reqfile = self.opener("requires", "w")
43 for r in requirements:
43 for r in requirements:
44 reqfile.write("%s\n" % r)
44 reqfile.write("%s\n" % r)
45 reqfile.close()
45 reqfile.close()
46 else:
46 else:
47 raise repo.RepoError(_("repository %s not found") % path)
47 raise repo.RepoError(_("repository %s not found") % path)
48 elif create:
48 elif create:
49 raise repo.RepoError(_("repository %s already exists") % path)
49 raise repo.RepoError(_("repository %s already exists") % path)
50 else:
50 else:
51 # find requirements
51 # find requirements
52 try:
52 try:
53 requirements = self.opener("requires").read().splitlines()
53 requirements = self.opener("requires").read().splitlines()
54 except IOError, inst:
54 except IOError, inst:
55 if inst.errno != errno.ENOENT:
55 if inst.errno != errno.ENOENT:
56 raise
56 raise
57 requirements = []
57 requirements = []
58 # check them
58 # check them
59 for r in requirements:
59 for r in requirements:
60 if r not in self.supported:
60 if r not in self.supported:
61 raise repo.RepoError(_("requirement '%s' not supported") % r)
61 raise repo.RepoError(_("requirement '%s' not supported") % r)
62
62
63 # setup store
63 # setup store
64 if "store" in requirements:
64 if "store" in requirements:
65 self.encodefn = util.encodefilename
65 self.encodefn = util.encodefilename
66 self.decodefn = util.decodefilename
66 self.decodefn = util.decodefilename
67 self.spath = os.path.join(self.path, "store")
67 self.spath = os.path.join(self.path, "store")
68 else:
68 else:
69 self.encodefn = lambda x: x
69 self.encodefn = lambda x: x
70 self.decodefn = lambda x: x
70 self.decodefn = lambda x: x
71 self.spath = self.path
71 self.spath = self.path
72
72
73 try:
73 try:
74 # files in .hg/ will be created using this mode
74 # files in .hg/ will be created using this mode
75 mode = os.stat(self.spath).st_mode
75 mode = os.stat(self.spath).st_mode
76 # avoid some useless chmods
76 # avoid some useless chmods
77 if (0777 & ~util._umask) == (0777 & mode):
77 if (0777 & ~util._umask) == (0777 & mode):
78 mode = None
78 mode = None
79 except OSError:
79 except OSError:
80 mode = None
80 mode = None
81
81
82 self._createmode = mode
82 self._createmode = mode
83 self.opener.createmode = mode
83 self.opener.createmode = mode
84 sopener = util.opener(self.spath)
84 sopener = util.opener(self.spath)
85 sopener.createmode = mode
85 sopener.createmode = mode
86 self.sopener = util.encodedopener(sopener, self.encodefn)
86 self.sopener = util.encodedopener(sopener, self.encodefn)
87
87
88 self.ui = ui.ui(parentui=parentui)
88 self.ui = ui.ui(parentui=parentui)
89 try:
89 try:
90 self.ui.readconfig(self.join("hgrc"), self.root)
90 self.ui.readconfig(self.join("hgrc"), self.root)
91 extensions.loadall(self.ui)
91 extensions.loadall(self.ui)
92 except IOError:
92 except IOError:
93 pass
93 pass
94
94
95 self.tagscache = None
95 self.tagscache = None
96 self._tagstypecache = None
96 self._tagstypecache = None
97 self.branchcache = None
97 self.branchcache = None
98 self._ubranchcache = None # UTF-8 version of branchcache
98 self._ubranchcache = None # UTF-8 version of branchcache
99 self._branchcachetip = None
99 self._branchcachetip = None
100 self.nodetagscache = None
100 self.nodetagscache = None
101 self.filterpats = {}
101 self.filterpats = {}
102 self._datafilters = {}
102 self._datafilters = {}
103 self._transref = self._lockref = self._wlockref = None
103 self._transref = self._lockref = self._wlockref = None
104
104
105 def __getattr__(self, name):
105 def __getattr__(self, name):
106 if name == 'changelog':
106 if name == 'changelog':
107 self.changelog = changelog.changelog(self.sopener)
107 self.changelog = changelog.changelog(self.sopener)
108 self.sopener.defversion = self.changelog.version
108 self.sopener.defversion = self.changelog.version
109 return self.changelog
109 return self.changelog
110 if name == 'manifest':
110 if name == 'manifest':
111 self.changelog
111 self.changelog
112 self.manifest = manifest.manifest(self.sopener)
112 self.manifest = manifest.manifest(self.sopener)
113 return self.manifest
113 return self.manifest
114 if name == 'dirstate':
114 if name == 'dirstate':
115 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
115 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
116 return self.dirstate
116 return self.dirstate
117 else:
117 else:
118 raise AttributeError, name
118 raise AttributeError, name
119
119
120 def __getitem__(self, changeid):
121 if changeid == None:
122 return context.workingctx(self)
123 return context.changectx(self, changeid)
124
120 def url(self):
125 def url(self):
121 return 'file:' + self.root
126 return 'file:' + self.root
122
127
123 def hook(self, name, throw=False, **args):
128 def hook(self, name, throw=False, **args):
124 return hook.hook(self.ui, self, name, throw, **args)
129 return hook.hook(self.ui, self, name, throw, **args)
125
130
126 tag_disallowed = ':\r\n'
131 tag_disallowed = ':\r\n'
127
132
128 def _tag(self, names, node, message, local, user, date, parent=None,
133 def _tag(self, names, node, message, local, user, date, parent=None,
129 extra={}):
134 extra={}):
130 use_dirstate = parent is None
135 use_dirstate = parent is None
131
136
132 if isinstance(names, str):
137 if isinstance(names, str):
133 allchars = names
138 allchars = names
134 names = (names,)
139 names = (names,)
135 else:
140 else:
136 allchars = ''.join(names)
141 allchars = ''.join(names)
137 for c in self.tag_disallowed:
142 for c in self.tag_disallowed:
138 if c in allchars:
143 if c in allchars:
139 raise util.Abort(_('%r cannot be used in a tag name') % c)
144 raise util.Abort(_('%r cannot be used in a tag name') % c)
140
145
141 for name in names:
146 for name in names:
142 self.hook('pretag', throw=True, node=hex(node), tag=name,
147 self.hook('pretag', throw=True, node=hex(node), tag=name,
143 local=local)
148 local=local)
144
149
145 def writetags(fp, names, munge, prevtags):
150 def writetags(fp, names, munge, prevtags):
146 fp.seek(0, 2)
151 fp.seek(0, 2)
147 if prevtags and prevtags[-1] != '\n':
152 if prevtags and prevtags[-1] != '\n':
148 fp.write('\n')
153 fp.write('\n')
149 for name in names:
154 for name in names:
150 m = munge and munge(name) or name
155 m = munge and munge(name) or name
151 if self._tagstypecache and name in self._tagstypecache:
156 if self._tagstypecache and name in self._tagstypecache:
152 old = self.tagscache.get(name, nullid)
157 old = self.tagscache.get(name, nullid)
153 fp.write('%s %s\n' % (hex(old), m))
158 fp.write('%s %s\n' % (hex(old), m))
154 fp.write('%s %s\n' % (hex(node), m))
159 fp.write('%s %s\n' % (hex(node), m))
155 fp.close()
160 fp.close()
156
161
157 prevtags = ''
162 prevtags = ''
158 if local:
163 if local:
159 try:
164 try:
160 fp = self.opener('localtags', 'r+')
165 fp = self.opener('localtags', 'r+')
161 except IOError, err:
166 except IOError, err:
162 fp = self.opener('localtags', 'a')
167 fp = self.opener('localtags', 'a')
163 else:
168 else:
164 prevtags = fp.read()
169 prevtags = fp.read()
165
170
166 # local tags are stored in the current charset
171 # local tags are stored in the current charset
167 writetags(fp, names, None, prevtags)
172 writetags(fp, names, None, prevtags)
168 for name in names:
173 for name in names:
169 self.hook('tag', node=hex(node), tag=name, local=local)
174 self.hook('tag', node=hex(node), tag=name, local=local)
170 return
175 return
171
176
172 if use_dirstate:
177 if use_dirstate:
173 try:
178 try:
174 fp = self.wfile('.hgtags', 'rb+')
179 fp = self.wfile('.hgtags', 'rb+')
175 except IOError, err:
180 except IOError, err:
176 fp = self.wfile('.hgtags', 'ab')
181 fp = self.wfile('.hgtags', 'ab')
177 else:
182 else:
178 prevtags = fp.read()
183 prevtags = fp.read()
179 else:
184 else:
180 try:
185 try:
181 prevtags = self.filectx('.hgtags', parent).data()
186 prevtags = self.filectx('.hgtags', parent).data()
182 except revlog.LookupError:
187 except revlog.LookupError:
183 pass
188 pass
184 fp = self.wfile('.hgtags', 'wb')
189 fp = self.wfile('.hgtags', 'wb')
185 if prevtags:
190 if prevtags:
186 fp.write(prevtags)
191 fp.write(prevtags)
187
192
188 # committed tags are stored in UTF-8
193 # committed tags are stored in UTF-8
189 writetags(fp, names, util.fromlocal, prevtags)
194 writetags(fp, names, util.fromlocal, prevtags)
190
195
191 if use_dirstate and '.hgtags' not in self.dirstate:
196 if use_dirstate and '.hgtags' not in self.dirstate:
192 self.add(['.hgtags'])
197 self.add(['.hgtags'])
193
198
194 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
199 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
195 extra=extra)
200 extra=extra)
196
201
197 for name in names:
202 for name in names:
198 self.hook('tag', node=hex(node), tag=name, local=local)
203 self.hook('tag', node=hex(node), tag=name, local=local)
199
204
200 return tagnode
205 return tagnode
201
206
202 def tag(self, names, node, message, local, user, date):
207 def tag(self, names, node, message, local, user, date):
203 '''tag a revision with one or more symbolic names.
208 '''tag a revision with one or more symbolic names.
204
209
205 names is a list of strings or, when adding a single tag, names may be a
210 names is a list of strings or, when adding a single tag, names may be a
206 string.
211 string.
207
212
208 if local is True, the tags are stored in a per-repository file.
213 if local is True, the tags are stored in a per-repository file.
209 otherwise, they are stored in the .hgtags file, and a new
214 otherwise, they are stored in the .hgtags file, and a new
210 changeset is committed with the change.
215 changeset is committed with the change.
211
216
212 keyword arguments:
217 keyword arguments:
213
218
214 local: whether to store tags in non-version-controlled file
219 local: whether to store tags in non-version-controlled file
215 (default False)
220 (default False)
216
221
217 message: commit message to use if committing
222 message: commit message to use if committing
218
223
219 user: name of user to use if committing
224 user: name of user to use if committing
220
225
221 date: date tuple to use if committing'''
226 date: date tuple to use if committing'''
222
227
223 for x in self.status()[:5]:
228 for x in self.status()[:5]:
224 if '.hgtags' in x:
229 if '.hgtags' in x:
225 raise util.Abort(_('working copy of .hgtags is changed '
230 raise util.Abort(_('working copy of .hgtags is changed '
226 '(please commit .hgtags manually)'))
231 '(please commit .hgtags manually)'))
227
232
228 self._tag(names, node, message, local, user, date)
233 self._tag(names, node, message, local, user, date)
229
234
230 def tags(self):
235 def tags(self):
231 '''return a mapping of tag to node'''
236 '''return a mapping of tag to node'''
232 if self.tagscache:
237 if self.tagscache:
233 return self.tagscache
238 return self.tagscache
234
239
235 globaltags = {}
240 globaltags = {}
236 tagtypes = {}
241 tagtypes = {}
237
242
238 def readtags(lines, fn, tagtype):
243 def readtags(lines, fn, tagtype):
239 filetags = {}
244 filetags = {}
240 count = 0
245 count = 0
241
246
242 def warn(msg):
247 def warn(msg):
243 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
248 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
244
249
245 for l in lines:
250 for l in lines:
246 count += 1
251 count += 1
247 if not l:
252 if not l:
248 continue
253 continue
249 s = l.split(" ", 1)
254 s = l.split(" ", 1)
250 if len(s) != 2:
255 if len(s) != 2:
251 warn(_("cannot parse entry"))
256 warn(_("cannot parse entry"))
252 continue
257 continue
253 node, key = s
258 node, key = s
254 key = util.tolocal(key.strip()) # stored in UTF-8
259 key = util.tolocal(key.strip()) # stored in UTF-8
255 try:
260 try:
256 bin_n = bin(node)
261 bin_n = bin(node)
257 except TypeError:
262 except TypeError:
258 warn(_("node '%s' is not well formed") % node)
263 warn(_("node '%s' is not well formed") % node)
259 continue
264 continue
260 if bin_n not in self.changelog.nodemap:
265 if bin_n not in self.changelog.nodemap:
261 warn(_("tag '%s' refers to unknown node") % key)
266 warn(_("tag '%s' refers to unknown node") % key)
262 continue
267 continue
263
268
264 h = []
269 h = []
265 if key in filetags:
270 if key in filetags:
266 n, h = filetags[key]
271 n, h = filetags[key]
267 h.append(n)
272 h.append(n)
268 filetags[key] = (bin_n, h)
273 filetags[key] = (bin_n, h)
269
274
270 for k, nh in filetags.items():
275 for k, nh in filetags.items():
271 if k not in globaltags:
276 if k not in globaltags:
272 globaltags[k] = nh
277 globaltags[k] = nh
273 tagtypes[k] = tagtype
278 tagtypes[k] = tagtype
274 continue
279 continue
275
280
276 # we prefer the global tag if:
281 # we prefer the global tag if:
277 # it supercedes us OR
282 # it supercedes us OR
278 # mutual supercedes and it has a higher rank
283 # mutual supercedes and it has a higher rank
279 # otherwise we win because we're tip-most
284 # otherwise we win because we're tip-most
280 an, ah = nh
285 an, ah = nh
281 bn, bh = globaltags[k]
286 bn, bh = globaltags[k]
282 if (bn != an and an in bh and
287 if (bn != an and an in bh and
283 (bn not in ah or len(bh) > len(ah))):
288 (bn not in ah or len(bh) > len(ah))):
284 an = bn
289 an = bn
285 ah.extend([n for n in bh if n not in ah])
290 ah.extend([n for n in bh if n not in ah])
286 globaltags[k] = an, ah
291 globaltags[k] = an, ah
287 tagtypes[k] = tagtype
292 tagtypes[k] = tagtype
288
293
289 # read the tags file from each head, ending with the tip
294 # read the tags file from each head, ending with the tip
290 f = None
295 f = None
291 for rev, node, fnode in self._hgtagsnodes():
296 for rev, node, fnode in self._hgtagsnodes():
292 f = (f and f.filectx(fnode) or
297 f = (f and f.filectx(fnode) or
293 self.filectx('.hgtags', fileid=fnode))
298 self.filectx('.hgtags', fileid=fnode))
294 readtags(f.data().splitlines(), f, "global")
299 readtags(f.data().splitlines(), f, "global")
295
300
296 try:
301 try:
297 data = util.fromlocal(self.opener("localtags").read())
302 data = util.fromlocal(self.opener("localtags").read())
298 # localtags are stored in the local character set
303 # localtags are stored in the local character set
299 # while the internal tag table is stored in UTF-8
304 # while the internal tag table is stored in UTF-8
300 readtags(data.splitlines(), "localtags", "local")
305 readtags(data.splitlines(), "localtags", "local")
301 except IOError:
306 except IOError:
302 pass
307 pass
303
308
304 self.tagscache = {}
309 self.tagscache = {}
305 self._tagstypecache = {}
310 self._tagstypecache = {}
306 for k,nh in globaltags.items():
311 for k,nh in globaltags.items():
307 n = nh[0]
312 n = nh[0]
308 if n != nullid:
313 if n != nullid:
309 self.tagscache[k] = n
314 self.tagscache[k] = n
310 self._tagstypecache[k] = tagtypes[k]
315 self._tagstypecache[k] = tagtypes[k]
311 self.tagscache['tip'] = self.changelog.tip()
316 self.tagscache['tip'] = self.changelog.tip()
312 return self.tagscache
317 return self.tagscache
313
318
314 def tagtype(self, tagname):
319 def tagtype(self, tagname):
315 '''
320 '''
316 return the type of the given tag. result can be:
321 return the type of the given tag. result can be:
317
322
318 'local' : a local tag
323 'local' : a local tag
319 'global' : a global tag
324 'global' : a global tag
320 None : tag does not exist
325 None : tag does not exist
321 '''
326 '''
322
327
323 self.tags()
328 self.tags()
324
329
325 return self._tagstypecache.get(tagname)
330 return self._tagstypecache.get(tagname)
326
331
327 def _hgtagsnodes(self):
332 def _hgtagsnodes(self):
328 heads = self.heads()
333 heads = self.heads()
329 heads.reverse()
334 heads.reverse()
330 last = {}
335 last = {}
331 ret = []
336 ret = []
332 for node in heads:
337 for node in heads:
333 c = self.changectx(node)
338 c = self[node]
334 rev = c.rev()
339 rev = c.rev()
335 try:
340 try:
336 fnode = c.filenode('.hgtags')
341 fnode = c.filenode('.hgtags')
337 except revlog.LookupError:
342 except revlog.LookupError:
338 continue
343 continue
339 ret.append((rev, node, fnode))
344 ret.append((rev, node, fnode))
340 if fnode in last:
345 if fnode in last:
341 ret[last[fnode]] = None
346 ret[last[fnode]] = None
342 last[fnode] = len(ret) - 1
347 last[fnode] = len(ret) - 1
343 return [item for item in ret if item]
348 return [item for item in ret if item]
344
349
345 def tagslist(self):
350 def tagslist(self):
346 '''return a list of tags ordered by revision'''
351 '''return a list of tags ordered by revision'''
347 l = []
352 l = []
348 for t, n in self.tags().items():
353 for t, n in self.tags().items():
349 try:
354 try:
350 r = self.changelog.rev(n)
355 r = self.changelog.rev(n)
351 except:
356 except:
352 r = -2 # sort to the beginning of the list if unknown
357 r = -2 # sort to the beginning of the list if unknown
353 l.append((r, t, n))
358 l.append((r, t, n))
354 l.sort()
359 l.sort()
355 return [(t, n) for r, t, n in l]
360 return [(t, n) for r, t, n in l]
356
361
357 def nodetags(self, node):
362 def nodetags(self, node):
358 '''return the tags associated with a node'''
363 '''return the tags associated with a node'''
359 if not self.nodetagscache:
364 if not self.nodetagscache:
360 self.nodetagscache = {}
365 self.nodetagscache = {}
361 for t, n in self.tags().items():
366 for t, n in self.tags().items():
362 self.nodetagscache.setdefault(n, []).append(t)
367 self.nodetagscache.setdefault(n, []).append(t)
363 return self.nodetagscache.get(node, [])
368 return self.nodetagscache.get(node, [])
364
369
365 def _branchtags(self, partial, lrev):
370 def _branchtags(self, partial, lrev):
366 tiprev = self.changelog.count() - 1
371 tiprev = self.changelog.count() - 1
367 if lrev != tiprev:
372 if lrev != tiprev:
368 self._updatebranchcache(partial, lrev+1, tiprev+1)
373 self._updatebranchcache(partial, lrev+1, tiprev+1)
369 self._writebranchcache(partial, self.changelog.tip(), tiprev)
374 self._writebranchcache(partial, self.changelog.tip(), tiprev)
370
375
371 return partial
376 return partial
372
377
373 def branchtags(self):
378 def branchtags(self):
374 tip = self.changelog.tip()
379 tip = self.changelog.tip()
375 if self.branchcache is not None and self._branchcachetip == tip:
380 if self.branchcache is not None and self._branchcachetip == tip:
376 return self.branchcache
381 return self.branchcache
377
382
378 oldtip = self._branchcachetip
383 oldtip = self._branchcachetip
379 self._branchcachetip = tip
384 self._branchcachetip = tip
380 if self.branchcache is None:
385 if self.branchcache is None:
381 self.branchcache = {} # avoid recursion in changectx
386 self.branchcache = {} # avoid recursion in changectx
382 else:
387 else:
383 self.branchcache.clear() # keep using the same dict
388 self.branchcache.clear() # keep using the same dict
384 if oldtip is None or oldtip not in self.changelog.nodemap:
389 if oldtip is None or oldtip not in self.changelog.nodemap:
385 partial, last, lrev = self._readbranchcache()
390 partial, last, lrev = self._readbranchcache()
386 else:
391 else:
387 lrev = self.changelog.rev(oldtip)
392 lrev = self.changelog.rev(oldtip)
388 partial = self._ubranchcache
393 partial = self._ubranchcache
389
394
390 self._branchtags(partial, lrev)
395 self._branchtags(partial, lrev)
391
396
392 # the branch cache is stored on disk as UTF-8, but in the local
397 # the branch cache is stored on disk as UTF-8, but in the local
393 # charset internally
398 # charset internally
394 for k, v in partial.items():
399 for k, v in partial.items():
395 self.branchcache[util.tolocal(k)] = v
400 self.branchcache[util.tolocal(k)] = v
396 self._ubranchcache = partial
401 self._ubranchcache = partial
397 return self.branchcache
402 return self.branchcache
398
403
399 def _readbranchcache(self):
404 def _readbranchcache(self):
400 partial = {}
405 partial = {}
401 try:
406 try:
402 f = self.opener("branch.cache")
407 f = self.opener("branch.cache")
403 lines = f.read().split('\n')
408 lines = f.read().split('\n')
404 f.close()
409 f.close()
405 except (IOError, OSError):
410 except (IOError, OSError):
406 return {}, nullid, nullrev
411 return {}, nullid, nullrev
407
412
408 try:
413 try:
409 last, lrev = lines.pop(0).split(" ", 1)
414 last, lrev = lines.pop(0).split(" ", 1)
410 last, lrev = bin(last), int(lrev)
415 last, lrev = bin(last), int(lrev)
411 if not (lrev < self.changelog.count() and
416 if not (lrev < self.changelog.count() and
412 self.changelog.node(lrev) == last): # sanity check
417 self.changelog.node(lrev) == last): # sanity check
413 # invalidate the cache
418 # invalidate the cache
414 raise ValueError('invalidating branch cache (tip differs)')
419 raise ValueError('invalidating branch cache (tip differs)')
415 for l in lines:
420 for l in lines:
416 if not l: continue
421 if not l: continue
417 node, label = l.split(" ", 1)
422 node, label = l.split(" ", 1)
418 partial[label.strip()] = bin(node)
423 partial[label.strip()] = bin(node)
419 except (KeyboardInterrupt, util.SignalInterrupt):
424 except (KeyboardInterrupt, util.SignalInterrupt):
420 raise
425 raise
421 except Exception, inst:
426 except Exception, inst:
422 if self.ui.debugflag:
427 if self.ui.debugflag:
423 self.ui.warn(str(inst), '\n')
428 self.ui.warn(str(inst), '\n')
424 partial, last, lrev = {}, nullid, nullrev
429 partial, last, lrev = {}, nullid, nullrev
425 return partial, last, lrev
430 return partial, last, lrev
426
431
427 def _writebranchcache(self, branches, tip, tiprev):
432 def _writebranchcache(self, branches, tip, tiprev):
428 try:
433 try:
429 f = self.opener("branch.cache", "w", atomictemp=True)
434 f = self.opener("branch.cache", "w", atomictemp=True)
430 f.write("%s %s\n" % (hex(tip), tiprev))
435 f.write("%s %s\n" % (hex(tip), tiprev))
431 for label, node in branches.iteritems():
436 for label, node in branches.iteritems():
432 f.write("%s %s\n" % (hex(node), label))
437 f.write("%s %s\n" % (hex(node), label))
433 f.rename()
438 f.rename()
434 except (IOError, OSError):
439 except (IOError, OSError):
435 pass
440 pass
436
441
437 def _updatebranchcache(self, partial, start, end):
442 def _updatebranchcache(self, partial, start, end):
438 for r in xrange(start, end):
443 for r in xrange(start, end):
439 c = self.changectx(r)
444 c = self[r]
440 b = c.branch()
445 b = c.branch()
441 partial[b] = c.node()
446 partial[b] = c.node()
442
447
443 def lookup(self, key):
448 def lookup(self, key):
444 if key == '.':
449 if key == '.':
445 return self.dirstate.parents()[0]
450 return self.dirstate.parents()[0]
446 elif key == 'null':
451 elif key == 'null':
447 return nullid
452 return nullid
448 n = self.changelog._match(key)
453 n = self.changelog._match(key)
449 if n:
454 if n:
450 return n
455 return n
451 if key in self.tags():
456 if key in self.tags():
452 return self.tags()[key]
457 return self.tags()[key]
453 if key in self.branchtags():
458 if key in self.branchtags():
454 return self.branchtags()[key]
459 return self.branchtags()[key]
455 n = self.changelog._partialmatch(key)
460 n = self.changelog._partialmatch(key)
456 if n:
461 if n:
457 return n
462 return n
458 try:
463 try:
459 if len(key) == 20:
464 if len(key) == 20:
460 key = hex(key)
465 key = hex(key)
461 except:
466 except:
462 pass
467 pass
463 raise repo.RepoError(_("unknown revision '%s'") % key)
468 raise repo.RepoError(_("unknown revision '%s'") % key)
464
469
465 def local(self):
470 def local(self):
466 return True
471 return True
467
472
468 def join(self, f):
473 def join(self, f):
469 return os.path.join(self.path, f)
474 return os.path.join(self.path, f)
470
475
471 def sjoin(self, f):
476 def sjoin(self, f):
472 f = self.encodefn(f)
477 f = self.encodefn(f)
473 return os.path.join(self.spath, f)
478 return os.path.join(self.spath, f)
474
479
475 def wjoin(self, f):
480 def wjoin(self, f):
476 return os.path.join(self.root, f)
481 return os.path.join(self.root, f)
477
482
478 def rjoin(self, f):
483 def rjoin(self, f):
479 return os.path.join(self.root, util.pconvert(f))
484 return os.path.join(self.root, util.pconvert(f))
480
485
481 def file(self, f):
486 def file(self, f):
482 if f[0] == '/':
487 if f[0] == '/':
483 f = f[1:]
488 f = f[1:]
484 return filelog.filelog(self.sopener, f)
489 return filelog.filelog(self.sopener, f)
485
490
486 def changectx(self, changeid):
491 def changectx(self, changeid):
487 if changeid == None:
492 return self[changeid]
488 return context.workingctx(self)
489 return context.changectx(self, changeid)
490
493
491 def parents(self, changeid=None):
494 def parents(self, changeid=None):
492 '''get list of changectxs for parents of changeid'''
495 '''get list of changectxs for parents of changeid'''
493 return self.changectx(changeid).parents()
496 return self[changeid].parents()
494
497
495 def filectx(self, path, changeid=None, fileid=None):
498 def filectx(self, path, changeid=None, fileid=None):
496 """changeid can be a changeset revision, node, or tag.
499 """changeid can be a changeset revision, node, or tag.
497 fileid can be a file revision or node."""
500 fileid can be a file revision or node."""
498 return context.filectx(self, path, changeid, fileid)
501 return context.filectx(self, path, changeid, fileid)
499
502
500 def getcwd(self):
503 def getcwd(self):
501 return self.dirstate.getcwd()
504 return self.dirstate.getcwd()
502
505
503 def pathto(self, f, cwd=None):
506 def pathto(self, f, cwd=None):
504 return self.dirstate.pathto(f, cwd)
507 return self.dirstate.pathto(f, cwd)
505
508
506 def wfile(self, f, mode='r'):
509 def wfile(self, f, mode='r'):
507 return self.wopener(f, mode)
510 return self.wopener(f, mode)
508
511
509 def _link(self, f):
512 def _link(self, f):
510 return os.path.islink(self.wjoin(f))
513 return os.path.islink(self.wjoin(f))
511
514
512 def _filter(self, filter, filename, data):
515 def _filter(self, filter, filename, data):
513 if filter not in self.filterpats:
516 if filter not in self.filterpats:
514 l = []
517 l = []
515 for pat, cmd in self.ui.configitems(filter):
518 for pat, cmd in self.ui.configitems(filter):
516 mf = util.matcher(self.root, "", [pat], [], [])[1]
519 mf = util.matcher(self.root, "", [pat], [], [])[1]
517 fn = None
520 fn = None
518 params = cmd
521 params = cmd
519 for name, filterfn in self._datafilters.iteritems():
522 for name, filterfn in self._datafilters.iteritems():
520 if cmd.startswith(name):
523 if cmd.startswith(name):
521 fn = filterfn
524 fn = filterfn
522 params = cmd[len(name):].lstrip()
525 params = cmd[len(name):].lstrip()
523 break
526 break
524 if not fn:
527 if not fn:
525 fn = lambda s, c, **kwargs: util.filter(s, c)
528 fn = lambda s, c, **kwargs: util.filter(s, c)
526 # Wrap old filters not supporting keyword arguments
529 # Wrap old filters not supporting keyword arguments
527 if not inspect.getargspec(fn)[2]:
530 if not inspect.getargspec(fn)[2]:
528 oldfn = fn
531 oldfn = fn
529 fn = lambda s, c, **kwargs: oldfn(s, c)
532 fn = lambda s, c, **kwargs: oldfn(s, c)
530 l.append((mf, fn, params))
533 l.append((mf, fn, params))
531 self.filterpats[filter] = l
534 self.filterpats[filter] = l
532
535
533 for mf, fn, cmd in self.filterpats[filter]:
536 for mf, fn, cmd in self.filterpats[filter]:
534 if mf(filename):
537 if mf(filename):
535 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
538 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
536 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
539 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
537 break
540 break
538
541
539 return data
542 return data
540
543
541 def adddatafilter(self, name, filter):
544 def adddatafilter(self, name, filter):
542 self._datafilters[name] = filter
545 self._datafilters[name] = filter
543
546
544 def wread(self, filename):
547 def wread(self, filename):
545 if self._link(filename):
548 if self._link(filename):
546 data = os.readlink(self.wjoin(filename))
549 data = os.readlink(self.wjoin(filename))
547 else:
550 else:
548 data = self.wopener(filename, 'r').read()
551 data = self.wopener(filename, 'r').read()
549 return self._filter("encode", filename, data)
552 return self._filter("encode", filename, data)
550
553
551 def wwrite(self, filename, data, flags):
554 def wwrite(self, filename, data, flags):
552 data = self._filter("decode", filename, data)
555 data = self._filter("decode", filename, data)
553 try:
556 try:
554 os.unlink(self.wjoin(filename))
557 os.unlink(self.wjoin(filename))
555 except OSError:
558 except OSError:
556 pass
559 pass
557 self.wopener(filename, 'w').write(data)
560 self.wopener(filename, 'w').write(data)
558 util.set_flags(self.wjoin(filename), flags)
561 util.set_flags(self.wjoin(filename), flags)
559
562
560 def wwritedata(self, filename, data):
563 def wwritedata(self, filename, data):
561 return self._filter("decode", filename, data)
564 return self._filter("decode", filename, data)
562
565
563 def transaction(self):
566 def transaction(self):
564 if self._transref and self._transref():
567 if self._transref and self._transref():
565 return self._transref().nest()
568 return self._transref().nest()
566
569
567 # abort here if the journal already exists
570 # abort here if the journal already exists
568 if os.path.exists(self.sjoin("journal")):
571 if os.path.exists(self.sjoin("journal")):
569 raise repo.RepoError(_("journal already exists - run hg recover"))
572 raise repo.RepoError(_("journal already exists - run hg recover"))
570
573
571 # save dirstate for rollback
574 # save dirstate for rollback
572 try:
575 try:
573 ds = self.opener("dirstate").read()
576 ds = self.opener("dirstate").read()
574 except IOError:
577 except IOError:
575 ds = ""
578 ds = ""
576 self.opener("journal.dirstate", "w").write(ds)
579 self.opener("journal.dirstate", "w").write(ds)
577 self.opener("journal.branch", "w").write(self.dirstate.branch())
580 self.opener("journal.branch", "w").write(self.dirstate.branch())
578
581
579 renames = [(self.sjoin("journal"), self.sjoin("undo")),
582 renames = [(self.sjoin("journal"), self.sjoin("undo")),
580 (self.join("journal.dirstate"), self.join("undo.dirstate")),
583 (self.join("journal.dirstate"), self.join("undo.dirstate")),
581 (self.join("journal.branch"), self.join("undo.branch"))]
584 (self.join("journal.branch"), self.join("undo.branch"))]
582 tr = transaction.transaction(self.ui.warn, self.sopener,
585 tr = transaction.transaction(self.ui.warn, self.sopener,
583 self.sjoin("journal"),
586 self.sjoin("journal"),
584 aftertrans(renames),
587 aftertrans(renames),
585 self._createmode)
588 self._createmode)
586 self._transref = weakref.ref(tr)
589 self._transref = weakref.ref(tr)
587 return tr
590 return tr
588
591
589 def recover(self):
592 def recover(self):
590 l = self.lock()
593 l = self.lock()
591 try:
594 try:
592 if os.path.exists(self.sjoin("journal")):
595 if os.path.exists(self.sjoin("journal")):
593 self.ui.status(_("rolling back interrupted transaction\n"))
596 self.ui.status(_("rolling back interrupted transaction\n"))
594 transaction.rollback(self.sopener, self.sjoin("journal"))
597 transaction.rollback(self.sopener, self.sjoin("journal"))
595 self.invalidate()
598 self.invalidate()
596 return True
599 return True
597 else:
600 else:
598 self.ui.warn(_("no interrupted transaction available\n"))
601 self.ui.warn(_("no interrupted transaction available\n"))
599 return False
602 return False
600 finally:
603 finally:
601 del l
604 del l
602
605
603 def rollback(self):
606 def rollback(self):
604 wlock = lock = None
607 wlock = lock = None
605 try:
608 try:
606 wlock = self.wlock()
609 wlock = self.wlock()
607 lock = self.lock()
610 lock = self.lock()
608 if os.path.exists(self.sjoin("undo")):
611 if os.path.exists(self.sjoin("undo")):
609 self.ui.status(_("rolling back last transaction\n"))
612 self.ui.status(_("rolling back last transaction\n"))
610 transaction.rollback(self.sopener, self.sjoin("undo"))
613 transaction.rollback(self.sopener, self.sjoin("undo"))
611 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
614 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
612 try:
615 try:
613 branch = self.opener("undo.branch").read()
616 branch = self.opener("undo.branch").read()
614 self.dirstate.setbranch(branch)
617 self.dirstate.setbranch(branch)
615 except IOError:
618 except IOError:
616 self.ui.warn(_("Named branch could not be reset, "
619 self.ui.warn(_("Named branch could not be reset, "
617 "current branch still is: %s\n")
620 "current branch still is: %s\n")
618 % util.tolocal(self.dirstate.branch()))
621 % util.tolocal(self.dirstate.branch()))
619 self.invalidate()
622 self.invalidate()
620 self.dirstate.invalidate()
623 self.dirstate.invalidate()
621 else:
624 else:
622 self.ui.warn(_("no rollback information available\n"))
625 self.ui.warn(_("no rollback information available\n"))
623 finally:
626 finally:
624 del lock, wlock
627 del lock, wlock
625
628
626 def invalidate(self):
629 def invalidate(self):
627 for a in "changelog manifest".split():
630 for a in "changelog manifest".split():
628 if a in self.__dict__:
631 if a in self.__dict__:
629 delattr(self, a)
632 delattr(self, a)
630 self.tagscache = None
633 self.tagscache = None
631 self._tagstypecache = None
634 self._tagstypecache = None
632 self.nodetagscache = None
635 self.nodetagscache = None
633 self.branchcache = None
636 self.branchcache = None
634 self._ubranchcache = None
637 self._ubranchcache = None
635 self._branchcachetip = None
638 self._branchcachetip = None
636
639
637 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
640 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
638 try:
641 try:
639 l = lock.lock(lockname, 0, releasefn, desc=desc)
642 l = lock.lock(lockname, 0, releasefn, desc=desc)
640 except lock.LockHeld, inst:
643 except lock.LockHeld, inst:
641 if not wait:
644 if not wait:
642 raise
645 raise
643 self.ui.warn(_("waiting for lock on %s held by %r\n") %
646 self.ui.warn(_("waiting for lock on %s held by %r\n") %
644 (desc, inst.locker))
647 (desc, inst.locker))
645 # default to 600 seconds timeout
648 # default to 600 seconds timeout
646 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
649 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
647 releasefn, desc=desc)
650 releasefn, desc=desc)
648 if acquirefn:
651 if acquirefn:
649 acquirefn()
652 acquirefn()
650 return l
653 return l
651
654
652 def lock(self, wait=True):
655 def lock(self, wait=True):
653 if self._lockref and self._lockref():
656 if self._lockref and self._lockref():
654 return self._lockref()
657 return self._lockref()
655
658
656 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
659 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
657 _('repository %s') % self.origroot)
660 _('repository %s') % self.origroot)
658 self._lockref = weakref.ref(l)
661 self._lockref = weakref.ref(l)
659 return l
662 return l
660
663
661 def wlock(self, wait=True):
664 def wlock(self, wait=True):
662 if self._wlockref and self._wlockref():
665 if self._wlockref and self._wlockref():
663 return self._wlockref()
666 return self._wlockref()
664
667
665 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
668 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
666 self.dirstate.invalidate, _('working directory of %s') %
669 self.dirstate.invalidate, _('working directory of %s') %
667 self.origroot)
670 self.origroot)
668 self._wlockref = weakref.ref(l)
671 self._wlockref = weakref.ref(l)
669 return l
672 return l
670
673
671 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
674 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
672 """
675 """
673 commit an individual file as part of a larger transaction
676 commit an individual file as part of a larger transaction
674 """
677 """
675
678
676 fn = fctx.path()
679 fn = fctx.path()
677 t = fctx.data()
680 t = fctx.data()
678 fl = self.file(fn)
681 fl = self.file(fn)
679 fp1 = manifest1.get(fn, nullid)
682 fp1 = manifest1.get(fn, nullid)
680 fp2 = manifest2.get(fn, nullid)
683 fp2 = manifest2.get(fn, nullid)
681
684
682 meta = {}
685 meta = {}
683 cp = fctx.renamed()
686 cp = fctx.renamed()
684 if cp and cp[0] != fn:
687 if cp and cp[0] != fn:
685 cp = cp[0]
688 cp = cp[0]
686 # Mark the new revision of this file as a copy of another
689 # Mark the new revision of this file as a copy of another
687 # file. This copy data will effectively act as a parent
690 # file. This copy data will effectively act as a parent
688 # of this new revision. If this is a merge, the first
691 # of this new revision. If this is a merge, the first
689 # parent will be the nullid (meaning "look up the copy data")
692 # parent will be the nullid (meaning "look up the copy data")
690 # and the second one will be the other parent. For example:
693 # and the second one will be the other parent. For example:
691 #
694 #
692 # 0 --- 1 --- 3 rev1 changes file foo
695 # 0 --- 1 --- 3 rev1 changes file foo
693 # \ / rev2 renames foo to bar and changes it
696 # \ / rev2 renames foo to bar and changes it
694 # \- 2 -/ rev3 should have bar with all changes and
697 # \- 2 -/ rev3 should have bar with all changes and
695 # should record that bar descends from
698 # should record that bar descends from
696 # bar in rev2 and foo in rev1
699 # bar in rev2 and foo in rev1
697 #
700 #
698 # this allows this merge to succeed:
701 # this allows this merge to succeed:
699 #
702 #
700 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
703 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
701 # \ / merging rev3 and rev4 should use bar@rev2
704 # \ / merging rev3 and rev4 should use bar@rev2
702 # \- 2 --- 4 as the merge base
705 # \- 2 --- 4 as the merge base
703 #
706 #
704 meta["copy"] = cp
707 meta["copy"] = cp
705 if not manifest2: # not a branch merge
708 if not manifest2: # not a branch merge
706 meta["copyrev"] = hex(manifest1[cp])
709 meta["copyrev"] = hex(manifest1[cp])
707 fp2 = nullid
710 fp2 = nullid
708 elif fp2 != nullid: # copied on remote side
711 elif fp2 != nullid: # copied on remote side
709 meta["copyrev"] = hex(manifest1[cp])
712 meta["copyrev"] = hex(manifest1[cp])
710 elif fp1 != nullid: # copied on local side, reversed
713 elif fp1 != nullid: # copied on local side, reversed
711 meta["copyrev"] = hex(manifest2[cp])
714 meta["copyrev"] = hex(manifest2[cp])
712 fp2 = fp1
715 fp2 = fp1
713 elif cp in manifest2: # directory rename on local side
716 elif cp in manifest2: # directory rename on local side
714 meta["copyrev"] = hex(manifest2[cp])
717 meta["copyrev"] = hex(manifest2[cp])
715 else: # directory rename on remote side
718 else: # directory rename on remote side
716 meta["copyrev"] = hex(manifest1[cp])
719 meta["copyrev"] = hex(manifest1[cp])
717 self.ui.debug(_(" %s: copy %s:%s\n") %
720 self.ui.debug(_(" %s: copy %s:%s\n") %
718 (fn, cp, meta["copyrev"]))
721 (fn, cp, meta["copyrev"]))
719 fp1 = nullid
722 fp1 = nullid
720 elif fp2 != nullid:
723 elif fp2 != nullid:
721 # is one parent an ancestor of the other?
724 # is one parent an ancestor of the other?
722 fpa = fl.ancestor(fp1, fp2)
725 fpa = fl.ancestor(fp1, fp2)
723 if fpa == fp1:
726 if fpa == fp1:
724 fp1, fp2 = fp2, nullid
727 fp1, fp2 = fp2, nullid
725 elif fpa == fp2:
728 elif fpa == fp2:
726 fp2 = nullid
729 fp2 = nullid
727
730
728 # is the file unmodified from the parent? report existing entry
731 # is the file unmodified from the parent? report existing entry
729 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
732 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
730 return fp1
733 return fp1
731
734
732 changelist.append(fn)
735 changelist.append(fn)
733 return fl.add(t, meta, tr, linkrev, fp1, fp2)
736 return fl.add(t, meta, tr, linkrev, fp1, fp2)
734
737
735 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
738 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
736 if p1 is None:
739 if p1 is None:
737 p1, p2 = self.dirstate.parents()
740 p1, p2 = self.dirstate.parents()
738 return self.commit(files=files, text=text, user=user, date=date,
741 return self.commit(files=files, text=text, user=user, date=date,
739 p1=p1, p2=p2, extra=extra, empty_ok=True)
742 p1=p1, p2=p2, extra=extra, empty_ok=True)
740
743
741 def commit(self, files=None, text="", user=None, date=None,
744 def commit(self, files=None, text="", user=None, date=None,
742 match=None, force=False, force_editor=False,
745 match=None, force=False, force_editor=False,
743 p1=None, p2=None, extra={}, empty_ok=False):
746 p1=None, p2=None, extra={}, empty_ok=False):
744 wlock = lock = None
747 wlock = lock = None
745 if files:
748 if files:
746 files = util.unique(files)
749 files = util.unique(files)
747 try:
750 try:
748 wlock = self.wlock()
751 wlock = self.wlock()
749 lock = self.lock()
752 lock = self.lock()
750 use_dirstate = (p1 is None) # not rawcommit
753 use_dirstate = (p1 is None) # not rawcommit
751
754
752 if use_dirstate:
755 if use_dirstate:
753 p1, p2 = self.dirstate.parents()
756 p1, p2 = self.dirstate.parents()
754 update_dirstate = True
757 update_dirstate = True
755
758
756 if (not force and p2 != nullid and
759 if (not force and p2 != nullid and
757 (match and (match.files() or match.anypats()))):
760 (match and (match.files() or match.anypats()))):
758 raise util.Abort(_('cannot partially commit a merge '
761 raise util.Abort(_('cannot partially commit a merge '
759 '(do not specify files or patterns)'))
762 '(do not specify files or patterns)'))
760
763
761 if files:
764 if files:
762 modified, removed = [], []
765 modified, removed = [], []
763 for f in files:
766 for f in files:
764 s = self.dirstate[f]
767 s = self.dirstate[f]
765 if s in 'nma':
768 if s in 'nma':
766 modified.append(f)
769 modified.append(f)
767 elif s == 'r':
770 elif s == 'r':
768 removed.append(f)
771 removed.append(f)
769 else:
772 else:
770 self.ui.warn(_("%s not tracked!\n") % f)
773 self.ui.warn(_("%s not tracked!\n") % f)
771 changes = [modified, [], removed, [], []]
774 changes = [modified, [], removed, [], []]
772 else:
775 else:
773 changes = self.status(match=match)
776 changes = self.status(match=match)
774 else:
777 else:
775 p1, p2 = p1, p2 or nullid
778 p1, p2 = p1, p2 or nullid
776 update_dirstate = (self.dirstate.parents()[0] == p1)
779 update_dirstate = (self.dirstate.parents()[0] == p1)
777 changes = [files, [], [], [], []]
780 changes = [files, [], [], [], []]
778
781
779 wctx = context.workingctx(self, (p1, p2), text, user, date,
782 wctx = context.workingctx(self, (p1, p2), text, user, date,
780 extra, changes)
783 extra, changes)
781 return self._commitctx(wctx, force, force_editor, empty_ok,
784 return self._commitctx(wctx, force, force_editor, empty_ok,
782 use_dirstate, update_dirstate)
785 use_dirstate, update_dirstate)
783 finally:
786 finally:
784 del lock, wlock
787 del lock, wlock
785
788
786 def commitctx(self, ctx):
789 def commitctx(self, ctx):
787 wlock = lock = None
790 wlock = lock = None
788 try:
791 try:
789 wlock = self.wlock()
792 wlock = self.wlock()
790 lock = self.lock()
793 lock = self.lock()
791 return self._commitctx(ctx, force=True, force_editor=False,
794 return self._commitctx(ctx, force=True, force_editor=False,
792 empty_ok=True, use_dirstate=False,
795 empty_ok=True, use_dirstate=False,
793 update_dirstate=False)
796 update_dirstate=False)
794 finally:
797 finally:
795 del lock, wlock
798 del lock, wlock
796
799
797 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
800 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
798 use_dirstate=True, update_dirstate=True):
801 use_dirstate=True, update_dirstate=True):
799 tr = None
802 tr = None
800 valid = 0 # don't save the dirstate if this isn't set
803 valid = 0 # don't save the dirstate if this isn't set
801 try:
804 try:
802 commit = wctx.modified() + wctx.added()
805 commit = wctx.modified() + wctx.added()
803 remove = wctx.removed()
806 remove = wctx.removed()
804 extra = wctx.extra().copy()
807 extra = wctx.extra().copy()
805 branchname = extra['branch']
808 branchname = extra['branch']
806 user = wctx.user()
809 user = wctx.user()
807 text = wctx.description()
810 text = wctx.description()
808
811
809 p1, p2 = [p.node() for p in wctx.parents()]
812 p1, p2 = [p.node() for p in wctx.parents()]
810 c1 = self.changelog.read(p1)
813 c1 = self.changelog.read(p1)
811 c2 = self.changelog.read(p2)
814 c2 = self.changelog.read(p2)
812 m1 = self.manifest.read(c1[0]).copy()
815 m1 = self.manifest.read(c1[0]).copy()
813 m2 = self.manifest.read(c2[0])
816 m2 = self.manifest.read(c2[0])
814
817
815 if use_dirstate:
818 if use_dirstate:
816 oldname = c1[5].get("branch") # stored in UTF-8
819 oldname = c1[5].get("branch") # stored in UTF-8
817 if (not commit and not remove and not force and p2 == nullid
820 if (not commit and not remove and not force and p2 == nullid
818 and branchname == oldname):
821 and branchname == oldname):
819 self.ui.status(_("nothing changed\n"))
822 self.ui.status(_("nothing changed\n"))
820 return None
823 return None
821
824
822 xp1 = hex(p1)
825 xp1 = hex(p1)
823 if p2 == nullid: xp2 = ''
826 if p2 == nullid: xp2 = ''
824 else: xp2 = hex(p2)
827 else: xp2 = hex(p2)
825
828
826 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
829 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
827
830
828 tr = self.transaction()
831 tr = self.transaction()
829 trp = weakref.proxy(tr)
832 trp = weakref.proxy(tr)
830
833
831 # check in files
834 # check in files
832 new = {}
835 new = {}
833 changed = []
836 changed = []
834 linkrev = self.changelog.count()
837 linkrev = self.changelog.count()
835 commit.sort()
838 commit.sort()
836 for f in commit:
839 for f in commit:
837 self.ui.note(f + "\n")
840 self.ui.note(f + "\n")
838 try:
841 try:
839 fctx = wctx.filectx(f)
842 fctx = wctx.filectx(f)
840 newflags = fctx.flags()
843 newflags = fctx.flags()
841 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
844 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
842 if ((not changed or changed[-1] != f) and
845 if ((not changed or changed[-1] != f) and
843 m2.get(f) != new[f]):
846 m2.get(f) != new[f]):
844 # mention the file in the changelog if some
847 # mention the file in the changelog if some
845 # flag changed, even if there was no content
848 # flag changed, even if there was no content
846 # change.
849 # change.
847 if m1.flags(f) != newflags:
850 if m1.flags(f) != newflags:
848 changed.append(f)
851 changed.append(f)
849 m1.set(f, newflags)
852 m1.set(f, newflags)
850 if use_dirstate:
853 if use_dirstate:
851 self.dirstate.normal(f)
854 self.dirstate.normal(f)
852
855
853 except (OSError, IOError):
856 except (OSError, IOError):
854 if use_dirstate:
857 if use_dirstate:
855 self.ui.warn(_("trouble committing %s!\n") % f)
858 self.ui.warn(_("trouble committing %s!\n") % f)
856 raise
859 raise
857 else:
860 else:
858 remove.append(f)
861 remove.append(f)
859
862
860 # update manifest
863 # update manifest
861 m1.update(new)
864 m1.update(new)
862 remove.sort()
865 remove.sort()
863 removed = []
866 removed = []
864
867
865 for f in remove:
868 for f in remove:
866 if f in m1:
869 if f in m1:
867 del m1[f]
870 del m1[f]
868 removed.append(f)
871 removed.append(f)
869 elif f in m2:
872 elif f in m2:
870 removed.append(f)
873 removed.append(f)
871 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
874 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
872 (new, removed))
875 (new, removed))
873
876
874 # add changeset
877 # add changeset
875 if (not empty_ok and not text) or force_editor:
878 if (not empty_ok and not text) or force_editor:
876 edittext = []
879 edittext = []
877 if text:
880 if text:
878 edittext.append(text)
881 edittext.append(text)
879 edittext.append("")
882 edittext.append("")
880 edittext.append(_("HG: Enter commit message."
883 edittext.append(_("HG: Enter commit message."
881 " Lines beginning with 'HG:' are removed."))
884 " Lines beginning with 'HG:' are removed."))
882 edittext.append("HG: --")
885 edittext.append("HG: --")
883 edittext.append("HG: user: %s" % user)
886 edittext.append("HG: user: %s" % user)
884 if p2 != nullid:
887 if p2 != nullid:
885 edittext.append("HG: branch merge")
888 edittext.append("HG: branch merge")
886 if branchname:
889 if branchname:
887 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
890 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
888 edittext.extend(["HG: changed %s" % f for f in changed])
891 edittext.extend(["HG: changed %s" % f for f in changed])
889 edittext.extend(["HG: removed %s" % f for f in removed])
892 edittext.extend(["HG: removed %s" % f for f in removed])
890 if not changed and not remove:
893 if not changed and not remove:
891 edittext.append("HG: no files changed")
894 edittext.append("HG: no files changed")
892 edittext.append("")
895 edittext.append("")
893 # run editor in the repository root
896 # run editor in the repository root
894 olddir = os.getcwd()
897 olddir = os.getcwd()
895 os.chdir(self.root)
898 os.chdir(self.root)
896 text = self.ui.edit("\n".join(edittext), user)
899 text = self.ui.edit("\n".join(edittext), user)
897 os.chdir(olddir)
900 os.chdir(olddir)
898
901
899 lines = [line.rstrip() for line in text.rstrip().splitlines()]
902 lines = [line.rstrip() for line in text.rstrip().splitlines()]
900 while lines and not lines[0]:
903 while lines and not lines[0]:
901 del lines[0]
904 del lines[0]
902 if not lines and use_dirstate:
905 if not lines and use_dirstate:
903 raise util.Abort(_("empty commit message"))
906 raise util.Abort(_("empty commit message"))
904 text = '\n'.join(lines)
907 text = '\n'.join(lines)
905
908
906 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
909 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
907 user, wctx.date(), extra)
910 user, wctx.date(), extra)
908 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
911 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
909 parent2=xp2)
912 parent2=xp2)
910 tr.close()
913 tr.close()
911
914
912 if self.branchcache:
915 if self.branchcache:
913 self.branchtags()
916 self.branchtags()
914
917
915 if use_dirstate or update_dirstate:
918 if use_dirstate or update_dirstate:
916 self.dirstate.setparents(n)
919 self.dirstate.setparents(n)
917 if use_dirstate:
920 if use_dirstate:
918 for f in removed:
921 for f in removed:
919 self.dirstate.forget(f)
922 self.dirstate.forget(f)
920 valid = 1 # our dirstate updates are complete
923 valid = 1 # our dirstate updates are complete
921
924
922 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
925 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
923 return n
926 return n
924 finally:
927 finally:
925 if not valid: # don't save our updated dirstate
928 if not valid: # don't save our updated dirstate
926 self.dirstate.invalidate()
929 self.dirstate.invalidate()
927 del tr
930 del tr
928
931
929 def walk(self, match, node=None):
932 def walk(self, match, node=None):
930 '''
933 '''
931 walk recursively through the directory tree or a given
934 walk recursively through the directory tree or a given
932 changeset, finding all files matched by the match
935 changeset, finding all files matched by the match
933 function
936 function
934 '''
937 '''
935
938
936 if node:
939 if node:
937 fdict = dict.fromkeys(match.files())
940 fdict = dict.fromkeys(match.files())
938 # for dirstate.walk, files=['.'] means "walk the whole tree".
941 # for dirstate.walk, files=['.'] means "walk the whole tree".
939 # follow that here, too
942 # follow that here, too
940 fdict.pop('.', None)
943 fdict.pop('.', None)
941 mdict = self.manifest.read(self.changelog.read(node)[0])
944 mdict = self.manifest.read(self.changelog.read(node)[0])
942 mfiles = mdict.keys()
945 mfiles = mdict.keys()
943 mfiles.sort()
946 mfiles.sort()
944 for fn in mfiles:
947 for fn in mfiles:
945 for ffn in fdict:
948 for ffn in fdict:
946 # match if the file is the exact name or a directory
949 # match if the file is the exact name or a directory
947 if ffn == fn or fn.startswith("%s/" % ffn):
950 if ffn == fn or fn.startswith("%s/" % ffn):
948 del fdict[ffn]
951 del fdict[ffn]
949 break
952 break
950 if match(fn):
953 if match(fn):
951 yield fn
954 yield fn
952 ffiles = fdict.keys()
955 ffiles = fdict.keys()
953 ffiles.sort()
956 ffiles.sort()
954 for fn in ffiles:
957 for fn in ffiles:
955 if match.bad(fn, 'No such file in rev ' + short(node)) \
958 if match.bad(fn, 'No such file in rev ' + short(node)) \
956 and match(fn):
959 and match(fn):
957 yield fn
960 yield fn
958 else:
961 else:
959 for fn in self.dirstate.walk(match):
962 for fn in self.dirstate.walk(match):
960 yield fn
963 yield fn
961
964
962 def status(self, node1=None, node2=None, match=None,
965 def status(self, node1=None, node2=None, match=None,
963 list_ignored=False, list_clean=False, list_unknown=True):
966 list_ignored=False, list_clean=False, list_unknown=True):
964 """return status of files between two nodes or node and working directory
967 """return status of files between two nodes or node and working directory
965
968
966 If node1 is None, use the first dirstate parent instead.
969 If node1 is None, use the first dirstate parent instead.
967 If node2 is None, compare node1 with working directory.
970 If node2 is None, compare node1 with working directory.
968 """
971 """
969
972
970 def fcmp(fn, getnode):
973 def fcmp(fn, getnode):
971 t1 = self.wread(fn)
974 t1 = self.wread(fn)
972 return self.file(fn).cmp(getnode(fn), t1)
975 return self.file(fn).cmp(getnode(fn), t1)
973
976
974 def mfmatches(node):
977 def mfmatches(node):
975 change = self.changelog.read(node)
978 change = self.changelog.read(node)
976 mf = self.manifest.read(change[0]).copy()
979 mf = self.manifest.read(change[0]).copy()
977 for fn in mf.keys():
980 for fn in mf.keys():
978 if not match(fn):
981 if not match(fn):
979 del mf[fn]
982 del mf[fn]
980 return mf
983 return mf
981
984
982 if not match:
985 if not match:
983 match = match_.always(self.root, self.getcwd())
986 match = match_.always(self.root, self.getcwd())
984
987
985 modified, added, removed, deleted, unknown = [], [], [], [], []
988 modified, added, removed, deleted, unknown = [], [], [], [], []
986 ignored, clean = [], []
989 ignored, clean = [], []
987
990
988 compareworking = False
991 compareworking = False
989 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
992 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
990 compareworking = True
993 compareworking = True
991
994
992 if not compareworking:
995 if not compareworking:
993 # read the manifest from node1 before the manifest from node2,
996 # read the manifest from node1 before the manifest from node2,
994 # so that we'll hit the manifest cache if we're going through
997 # so that we'll hit the manifest cache if we're going through
995 # all the revisions in parent->child order.
998 # all the revisions in parent->child order.
996 mf1 = mfmatches(node1)
999 mf1 = mfmatches(node1)
997
1000
998 # are we comparing the working directory?
1001 # are we comparing the working directory?
999 if not node2:
1002 if not node2:
1000 (lookup, modified, added, removed, deleted, unknown,
1003 (lookup, modified, added, removed, deleted, unknown,
1001 ignored, clean) = self.dirstate.status(match, list_ignored,
1004 ignored, clean) = self.dirstate.status(match, list_ignored,
1002 list_clean, list_unknown)
1005 list_clean, list_unknown)
1003 # are we comparing working dir against its parent?
1006 # are we comparing working dir against its parent?
1004 if compareworking:
1007 if compareworking:
1005 if lookup:
1008 if lookup:
1006 fixup = []
1009 fixup = []
1007 # do a full compare of any files that might have changed
1010 # do a full compare of any files that might have changed
1008 ctx = self.changectx('')
1011 ctx = self['.']
1009 ff = self.dirstate.flagfunc(ctx.flags)
1012 ff = self.dirstate.flagfunc(ctx.flags)
1010 for f in lookup:
1013 for f in lookup:
1011 if (f not in ctx or ff(f) != ctx.flags(f)
1014 if (f not in ctx or ff(f) != ctx.flags(f)
1012 or ctx[f].cmp(self.wread(f))):
1015 or ctx[f].cmp(self.wread(f))):
1013 modified.append(f)
1016 modified.append(f)
1014 else:
1017 else:
1015 fixup.append(f)
1018 fixup.append(f)
1016 if list_clean:
1019 if list_clean:
1017 clean.append(f)
1020 clean.append(f)
1018
1021
1019 # update dirstate for files that are actually clean
1022 # update dirstate for files that are actually clean
1020 if fixup:
1023 if fixup:
1021 wlock = None
1024 wlock = None
1022 try:
1025 try:
1023 try:
1026 try:
1024 wlock = self.wlock(False)
1027 wlock = self.wlock(False)
1025 except lock.LockException:
1028 except lock.LockException:
1026 pass
1029 pass
1027 if wlock:
1030 if wlock:
1028 for f in fixup:
1031 for f in fixup:
1029 self.dirstate.normal(f)
1032 self.dirstate.normal(f)
1030 finally:
1033 finally:
1031 del wlock
1034 del wlock
1032 else:
1035 else:
1033 # we are comparing working dir against non-parent
1036 # we are comparing working dir against non-parent
1034 # generate a pseudo-manifest for the working dir
1037 # generate a pseudo-manifest for the working dir
1035 # XXX: create it in dirstate.py ?
1038 # XXX: create it in dirstate.py ?
1036 mf2 = mfmatches(self.dirstate.parents()[0])
1039 mf2 = mfmatches(self.dirstate.parents()[0])
1037 ff = self.dirstate.flagfunc(mf2.flags)
1040 ff = self.dirstate.flagfunc(mf2.flags)
1038 for f in lookup + modified + added:
1041 for f in lookup + modified + added:
1039 mf2[f] = ""
1042 mf2[f] = ""
1040 mf2.set(f, ff(f))
1043 mf2.set(f, ff(f))
1041 for f in removed:
1044 for f in removed:
1042 if f in mf2:
1045 if f in mf2:
1043 del mf2[f]
1046 del mf2[f]
1044
1047
1045 else:
1048 else:
1046 # we are comparing two revisions
1049 # we are comparing two revisions
1047 mf2 = mfmatches(node2)
1050 mf2 = mfmatches(node2)
1048
1051
1049 if not compareworking:
1052 if not compareworking:
1050 # flush lists from dirstate before comparing manifests
1053 # flush lists from dirstate before comparing manifests
1051 modified, added, clean = [], [], []
1054 modified, added, clean = [], [], []
1052
1055
1053 # make sure to sort the files so we talk to the disk in a
1056 # make sure to sort the files so we talk to the disk in a
1054 # reasonable order
1057 # reasonable order
1055 mf2keys = mf2.keys()
1058 mf2keys = mf2.keys()
1056 mf2keys.sort()
1059 mf2keys.sort()
1057 getnode = lambda fn: mf1.get(fn, nullid)
1060 getnode = lambda fn: mf1.get(fn, nullid)
1058 for fn in mf2keys:
1061 for fn in mf2keys:
1059 if fn in mf1:
1062 if fn in mf1:
1060 if (mf1.flags(fn) != mf2.flags(fn) or
1063 if (mf1.flags(fn) != mf2.flags(fn) or
1061 (mf1[fn] != mf2[fn] and
1064 (mf1[fn] != mf2[fn] and
1062 (mf2[fn] != "" or fcmp(fn, getnode)))):
1065 (mf2[fn] != "" or fcmp(fn, getnode)))):
1063 modified.append(fn)
1066 modified.append(fn)
1064 elif list_clean:
1067 elif list_clean:
1065 clean.append(fn)
1068 clean.append(fn)
1066 del mf1[fn]
1069 del mf1[fn]
1067 else:
1070 else:
1068 added.append(fn)
1071 added.append(fn)
1069
1072
1070 removed = mf1.keys()
1073 removed = mf1.keys()
1071
1074
1072 # sort and return results:
1075 # sort and return results:
1073 for l in modified, added, removed, deleted, unknown, ignored, clean:
1076 for l in modified, added, removed, deleted, unknown, ignored, clean:
1074 l.sort()
1077 l.sort()
1075 return (modified, added, removed, deleted, unknown, ignored, clean)
1078 return (modified, added, removed, deleted, unknown, ignored, clean)
1076
1079
1077 def add(self, list):
1080 def add(self, list):
1078 wlock = self.wlock()
1081 wlock = self.wlock()
1079 try:
1082 try:
1080 rejected = []
1083 rejected = []
1081 for f in list:
1084 for f in list:
1082 p = self.wjoin(f)
1085 p = self.wjoin(f)
1083 try:
1086 try:
1084 st = os.lstat(p)
1087 st = os.lstat(p)
1085 except:
1088 except:
1086 self.ui.warn(_("%s does not exist!\n") % f)
1089 self.ui.warn(_("%s does not exist!\n") % f)
1087 rejected.append(f)
1090 rejected.append(f)
1088 continue
1091 continue
1089 if st.st_size > 10000000:
1092 if st.st_size > 10000000:
1090 self.ui.warn(_("%s: files over 10MB may cause memory and"
1093 self.ui.warn(_("%s: files over 10MB may cause memory and"
1091 " performance problems\n"
1094 " performance problems\n"
1092 "(use 'hg revert %s' to unadd the file)\n")
1095 "(use 'hg revert %s' to unadd the file)\n")
1093 % (f, f))
1096 % (f, f))
1094 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1097 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1095 self.ui.warn(_("%s not added: only files and symlinks "
1098 self.ui.warn(_("%s not added: only files and symlinks "
1096 "supported currently\n") % f)
1099 "supported currently\n") % f)
1097 rejected.append(p)
1100 rejected.append(p)
1098 elif self.dirstate[f] in 'amn':
1101 elif self.dirstate[f] in 'amn':
1099 self.ui.warn(_("%s already tracked!\n") % f)
1102 self.ui.warn(_("%s already tracked!\n") % f)
1100 elif self.dirstate[f] == 'r':
1103 elif self.dirstate[f] == 'r':
1101 self.dirstate.normallookup(f)
1104 self.dirstate.normallookup(f)
1102 else:
1105 else:
1103 self.dirstate.add(f)
1106 self.dirstate.add(f)
1104 return rejected
1107 return rejected
1105 finally:
1108 finally:
1106 del wlock
1109 del wlock
1107
1110
1108 def forget(self, list):
1111 def forget(self, list):
1109 wlock = self.wlock()
1112 wlock = self.wlock()
1110 try:
1113 try:
1111 for f in list:
1114 for f in list:
1112 if self.dirstate[f] != 'a':
1115 if self.dirstate[f] != 'a':
1113 self.ui.warn(_("%s not added!\n") % f)
1116 self.ui.warn(_("%s not added!\n") % f)
1114 else:
1117 else:
1115 self.dirstate.forget(f)
1118 self.dirstate.forget(f)
1116 finally:
1119 finally:
1117 del wlock
1120 del wlock
1118
1121
1119 def remove(self, list, unlink=False):
1122 def remove(self, list, unlink=False):
1120 wlock = None
1123 wlock = None
1121 try:
1124 try:
1122 if unlink:
1125 if unlink:
1123 for f in list:
1126 for f in list:
1124 try:
1127 try:
1125 util.unlink(self.wjoin(f))
1128 util.unlink(self.wjoin(f))
1126 except OSError, inst:
1129 except OSError, inst:
1127 if inst.errno != errno.ENOENT:
1130 if inst.errno != errno.ENOENT:
1128 raise
1131 raise
1129 wlock = self.wlock()
1132 wlock = self.wlock()
1130 for f in list:
1133 for f in list:
1131 if unlink and os.path.exists(self.wjoin(f)):
1134 if unlink and os.path.exists(self.wjoin(f)):
1132 self.ui.warn(_("%s still exists!\n") % f)
1135 self.ui.warn(_("%s still exists!\n") % f)
1133 elif self.dirstate[f] == 'a':
1136 elif self.dirstate[f] == 'a':
1134 self.dirstate.forget(f)
1137 self.dirstate.forget(f)
1135 elif f not in self.dirstate:
1138 elif f not in self.dirstate:
1136 self.ui.warn(_("%s not tracked!\n") % f)
1139 self.ui.warn(_("%s not tracked!\n") % f)
1137 else:
1140 else:
1138 self.dirstate.remove(f)
1141 self.dirstate.remove(f)
1139 finally:
1142 finally:
1140 del wlock
1143 del wlock
1141
1144
1142 def undelete(self, list):
1145 def undelete(self, list):
1143 wlock = None
1146 wlock = None
1144 try:
1147 try:
1145 manifests = [self.manifest.read(self.changelog.read(p)[0])
1148 manifests = [self.manifest.read(self.changelog.read(p)[0])
1146 for p in self.dirstate.parents() if p != nullid]
1149 for p in self.dirstate.parents() if p != nullid]
1147 wlock = self.wlock()
1150 wlock = self.wlock()
1148 for f in list:
1151 for f in list:
1149 if self.dirstate[f] != 'r':
1152 if self.dirstate[f] != 'r':
1150 self.ui.warn("%s not removed!\n" % f)
1153 self.ui.warn("%s not removed!\n" % f)
1151 else:
1154 else:
1152 m = f in manifests[0] and manifests[0] or manifests[1]
1155 m = f in manifests[0] and manifests[0] or manifests[1]
1153 t = self.file(f).read(m[f])
1156 t = self.file(f).read(m[f])
1154 self.wwrite(f, t, m.flags(f))
1157 self.wwrite(f, t, m.flags(f))
1155 self.dirstate.normal(f)
1158 self.dirstate.normal(f)
1156 finally:
1159 finally:
1157 del wlock
1160 del wlock
1158
1161
1159 def copy(self, source, dest):
1162 def copy(self, source, dest):
1160 wlock = None
1163 wlock = None
1161 try:
1164 try:
1162 p = self.wjoin(dest)
1165 p = self.wjoin(dest)
1163 if not (os.path.exists(p) or os.path.islink(p)):
1166 if not (os.path.exists(p) or os.path.islink(p)):
1164 self.ui.warn(_("%s does not exist!\n") % dest)
1167 self.ui.warn(_("%s does not exist!\n") % dest)
1165 elif not (os.path.isfile(p) or os.path.islink(p)):
1168 elif not (os.path.isfile(p) or os.path.islink(p)):
1166 self.ui.warn(_("copy failed: %s is not a file or a "
1169 self.ui.warn(_("copy failed: %s is not a file or a "
1167 "symbolic link\n") % dest)
1170 "symbolic link\n") % dest)
1168 else:
1171 else:
1169 wlock = self.wlock()
1172 wlock = self.wlock()
1170 if dest not in self.dirstate:
1173 if dest not in self.dirstate:
1171 self.dirstate.add(dest)
1174 self.dirstate.add(dest)
1172 self.dirstate.copy(source, dest)
1175 self.dirstate.copy(source, dest)
1173 finally:
1176 finally:
1174 del wlock
1177 del wlock
1175
1178
1176 def heads(self, start=None):
1179 def heads(self, start=None):
1177 heads = self.changelog.heads(start)
1180 heads = self.changelog.heads(start)
1178 # sort the output in rev descending order
1181 # sort the output in rev descending order
1179 heads = [(-self.changelog.rev(h), h) for h in heads]
1182 heads = [(-self.changelog.rev(h), h) for h in heads]
1180 heads.sort()
1183 heads.sort()
1181 return [n for (r, n) in heads]
1184 return [n for (r, n) in heads]
1182
1185
1183 def branchheads(self, branch=None, start=None):
1186 def branchheads(self, branch=None, start=None):
1184 branch = branch is None and self.changectx(None).branch() or branch
1187 if branch is None:
1188 branch = self[None].branch()
1185 branches = self.branchtags()
1189 branches = self.branchtags()
1186 if branch not in branches:
1190 if branch not in branches:
1187 return []
1191 return []
1188 # The basic algorithm is this:
1192 # The basic algorithm is this:
1189 #
1193 #
1190 # Start from the branch tip since there are no later revisions that can
1194 # Start from the branch tip since there are no later revisions that can
1191 # possibly be in this branch, and the tip is a guaranteed head.
1195 # possibly be in this branch, and the tip is a guaranteed head.
1192 #
1196 #
1193 # Remember the tip's parents as the first ancestors, since these by
1197 # Remember the tip's parents as the first ancestors, since these by
1194 # definition are not heads.
1198 # definition are not heads.
1195 #
1199 #
1196 # Step backwards from the brach tip through all the revisions. We are
1200 # Step backwards from the brach tip through all the revisions. We are
1197 # guaranteed by the rules of Mercurial that we will now be visiting the
1201 # guaranteed by the rules of Mercurial that we will now be visiting the
1198 # nodes in reverse topological order (children before parents).
1202 # nodes in reverse topological order (children before parents).
1199 #
1203 #
1200 # If a revision is one of the ancestors of a head then we can toss it
1204 # If a revision is one of the ancestors of a head then we can toss it
1201 # out of the ancestors set (we've already found it and won't be
1205 # out of the ancestors set (we've already found it and won't be
1202 # visiting it again) and put its parents in the ancestors set.
1206 # visiting it again) and put its parents in the ancestors set.
1203 #
1207 #
1204 # Otherwise, if a revision is in the branch it's another head, since it
1208 # Otherwise, if a revision is in the branch it's another head, since it
1205 # wasn't in the ancestor list of an existing head. So add it to the
1209 # wasn't in the ancestor list of an existing head. So add it to the
1206 # head list, and add its parents to the ancestor list.
1210 # head list, and add its parents to the ancestor list.
1207 #
1211 #
1208 # If it is not in the branch ignore it.
1212 # If it is not in the branch ignore it.
1209 #
1213 #
1210 # Once we have a list of heads, use nodesbetween to filter out all the
1214 # Once we have a list of heads, use nodesbetween to filter out all the
1211 # heads that cannot be reached from startrev. There may be a more
1215 # heads that cannot be reached from startrev. There may be a more
1212 # efficient way to do this as part of the previous algorithm.
1216 # efficient way to do this as part of the previous algorithm.
1213
1217
1214 set = util.set
1218 set = util.set
1215 heads = [self.changelog.rev(branches[branch])]
1219 heads = [self.changelog.rev(branches[branch])]
1216 # Don't care if ancestors contains nullrev or not.
1220 # Don't care if ancestors contains nullrev or not.
1217 ancestors = set(self.changelog.parentrevs(heads[0]))
1221 ancestors = set(self.changelog.parentrevs(heads[0]))
1218 for rev in xrange(heads[0] - 1, nullrev, -1):
1222 for rev in xrange(heads[0] - 1, nullrev, -1):
1219 if rev in ancestors:
1223 if rev in ancestors:
1220 ancestors.update(self.changelog.parentrevs(rev))
1224 ancestors.update(self.changelog.parentrevs(rev))
1221 ancestors.remove(rev)
1225 ancestors.remove(rev)
1222 elif self.changectx(rev).branch() == branch:
1226 elif self[rev].branch() == branch:
1223 heads.append(rev)
1227 heads.append(rev)
1224 ancestors.update(self.changelog.parentrevs(rev))
1228 ancestors.update(self.changelog.parentrevs(rev))
1225 heads = [self.changelog.node(rev) for rev in heads]
1229 heads = [self.changelog.node(rev) for rev in heads]
1226 if start is not None:
1230 if start is not None:
1227 heads = self.changelog.nodesbetween([start], heads)[2]
1231 heads = self.changelog.nodesbetween([start], heads)[2]
1228 return heads
1232 return heads
1229
1233
1230 def branches(self, nodes):
1234 def branches(self, nodes):
1231 if not nodes:
1235 if not nodes:
1232 nodes = [self.changelog.tip()]
1236 nodes = [self.changelog.tip()]
1233 b = []
1237 b = []
1234 for n in nodes:
1238 for n in nodes:
1235 t = n
1239 t = n
1236 while 1:
1240 while 1:
1237 p = self.changelog.parents(n)
1241 p = self.changelog.parents(n)
1238 if p[1] != nullid or p[0] == nullid:
1242 if p[1] != nullid or p[0] == nullid:
1239 b.append((t, n, p[0], p[1]))
1243 b.append((t, n, p[0], p[1]))
1240 break
1244 break
1241 n = p[0]
1245 n = p[0]
1242 return b
1246 return b
1243
1247
1244 def between(self, pairs):
1248 def between(self, pairs):
1245 r = []
1249 r = []
1246
1250
1247 for top, bottom in pairs:
1251 for top, bottom in pairs:
1248 n, l, i = top, [], 0
1252 n, l, i = top, [], 0
1249 f = 1
1253 f = 1
1250
1254
1251 while n != bottom:
1255 while n != bottom:
1252 p = self.changelog.parents(n)[0]
1256 p = self.changelog.parents(n)[0]
1253 if i == f:
1257 if i == f:
1254 l.append(n)
1258 l.append(n)
1255 f = f * 2
1259 f = f * 2
1256 n = p
1260 n = p
1257 i += 1
1261 i += 1
1258
1262
1259 r.append(l)
1263 r.append(l)
1260
1264
1261 return r
1265 return r
1262
1266
1263 def findincoming(self, remote, base=None, heads=None, force=False):
1267 def findincoming(self, remote, base=None, heads=None, force=False):
1264 """Return list of roots of the subsets of missing nodes from remote
1268 """Return list of roots of the subsets of missing nodes from remote
1265
1269
1266 If base dict is specified, assume that these nodes and their parents
1270 If base dict is specified, assume that these nodes and their parents
1267 exist on the remote side and that no child of a node of base exists
1271 exist on the remote side and that no child of a node of base exists
1268 in both remote and self.
1272 in both remote and self.
1269 Furthermore base will be updated to include the nodes that exists
1273 Furthermore base will be updated to include the nodes that exists
1270 in self and remote but no children exists in self and remote.
1274 in self and remote but no children exists in self and remote.
1271 If a list of heads is specified, return only nodes which are heads
1275 If a list of heads is specified, return only nodes which are heads
1272 or ancestors of these heads.
1276 or ancestors of these heads.
1273
1277
1274 All the ancestors of base are in self and in remote.
1278 All the ancestors of base are in self and in remote.
1275 All the descendants of the list returned are missing in self.
1279 All the descendants of the list returned are missing in self.
1276 (and so we know that the rest of the nodes are missing in remote, see
1280 (and so we know that the rest of the nodes are missing in remote, see
1277 outgoing)
1281 outgoing)
1278 """
1282 """
1279 m = self.changelog.nodemap
1283 m = self.changelog.nodemap
1280 search = []
1284 search = []
1281 fetch = {}
1285 fetch = {}
1282 seen = {}
1286 seen = {}
1283 seenbranch = {}
1287 seenbranch = {}
1284 if base == None:
1288 if base == None:
1285 base = {}
1289 base = {}
1286
1290
1287 if not heads:
1291 if not heads:
1288 heads = remote.heads()
1292 heads = remote.heads()
1289
1293
1290 if self.changelog.tip() == nullid:
1294 if self.changelog.tip() == nullid:
1291 base[nullid] = 1
1295 base[nullid] = 1
1292 if heads != [nullid]:
1296 if heads != [nullid]:
1293 return [nullid]
1297 return [nullid]
1294 return []
1298 return []
1295
1299
1296 # assume we're closer to the tip than the root
1300 # assume we're closer to the tip than the root
1297 # and start by examining the heads
1301 # and start by examining the heads
1298 self.ui.status(_("searching for changes\n"))
1302 self.ui.status(_("searching for changes\n"))
1299
1303
1300 unknown = []
1304 unknown = []
1301 for h in heads:
1305 for h in heads:
1302 if h not in m:
1306 if h not in m:
1303 unknown.append(h)
1307 unknown.append(h)
1304 else:
1308 else:
1305 base[h] = 1
1309 base[h] = 1
1306
1310
1307 if not unknown:
1311 if not unknown:
1308 return []
1312 return []
1309
1313
1310 req = dict.fromkeys(unknown)
1314 req = dict.fromkeys(unknown)
1311 reqcnt = 0
1315 reqcnt = 0
1312
1316
1313 # search through remote branches
1317 # search through remote branches
1314 # a 'branch' here is a linear segment of history, with four parts:
1318 # a 'branch' here is a linear segment of history, with four parts:
1315 # head, root, first parent, second parent
1319 # head, root, first parent, second parent
1316 # (a branch always has two parents (or none) by definition)
1320 # (a branch always has two parents (or none) by definition)
1317 unknown = remote.branches(unknown)
1321 unknown = remote.branches(unknown)
1318 while unknown:
1322 while unknown:
1319 r = []
1323 r = []
1320 while unknown:
1324 while unknown:
1321 n = unknown.pop(0)
1325 n = unknown.pop(0)
1322 if n[0] in seen:
1326 if n[0] in seen:
1323 continue
1327 continue
1324
1328
1325 self.ui.debug(_("examining %s:%s\n")
1329 self.ui.debug(_("examining %s:%s\n")
1326 % (short(n[0]), short(n[1])))
1330 % (short(n[0]), short(n[1])))
1327 if n[0] == nullid: # found the end of the branch
1331 if n[0] == nullid: # found the end of the branch
1328 pass
1332 pass
1329 elif n in seenbranch:
1333 elif n in seenbranch:
1330 self.ui.debug(_("branch already found\n"))
1334 self.ui.debug(_("branch already found\n"))
1331 continue
1335 continue
1332 elif n[1] and n[1] in m: # do we know the base?
1336 elif n[1] and n[1] in m: # do we know the base?
1333 self.ui.debug(_("found incomplete branch %s:%s\n")
1337 self.ui.debug(_("found incomplete branch %s:%s\n")
1334 % (short(n[0]), short(n[1])))
1338 % (short(n[0]), short(n[1])))
1335 search.append(n) # schedule branch range for scanning
1339 search.append(n) # schedule branch range for scanning
1336 seenbranch[n] = 1
1340 seenbranch[n] = 1
1337 else:
1341 else:
1338 if n[1] not in seen and n[1] not in fetch:
1342 if n[1] not in seen and n[1] not in fetch:
1339 if n[2] in m and n[3] in m:
1343 if n[2] in m and n[3] in m:
1340 self.ui.debug(_("found new changeset %s\n") %
1344 self.ui.debug(_("found new changeset %s\n") %
1341 short(n[1]))
1345 short(n[1]))
1342 fetch[n[1]] = 1 # earliest unknown
1346 fetch[n[1]] = 1 # earliest unknown
1343 for p in n[2:4]:
1347 for p in n[2:4]:
1344 if p in m:
1348 if p in m:
1345 base[p] = 1 # latest known
1349 base[p] = 1 # latest known
1346
1350
1347 for p in n[2:4]:
1351 for p in n[2:4]:
1348 if p not in req and p not in m:
1352 if p not in req and p not in m:
1349 r.append(p)
1353 r.append(p)
1350 req[p] = 1
1354 req[p] = 1
1351 seen[n[0]] = 1
1355 seen[n[0]] = 1
1352
1356
1353 if r:
1357 if r:
1354 reqcnt += 1
1358 reqcnt += 1
1355 self.ui.debug(_("request %d: %s\n") %
1359 self.ui.debug(_("request %d: %s\n") %
1356 (reqcnt, " ".join(map(short, r))))
1360 (reqcnt, " ".join(map(short, r))))
1357 for p in xrange(0, len(r), 10):
1361 for p in xrange(0, len(r), 10):
1358 for b in remote.branches(r[p:p+10]):
1362 for b in remote.branches(r[p:p+10]):
1359 self.ui.debug(_("received %s:%s\n") %
1363 self.ui.debug(_("received %s:%s\n") %
1360 (short(b[0]), short(b[1])))
1364 (short(b[0]), short(b[1])))
1361 unknown.append(b)
1365 unknown.append(b)
1362
1366
1363 # do binary search on the branches we found
1367 # do binary search on the branches we found
1364 while search:
1368 while search:
1365 n = search.pop(0)
1369 n = search.pop(0)
1366 reqcnt += 1
1370 reqcnt += 1
1367 l = remote.between([(n[0], n[1])])[0]
1371 l = remote.between([(n[0], n[1])])[0]
1368 l.append(n[1])
1372 l.append(n[1])
1369 p = n[0]
1373 p = n[0]
1370 f = 1
1374 f = 1
1371 for i in l:
1375 for i in l:
1372 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1376 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1373 if i in m:
1377 if i in m:
1374 if f <= 2:
1378 if f <= 2:
1375 self.ui.debug(_("found new branch changeset %s\n") %
1379 self.ui.debug(_("found new branch changeset %s\n") %
1376 short(p))
1380 short(p))
1377 fetch[p] = 1
1381 fetch[p] = 1
1378 base[i] = 1
1382 base[i] = 1
1379 else:
1383 else:
1380 self.ui.debug(_("narrowed branch search to %s:%s\n")
1384 self.ui.debug(_("narrowed branch search to %s:%s\n")
1381 % (short(p), short(i)))
1385 % (short(p), short(i)))
1382 search.append((p, i))
1386 search.append((p, i))
1383 break
1387 break
1384 p, f = i, f * 2
1388 p, f = i, f * 2
1385
1389
1386 # sanity check our fetch list
1390 # sanity check our fetch list
1387 for f in fetch.keys():
1391 for f in fetch.keys():
1388 if f in m:
1392 if f in m:
1389 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1393 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1390
1394
1391 if base.keys() == [nullid]:
1395 if base.keys() == [nullid]:
1392 if force:
1396 if force:
1393 self.ui.warn(_("warning: repository is unrelated\n"))
1397 self.ui.warn(_("warning: repository is unrelated\n"))
1394 else:
1398 else:
1395 raise util.Abort(_("repository is unrelated"))
1399 raise util.Abort(_("repository is unrelated"))
1396
1400
1397 self.ui.debug(_("found new changesets starting at ") +
1401 self.ui.debug(_("found new changesets starting at ") +
1398 " ".join([short(f) for f in fetch]) + "\n")
1402 " ".join([short(f) for f in fetch]) + "\n")
1399
1403
1400 self.ui.debug(_("%d total queries\n") % reqcnt)
1404 self.ui.debug(_("%d total queries\n") % reqcnt)
1401
1405
1402 return fetch.keys()
1406 return fetch.keys()
1403
1407
1404 def findoutgoing(self, remote, base=None, heads=None, force=False):
1408 def findoutgoing(self, remote, base=None, heads=None, force=False):
1405 """Return list of nodes that are roots of subsets not in remote
1409 """Return list of nodes that are roots of subsets not in remote
1406
1410
1407 If base dict is specified, assume that these nodes and their parents
1411 If base dict is specified, assume that these nodes and their parents
1408 exist on the remote side.
1412 exist on the remote side.
1409 If a list of heads is specified, return only nodes which are heads
1413 If a list of heads is specified, return only nodes which are heads
1410 or ancestors of these heads, and return a second element which
1414 or ancestors of these heads, and return a second element which
1411 contains all remote heads which get new children.
1415 contains all remote heads which get new children.
1412 """
1416 """
1413 if base == None:
1417 if base == None:
1414 base = {}
1418 base = {}
1415 self.findincoming(remote, base, heads, force=force)
1419 self.findincoming(remote, base, heads, force=force)
1416
1420
1417 self.ui.debug(_("common changesets up to ")
1421 self.ui.debug(_("common changesets up to ")
1418 + " ".join(map(short, base.keys())) + "\n")
1422 + " ".join(map(short, base.keys())) + "\n")
1419
1423
1420 remain = dict.fromkeys(self.changelog.nodemap)
1424 remain = dict.fromkeys(self.changelog.nodemap)
1421
1425
1422 # prune everything remote has from the tree
1426 # prune everything remote has from the tree
1423 del remain[nullid]
1427 del remain[nullid]
1424 remove = base.keys()
1428 remove = base.keys()
1425 while remove:
1429 while remove:
1426 n = remove.pop(0)
1430 n = remove.pop(0)
1427 if n in remain:
1431 if n in remain:
1428 del remain[n]
1432 del remain[n]
1429 for p in self.changelog.parents(n):
1433 for p in self.changelog.parents(n):
1430 remove.append(p)
1434 remove.append(p)
1431
1435
1432 # find every node whose parents have been pruned
1436 # find every node whose parents have been pruned
1433 subset = []
1437 subset = []
1434 # find every remote head that will get new children
1438 # find every remote head that will get new children
1435 updated_heads = {}
1439 updated_heads = {}
1436 for n in remain:
1440 for n in remain:
1437 p1, p2 = self.changelog.parents(n)
1441 p1, p2 = self.changelog.parents(n)
1438 if p1 not in remain and p2 not in remain:
1442 if p1 not in remain and p2 not in remain:
1439 subset.append(n)
1443 subset.append(n)
1440 if heads:
1444 if heads:
1441 if p1 in heads:
1445 if p1 in heads:
1442 updated_heads[p1] = True
1446 updated_heads[p1] = True
1443 if p2 in heads:
1447 if p2 in heads:
1444 updated_heads[p2] = True
1448 updated_heads[p2] = True
1445
1449
1446 # this is the set of all roots we have to push
1450 # this is the set of all roots we have to push
1447 if heads:
1451 if heads:
1448 return subset, updated_heads.keys()
1452 return subset, updated_heads.keys()
1449 else:
1453 else:
1450 return subset
1454 return subset
1451
1455
1452 def pull(self, remote, heads=None, force=False):
1456 def pull(self, remote, heads=None, force=False):
1453 lock = self.lock()
1457 lock = self.lock()
1454 try:
1458 try:
1455 fetch = self.findincoming(remote, heads=heads, force=force)
1459 fetch = self.findincoming(remote, heads=heads, force=force)
1456 if fetch == [nullid]:
1460 if fetch == [nullid]:
1457 self.ui.status(_("requesting all changes\n"))
1461 self.ui.status(_("requesting all changes\n"))
1458
1462
1459 if not fetch:
1463 if not fetch:
1460 self.ui.status(_("no changes found\n"))
1464 self.ui.status(_("no changes found\n"))
1461 return 0
1465 return 0
1462
1466
1463 if heads is None:
1467 if heads is None:
1464 cg = remote.changegroup(fetch, 'pull')
1468 cg = remote.changegroup(fetch, 'pull')
1465 else:
1469 else:
1466 if 'changegroupsubset' not in remote.capabilities:
1470 if 'changegroupsubset' not in remote.capabilities:
1467 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1471 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1468 cg = remote.changegroupsubset(fetch, heads, 'pull')
1472 cg = remote.changegroupsubset(fetch, heads, 'pull')
1469 return self.addchangegroup(cg, 'pull', remote.url())
1473 return self.addchangegroup(cg, 'pull', remote.url())
1470 finally:
1474 finally:
1471 del lock
1475 del lock
1472
1476
1473 def push(self, remote, force=False, revs=None):
1477 def push(self, remote, force=False, revs=None):
1474 # there are two ways to push to remote repo:
1478 # there are two ways to push to remote repo:
1475 #
1479 #
1476 # addchangegroup assumes local user can lock remote
1480 # addchangegroup assumes local user can lock remote
1477 # repo (local filesystem, old ssh servers).
1481 # repo (local filesystem, old ssh servers).
1478 #
1482 #
1479 # unbundle assumes local user cannot lock remote repo (new ssh
1483 # unbundle assumes local user cannot lock remote repo (new ssh
1480 # servers, http servers).
1484 # servers, http servers).
1481
1485
1482 if remote.capable('unbundle'):
1486 if remote.capable('unbundle'):
1483 return self.push_unbundle(remote, force, revs)
1487 return self.push_unbundle(remote, force, revs)
1484 return self.push_addchangegroup(remote, force, revs)
1488 return self.push_addchangegroup(remote, force, revs)
1485
1489
1486 def prepush(self, remote, force, revs):
1490 def prepush(self, remote, force, revs):
1487 base = {}
1491 base = {}
1488 remote_heads = remote.heads()
1492 remote_heads = remote.heads()
1489 inc = self.findincoming(remote, base, remote_heads, force=force)
1493 inc = self.findincoming(remote, base, remote_heads, force=force)
1490
1494
1491 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1495 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1492 if revs is not None:
1496 if revs is not None:
1493 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1497 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1494 else:
1498 else:
1495 bases, heads = update, self.changelog.heads()
1499 bases, heads = update, self.changelog.heads()
1496
1500
1497 if not bases:
1501 if not bases:
1498 self.ui.status(_("no changes found\n"))
1502 self.ui.status(_("no changes found\n"))
1499 return None, 1
1503 return None, 1
1500 elif not force:
1504 elif not force:
1501 # check if we're creating new remote heads
1505 # check if we're creating new remote heads
1502 # to be a remote head after push, node must be either
1506 # to be a remote head after push, node must be either
1503 # - unknown locally
1507 # - unknown locally
1504 # - a local outgoing head descended from update
1508 # - a local outgoing head descended from update
1505 # - a remote head that's known locally and not
1509 # - a remote head that's known locally and not
1506 # ancestral to an outgoing head
1510 # ancestral to an outgoing head
1507
1511
1508 warn = 0
1512 warn = 0
1509
1513
1510 if remote_heads == [nullid]:
1514 if remote_heads == [nullid]:
1511 warn = 0
1515 warn = 0
1512 elif not revs and len(heads) > len(remote_heads):
1516 elif not revs and len(heads) > len(remote_heads):
1513 warn = 1
1517 warn = 1
1514 else:
1518 else:
1515 newheads = list(heads)
1519 newheads = list(heads)
1516 for r in remote_heads:
1520 for r in remote_heads:
1517 if r in self.changelog.nodemap:
1521 if r in self.changelog.nodemap:
1518 desc = self.changelog.heads(r, heads)
1522 desc = self.changelog.heads(r, heads)
1519 l = [h for h in heads if h in desc]
1523 l = [h for h in heads if h in desc]
1520 if not l:
1524 if not l:
1521 newheads.append(r)
1525 newheads.append(r)
1522 else:
1526 else:
1523 newheads.append(r)
1527 newheads.append(r)
1524 if len(newheads) > len(remote_heads):
1528 if len(newheads) > len(remote_heads):
1525 warn = 1
1529 warn = 1
1526
1530
1527 if warn:
1531 if warn:
1528 self.ui.warn(_("abort: push creates new remote heads!\n"))
1532 self.ui.warn(_("abort: push creates new remote heads!\n"))
1529 self.ui.status(_("(did you forget to merge?"
1533 self.ui.status(_("(did you forget to merge?"
1530 " use push -f to force)\n"))
1534 " use push -f to force)\n"))
1531 return None, 0
1535 return None, 0
1532 elif inc:
1536 elif inc:
1533 self.ui.warn(_("note: unsynced remote changes!\n"))
1537 self.ui.warn(_("note: unsynced remote changes!\n"))
1534
1538
1535
1539
1536 if revs is None:
1540 if revs is None:
1537 cg = self.changegroup(update, 'push')
1541 cg = self.changegroup(update, 'push')
1538 else:
1542 else:
1539 cg = self.changegroupsubset(update, revs, 'push')
1543 cg = self.changegroupsubset(update, revs, 'push')
1540 return cg, remote_heads
1544 return cg, remote_heads
1541
1545
1542 def push_addchangegroup(self, remote, force, revs):
1546 def push_addchangegroup(self, remote, force, revs):
1543 lock = remote.lock()
1547 lock = remote.lock()
1544 try:
1548 try:
1545 ret = self.prepush(remote, force, revs)
1549 ret = self.prepush(remote, force, revs)
1546 if ret[0] is not None:
1550 if ret[0] is not None:
1547 cg, remote_heads = ret
1551 cg, remote_heads = ret
1548 return remote.addchangegroup(cg, 'push', self.url())
1552 return remote.addchangegroup(cg, 'push', self.url())
1549 return ret[1]
1553 return ret[1]
1550 finally:
1554 finally:
1551 del lock
1555 del lock
1552
1556
1553 def push_unbundle(self, remote, force, revs):
1557 def push_unbundle(self, remote, force, revs):
1554 # local repo finds heads on server, finds out what revs it
1558 # local repo finds heads on server, finds out what revs it
1555 # must push. once revs transferred, if server finds it has
1559 # must push. once revs transferred, if server finds it has
1556 # different heads (someone else won commit/push race), server
1560 # different heads (someone else won commit/push race), server
1557 # aborts.
1561 # aborts.
1558
1562
1559 ret = self.prepush(remote, force, revs)
1563 ret = self.prepush(remote, force, revs)
1560 if ret[0] is not None:
1564 if ret[0] is not None:
1561 cg, remote_heads = ret
1565 cg, remote_heads = ret
1562 if force: remote_heads = ['force']
1566 if force: remote_heads = ['force']
1563 return remote.unbundle(cg, remote_heads, 'push')
1567 return remote.unbundle(cg, remote_heads, 'push')
1564 return ret[1]
1568 return ret[1]
1565
1569
1566 def changegroupinfo(self, nodes, source):
1570 def changegroupinfo(self, nodes, source):
1567 if self.ui.verbose or source == 'bundle':
1571 if self.ui.verbose or source == 'bundle':
1568 self.ui.status(_("%d changesets found\n") % len(nodes))
1572 self.ui.status(_("%d changesets found\n") % len(nodes))
1569 if self.ui.debugflag:
1573 if self.ui.debugflag:
1570 self.ui.debug(_("List of changesets:\n"))
1574 self.ui.debug(_("List of changesets:\n"))
1571 for node in nodes:
1575 for node in nodes:
1572 self.ui.debug("%s\n" % hex(node))
1576 self.ui.debug("%s\n" % hex(node))
1573
1577
1574 def changegroupsubset(self, bases, heads, source, extranodes=None):
1578 def changegroupsubset(self, bases, heads, source, extranodes=None):
1575 """This function generates a changegroup consisting of all the nodes
1579 """This function generates a changegroup consisting of all the nodes
1576 that are descendents of any of the bases, and ancestors of any of
1580 that are descendents of any of the bases, and ancestors of any of
1577 the heads.
1581 the heads.
1578
1582
1579 It is fairly complex as determining which filenodes and which
1583 It is fairly complex as determining which filenodes and which
1580 manifest nodes need to be included for the changeset to be complete
1584 manifest nodes need to be included for the changeset to be complete
1581 is non-trivial.
1585 is non-trivial.
1582
1586
1583 Another wrinkle is doing the reverse, figuring out which changeset in
1587 Another wrinkle is doing the reverse, figuring out which changeset in
1584 the changegroup a particular filenode or manifestnode belongs to.
1588 the changegroup a particular filenode or manifestnode belongs to.
1585
1589
1586 The caller can specify some nodes that must be included in the
1590 The caller can specify some nodes that must be included in the
1587 changegroup using the extranodes argument. It should be a dict
1591 changegroup using the extranodes argument. It should be a dict
1588 where the keys are the filenames (or 1 for the manifest), and the
1592 where the keys are the filenames (or 1 for the manifest), and the
1589 values are lists of (node, linknode) tuples, where node is a wanted
1593 values are lists of (node, linknode) tuples, where node is a wanted
1590 node and linknode is the changelog node that should be transmitted as
1594 node and linknode is the changelog node that should be transmitted as
1591 the linkrev.
1595 the linkrev.
1592 """
1596 """
1593
1597
1594 self.hook('preoutgoing', throw=True, source=source)
1598 self.hook('preoutgoing', throw=True, source=source)
1595
1599
1596 # Set up some initial variables
1600 # Set up some initial variables
1597 # Make it easy to refer to self.changelog
1601 # Make it easy to refer to self.changelog
1598 cl = self.changelog
1602 cl = self.changelog
1599 # msng is short for missing - compute the list of changesets in this
1603 # msng is short for missing - compute the list of changesets in this
1600 # changegroup.
1604 # changegroup.
1601 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1605 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1602 self.changegroupinfo(msng_cl_lst, source)
1606 self.changegroupinfo(msng_cl_lst, source)
1603 # Some bases may turn out to be superfluous, and some heads may be
1607 # Some bases may turn out to be superfluous, and some heads may be
1604 # too. nodesbetween will return the minimal set of bases and heads
1608 # too. nodesbetween will return the minimal set of bases and heads
1605 # necessary to re-create the changegroup.
1609 # necessary to re-create the changegroup.
1606
1610
1607 # Known heads are the list of heads that it is assumed the recipient
1611 # Known heads are the list of heads that it is assumed the recipient
1608 # of this changegroup will know about.
1612 # of this changegroup will know about.
1609 knownheads = {}
1613 knownheads = {}
1610 # We assume that all parents of bases are known heads.
1614 # We assume that all parents of bases are known heads.
1611 for n in bases:
1615 for n in bases:
1612 for p in cl.parents(n):
1616 for p in cl.parents(n):
1613 if p != nullid:
1617 if p != nullid:
1614 knownheads[p] = 1
1618 knownheads[p] = 1
1615 knownheads = knownheads.keys()
1619 knownheads = knownheads.keys()
1616 if knownheads:
1620 if knownheads:
1617 # Now that we know what heads are known, we can compute which
1621 # Now that we know what heads are known, we can compute which
1618 # changesets are known. The recipient must know about all
1622 # changesets are known. The recipient must know about all
1619 # changesets required to reach the known heads from the null
1623 # changesets required to reach the known heads from the null
1620 # changeset.
1624 # changeset.
1621 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1625 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1622 junk = None
1626 junk = None
1623 # Transform the list into an ersatz set.
1627 # Transform the list into an ersatz set.
1624 has_cl_set = dict.fromkeys(has_cl_set)
1628 has_cl_set = dict.fromkeys(has_cl_set)
1625 else:
1629 else:
1626 # If there were no known heads, the recipient cannot be assumed to
1630 # If there were no known heads, the recipient cannot be assumed to
1627 # know about any changesets.
1631 # know about any changesets.
1628 has_cl_set = {}
1632 has_cl_set = {}
1629
1633
1630 # Make it easy to refer to self.manifest
1634 # Make it easy to refer to self.manifest
1631 mnfst = self.manifest
1635 mnfst = self.manifest
1632 # We don't know which manifests are missing yet
1636 # We don't know which manifests are missing yet
1633 msng_mnfst_set = {}
1637 msng_mnfst_set = {}
1634 # Nor do we know which filenodes are missing.
1638 # Nor do we know which filenodes are missing.
1635 msng_filenode_set = {}
1639 msng_filenode_set = {}
1636
1640
1637 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1641 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1638 junk = None
1642 junk = None
1639
1643
1640 # A changeset always belongs to itself, so the changenode lookup
1644 # A changeset always belongs to itself, so the changenode lookup
1641 # function for a changenode is identity.
1645 # function for a changenode is identity.
1642 def identity(x):
1646 def identity(x):
1643 return x
1647 return x
1644
1648
1645 # A function generating function. Sets up an environment for the
1649 # A function generating function. Sets up an environment for the
1646 # inner function.
1650 # inner function.
1647 def cmp_by_rev_func(revlog):
1651 def cmp_by_rev_func(revlog):
1648 # Compare two nodes by their revision number in the environment's
1652 # Compare two nodes by their revision number in the environment's
1649 # revision history. Since the revision number both represents the
1653 # revision history. Since the revision number both represents the
1650 # most efficient order to read the nodes in, and represents a
1654 # most efficient order to read the nodes in, and represents a
1651 # topological sorting of the nodes, this function is often useful.
1655 # topological sorting of the nodes, this function is often useful.
1652 def cmp_by_rev(a, b):
1656 def cmp_by_rev(a, b):
1653 return cmp(revlog.rev(a), revlog.rev(b))
1657 return cmp(revlog.rev(a), revlog.rev(b))
1654 return cmp_by_rev
1658 return cmp_by_rev
1655
1659
1656 # If we determine that a particular file or manifest node must be a
1660 # If we determine that a particular file or manifest node must be a
1657 # node that the recipient of the changegroup will already have, we can
1661 # node that the recipient of the changegroup will already have, we can
1658 # also assume the recipient will have all the parents. This function
1662 # also assume the recipient will have all the parents. This function
1659 # prunes them from the set of missing nodes.
1663 # prunes them from the set of missing nodes.
1660 def prune_parents(revlog, hasset, msngset):
1664 def prune_parents(revlog, hasset, msngset):
1661 haslst = hasset.keys()
1665 haslst = hasset.keys()
1662 haslst.sort(cmp_by_rev_func(revlog))
1666 haslst.sort(cmp_by_rev_func(revlog))
1663 for node in haslst:
1667 for node in haslst:
1664 parentlst = [p for p in revlog.parents(node) if p != nullid]
1668 parentlst = [p for p in revlog.parents(node) if p != nullid]
1665 while parentlst:
1669 while parentlst:
1666 n = parentlst.pop()
1670 n = parentlst.pop()
1667 if n not in hasset:
1671 if n not in hasset:
1668 hasset[n] = 1
1672 hasset[n] = 1
1669 p = [p for p in revlog.parents(n) if p != nullid]
1673 p = [p for p in revlog.parents(n) if p != nullid]
1670 parentlst.extend(p)
1674 parentlst.extend(p)
1671 for n in hasset:
1675 for n in hasset:
1672 msngset.pop(n, None)
1676 msngset.pop(n, None)
1673
1677
1674 # This is a function generating function used to set up an environment
1678 # This is a function generating function used to set up an environment
1675 # for the inner function to execute in.
1679 # for the inner function to execute in.
1676 def manifest_and_file_collector(changedfileset):
1680 def manifest_and_file_collector(changedfileset):
1677 # This is an information gathering function that gathers
1681 # This is an information gathering function that gathers
1678 # information from each changeset node that goes out as part of
1682 # information from each changeset node that goes out as part of
1679 # the changegroup. The information gathered is a list of which
1683 # the changegroup. The information gathered is a list of which
1680 # manifest nodes are potentially required (the recipient may
1684 # manifest nodes are potentially required (the recipient may
1681 # already have them) and total list of all files which were
1685 # already have them) and total list of all files which were
1682 # changed in any changeset in the changegroup.
1686 # changed in any changeset in the changegroup.
1683 #
1687 #
1684 # We also remember the first changenode we saw any manifest
1688 # We also remember the first changenode we saw any manifest
1685 # referenced by so we can later determine which changenode 'owns'
1689 # referenced by so we can later determine which changenode 'owns'
1686 # the manifest.
1690 # the manifest.
1687 def collect_manifests_and_files(clnode):
1691 def collect_manifests_and_files(clnode):
1688 c = cl.read(clnode)
1692 c = cl.read(clnode)
1689 for f in c[3]:
1693 for f in c[3]:
1690 # This is to make sure we only have one instance of each
1694 # This is to make sure we only have one instance of each
1691 # filename string for each filename.
1695 # filename string for each filename.
1692 changedfileset.setdefault(f, f)
1696 changedfileset.setdefault(f, f)
1693 msng_mnfst_set.setdefault(c[0], clnode)
1697 msng_mnfst_set.setdefault(c[0], clnode)
1694 return collect_manifests_and_files
1698 return collect_manifests_and_files
1695
1699
1696 # Figure out which manifest nodes (of the ones we think might be part
1700 # Figure out which manifest nodes (of the ones we think might be part
1697 # of the changegroup) the recipient must know about and remove them
1701 # of the changegroup) the recipient must know about and remove them
1698 # from the changegroup.
1702 # from the changegroup.
1699 def prune_manifests():
1703 def prune_manifests():
1700 has_mnfst_set = {}
1704 has_mnfst_set = {}
1701 for n in msng_mnfst_set:
1705 for n in msng_mnfst_set:
1702 # If a 'missing' manifest thinks it belongs to a changenode
1706 # If a 'missing' manifest thinks it belongs to a changenode
1703 # the recipient is assumed to have, obviously the recipient
1707 # the recipient is assumed to have, obviously the recipient
1704 # must have that manifest.
1708 # must have that manifest.
1705 linknode = cl.node(mnfst.linkrev(n))
1709 linknode = cl.node(mnfst.linkrev(n))
1706 if linknode in has_cl_set:
1710 if linknode in has_cl_set:
1707 has_mnfst_set[n] = 1
1711 has_mnfst_set[n] = 1
1708 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1712 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1709
1713
1710 # Use the information collected in collect_manifests_and_files to say
1714 # Use the information collected in collect_manifests_and_files to say
1711 # which changenode any manifestnode belongs to.
1715 # which changenode any manifestnode belongs to.
1712 def lookup_manifest_link(mnfstnode):
1716 def lookup_manifest_link(mnfstnode):
1713 return msng_mnfst_set[mnfstnode]
1717 return msng_mnfst_set[mnfstnode]
1714
1718
1715 # A function generating function that sets up the initial environment
1719 # A function generating function that sets up the initial environment
1716 # the inner function.
1720 # the inner function.
1717 def filenode_collector(changedfiles):
1721 def filenode_collector(changedfiles):
1718 next_rev = [0]
1722 next_rev = [0]
1719 # This gathers information from each manifestnode included in the
1723 # This gathers information from each manifestnode included in the
1720 # changegroup about which filenodes the manifest node references
1724 # changegroup about which filenodes the manifest node references
1721 # so we can include those in the changegroup too.
1725 # so we can include those in the changegroup too.
1722 #
1726 #
1723 # It also remembers which changenode each filenode belongs to. It
1727 # It also remembers which changenode each filenode belongs to. It
1724 # does this by assuming the a filenode belongs to the changenode
1728 # does this by assuming the a filenode belongs to the changenode
1725 # the first manifest that references it belongs to.
1729 # the first manifest that references it belongs to.
1726 def collect_msng_filenodes(mnfstnode):
1730 def collect_msng_filenodes(mnfstnode):
1727 r = mnfst.rev(mnfstnode)
1731 r = mnfst.rev(mnfstnode)
1728 if r == next_rev[0]:
1732 if r == next_rev[0]:
1729 # If the last rev we looked at was the one just previous,
1733 # If the last rev we looked at was the one just previous,
1730 # we only need to see a diff.
1734 # we only need to see a diff.
1731 deltamf = mnfst.readdelta(mnfstnode)
1735 deltamf = mnfst.readdelta(mnfstnode)
1732 # For each line in the delta
1736 # For each line in the delta
1733 for f, fnode in deltamf.items():
1737 for f, fnode in deltamf.items():
1734 f = changedfiles.get(f, None)
1738 f = changedfiles.get(f, None)
1735 # And if the file is in the list of files we care
1739 # And if the file is in the list of files we care
1736 # about.
1740 # about.
1737 if f is not None:
1741 if f is not None:
1738 # Get the changenode this manifest belongs to
1742 # Get the changenode this manifest belongs to
1739 clnode = msng_mnfst_set[mnfstnode]
1743 clnode = msng_mnfst_set[mnfstnode]
1740 # Create the set of filenodes for the file if
1744 # Create the set of filenodes for the file if
1741 # there isn't one already.
1745 # there isn't one already.
1742 ndset = msng_filenode_set.setdefault(f, {})
1746 ndset = msng_filenode_set.setdefault(f, {})
1743 # And set the filenode's changelog node to the
1747 # And set the filenode's changelog node to the
1744 # manifest's if it hasn't been set already.
1748 # manifest's if it hasn't been set already.
1745 ndset.setdefault(fnode, clnode)
1749 ndset.setdefault(fnode, clnode)
1746 else:
1750 else:
1747 # Otherwise we need a full manifest.
1751 # Otherwise we need a full manifest.
1748 m = mnfst.read(mnfstnode)
1752 m = mnfst.read(mnfstnode)
1749 # For every file in we care about.
1753 # For every file in we care about.
1750 for f in changedfiles:
1754 for f in changedfiles:
1751 fnode = m.get(f, None)
1755 fnode = m.get(f, None)
1752 # If it's in the manifest
1756 # If it's in the manifest
1753 if fnode is not None:
1757 if fnode is not None:
1754 # See comments above.
1758 # See comments above.
1755 clnode = msng_mnfst_set[mnfstnode]
1759 clnode = msng_mnfst_set[mnfstnode]
1756 ndset = msng_filenode_set.setdefault(f, {})
1760 ndset = msng_filenode_set.setdefault(f, {})
1757 ndset.setdefault(fnode, clnode)
1761 ndset.setdefault(fnode, clnode)
1758 # Remember the revision we hope to see next.
1762 # Remember the revision we hope to see next.
1759 next_rev[0] = r + 1
1763 next_rev[0] = r + 1
1760 return collect_msng_filenodes
1764 return collect_msng_filenodes
1761
1765
1762 # We have a list of filenodes we think we need for a file, lets remove
1766 # We have a list of filenodes we think we need for a file, lets remove
1763 # all those we now the recipient must have.
1767 # all those we now the recipient must have.
1764 def prune_filenodes(f, filerevlog):
1768 def prune_filenodes(f, filerevlog):
1765 msngset = msng_filenode_set[f]
1769 msngset = msng_filenode_set[f]
1766 hasset = {}
1770 hasset = {}
1767 # If a 'missing' filenode thinks it belongs to a changenode we
1771 # If a 'missing' filenode thinks it belongs to a changenode we
1768 # assume the recipient must have, then the recipient must have
1772 # assume the recipient must have, then the recipient must have
1769 # that filenode.
1773 # that filenode.
1770 for n in msngset:
1774 for n in msngset:
1771 clnode = cl.node(filerevlog.linkrev(n))
1775 clnode = cl.node(filerevlog.linkrev(n))
1772 if clnode in has_cl_set:
1776 if clnode in has_cl_set:
1773 hasset[n] = 1
1777 hasset[n] = 1
1774 prune_parents(filerevlog, hasset, msngset)
1778 prune_parents(filerevlog, hasset, msngset)
1775
1779
1776 # A function generator function that sets up the a context for the
1780 # A function generator function that sets up the a context for the
1777 # inner function.
1781 # inner function.
1778 def lookup_filenode_link_func(fname):
1782 def lookup_filenode_link_func(fname):
1779 msngset = msng_filenode_set[fname]
1783 msngset = msng_filenode_set[fname]
1780 # Lookup the changenode the filenode belongs to.
1784 # Lookup the changenode the filenode belongs to.
1781 def lookup_filenode_link(fnode):
1785 def lookup_filenode_link(fnode):
1782 return msngset[fnode]
1786 return msngset[fnode]
1783 return lookup_filenode_link
1787 return lookup_filenode_link
1784
1788
1785 # Add the nodes that were explicitly requested.
1789 # Add the nodes that were explicitly requested.
1786 def add_extra_nodes(name, nodes):
1790 def add_extra_nodes(name, nodes):
1787 if not extranodes or name not in extranodes:
1791 if not extranodes or name not in extranodes:
1788 return
1792 return
1789
1793
1790 for node, linknode in extranodes[name]:
1794 for node, linknode in extranodes[name]:
1791 if node not in nodes:
1795 if node not in nodes:
1792 nodes[node] = linknode
1796 nodes[node] = linknode
1793
1797
1794 # Now that we have all theses utility functions to help out and
1798 # Now that we have all theses utility functions to help out and
1795 # logically divide up the task, generate the group.
1799 # logically divide up the task, generate the group.
1796 def gengroup():
1800 def gengroup():
1797 # The set of changed files starts empty.
1801 # The set of changed files starts empty.
1798 changedfiles = {}
1802 changedfiles = {}
1799 # Create a changenode group generator that will call our functions
1803 # Create a changenode group generator that will call our functions
1800 # back to lookup the owning changenode and collect information.
1804 # back to lookup the owning changenode and collect information.
1801 group = cl.group(msng_cl_lst, identity,
1805 group = cl.group(msng_cl_lst, identity,
1802 manifest_and_file_collector(changedfiles))
1806 manifest_and_file_collector(changedfiles))
1803 for chnk in group:
1807 for chnk in group:
1804 yield chnk
1808 yield chnk
1805
1809
1806 # The list of manifests has been collected by the generator
1810 # The list of manifests has been collected by the generator
1807 # calling our functions back.
1811 # calling our functions back.
1808 prune_manifests()
1812 prune_manifests()
1809 add_extra_nodes(1, msng_mnfst_set)
1813 add_extra_nodes(1, msng_mnfst_set)
1810 msng_mnfst_lst = msng_mnfst_set.keys()
1814 msng_mnfst_lst = msng_mnfst_set.keys()
1811 # Sort the manifestnodes by revision number.
1815 # Sort the manifestnodes by revision number.
1812 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1816 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1813 # Create a generator for the manifestnodes that calls our lookup
1817 # Create a generator for the manifestnodes that calls our lookup
1814 # and data collection functions back.
1818 # and data collection functions back.
1815 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1819 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1816 filenode_collector(changedfiles))
1820 filenode_collector(changedfiles))
1817 for chnk in group:
1821 for chnk in group:
1818 yield chnk
1822 yield chnk
1819
1823
1820 # These are no longer needed, dereference and toss the memory for
1824 # These are no longer needed, dereference and toss the memory for
1821 # them.
1825 # them.
1822 msng_mnfst_lst = None
1826 msng_mnfst_lst = None
1823 msng_mnfst_set.clear()
1827 msng_mnfst_set.clear()
1824
1828
1825 if extranodes:
1829 if extranodes:
1826 for fname in extranodes:
1830 for fname in extranodes:
1827 if isinstance(fname, int):
1831 if isinstance(fname, int):
1828 continue
1832 continue
1829 add_extra_nodes(fname,
1833 add_extra_nodes(fname,
1830 msng_filenode_set.setdefault(fname, {}))
1834 msng_filenode_set.setdefault(fname, {}))
1831 changedfiles[fname] = 1
1835 changedfiles[fname] = 1
1832 changedfiles = changedfiles.keys()
1836 changedfiles = changedfiles.keys()
1833 changedfiles.sort()
1837 changedfiles.sort()
1834 # Go through all our files in order sorted by name.
1838 # Go through all our files in order sorted by name.
1835 for fname in changedfiles:
1839 for fname in changedfiles:
1836 filerevlog = self.file(fname)
1840 filerevlog = self.file(fname)
1837 if filerevlog.count() == 0:
1841 if filerevlog.count() == 0:
1838 raise util.Abort(_("empty or missing revlog for %s") % fname)
1842 raise util.Abort(_("empty or missing revlog for %s") % fname)
1839 # Toss out the filenodes that the recipient isn't really
1843 # Toss out the filenodes that the recipient isn't really
1840 # missing.
1844 # missing.
1841 if fname in msng_filenode_set:
1845 if fname in msng_filenode_set:
1842 prune_filenodes(fname, filerevlog)
1846 prune_filenodes(fname, filerevlog)
1843 msng_filenode_lst = msng_filenode_set[fname].keys()
1847 msng_filenode_lst = msng_filenode_set[fname].keys()
1844 else:
1848 else:
1845 msng_filenode_lst = []
1849 msng_filenode_lst = []
1846 # If any filenodes are left, generate the group for them,
1850 # If any filenodes are left, generate the group for them,
1847 # otherwise don't bother.
1851 # otherwise don't bother.
1848 if len(msng_filenode_lst) > 0:
1852 if len(msng_filenode_lst) > 0:
1849 yield changegroup.chunkheader(len(fname))
1853 yield changegroup.chunkheader(len(fname))
1850 yield fname
1854 yield fname
1851 # Sort the filenodes by their revision #
1855 # Sort the filenodes by their revision #
1852 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1856 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1853 # Create a group generator and only pass in a changenode
1857 # Create a group generator and only pass in a changenode
1854 # lookup function as we need to collect no information
1858 # lookup function as we need to collect no information
1855 # from filenodes.
1859 # from filenodes.
1856 group = filerevlog.group(msng_filenode_lst,
1860 group = filerevlog.group(msng_filenode_lst,
1857 lookup_filenode_link_func(fname))
1861 lookup_filenode_link_func(fname))
1858 for chnk in group:
1862 for chnk in group:
1859 yield chnk
1863 yield chnk
1860 if fname in msng_filenode_set:
1864 if fname in msng_filenode_set:
1861 # Don't need this anymore, toss it to free memory.
1865 # Don't need this anymore, toss it to free memory.
1862 del msng_filenode_set[fname]
1866 del msng_filenode_set[fname]
1863 # Signal that no more groups are left.
1867 # Signal that no more groups are left.
1864 yield changegroup.closechunk()
1868 yield changegroup.closechunk()
1865
1869
1866 if msng_cl_lst:
1870 if msng_cl_lst:
1867 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1871 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1868
1872
1869 return util.chunkbuffer(gengroup())
1873 return util.chunkbuffer(gengroup())
1870
1874
1871 def changegroup(self, basenodes, source):
1875 def changegroup(self, basenodes, source):
1872 """Generate a changegroup of all nodes that we have that a recipient
1876 """Generate a changegroup of all nodes that we have that a recipient
1873 doesn't.
1877 doesn't.
1874
1878
1875 This is much easier than the previous function as we can assume that
1879 This is much easier than the previous function as we can assume that
1876 the recipient has any changenode we aren't sending them."""
1880 the recipient has any changenode we aren't sending them."""
1877
1881
1878 self.hook('preoutgoing', throw=True, source=source)
1882 self.hook('preoutgoing', throw=True, source=source)
1879
1883
1880 cl = self.changelog
1884 cl = self.changelog
1881 nodes = cl.nodesbetween(basenodes, None)[0]
1885 nodes = cl.nodesbetween(basenodes, None)[0]
1882 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1886 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1883 self.changegroupinfo(nodes, source)
1887 self.changegroupinfo(nodes, source)
1884
1888
1885 def identity(x):
1889 def identity(x):
1886 return x
1890 return x
1887
1891
1888 def gennodelst(revlog):
1892 def gennodelst(revlog):
1889 for r in xrange(0, revlog.count()):
1893 for r in xrange(0, revlog.count()):
1890 n = revlog.node(r)
1894 n = revlog.node(r)
1891 if revlog.linkrev(n) in revset:
1895 if revlog.linkrev(n) in revset:
1892 yield n
1896 yield n
1893
1897
1894 def changed_file_collector(changedfileset):
1898 def changed_file_collector(changedfileset):
1895 def collect_changed_files(clnode):
1899 def collect_changed_files(clnode):
1896 c = cl.read(clnode)
1900 c = cl.read(clnode)
1897 for fname in c[3]:
1901 for fname in c[3]:
1898 changedfileset[fname] = 1
1902 changedfileset[fname] = 1
1899 return collect_changed_files
1903 return collect_changed_files
1900
1904
1901 def lookuprevlink_func(revlog):
1905 def lookuprevlink_func(revlog):
1902 def lookuprevlink(n):
1906 def lookuprevlink(n):
1903 return cl.node(revlog.linkrev(n))
1907 return cl.node(revlog.linkrev(n))
1904 return lookuprevlink
1908 return lookuprevlink
1905
1909
1906 def gengroup():
1910 def gengroup():
1907 # construct a list of all changed files
1911 # construct a list of all changed files
1908 changedfiles = {}
1912 changedfiles = {}
1909
1913
1910 for chnk in cl.group(nodes, identity,
1914 for chnk in cl.group(nodes, identity,
1911 changed_file_collector(changedfiles)):
1915 changed_file_collector(changedfiles)):
1912 yield chnk
1916 yield chnk
1913 changedfiles = changedfiles.keys()
1917 changedfiles = changedfiles.keys()
1914 changedfiles.sort()
1918 changedfiles.sort()
1915
1919
1916 mnfst = self.manifest
1920 mnfst = self.manifest
1917 nodeiter = gennodelst(mnfst)
1921 nodeiter = gennodelst(mnfst)
1918 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1922 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1919 yield chnk
1923 yield chnk
1920
1924
1921 for fname in changedfiles:
1925 for fname in changedfiles:
1922 filerevlog = self.file(fname)
1926 filerevlog = self.file(fname)
1923 if filerevlog.count() == 0:
1927 if filerevlog.count() == 0:
1924 raise util.Abort(_("empty or missing revlog for %s") % fname)
1928 raise util.Abort(_("empty or missing revlog for %s") % fname)
1925 nodeiter = gennodelst(filerevlog)
1929 nodeiter = gennodelst(filerevlog)
1926 nodeiter = list(nodeiter)
1930 nodeiter = list(nodeiter)
1927 if nodeiter:
1931 if nodeiter:
1928 yield changegroup.chunkheader(len(fname))
1932 yield changegroup.chunkheader(len(fname))
1929 yield fname
1933 yield fname
1930 lookup = lookuprevlink_func(filerevlog)
1934 lookup = lookuprevlink_func(filerevlog)
1931 for chnk in filerevlog.group(nodeiter, lookup):
1935 for chnk in filerevlog.group(nodeiter, lookup):
1932 yield chnk
1936 yield chnk
1933
1937
1934 yield changegroup.closechunk()
1938 yield changegroup.closechunk()
1935
1939
1936 if nodes:
1940 if nodes:
1937 self.hook('outgoing', node=hex(nodes[0]), source=source)
1941 self.hook('outgoing', node=hex(nodes[0]), source=source)
1938
1942
1939 return util.chunkbuffer(gengroup())
1943 return util.chunkbuffer(gengroup())
1940
1944
1941 def addchangegroup(self, source, srctype, url, emptyok=False):
1945 def addchangegroup(self, source, srctype, url, emptyok=False):
1942 """add changegroup to repo.
1946 """add changegroup to repo.
1943
1947
1944 return values:
1948 return values:
1945 - nothing changed or no source: 0
1949 - nothing changed or no source: 0
1946 - more heads than before: 1+added heads (2..n)
1950 - more heads than before: 1+added heads (2..n)
1947 - less heads than before: -1-removed heads (-2..-n)
1951 - less heads than before: -1-removed heads (-2..-n)
1948 - number of heads stays the same: 1
1952 - number of heads stays the same: 1
1949 """
1953 """
1950 def csmap(x):
1954 def csmap(x):
1951 self.ui.debug(_("add changeset %s\n") % short(x))
1955 self.ui.debug(_("add changeset %s\n") % short(x))
1952 return cl.count()
1956 return cl.count()
1953
1957
1954 def revmap(x):
1958 def revmap(x):
1955 return cl.rev(x)
1959 return cl.rev(x)
1956
1960
1957 if not source:
1961 if not source:
1958 return 0
1962 return 0
1959
1963
1960 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1964 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1961
1965
1962 changesets = files = revisions = 0
1966 changesets = files = revisions = 0
1963
1967
1964 # write changelog data to temp files so concurrent readers will not see
1968 # write changelog data to temp files so concurrent readers will not see
1965 # inconsistent view
1969 # inconsistent view
1966 cl = self.changelog
1970 cl = self.changelog
1967 cl.delayupdate()
1971 cl.delayupdate()
1968 oldheads = len(cl.heads())
1972 oldheads = len(cl.heads())
1969
1973
1970 tr = self.transaction()
1974 tr = self.transaction()
1971 try:
1975 try:
1972 trp = weakref.proxy(tr)
1976 trp = weakref.proxy(tr)
1973 # pull off the changeset group
1977 # pull off the changeset group
1974 self.ui.status(_("adding changesets\n"))
1978 self.ui.status(_("adding changesets\n"))
1975 cor = cl.count() - 1
1979 cor = cl.count() - 1
1976 chunkiter = changegroup.chunkiter(source)
1980 chunkiter = changegroup.chunkiter(source)
1977 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1981 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1978 raise util.Abort(_("received changelog group is empty"))
1982 raise util.Abort(_("received changelog group is empty"))
1979 cnr = cl.count() - 1
1983 cnr = cl.count() - 1
1980 changesets = cnr - cor
1984 changesets = cnr - cor
1981
1985
1982 # pull off the manifest group
1986 # pull off the manifest group
1983 self.ui.status(_("adding manifests\n"))
1987 self.ui.status(_("adding manifests\n"))
1984 chunkiter = changegroup.chunkiter(source)
1988 chunkiter = changegroup.chunkiter(source)
1985 # no need to check for empty manifest group here:
1989 # no need to check for empty manifest group here:
1986 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1990 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1987 # no new manifest will be created and the manifest group will
1991 # no new manifest will be created and the manifest group will
1988 # be empty during the pull
1992 # be empty during the pull
1989 self.manifest.addgroup(chunkiter, revmap, trp)
1993 self.manifest.addgroup(chunkiter, revmap, trp)
1990
1994
1991 # process the files
1995 # process the files
1992 self.ui.status(_("adding file changes\n"))
1996 self.ui.status(_("adding file changes\n"))
1993 while 1:
1997 while 1:
1994 f = changegroup.getchunk(source)
1998 f = changegroup.getchunk(source)
1995 if not f:
1999 if not f:
1996 break
2000 break
1997 self.ui.debug(_("adding %s revisions\n") % f)
2001 self.ui.debug(_("adding %s revisions\n") % f)
1998 fl = self.file(f)
2002 fl = self.file(f)
1999 o = fl.count()
2003 o = fl.count()
2000 chunkiter = changegroup.chunkiter(source)
2004 chunkiter = changegroup.chunkiter(source)
2001 if fl.addgroup(chunkiter, revmap, trp) is None:
2005 if fl.addgroup(chunkiter, revmap, trp) is None:
2002 raise util.Abort(_("received file revlog group is empty"))
2006 raise util.Abort(_("received file revlog group is empty"))
2003 revisions += fl.count() - o
2007 revisions += fl.count() - o
2004 files += 1
2008 files += 1
2005
2009
2006 # make changelog see real files again
2010 # make changelog see real files again
2007 cl.finalize(trp)
2011 cl.finalize(trp)
2008
2012
2009 newheads = len(self.changelog.heads())
2013 newheads = len(self.changelog.heads())
2010 heads = ""
2014 heads = ""
2011 if oldheads and newheads != oldheads:
2015 if oldheads and newheads != oldheads:
2012 heads = _(" (%+d heads)") % (newheads - oldheads)
2016 heads = _(" (%+d heads)") % (newheads - oldheads)
2013
2017
2014 self.ui.status(_("added %d changesets"
2018 self.ui.status(_("added %d changesets"
2015 " with %d changes to %d files%s\n")
2019 " with %d changes to %d files%s\n")
2016 % (changesets, revisions, files, heads))
2020 % (changesets, revisions, files, heads))
2017
2021
2018 if changesets > 0:
2022 if changesets > 0:
2019 self.hook('pretxnchangegroup', throw=True,
2023 self.hook('pretxnchangegroup', throw=True,
2020 node=hex(self.changelog.node(cor+1)), source=srctype,
2024 node=hex(self.changelog.node(cor+1)), source=srctype,
2021 url=url)
2025 url=url)
2022
2026
2023 tr.close()
2027 tr.close()
2024 finally:
2028 finally:
2025 del tr
2029 del tr
2026
2030
2027 if changesets > 0:
2031 if changesets > 0:
2028 # forcefully update the on-disk branch cache
2032 # forcefully update the on-disk branch cache
2029 self.ui.debug(_("updating the branch cache\n"))
2033 self.ui.debug(_("updating the branch cache\n"))
2030 self.branchtags()
2034 self.branchtags()
2031 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2035 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2032 source=srctype, url=url)
2036 source=srctype, url=url)
2033
2037
2034 for i in xrange(cor + 1, cnr + 1):
2038 for i in xrange(cor + 1, cnr + 1):
2035 self.hook("incoming", node=hex(self.changelog.node(i)),
2039 self.hook("incoming", node=hex(self.changelog.node(i)),
2036 source=srctype, url=url)
2040 source=srctype, url=url)
2037
2041
2038 # never return 0 here:
2042 # never return 0 here:
2039 if newheads < oldheads:
2043 if newheads < oldheads:
2040 return newheads - oldheads - 1
2044 return newheads - oldheads - 1
2041 else:
2045 else:
2042 return newheads - oldheads + 1
2046 return newheads - oldheads + 1
2043
2047
2044
2048
2045 def stream_in(self, remote):
2049 def stream_in(self, remote):
2046 fp = remote.stream_out()
2050 fp = remote.stream_out()
2047 l = fp.readline()
2051 l = fp.readline()
2048 try:
2052 try:
2049 resp = int(l)
2053 resp = int(l)
2050 except ValueError:
2054 except ValueError:
2051 raise util.UnexpectedOutput(
2055 raise util.UnexpectedOutput(
2052 _('Unexpected response from remote server:'), l)
2056 _('Unexpected response from remote server:'), l)
2053 if resp == 1:
2057 if resp == 1:
2054 raise util.Abort(_('operation forbidden by server'))
2058 raise util.Abort(_('operation forbidden by server'))
2055 elif resp == 2:
2059 elif resp == 2:
2056 raise util.Abort(_('locking the remote repository failed'))
2060 raise util.Abort(_('locking the remote repository failed'))
2057 elif resp != 0:
2061 elif resp != 0:
2058 raise util.Abort(_('the server sent an unknown error code'))
2062 raise util.Abort(_('the server sent an unknown error code'))
2059 self.ui.status(_('streaming all changes\n'))
2063 self.ui.status(_('streaming all changes\n'))
2060 l = fp.readline()
2064 l = fp.readline()
2061 try:
2065 try:
2062 total_files, total_bytes = map(int, l.split(' ', 1))
2066 total_files, total_bytes = map(int, l.split(' ', 1))
2063 except (ValueError, TypeError):
2067 except (ValueError, TypeError):
2064 raise util.UnexpectedOutput(
2068 raise util.UnexpectedOutput(
2065 _('Unexpected response from remote server:'), l)
2069 _('Unexpected response from remote server:'), l)
2066 self.ui.status(_('%d files to transfer, %s of data\n') %
2070 self.ui.status(_('%d files to transfer, %s of data\n') %
2067 (total_files, util.bytecount(total_bytes)))
2071 (total_files, util.bytecount(total_bytes)))
2068 start = time.time()
2072 start = time.time()
2069 for i in xrange(total_files):
2073 for i in xrange(total_files):
2070 # XXX doesn't support '\n' or '\r' in filenames
2074 # XXX doesn't support '\n' or '\r' in filenames
2071 l = fp.readline()
2075 l = fp.readline()
2072 try:
2076 try:
2073 name, size = l.split('\0', 1)
2077 name, size = l.split('\0', 1)
2074 size = int(size)
2078 size = int(size)
2075 except ValueError, TypeError:
2079 except ValueError, TypeError:
2076 raise util.UnexpectedOutput(
2080 raise util.UnexpectedOutput(
2077 _('Unexpected response from remote server:'), l)
2081 _('Unexpected response from remote server:'), l)
2078 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2082 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2079 ofp = self.sopener(name, 'w')
2083 ofp = self.sopener(name, 'w')
2080 for chunk in util.filechunkiter(fp, limit=size):
2084 for chunk in util.filechunkiter(fp, limit=size):
2081 ofp.write(chunk)
2085 ofp.write(chunk)
2082 ofp.close()
2086 ofp.close()
2083 elapsed = time.time() - start
2087 elapsed = time.time() - start
2084 if elapsed <= 0:
2088 if elapsed <= 0:
2085 elapsed = 0.001
2089 elapsed = 0.001
2086 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2090 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2087 (util.bytecount(total_bytes), elapsed,
2091 (util.bytecount(total_bytes), elapsed,
2088 util.bytecount(total_bytes / elapsed)))
2092 util.bytecount(total_bytes / elapsed)))
2089 self.invalidate()
2093 self.invalidate()
2090 return len(self.heads()) + 1
2094 return len(self.heads()) + 1
2091
2095
2092 def clone(self, remote, heads=[], stream=False):
2096 def clone(self, remote, heads=[], stream=False):
2093 '''clone remote repository.
2097 '''clone remote repository.
2094
2098
2095 keyword arguments:
2099 keyword arguments:
2096 heads: list of revs to clone (forces use of pull)
2100 heads: list of revs to clone (forces use of pull)
2097 stream: use streaming clone if possible'''
2101 stream: use streaming clone if possible'''
2098
2102
2099 # now, all clients that can request uncompressed clones can
2103 # now, all clients that can request uncompressed clones can
2100 # read repo formats supported by all servers that can serve
2104 # read repo formats supported by all servers that can serve
2101 # them.
2105 # them.
2102
2106
2103 # if revlog format changes, client will have to check version
2107 # if revlog format changes, client will have to check version
2104 # and format flags on "stream" capability, and use
2108 # and format flags on "stream" capability, and use
2105 # uncompressed only if compatible.
2109 # uncompressed only if compatible.
2106
2110
2107 if stream and not heads and remote.capable('stream'):
2111 if stream and not heads and remote.capable('stream'):
2108 return self.stream_in(remote)
2112 return self.stream_in(remote)
2109 return self.pull(remote, heads)
2113 return self.pull(remote, heads)
2110
2114
2111 # used to avoid circular references so destructors work
2115 # used to avoid circular references so destructors work
2112 def aftertrans(files):
2116 def aftertrans(files):
2113 renamefiles = [tuple(t) for t in files]
2117 renamefiles = [tuple(t) for t in files]
2114 def a():
2118 def a():
2115 for src, dest in renamefiles:
2119 for src, dest in renamefiles:
2116 util.rename(src, dest)
2120 util.rename(src, dest)
2117 return a
2121 return a
2118
2122
2119 def instance(ui, path, create):
2123 def instance(ui, path, create):
2120 return localrepository(ui, util.drop_scheme('file', path), create)
2124 return localrepository(ui, util.drop_scheme('file', path), create)
2121
2125
2122 def islocal(path):
2126 def islocal(path):
2123 return True
2127 return True
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now