##// END OF EJS Templates
Expand import * to allow Pyflakes to find problems
Joel Rosdahl -
r6211:f89fd07f default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,124 +1,124 b''
1 # acl.py - changeset access control for mercurial
1 # acl.py - changeset access control for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # this hook allows to allow or deny access to parts of a repo when
8 # this hook allows to allow or deny access to parts of a repo when
9 # taking incoming changesets.
9 # taking incoming changesets.
10 #
10 #
11 # authorization is against local user name on system where hook is
11 # authorization is against local user name on system where hook is
12 # run, not committer of original changeset (since that is easy to
12 # run, not committer of original changeset (since that is easy to
13 # spoof).
13 # spoof).
14 #
14 #
15 # acl hook is best to use if you use hgsh to set up restricted shells
15 # acl hook is best to use if you use hgsh to set up restricted shells
16 # for authenticated users to only push to / pull from. not safe if
16 # for authenticated users to only push to / pull from. not safe if
17 # user has interactive shell access, because they can disable hook.
17 # user has interactive shell access, because they can disable hook.
18 # also not safe if remote users share one local account, because then
18 # also not safe if remote users share one local account, because then
19 # no way to tell remote users apart.
19 # no way to tell remote users apart.
20 #
20 #
21 # to use, configure acl extension in hgrc like this:
21 # to use, configure acl extension in hgrc like this:
22 #
22 #
23 # [extensions]
23 # [extensions]
24 # hgext.acl =
24 # hgext.acl =
25 #
25 #
26 # [hooks]
26 # [hooks]
27 # pretxnchangegroup.acl = python:hgext.acl.hook
27 # pretxnchangegroup.acl = python:hgext.acl.hook
28 #
28 #
29 # [acl]
29 # [acl]
30 # sources = serve # check if source of incoming changes in this list
30 # sources = serve # check if source of incoming changes in this list
31 # # ("serve" == ssh or http, "push", "pull", "bundle")
31 # # ("serve" == ssh or http, "push", "pull", "bundle")
32 #
32 #
33 # allow and deny lists have subtree pattern (default syntax is glob)
33 # allow and deny lists have subtree pattern (default syntax is glob)
34 # on left, user names on right. deny list checked before allow list.
34 # on left, user names on right. deny list checked before allow list.
35 #
35 #
36 # [acl.allow]
36 # [acl.allow]
37 # # if acl.allow not present, all users allowed by default
37 # # if acl.allow not present, all users allowed by default
38 # # empty acl.allow = no users allowed
38 # # empty acl.allow = no users allowed
39 # docs/** = doc_writer
39 # docs/** = doc_writer
40 # .hgtags = release_engineer
40 # .hgtags = release_engineer
41 #
41 #
42 # [acl.deny]
42 # [acl.deny]
43 # # if acl.deny not present, no users denied by default
43 # # if acl.deny not present, no users denied by default
44 # # empty acl.deny = all users allowed
44 # # empty acl.deny = all users allowed
45 # glob pattern = user4, user5
45 # glob pattern = user4, user5
46 # ** = user6
46 # ** = user6
47
47
48 from mercurial.i18n import _
48 from mercurial.i18n import _
49 from mercurial.node import *
49 from mercurial.node import bin, short
50 from mercurial import util
50 from mercurial import util
51 import getpass
51 import getpass
52
52
53 class checker(object):
53 class checker(object):
54 '''acl checker.'''
54 '''acl checker.'''
55
55
56 def buildmatch(self, key):
56 def buildmatch(self, key):
57 '''return tuple of (match function, list enabled).'''
57 '''return tuple of (match function, list enabled).'''
58 if not self.ui.has_section(key):
58 if not self.ui.has_section(key):
59 self.ui.debug(_('acl: %s not enabled\n') % key)
59 self.ui.debug(_('acl: %s not enabled\n') % key)
60 return None, False
60 return None, False
61
61
62 thisuser = self.getuser()
62 thisuser = self.getuser()
63 pats = [pat for pat, users in self.ui.configitems(key)
63 pats = [pat for pat, users in self.ui.configitems(key)
64 if thisuser in users.replace(',', ' ').split()]
64 if thisuser in users.replace(',', ' ').split()]
65 self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
65 self.ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
66 (key, len(pats), thisuser))
66 (key, len(pats), thisuser))
67 if pats:
67 if pats:
68 match = util.matcher(self.repo.root, names=pats)[1]
68 match = util.matcher(self.repo.root, names=pats)[1]
69 else:
69 else:
70 match = util.never
70 match = util.never
71 return match, True
71 return match, True
72
72
73 def getuser(self):
73 def getuser(self):
74 '''return name of authenticated user.'''
74 '''return name of authenticated user.'''
75 return self.user
75 return self.user
76
76
77 def __init__(self, ui, repo):
77 def __init__(self, ui, repo):
78 self.ui = ui
78 self.ui = ui
79 self.repo = repo
79 self.repo = repo
80 self.user = getpass.getuser()
80 self.user = getpass.getuser()
81 cfg = self.ui.config('acl', 'config')
81 cfg = self.ui.config('acl', 'config')
82 if cfg:
82 if cfg:
83 self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
83 self.ui.readsections(cfg, 'acl.allow', 'acl.deny')
84 self.allow, self.allowable = self.buildmatch('acl.allow')
84 self.allow, self.allowable = self.buildmatch('acl.allow')
85 self.deny, self.deniable = self.buildmatch('acl.deny')
85 self.deny, self.deniable = self.buildmatch('acl.deny')
86
86
87 def skipsource(self, source):
87 def skipsource(self, source):
88 '''true if incoming changes from this source should be skipped.'''
88 '''true if incoming changes from this source should be skipped.'''
89 ok_sources = self.ui.config('acl', 'sources', 'serve').split()
89 ok_sources = self.ui.config('acl', 'sources', 'serve').split()
90 return source not in ok_sources
90 return source not in ok_sources
91
91
92 def check(self, node):
92 def check(self, node):
93 '''return if access allowed, raise exception if not.'''
93 '''return if access allowed, raise exception if not.'''
94 files = self.repo.changectx(node).files()
94 files = self.repo.changectx(node).files()
95 if self.deniable:
95 if self.deniable:
96 for f in files:
96 for f in files:
97 if self.deny(f):
97 if self.deny(f):
98 self.ui.debug(_('acl: user %s denied on %s\n') %
98 self.ui.debug(_('acl: user %s denied on %s\n') %
99 (self.getuser(), f))
99 (self.getuser(), f))
100 raise util.Abort(_('acl: access denied for changeset %s') %
100 raise util.Abort(_('acl: access denied for changeset %s') %
101 short(node))
101 short(node))
102 if self.allowable:
102 if self.allowable:
103 for f in files:
103 for f in files:
104 if not self.allow(f):
104 if not self.allow(f):
105 self.ui.debug(_('acl: user %s not allowed on %s\n') %
105 self.ui.debug(_('acl: user %s not allowed on %s\n') %
106 (self.getuser(), f))
106 (self.getuser(), f))
107 raise util.Abort(_('acl: access denied for changeset %s') %
107 raise util.Abort(_('acl: access denied for changeset %s') %
108 short(node))
108 short(node))
109 self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
109 self.ui.debug(_('acl: allowing changeset %s\n') % short(node))
110
110
111 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
111 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
112 if hooktype != 'pretxnchangegroup':
112 if hooktype != 'pretxnchangegroup':
113 raise util.Abort(_('config error - hook type "%s" cannot stop '
113 raise util.Abort(_('config error - hook type "%s" cannot stop '
114 'incoming changesets') % hooktype)
114 'incoming changesets') % hooktype)
115
115
116 c = checker(ui, repo)
116 c = checker(ui, repo)
117 if c.skipsource(source):
117 if c.skipsource(source):
118 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
118 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
119 return
119 return
120
120
121 start = repo.changelog.rev(bin(node))
121 start = repo.changelog.rev(bin(node))
122 end = repo.changelog.count()
122 end = repo.changelog.count()
123 for rev in xrange(start, end):
123 for rev in xrange(start, end):
124 c.check(repo.changelog.node(rev))
124 c.check(repo.changelog.node(rev))
@@ -1,311 +1,311 b''
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to update comments of bugzilla bugs when changesets
8 # hook extension to update comments of bugzilla bugs when changesets
9 # that refer to bugs by id are seen. this hook does not change bug
9 # that refer to bugs by id are seen. this hook does not change bug
10 # status, only comments.
10 # status, only comments.
11 #
11 #
12 # to configure, add items to '[bugzilla]' section of hgrc.
12 # to configure, add items to '[bugzilla]' section of hgrc.
13 #
13 #
14 # to use, configure bugzilla extension and enable like this:
14 # to use, configure bugzilla extension and enable like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.bugzilla =
17 # hgext.bugzilla =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # run bugzilla hook on every change pulled or pushed in here
20 # # run bugzilla hook on every change pulled or pushed in here
21 # incoming.bugzilla = python:hgext.bugzilla.hook
21 # incoming.bugzilla = python:hgext.bugzilla.hook
22 #
22 #
23 # config items:
23 # config items:
24 #
24 #
25 # section name is 'bugzilla'.
25 # section name is 'bugzilla'.
26 # [bugzilla]
26 # [bugzilla]
27 #
27 #
28 # REQUIRED:
28 # REQUIRED:
29 # host = bugzilla # mysql server where bugzilla database lives
29 # host = bugzilla # mysql server where bugzilla database lives
30 # password = ** # user's password
30 # password = ** # user's password
31 # version = 2.16 # version of bugzilla installed
31 # version = 2.16 # version of bugzilla installed
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # bzuser = ... # fallback bugzilla user name to record comments with
34 # bzuser = ... # fallback bugzilla user name to record comments with
35 # db = bugs # database to connect to
35 # db = bugs # database to connect to
36 # notify = ... # command to run to get bugzilla to send mail
36 # notify = ... # command to run to get bugzilla to send mail
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
37 # regexp = ... # regexp to match bug ids (must contain one "()" group)
38 # strip = 0 # number of slashes to strip for url paths
38 # strip = 0 # number of slashes to strip for url paths
39 # style = ... # style file to use when formatting comments
39 # style = ... # style file to use when formatting comments
40 # template = ... # template to use when formatting comments
40 # template = ... # template to use when formatting comments
41 # timeout = 5 # database connection timeout (seconds)
41 # timeout = 5 # database connection timeout (seconds)
42 # user = bugs # user to connect to database as
42 # user = bugs # user to connect to database as
43 # [web]
43 # [web]
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
44 # baseurl = http://hgserver/... # root of hg web site for browsing commits
45 #
45 #
46 # if hg committer names are not same as bugzilla user names, use
46 # if hg committer names are not same as bugzilla user names, use
47 # "usermap" feature to map from committer email to bugzilla user name.
47 # "usermap" feature to map from committer email to bugzilla user name.
48 # usermap can be in hgrc or separate config file.
48 # usermap can be in hgrc or separate config file.
49 #
49 #
50 # [bugzilla]
50 # [bugzilla]
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
51 # usermap = filename # cfg file with "committer"="bugzilla user" info
52 # [usermap]
52 # [usermap]
53 # committer_email = bugzilla_user_name
53 # committer_email = bugzilla_user_name
54
54
55 from mercurial.i18n import _
55 from mercurial.i18n import _
56 from mercurial.node import *
56 from mercurial.node import short
57 from mercurial import cmdutil, templater, util
57 from mercurial import cmdutil, templater, util
58 import os, re, time
58 import os, re, time
59
59
60 MySQLdb = None
60 MySQLdb = None
61
61
62 def buglist(ids):
62 def buglist(ids):
63 return '(' + ','.join(map(str, ids)) + ')'
63 return '(' + ','.join(map(str, ids)) + ')'
64
64
65 class bugzilla_2_16(object):
65 class bugzilla_2_16(object):
66 '''support for bugzilla version 2.16.'''
66 '''support for bugzilla version 2.16.'''
67
67
68 def __init__(self, ui):
68 def __init__(self, ui):
69 self.ui = ui
69 self.ui = ui
70 host = self.ui.config('bugzilla', 'host', 'localhost')
70 host = self.ui.config('bugzilla', 'host', 'localhost')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
71 user = self.ui.config('bugzilla', 'user', 'bugs')
72 passwd = self.ui.config('bugzilla', 'password')
72 passwd = self.ui.config('bugzilla', 'password')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
73 db = self.ui.config('bugzilla', 'db', 'bugs')
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
74 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
75 usermap = self.ui.config('bugzilla', 'usermap')
75 usermap = self.ui.config('bugzilla', 'usermap')
76 if usermap:
76 if usermap:
77 self.ui.readsections(usermap, 'usermap')
77 self.ui.readsections(usermap, 'usermap')
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
78 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
79 (host, db, user, '*' * len(passwd)))
79 (host, db, user, '*' * len(passwd)))
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
80 self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
81 db=db, connect_timeout=timeout)
81 db=db, connect_timeout=timeout)
82 self.cursor = self.conn.cursor()
82 self.cursor = self.conn.cursor()
83 self.run('select fieldid from fielddefs where name = "longdesc"')
83 self.run('select fieldid from fielddefs where name = "longdesc"')
84 ids = self.cursor.fetchall()
84 ids = self.cursor.fetchall()
85 if len(ids) != 1:
85 if len(ids) != 1:
86 raise util.Abort(_('unknown database schema'))
86 raise util.Abort(_('unknown database schema'))
87 self.longdesc_id = ids[0][0]
87 self.longdesc_id = ids[0][0]
88 self.user_ids = {}
88 self.user_ids = {}
89
89
90 def run(self, *args, **kwargs):
90 def run(self, *args, **kwargs):
91 '''run a query.'''
91 '''run a query.'''
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
92 self.ui.note(_('query: %s %s\n') % (args, kwargs))
93 try:
93 try:
94 self.cursor.execute(*args, **kwargs)
94 self.cursor.execute(*args, **kwargs)
95 except MySQLdb.MySQLError, err:
95 except MySQLdb.MySQLError, err:
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
96 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
97 raise
97 raise
98
98
99 def filter_real_bug_ids(self, ids):
99 def filter_real_bug_ids(self, ids):
100 '''filter not-existing bug ids from list.'''
100 '''filter not-existing bug ids from list.'''
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
101 self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
102 ids = [c[0] for c in self.cursor.fetchall()]
102 ids = [c[0] for c in self.cursor.fetchall()]
103 ids.sort()
103 ids.sort()
104 return ids
104 return ids
105
105
106 def filter_unknown_bug_ids(self, node, ids):
106 def filter_unknown_bug_ids(self, node, ids):
107 '''filter bug ids from list that already refer to this changeset.'''
107 '''filter bug ids from list that already refer to this changeset.'''
108
108
109 self.run('''select bug_id from longdescs where
109 self.run('''select bug_id from longdescs where
110 bug_id in %s and thetext like "%%%s%%"''' %
110 bug_id in %s and thetext like "%%%s%%"''' %
111 (buglist(ids), short(node)))
111 (buglist(ids), short(node)))
112 unknown = dict.fromkeys(ids)
112 unknown = dict.fromkeys(ids)
113 for (id,) in self.cursor.fetchall():
113 for (id,) in self.cursor.fetchall():
114 self.ui.status(_('bug %d already knows about changeset %s\n') %
114 self.ui.status(_('bug %d already knows about changeset %s\n') %
115 (id, short(node)))
115 (id, short(node)))
116 unknown.pop(id, None)
116 unknown.pop(id, None)
117 ids = unknown.keys()
117 ids = unknown.keys()
118 ids.sort()
118 ids.sort()
119 return ids
119 return ids
120
120
121 def notify(self, ids):
121 def notify(self, ids):
122 '''tell bugzilla to send mail.'''
122 '''tell bugzilla to send mail.'''
123
123
124 self.ui.status(_('telling bugzilla to send mail:\n'))
124 self.ui.status(_('telling bugzilla to send mail:\n'))
125 for id in ids:
125 for id in ids:
126 self.ui.status(_(' bug %s\n') % id)
126 self.ui.status(_(' bug %s\n') % id)
127 cmd = self.ui.config('bugzilla', 'notify',
127 cmd = self.ui.config('bugzilla', 'notify',
128 'cd /var/www/html/bugzilla && '
128 'cd /var/www/html/bugzilla && '
129 './processmail %s nobody@nowhere.com') % id
129 './processmail %s nobody@nowhere.com') % id
130 fp = os.popen('(%s) 2>&1' % cmd)
130 fp = os.popen('(%s) 2>&1' % cmd)
131 out = fp.read()
131 out = fp.read()
132 ret = fp.close()
132 ret = fp.close()
133 if ret:
133 if ret:
134 self.ui.warn(out)
134 self.ui.warn(out)
135 raise util.Abort(_('bugzilla notify command %s') %
135 raise util.Abort(_('bugzilla notify command %s') %
136 util.explain_exit(ret)[0])
136 util.explain_exit(ret)[0])
137 self.ui.status(_('done\n'))
137 self.ui.status(_('done\n'))
138
138
139 def get_user_id(self, user):
139 def get_user_id(self, user):
140 '''look up numeric bugzilla user id.'''
140 '''look up numeric bugzilla user id.'''
141 try:
141 try:
142 return self.user_ids[user]
142 return self.user_ids[user]
143 except KeyError:
143 except KeyError:
144 try:
144 try:
145 userid = int(user)
145 userid = int(user)
146 except ValueError:
146 except ValueError:
147 self.ui.note(_('looking up user %s\n') % user)
147 self.ui.note(_('looking up user %s\n') % user)
148 self.run('''select userid from profiles
148 self.run('''select userid from profiles
149 where login_name like %s''', user)
149 where login_name like %s''', user)
150 all = self.cursor.fetchall()
150 all = self.cursor.fetchall()
151 if len(all) != 1:
151 if len(all) != 1:
152 raise KeyError(user)
152 raise KeyError(user)
153 userid = int(all[0][0])
153 userid = int(all[0][0])
154 self.user_ids[user] = userid
154 self.user_ids[user] = userid
155 return userid
155 return userid
156
156
157 def map_committer(self, user):
157 def map_committer(self, user):
158 '''map name of committer to bugzilla user name.'''
158 '''map name of committer to bugzilla user name.'''
159 for committer, bzuser in self.ui.configitems('usermap'):
159 for committer, bzuser in self.ui.configitems('usermap'):
160 if committer.lower() == user.lower():
160 if committer.lower() == user.lower():
161 return bzuser
161 return bzuser
162 return user
162 return user
163
163
164 def add_comment(self, bugid, text, committer):
164 def add_comment(self, bugid, text, committer):
165 '''add comment to bug. try adding comment as committer of
165 '''add comment to bug. try adding comment as committer of
166 changeset, otherwise as default bugzilla user.'''
166 changeset, otherwise as default bugzilla user.'''
167 user = self.map_committer(committer)
167 user = self.map_committer(committer)
168 try:
168 try:
169 userid = self.get_user_id(user)
169 userid = self.get_user_id(user)
170 except KeyError:
170 except KeyError:
171 try:
171 try:
172 defaultuser = self.ui.config('bugzilla', 'bzuser')
172 defaultuser = self.ui.config('bugzilla', 'bzuser')
173 if not defaultuser:
173 if not defaultuser:
174 raise util.Abort(_('cannot find bugzilla user id for %s') %
174 raise util.Abort(_('cannot find bugzilla user id for %s') %
175 user)
175 user)
176 userid = self.get_user_id(defaultuser)
176 userid = self.get_user_id(defaultuser)
177 except KeyError:
177 except KeyError:
178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
178 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
179 (user, defaultuser))
179 (user, defaultuser))
180 now = time.strftime('%Y-%m-%d %H:%M:%S')
180 now = time.strftime('%Y-%m-%d %H:%M:%S')
181 self.run('''insert into longdescs
181 self.run('''insert into longdescs
182 (bug_id, who, bug_when, thetext)
182 (bug_id, who, bug_when, thetext)
183 values (%s, %s, %s, %s)''',
183 values (%s, %s, %s, %s)''',
184 (bugid, userid, now, text))
184 (bugid, userid, now, text))
185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
185 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
186 values (%s, %s, %s, %s)''',
186 values (%s, %s, %s, %s)''',
187 (bugid, userid, now, self.longdesc_id))
187 (bugid, userid, now, self.longdesc_id))
188
188
189 class bugzilla(object):
189 class bugzilla(object):
190 # supported versions of bugzilla. different versions have
190 # supported versions of bugzilla. different versions have
191 # different schemas.
191 # different schemas.
192 _versions = {
192 _versions = {
193 '2.16': bugzilla_2_16,
193 '2.16': bugzilla_2_16,
194 }
194 }
195
195
196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
196 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
197 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
198
198
199 _bz = None
199 _bz = None
200
200
201 def __init__(self, ui, repo):
201 def __init__(self, ui, repo):
202 self.ui = ui
202 self.ui = ui
203 self.repo = repo
203 self.repo = repo
204
204
205 def bz(self):
205 def bz(self):
206 '''return object that knows how to talk to bugzilla version in
206 '''return object that knows how to talk to bugzilla version in
207 use.'''
207 use.'''
208
208
209 if bugzilla._bz is None:
209 if bugzilla._bz is None:
210 bzversion = self.ui.config('bugzilla', 'version')
210 bzversion = self.ui.config('bugzilla', 'version')
211 try:
211 try:
212 bzclass = bugzilla._versions[bzversion]
212 bzclass = bugzilla._versions[bzversion]
213 except KeyError:
213 except KeyError:
214 raise util.Abort(_('bugzilla version %s not supported') %
214 raise util.Abort(_('bugzilla version %s not supported') %
215 bzversion)
215 bzversion)
216 bugzilla._bz = bzclass(self.ui)
216 bugzilla._bz = bzclass(self.ui)
217 return bugzilla._bz
217 return bugzilla._bz
218
218
219 def __getattr__(self, key):
219 def __getattr__(self, key):
220 return getattr(self.bz(), key)
220 return getattr(self.bz(), key)
221
221
222 _bug_re = None
222 _bug_re = None
223 _split_re = None
223 _split_re = None
224
224
225 def find_bug_ids(self, ctx):
225 def find_bug_ids(self, ctx):
226 '''find valid bug ids that are referred to in changeset
226 '''find valid bug ids that are referred to in changeset
227 comments and that do not already have references to this
227 comments and that do not already have references to this
228 changeset.'''
228 changeset.'''
229
229
230 if bugzilla._bug_re is None:
230 if bugzilla._bug_re is None:
231 bugzilla._bug_re = re.compile(
231 bugzilla._bug_re = re.compile(
232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
232 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
233 re.IGNORECASE)
233 re.IGNORECASE)
234 bugzilla._split_re = re.compile(r'\D+')
234 bugzilla._split_re = re.compile(r'\D+')
235 start = 0
235 start = 0
236 ids = {}
236 ids = {}
237 while True:
237 while True:
238 m = bugzilla._bug_re.search(ctx.description(), start)
238 m = bugzilla._bug_re.search(ctx.description(), start)
239 if not m:
239 if not m:
240 break
240 break
241 start = m.end()
241 start = m.end()
242 for id in bugzilla._split_re.split(m.group(1)):
242 for id in bugzilla._split_re.split(m.group(1)):
243 if not id: continue
243 if not id: continue
244 ids[int(id)] = 1
244 ids[int(id)] = 1
245 ids = ids.keys()
245 ids = ids.keys()
246 if ids:
246 if ids:
247 ids = self.filter_real_bug_ids(ids)
247 ids = self.filter_real_bug_ids(ids)
248 if ids:
248 if ids:
249 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
249 ids = self.filter_unknown_bug_ids(ctx.node(), ids)
250 return ids
250 return ids
251
251
252 def update(self, bugid, ctx):
252 def update(self, bugid, ctx):
253 '''update bugzilla bug with reference to changeset.'''
253 '''update bugzilla bug with reference to changeset.'''
254
254
255 def webroot(root):
255 def webroot(root):
256 '''strip leading prefix of repo root and turn into
256 '''strip leading prefix of repo root and turn into
257 url-safe path.'''
257 url-safe path.'''
258 count = int(self.ui.config('bugzilla', 'strip', 0))
258 count = int(self.ui.config('bugzilla', 'strip', 0))
259 root = util.pconvert(root)
259 root = util.pconvert(root)
260 while count > 0:
260 while count > 0:
261 c = root.find('/')
261 c = root.find('/')
262 if c == -1:
262 if c == -1:
263 break
263 break
264 root = root[c+1:]
264 root = root[c+1:]
265 count -= 1
265 count -= 1
266 return root
266 return root
267
267
268 mapfile = self.ui.config('bugzilla', 'style')
268 mapfile = self.ui.config('bugzilla', 'style')
269 tmpl = self.ui.config('bugzilla', 'template')
269 tmpl = self.ui.config('bugzilla', 'template')
270 t = cmdutil.changeset_templater(self.ui, self.repo,
270 t = cmdutil.changeset_templater(self.ui, self.repo,
271 False, mapfile, False)
271 False, mapfile, False)
272 if not mapfile and not tmpl:
272 if not mapfile and not tmpl:
273 tmpl = _('changeset {node|short} in repo {root} refers '
273 tmpl = _('changeset {node|short} in repo {root} refers '
274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
274 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
275 if tmpl:
275 if tmpl:
276 tmpl = templater.parsestring(tmpl, quoted=False)
276 tmpl = templater.parsestring(tmpl, quoted=False)
277 t.use_template(tmpl)
277 t.use_template(tmpl)
278 self.ui.pushbuffer()
278 self.ui.pushbuffer()
279 t.show(changenode=ctx.node(), changes=ctx.changeset(),
279 t.show(changenode=ctx.node(), changes=ctx.changeset(),
280 bug=str(bugid),
280 bug=str(bugid),
281 hgweb=self.ui.config('web', 'baseurl'),
281 hgweb=self.ui.config('web', 'baseurl'),
282 root=self.repo.root,
282 root=self.repo.root,
283 webroot=webroot(self.repo.root))
283 webroot=webroot(self.repo.root))
284 data = self.ui.popbuffer()
284 data = self.ui.popbuffer()
285 self.add_comment(bugid, data, util.email(ctx.user()))
285 self.add_comment(bugid, data, util.email(ctx.user()))
286
286
287 def hook(ui, repo, hooktype, node=None, **kwargs):
287 def hook(ui, repo, hooktype, node=None, **kwargs):
288 '''add comment to bugzilla for each changeset that refers to a
288 '''add comment to bugzilla for each changeset that refers to a
289 bugzilla bug id. only add a comment once per bug, so same change
289 bugzilla bug id. only add a comment once per bug, so same change
290 seen multiple times does not fill bug with duplicate data.'''
290 seen multiple times does not fill bug with duplicate data.'''
291 try:
291 try:
292 import MySQLdb as mysql
292 import MySQLdb as mysql
293 global MySQLdb
293 global MySQLdb
294 MySQLdb = mysql
294 MySQLdb = mysql
295 except ImportError, err:
295 except ImportError, err:
296 raise util.Abort(_('python mysql support not available: %s') % err)
296 raise util.Abort(_('python mysql support not available: %s') % err)
297
297
298 if node is None:
298 if node is None:
299 raise util.Abort(_('hook type %s does not pass a changeset id') %
299 raise util.Abort(_('hook type %s does not pass a changeset id') %
300 hooktype)
300 hooktype)
301 try:
301 try:
302 bz = bugzilla(ui, repo)
302 bz = bugzilla(ui, repo)
303 ctx = repo.changectx(node)
303 ctx = repo.changectx(node)
304 ids = bz.find_bug_ids(ctx)
304 ids = bz.find_bug_ids(ctx)
305 if ids:
305 if ids:
306 for id in ids:
306 for id in ids:
307 bz.update(id, ctx)
307 bz.update(id, ctx)
308 bz.notify(ids)
308 bz.notify(ids)
309 except MySQLdb.MySQLError, err:
309 except MySQLdb.MySQLError, err:
310 raise util.Abort(_('database error: %s') % err[1])
310 raise util.Abort(_('database error: %s') % err[1])
311
311
@@ -1,301 +1,301 b''
1 # hg backend for convert extension
1 # hg backend for convert extension
2
2
3 # Notes for hg->hg conversion:
3 # Notes for hg->hg conversion:
4 #
4 #
5 # * Old versions of Mercurial didn't trim the whitespace from the ends
5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 # of commit messages, but new versions do. Changesets created by
6 # of commit messages, but new versions do. Changesets created by
7 # those older versions, then converted, may thus have different
7 # those older versions, then converted, may thus have different
8 # hashes for changesets that are otherwise identical.
8 # hashes for changesets that are otherwise identical.
9 #
9 #
10 # * By default, the source revision is stored in the converted
10 # * By default, the source revision is stored in the converted
11 # revision. This will cause the converted revision to have a
11 # revision. This will cause the converted revision to have a
12 # different identity than the source. To avoid this, use the
12 # different identity than the source. To avoid this, use the
13 # following option: "--config convert.hg.saverev=false"
13 # following option: "--config convert.hg.saverev=false"
14
14
15
15
16 import os, time
16 import os, time
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.node import *
18 from mercurial.node import bin, hex, nullid
19 from mercurial import hg, lock, revlog, util
19 from mercurial import hg, lock, revlog, util
20
20
21 from common import NoRepo, commit, converter_source, converter_sink
21 from common import NoRepo, commit, converter_source, converter_sink
22
22
23 class mercurial_sink(converter_sink):
23 class mercurial_sink(converter_sink):
24 def __init__(self, ui, path):
24 def __init__(self, ui, path):
25 converter_sink.__init__(self, ui, path)
25 converter_sink.__init__(self, ui, path)
26 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
26 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
27 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
27 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
28 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
28 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
29 self.lastbranch = None
29 self.lastbranch = None
30 if os.path.isdir(path) and len(os.listdir(path)) > 0:
30 if os.path.isdir(path) and len(os.listdir(path)) > 0:
31 try:
31 try:
32 self.repo = hg.repository(self.ui, path)
32 self.repo = hg.repository(self.ui, path)
33 if not self.repo.local():
33 if not self.repo.local():
34 raise NoRepo(_('%s is not a local Mercurial repo') % path)
34 raise NoRepo(_('%s is not a local Mercurial repo') % path)
35 except hg.RepoError, err:
35 except hg.RepoError, err:
36 ui.print_exc()
36 ui.print_exc()
37 raise NoRepo(err.args[0])
37 raise NoRepo(err.args[0])
38 else:
38 else:
39 try:
39 try:
40 ui.status(_('initializing destination %s repository\n') % path)
40 ui.status(_('initializing destination %s repository\n') % path)
41 self.repo = hg.repository(self.ui, path, create=True)
41 self.repo = hg.repository(self.ui, path, create=True)
42 if not self.repo.local():
42 if not self.repo.local():
43 raise NoRepo(_('%s is not a local Mercurial repo') % path)
43 raise NoRepo(_('%s is not a local Mercurial repo') % path)
44 self.created.append(path)
44 self.created.append(path)
45 except hg.RepoError, err:
45 except hg.RepoError, err:
46 ui.print_exc()
46 ui.print_exc()
47 raise NoRepo("could not create hg repo %s as sink" % path)
47 raise NoRepo("could not create hg repo %s as sink" % path)
48 self.lock = None
48 self.lock = None
49 self.wlock = None
49 self.wlock = None
50 self.filemapmode = False
50 self.filemapmode = False
51
51
52 def before(self):
52 def before(self):
53 self.ui.debug(_('run hg sink pre-conversion action\n'))
53 self.ui.debug(_('run hg sink pre-conversion action\n'))
54 self.wlock = self.repo.wlock()
54 self.wlock = self.repo.wlock()
55 self.lock = self.repo.lock()
55 self.lock = self.repo.lock()
56 self.repo.dirstate.clear()
56 self.repo.dirstate.clear()
57
57
58 def after(self):
58 def after(self):
59 self.ui.debug(_('run hg sink post-conversion action\n'))
59 self.ui.debug(_('run hg sink post-conversion action\n'))
60 self.repo.dirstate.invalidate()
60 self.repo.dirstate.invalidate()
61 self.lock = None
61 self.lock = None
62 self.wlock = None
62 self.wlock = None
63
63
64 def revmapfile(self):
64 def revmapfile(self):
65 return os.path.join(self.path, ".hg", "shamap")
65 return os.path.join(self.path, ".hg", "shamap")
66
66
67 def authorfile(self):
67 def authorfile(self):
68 return os.path.join(self.path, ".hg", "authormap")
68 return os.path.join(self.path, ".hg", "authormap")
69
69
70 def getheads(self):
70 def getheads(self):
71 h = self.repo.changelog.heads()
71 h = self.repo.changelog.heads()
72 return [ hex(x) for x in h ]
72 return [ hex(x) for x in h ]
73
73
74 def putfile(self, f, e, data):
74 def putfile(self, f, e, data):
75 self.repo.wwrite(f, data, e)
75 self.repo.wwrite(f, data, e)
76 if f not in self.repo.dirstate:
76 if f not in self.repo.dirstate:
77 self.repo.dirstate.normallookup(f)
77 self.repo.dirstate.normallookup(f)
78
78
79 def copyfile(self, source, dest):
79 def copyfile(self, source, dest):
80 self.repo.copy(source, dest)
80 self.repo.copy(source, dest)
81
81
82 def delfile(self, f):
82 def delfile(self, f):
83 try:
83 try:
84 util.unlink(self.repo.wjoin(f))
84 util.unlink(self.repo.wjoin(f))
85 #self.repo.remove([f])
85 #self.repo.remove([f])
86 except OSError:
86 except OSError:
87 pass
87 pass
88
88
89 def setbranch(self, branch, pbranches):
89 def setbranch(self, branch, pbranches):
90 if not self.clonebranches:
90 if not self.clonebranches:
91 return
91 return
92
92
93 setbranch = (branch != self.lastbranch)
93 setbranch = (branch != self.lastbranch)
94 self.lastbranch = branch
94 self.lastbranch = branch
95 if not branch:
95 if not branch:
96 branch = 'default'
96 branch = 'default'
97 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
97 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
98 pbranch = pbranches and pbranches[0][1] or 'default'
98 pbranch = pbranches and pbranches[0][1] or 'default'
99
99
100 branchpath = os.path.join(self.path, branch)
100 branchpath = os.path.join(self.path, branch)
101 if setbranch:
101 if setbranch:
102 self.after()
102 self.after()
103 try:
103 try:
104 self.repo = hg.repository(self.ui, branchpath)
104 self.repo = hg.repository(self.ui, branchpath)
105 except:
105 except:
106 self.repo = hg.repository(self.ui, branchpath, create=True)
106 self.repo = hg.repository(self.ui, branchpath, create=True)
107 self.before()
107 self.before()
108
108
109 # pbranches may bring revisions from other branches (merge parents)
109 # pbranches may bring revisions from other branches (merge parents)
110 # Make sure we have them, or pull them.
110 # Make sure we have them, or pull them.
111 missings = {}
111 missings = {}
112 for b in pbranches:
112 for b in pbranches:
113 try:
113 try:
114 self.repo.lookup(b[0])
114 self.repo.lookup(b[0])
115 except:
115 except:
116 missings.setdefault(b[1], []).append(b[0])
116 missings.setdefault(b[1], []).append(b[0])
117
117
118 if missings:
118 if missings:
119 self.after()
119 self.after()
120 for pbranch, heads in missings.iteritems():
120 for pbranch, heads in missings.iteritems():
121 pbranchpath = os.path.join(self.path, pbranch)
121 pbranchpath = os.path.join(self.path, pbranch)
122 prepo = hg.repository(self.ui, pbranchpath)
122 prepo = hg.repository(self.ui, pbranchpath)
123 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
123 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
124 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
124 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
125 self.before()
125 self.before()
126
126
127 def putcommit(self, files, parents, commit):
127 def putcommit(self, files, parents, commit):
128 seen = {}
128 seen = {}
129 pl = []
129 pl = []
130 for p in parents:
130 for p in parents:
131 if p not in seen:
131 if p not in seen:
132 pl.append(p)
132 pl.append(p)
133 seen[p] = 1
133 seen[p] = 1
134 parents = pl
134 parents = pl
135 nparents = len(parents)
135 nparents = len(parents)
136 if self.filemapmode and nparents == 1:
136 if self.filemapmode and nparents == 1:
137 m1node = self.repo.changelog.read(bin(parents[0]))[0]
137 m1node = self.repo.changelog.read(bin(parents[0]))[0]
138 parent = parents[0]
138 parent = parents[0]
139
139
140 if len(parents) < 2: parents.append("0" * 40)
140 if len(parents) < 2: parents.append("0" * 40)
141 if len(parents) < 2: parents.append("0" * 40)
141 if len(parents) < 2: parents.append("0" * 40)
142 p2 = parents.pop(0)
142 p2 = parents.pop(0)
143
143
144 text = commit.desc
144 text = commit.desc
145 extra = commit.extra.copy()
145 extra = commit.extra.copy()
146 if self.branchnames and commit.branch:
146 if self.branchnames and commit.branch:
147 extra['branch'] = commit.branch
147 extra['branch'] = commit.branch
148 if commit.rev:
148 if commit.rev:
149 extra['convert_revision'] = commit.rev
149 extra['convert_revision'] = commit.rev
150
150
151 while parents:
151 while parents:
152 p1 = p2
152 p1 = p2
153 p2 = parents.pop(0)
153 p2 = parents.pop(0)
154 a = self.repo.rawcommit(files, text, commit.author, commit.date,
154 a = self.repo.rawcommit(files, text, commit.author, commit.date,
155 bin(p1), bin(p2), extra=extra)
155 bin(p1), bin(p2), extra=extra)
156 self.repo.dirstate.clear()
156 self.repo.dirstate.clear()
157 text = "(octopus merge fixup)\n"
157 text = "(octopus merge fixup)\n"
158 p2 = hg.hex(self.repo.changelog.tip())
158 p2 = hg.hex(self.repo.changelog.tip())
159
159
160 if self.filemapmode and nparents == 1:
160 if self.filemapmode and nparents == 1:
161 man = self.repo.manifest
161 man = self.repo.manifest
162 mnode = self.repo.changelog.read(bin(p2))[0]
162 mnode = self.repo.changelog.read(bin(p2))[0]
163 if not man.cmp(m1node, man.revision(mnode)):
163 if not man.cmp(m1node, man.revision(mnode)):
164 self.repo.rollback()
164 self.repo.rollback()
165 self.repo.dirstate.clear()
165 self.repo.dirstate.clear()
166 return parent
166 return parent
167 return p2
167 return p2
168
168
169 def puttags(self, tags):
169 def puttags(self, tags):
170 try:
170 try:
171 old = self.repo.wfile(".hgtags").read()
171 old = self.repo.wfile(".hgtags").read()
172 oldlines = old.splitlines(1)
172 oldlines = old.splitlines(1)
173 oldlines.sort()
173 oldlines.sort()
174 except:
174 except:
175 oldlines = []
175 oldlines = []
176
176
177 k = tags.keys()
177 k = tags.keys()
178 k.sort()
178 k.sort()
179 newlines = []
179 newlines = []
180 for tag in k:
180 for tag in k:
181 newlines.append("%s %s\n" % (tags[tag], tag))
181 newlines.append("%s %s\n" % (tags[tag], tag))
182
182
183 newlines.sort()
183 newlines.sort()
184
184
185 if newlines != oldlines:
185 if newlines != oldlines:
186 self.ui.status("updating tags\n")
186 self.ui.status("updating tags\n")
187 f = self.repo.wfile(".hgtags", "w")
187 f = self.repo.wfile(".hgtags", "w")
188 f.write("".join(newlines))
188 f.write("".join(newlines))
189 f.close()
189 f.close()
190 if not oldlines: self.repo.add([".hgtags"])
190 if not oldlines: self.repo.add([".hgtags"])
191 date = "%s 0" % int(time.mktime(time.gmtime()))
191 date = "%s 0" % int(time.mktime(time.gmtime()))
192 extra = {}
192 extra = {}
193 if self.tagsbranch != 'default':
193 if self.tagsbranch != 'default':
194 extra['branch'] = self.tagsbranch
194 extra['branch'] = self.tagsbranch
195 try:
195 try:
196 tagparent = self.repo.changectx(self.tagsbranch).node()
196 tagparent = self.repo.changectx(self.tagsbranch).node()
197 except hg.RepoError, inst:
197 except hg.RepoError, inst:
198 tagparent = nullid
198 tagparent = nullid
199 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
199 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
200 date, tagparent, nullid, extra=extra)
200 date, tagparent, nullid, extra=extra)
201 return hex(self.repo.changelog.tip())
201 return hex(self.repo.changelog.tip())
202
202
203 def setfilemapmode(self, active):
203 def setfilemapmode(self, active):
204 self.filemapmode = active
204 self.filemapmode = active
205
205
206 class mercurial_source(converter_source):
206 class mercurial_source(converter_source):
207 def __init__(self, ui, path, rev=None):
207 def __init__(self, ui, path, rev=None):
208 converter_source.__init__(self, ui, path, rev)
208 converter_source.__init__(self, ui, path, rev)
209 self.saverev = ui.configbool('convert', 'hg.saverev', True)
209 self.saverev = ui.configbool('convert', 'hg.saverev', True)
210 try:
210 try:
211 self.repo = hg.repository(self.ui, path)
211 self.repo = hg.repository(self.ui, path)
212 # try to provoke an exception if this isn't really a hg
212 # try to provoke an exception if this isn't really a hg
213 # repo, but some other bogus compatible-looking url
213 # repo, but some other bogus compatible-looking url
214 if not self.repo.local():
214 if not self.repo.local():
215 raise hg.RepoError()
215 raise hg.RepoError()
216 except hg.RepoError:
216 except hg.RepoError:
217 ui.print_exc()
217 ui.print_exc()
218 raise NoRepo("%s is not a local Mercurial repo" % path)
218 raise NoRepo("%s is not a local Mercurial repo" % path)
219 self.lastrev = None
219 self.lastrev = None
220 self.lastctx = None
220 self.lastctx = None
221 self._changescache = None
221 self._changescache = None
222 self.convertfp = None
222 self.convertfp = None
223
223
224 def changectx(self, rev):
224 def changectx(self, rev):
225 if self.lastrev != rev:
225 if self.lastrev != rev:
226 self.lastctx = self.repo.changectx(rev)
226 self.lastctx = self.repo.changectx(rev)
227 self.lastrev = rev
227 self.lastrev = rev
228 return self.lastctx
228 return self.lastctx
229
229
230 def getheads(self):
230 def getheads(self):
231 if self.rev:
231 if self.rev:
232 return [hex(self.repo.changectx(self.rev).node())]
232 return [hex(self.repo.changectx(self.rev).node())]
233 else:
233 else:
234 return [hex(node) for node in self.repo.heads()]
234 return [hex(node) for node in self.repo.heads()]
235
235
236 def getfile(self, name, rev):
236 def getfile(self, name, rev):
237 try:
237 try:
238 return self.changectx(rev).filectx(name).data()
238 return self.changectx(rev).filectx(name).data()
239 except revlog.LookupError, err:
239 except revlog.LookupError, err:
240 raise IOError(err)
240 raise IOError(err)
241
241
242 def getmode(self, name, rev):
242 def getmode(self, name, rev):
243 m = self.changectx(rev).manifest()
243 m = self.changectx(rev).manifest()
244 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
244 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
245
245
246 def getchanges(self, rev):
246 def getchanges(self, rev):
247 ctx = self.changectx(rev)
247 ctx = self.changectx(rev)
248 if self._changescache and self._changescache[0] == rev:
248 if self._changescache and self._changescache[0] == rev:
249 m, a, r = self._changescache[1]
249 m, a, r = self._changescache[1]
250 else:
250 else:
251 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
251 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
252 changes = [(name, rev) for name in m + a + r]
252 changes = [(name, rev) for name in m + a + r]
253 changes.sort()
253 changes.sort()
254 return (changes, self.getcopies(ctx, m + a))
254 return (changes, self.getcopies(ctx, m + a))
255
255
256 def getcopies(self, ctx, files):
256 def getcopies(self, ctx, files):
257 copies = {}
257 copies = {}
258 for name in files:
258 for name in files:
259 try:
259 try:
260 copies[name] = ctx.filectx(name).renamed()[0]
260 copies[name] = ctx.filectx(name).renamed()[0]
261 except TypeError:
261 except TypeError:
262 pass
262 pass
263 return copies
263 return copies
264
264
265 def getcommit(self, rev):
265 def getcommit(self, rev):
266 ctx = self.changectx(rev)
266 ctx = self.changectx(rev)
267 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
267 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
268 if self.saverev:
268 if self.saverev:
269 crev = rev
269 crev = rev
270 else:
270 else:
271 crev = None
271 crev = None
272 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
272 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
273 desc=ctx.description(), rev=crev, parents=parents,
273 desc=ctx.description(), rev=crev, parents=parents,
274 branch=ctx.branch(), extra=ctx.extra())
274 branch=ctx.branch(), extra=ctx.extra())
275
275
276 def gettags(self):
276 def gettags(self):
277 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
277 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
278 return dict([(name, hex(node)) for name, node in tags])
278 return dict([(name, hex(node)) for name, node in tags])
279
279
280 def getchangedfiles(self, rev, i):
280 def getchangedfiles(self, rev, i):
281 ctx = self.changectx(rev)
281 ctx = self.changectx(rev)
282 i = i or 0
282 i = i or 0
283 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
283 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
284
284
285 if i == 0:
285 if i == 0:
286 self._changescache = (rev, changes)
286 self._changescache = (rev, changes)
287
287
288 return changes[0] + changes[1] + changes[2]
288 return changes[0] + changes[1] + changes[2]
289
289
290 def converted(self, rev, destrev):
290 def converted(self, rev, destrev):
291 if self.convertfp is None:
291 if self.convertfp is None:
292 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
292 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
293 'a')
293 'a')
294 self.convertfp.write('%s %s\n' % (destrev, rev))
294 self.convertfp.write('%s %s\n' % (destrev, rev))
295 self.convertfp.flush()
295 self.convertfp.flush()
296
296
297 def before(self):
297 def before(self):
298 self.ui.debug(_('run hg source pre-conversion action\n'))
298 self.ui.debug(_('run hg source pre-conversion action\n'))
299
299
300 def after(self):
300 def after(self):
301 self.ui.debug(_('run hg source post-conversion action\n'))
301 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,251 +1,251 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''
8 '''
9 The `extdiff' Mercurial extension allows you to use external programs
9 The `extdiff' Mercurial extension allows you to use external programs
10 to compare revisions, or revision with working dir. The external diff
10 to compare revisions, or revision with working dir. The external diff
11 programs are called with a configurable set of options and two
11 programs are called with a configurable set of options and two
12 non-option arguments: paths to directories containing snapshots of
12 non-option arguments: paths to directories containing snapshots of
13 files to compare.
13 files to compare.
14
14
15 To enable this extension:
15 To enable this extension:
16
16
17 [extensions]
17 [extensions]
18 hgext.extdiff =
18 hgext.extdiff =
19
19
20 The `extdiff' extension also allows to configure new diff commands, so
20 The `extdiff' extension also allows to configure new diff commands, so
21 you do not need to type "hg extdiff -p kdiff3" always.
21 you do not need to type "hg extdiff -p kdiff3" always.
22
22
23 [extdiff]
23 [extdiff]
24 # add new command that runs GNU diff(1) in 'context diff' mode
24 # add new command that runs GNU diff(1) in 'context diff' mode
25 cdiff = gdiff -Nprc5
25 cdiff = gdiff -Nprc5
26 ## or the old way:
26 ## or the old way:
27 #cmd.cdiff = gdiff
27 #cmd.cdiff = gdiff
28 #opts.cdiff = -Nprc5
28 #opts.cdiff = -Nprc5
29
29
30 # add new command called vdiff, runs kdiff3
30 # add new command called vdiff, runs kdiff3
31 vdiff = kdiff3
31 vdiff = kdiff3
32
32
33 # add new command called meld, runs meld (no need to name twice)
33 # add new command called meld, runs meld (no need to name twice)
34 meld =
34 meld =
35
35
36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
37 #(see http://www.vim.org/scripts/script.php?script_id=102)
37 #(see http://www.vim.org/scripts/script.php?script_id=102)
38 # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
38 # Non english user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
39 # your .vimrc
39 # your .vimrc
40 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
40 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
41
41
42 You can use -I/-X and list of file or directory names like normal
42 You can use -I/-X and list of file or directory names like normal
43 "hg diff" command. The `extdiff' extension makes snapshots of only
43 "hg diff" command. The `extdiff' extension makes snapshots of only
44 needed files, so running the external diff program will actually be
44 needed files, so running the external diff program will actually be
45 pretty fast (at least faster than having to compare the entire tree).
45 pretty fast (at least faster than having to compare the entire tree).
46 '''
46 '''
47
47
48 from mercurial.i18n import _
48 from mercurial.i18n import _
49 from mercurial.node import *
49 from mercurial.node import short
50 from mercurial import cmdutil, util, commands
50 from mercurial import cmdutil, util, commands
51 import os, shlex, shutil, tempfile
51 import os, shlex, shutil, tempfile
52
52
53 def snapshot_node(ui, repo, files, node, tmproot):
53 def snapshot_node(ui, repo, files, node, tmproot):
54 '''snapshot files as of some revision'''
54 '''snapshot files as of some revision'''
55 mf = repo.changectx(node).manifest()
55 mf = repo.changectx(node).manifest()
56 dirname = os.path.basename(repo.root)
56 dirname = os.path.basename(repo.root)
57 if dirname == "":
57 if dirname == "":
58 dirname = "root"
58 dirname = "root"
59 dirname = '%s.%s' % (dirname, short(node))
59 dirname = '%s.%s' % (dirname, short(node))
60 base = os.path.join(tmproot, dirname)
60 base = os.path.join(tmproot, dirname)
61 os.mkdir(base)
61 os.mkdir(base)
62 ui.note(_('making snapshot of %d files from rev %s\n') %
62 ui.note(_('making snapshot of %d files from rev %s\n') %
63 (len(files), short(node)))
63 (len(files), short(node)))
64 for fn in files:
64 for fn in files:
65 if not fn in mf:
65 if not fn in mf:
66 # skipping new file after a merge ?
66 # skipping new file after a merge ?
67 continue
67 continue
68 wfn = util.pconvert(fn)
68 wfn = util.pconvert(fn)
69 ui.note(' %s\n' % wfn)
69 ui.note(' %s\n' % wfn)
70 dest = os.path.join(base, wfn)
70 dest = os.path.join(base, wfn)
71 destdir = os.path.dirname(dest)
71 destdir = os.path.dirname(dest)
72 if not os.path.isdir(destdir):
72 if not os.path.isdir(destdir):
73 os.makedirs(destdir)
73 os.makedirs(destdir)
74 data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
74 data = repo.wwritedata(wfn, repo.file(wfn).read(mf[wfn]))
75 open(dest, 'wb').write(data)
75 open(dest, 'wb').write(data)
76 return dirname
76 return dirname
77
77
78
78
79 def snapshot_wdir(ui, repo, files, tmproot):
79 def snapshot_wdir(ui, repo, files, tmproot):
80 '''snapshot files from working directory.
80 '''snapshot files from working directory.
81 if not using snapshot, -I/-X does not work and recursive diff
81 if not using snapshot, -I/-X does not work and recursive diff
82 in tools like kdiff3 and meld displays too many files.'''
82 in tools like kdiff3 and meld displays too many files.'''
83 repo_root = repo.root
83 repo_root = repo.root
84
84
85 dirname = os.path.basename(repo_root)
85 dirname = os.path.basename(repo_root)
86 if dirname == "":
86 if dirname == "":
87 dirname = "root"
87 dirname = "root"
88 base = os.path.join(tmproot, dirname)
88 base = os.path.join(tmproot, dirname)
89 os.mkdir(base)
89 os.mkdir(base)
90 ui.note(_('making snapshot of %d files from working dir\n') %
90 ui.note(_('making snapshot of %d files from working dir\n') %
91 (len(files)))
91 (len(files)))
92
92
93 fns_and_mtime = []
93 fns_and_mtime = []
94
94
95 for fn in files:
95 for fn in files:
96 wfn = util.pconvert(fn)
96 wfn = util.pconvert(fn)
97 ui.note(' %s\n' % wfn)
97 ui.note(' %s\n' % wfn)
98 dest = os.path.join(base, wfn)
98 dest = os.path.join(base, wfn)
99 destdir = os.path.dirname(dest)
99 destdir = os.path.dirname(dest)
100 if not os.path.isdir(destdir):
100 if not os.path.isdir(destdir):
101 os.makedirs(destdir)
101 os.makedirs(destdir)
102
102
103 fp = open(dest, 'wb')
103 fp = open(dest, 'wb')
104 for chunk in util.filechunkiter(repo.wopener(wfn)):
104 for chunk in util.filechunkiter(repo.wopener(wfn)):
105 fp.write(chunk)
105 fp.write(chunk)
106 fp.close()
106 fp.close()
107
107
108 fns_and_mtime.append((dest, os.path.join(repo_root, fn),
108 fns_and_mtime.append((dest, os.path.join(repo_root, fn),
109 os.path.getmtime(dest)))
109 os.path.getmtime(dest)))
110
110
111
111
112 return dirname, fns_and_mtime
112 return dirname, fns_and_mtime
113
113
114
114
115 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
115 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
116 '''Do the actuall diff:
116 '''Do the actuall diff:
117
117
118 - copy to a temp structure if diffing 2 internal revisions
118 - copy to a temp structure if diffing 2 internal revisions
119 - copy to a temp structure if diffing working revision with
119 - copy to a temp structure if diffing working revision with
120 another one and more than 1 file is changed
120 another one and more than 1 file is changed
121 - just invoke the diff for a single file in the working dir
121 - just invoke the diff for a single file in the working dir
122 '''
122 '''
123 node1, node2 = cmdutil.revpair(repo, opts['rev'])
123 node1, node2 = cmdutil.revpair(repo, opts['rev'])
124 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
124 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
125 modified, added, removed, deleted, unknown = repo.status(
125 modified, added, removed, deleted, unknown = repo.status(
126 node1, node2, files, match=matchfn)[:5]
126 node1, node2, files, match=matchfn)[:5]
127 if not (modified or added or removed):
127 if not (modified or added or removed):
128 return 0
128 return 0
129
129
130 tmproot = tempfile.mkdtemp(prefix='extdiff.')
130 tmproot = tempfile.mkdtemp(prefix='extdiff.')
131 dir2root = ''
131 dir2root = ''
132 try:
132 try:
133 # Always make a copy of node1
133 # Always make a copy of node1
134 dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
134 dir1 = snapshot_node(ui, repo, modified + removed, node1, tmproot)
135 changes = len(modified) + len(removed) + len(added)
135 changes = len(modified) + len(removed) + len(added)
136
136
137 fns_and_mtime = []
137 fns_and_mtime = []
138
138
139 # If node2 in not the wc or there is >1 change, copy it
139 # If node2 in not the wc or there is >1 change, copy it
140 if node2:
140 if node2:
141 dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
141 dir2 = snapshot_node(ui, repo, modified + added, node2, tmproot)
142 elif changes > 1:
142 elif changes > 1:
143 #we only actually need to get the files to copy back to the working
143 #we only actually need to get the files to copy back to the working
144 #dir in this case (because the other cases are: diffing 2 revisions
144 #dir in this case (because the other cases are: diffing 2 revisions
145 #or single file -- in which case the file is already directly passed
145 #or single file -- in which case the file is already directly passed
146 #to the diff tool).
146 #to the diff tool).
147 dir2, fns_and_mtime = snapshot_wdir(ui, repo, modified + added, tmproot)
147 dir2, fns_and_mtime = snapshot_wdir(ui, repo, modified + added, tmproot)
148 else:
148 else:
149 # This lets the diff tool open the changed file directly
149 # This lets the diff tool open the changed file directly
150 dir2 = ''
150 dir2 = ''
151 dir2root = repo.root
151 dir2root = repo.root
152
152
153 # If only one change, diff the files instead of the directories
153 # If only one change, diff the files instead of the directories
154 if changes == 1 :
154 if changes == 1 :
155 if len(modified):
155 if len(modified):
156 dir1 = os.path.join(dir1, util.localpath(modified[0]))
156 dir1 = os.path.join(dir1, util.localpath(modified[0]))
157 dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
157 dir2 = os.path.join(dir2root, dir2, util.localpath(modified[0]))
158 elif len(removed) :
158 elif len(removed) :
159 dir1 = os.path.join(dir1, util.localpath(removed[0]))
159 dir1 = os.path.join(dir1, util.localpath(removed[0]))
160 dir2 = os.devnull
160 dir2 = os.devnull
161 else:
161 else:
162 dir1 = os.devnull
162 dir1 = os.devnull
163 dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
163 dir2 = os.path.join(dir2root, dir2, util.localpath(added[0]))
164
164
165 cmdline = ('%s %s %s %s' %
165 cmdline = ('%s %s %s %s' %
166 (util.shellquote(diffcmd), ' '.join(diffopts),
166 (util.shellquote(diffcmd), ' '.join(diffopts),
167 util.shellquote(dir1), util.shellquote(dir2)))
167 util.shellquote(dir1), util.shellquote(dir2)))
168 ui.debug('running %r in %s\n' % (cmdline, tmproot))
168 ui.debug('running %r in %s\n' % (cmdline, tmproot))
169 util.system(cmdline, cwd=tmproot)
169 util.system(cmdline, cwd=tmproot)
170
170
171 for copy_fn, working_fn, mtime in fns_and_mtime:
171 for copy_fn, working_fn, mtime in fns_and_mtime:
172 if os.path.getmtime(copy_fn) != mtime:
172 if os.path.getmtime(copy_fn) != mtime:
173 ui.debug('File changed while diffing. '
173 ui.debug('File changed while diffing. '
174 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
174 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
175 util.copyfile(copy_fn, working_fn)
175 util.copyfile(copy_fn, working_fn)
176
176
177 return 1
177 return 1
178 finally:
178 finally:
179 ui.note(_('cleaning up temp directory\n'))
179 ui.note(_('cleaning up temp directory\n'))
180 shutil.rmtree(tmproot)
180 shutil.rmtree(tmproot)
181
181
182 def extdiff(ui, repo, *pats, **opts):
182 def extdiff(ui, repo, *pats, **opts):
183 '''use external program to diff repository (or selected files)
183 '''use external program to diff repository (or selected files)
184
184
185 Show differences between revisions for the specified files, using
185 Show differences between revisions for the specified files, using
186 an external program. The default program used is diff, with
186 an external program. The default program used is diff, with
187 default options "-Npru".
187 default options "-Npru".
188
188
189 To select a different program, use the -p option. The program
189 To select a different program, use the -p option. The program
190 will be passed the names of two directories to compare. To pass
190 will be passed the names of two directories to compare. To pass
191 additional options to the program, use the -o option. These will
191 additional options to the program, use the -o option. These will
192 be passed before the names of the directories to compare.
192 be passed before the names of the directories to compare.
193
193
194 When two revision arguments are given, then changes are
194 When two revision arguments are given, then changes are
195 shown between those revisions. If only one revision is
195 shown between those revisions. If only one revision is
196 specified then that revision is compared to the working
196 specified then that revision is compared to the working
197 directory, and, when no revisions are specified, the
197 directory, and, when no revisions are specified, the
198 working directory files are compared to its parent.'''
198 working directory files are compared to its parent.'''
199 program = opts['program'] or 'diff'
199 program = opts['program'] or 'diff'
200 if opts['program']:
200 if opts['program']:
201 option = opts['option']
201 option = opts['option']
202 else:
202 else:
203 option = opts['option'] or ['-Npru']
203 option = opts['option'] or ['-Npru']
204 return dodiff(ui, repo, program, option, pats, opts)
204 return dodiff(ui, repo, program, option, pats, opts)
205
205
206 cmdtable = {
206 cmdtable = {
207 "extdiff":
207 "extdiff":
208 (extdiff,
208 (extdiff,
209 [('p', 'program', '', _('comparison program to run')),
209 [('p', 'program', '', _('comparison program to run')),
210 ('o', 'option', [], _('pass option to comparison program')),
210 ('o', 'option', [], _('pass option to comparison program')),
211 ('r', 'rev', [], _('revision')),
211 ('r', 'rev', [], _('revision')),
212 ] + commands.walkopts,
212 ] + commands.walkopts,
213 _('hg extdiff [OPT]... [FILE]...')),
213 _('hg extdiff [OPT]... [FILE]...')),
214 }
214 }
215
215
216 def uisetup(ui):
216 def uisetup(ui):
217 for cmd, path in ui.configitems('extdiff'):
217 for cmd, path in ui.configitems('extdiff'):
218 if cmd.startswith('cmd.'):
218 if cmd.startswith('cmd.'):
219 cmd = cmd[4:]
219 cmd = cmd[4:]
220 if not path: path = cmd
220 if not path: path = cmd
221 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
221 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
222 diffopts = diffopts and [diffopts] or []
222 diffopts = diffopts and [diffopts] or []
223 elif cmd.startswith('opts.'):
223 elif cmd.startswith('opts.'):
224 continue
224 continue
225 else:
225 else:
226 # command = path opts
226 # command = path opts
227 if path:
227 if path:
228 diffopts = shlex.split(path)
228 diffopts = shlex.split(path)
229 path = diffopts.pop(0)
229 path = diffopts.pop(0)
230 else:
230 else:
231 path, diffopts = cmd, []
231 path, diffopts = cmd, []
232 def save(cmd, path, diffopts):
232 def save(cmd, path, diffopts):
233 '''use closure to save diff command to use'''
233 '''use closure to save diff command to use'''
234 def mydiff(ui, repo, *pats, **opts):
234 def mydiff(ui, repo, *pats, **opts):
235 return dodiff(ui, repo, path, diffopts, pats, opts)
235 return dodiff(ui, repo, path, diffopts, pats, opts)
236 mydiff.__doc__ = '''use %(path)s to diff repository (or selected files)
236 mydiff.__doc__ = '''use %(path)s to diff repository (or selected files)
237
237
238 Show differences between revisions for the specified
238 Show differences between revisions for the specified
239 files, using the %(path)s program.
239 files, using the %(path)s program.
240
240
241 When two revision arguments are given, then changes are
241 When two revision arguments are given, then changes are
242 shown between those revisions. If only one revision is
242 shown between those revisions. If only one revision is
243 specified then that revision is compared to the working
243 specified then that revision is compared to the working
244 directory, and, when no revisions are specified, the
244 directory, and, when no revisions are specified, the
245 working directory files are compared to its parent.''' % {
245 working directory files are compared to its parent.''' % {
246 'path': util.uirepr(path),
246 'path': util.uirepr(path),
247 }
247 }
248 return mydiff
248 return mydiff
249 cmdtable[cmd] = (save(cmd, path, diffopts),
249 cmdtable[cmd] = (save(cmd, path, diffopts),
250 cmdtable['extdiff'][1][1:],
250 cmdtable['extdiff'][1][1:],
251 _('hg %s [OPTION]... [FILE]...') % cmd)
251 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,123 +1,123 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 from mercurial.node import *
9 from mercurial.node import nullid, short
10 from mercurial import commands, cmdutil, hg, node, util
10 from mercurial import commands, cmdutil, hg, node, util
11
11
12 def fetch(ui, repo, source='default', **opts):
12 def fetch(ui, repo, source='default', **opts):
13 '''Pull changes from a remote repository, merge new changes if needed.
13 '''Pull changes from a remote repository, merge new changes if needed.
14
14
15 This finds all changes from the repository at the specified path
15 This finds all changes from the repository at the specified path
16 or URL and adds them to the local repository.
16 or URL and adds them to the local repository.
17
17
18 If the pulled changes add a new head, the head is automatically
18 If the pulled changes add a new head, the head is automatically
19 merged, and the result of the merge is committed. Otherwise, the
19 merged, and the result of the merge is committed. Otherwise, the
20 working directory is updated to include the new changes.
20 working directory is updated to include the new changes.
21
21
22 When a merge occurs, the newly pulled changes are assumed to be
22 When a merge occurs, the newly pulled changes are assumed to be
23 "authoritative". The head of the new changes is used as the first
23 "authoritative". The head of the new changes is used as the first
24 parent, with local changes as the second. To switch the merge
24 parent, with local changes as the second. To switch the merge
25 order, use --switch-parent.
25 order, use --switch-parent.
26
26
27 See 'hg help dates' for a list of formats valid for -d/--date.
27 See 'hg help dates' for a list of formats valid for -d/--date.
28 '''
28 '''
29
29
30 def postincoming(other, modheads):
30 def postincoming(other, modheads):
31 if modheads == 0:
31 if modheads == 0:
32 return 0
32 return 0
33 if modheads == 1:
33 if modheads == 1:
34 return hg.clean(repo, repo.changelog.tip())
34 return hg.clean(repo, repo.changelog.tip())
35 newheads = repo.heads(parent)
35 newheads = repo.heads(parent)
36 newchildren = [n for n in repo.heads(parent) if n != parent]
36 newchildren = [n for n in repo.heads(parent) if n != parent]
37 newparent = parent
37 newparent = parent
38 if newchildren:
38 if newchildren:
39 newparent = newchildren[0]
39 newparent = newchildren[0]
40 hg.clean(repo, newparent)
40 hg.clean(repo, newparent)
41 newheads = [n for n in repo.heads() if n != newparent]
41 newheads = [n for n in repo.heads() if n != newparent]
42 if len(newheads) > 1:
42 if len(newheads) > 1:
43 ui.status(_('not merging with %d other new heads '
43 ui.status(_('not merging with %d other new heads '
44 '(use "hg heads" and "hg merge" to merge them)') %
44 '(use "hg heads" and "hg merge" to merge them)') %
45 (len(newheads) - 1))
45 (len(newheads) - 1))
46 return
46 return
47 err = False
47 err = False
48 if newheads:
48 if newheads:
49 # By default, we consider the repository we're pulling
49 # By default, we consider the repository we're pulling
50 # *from* as authoritative, so we merge our changes into
50 # *from* as authoritative, so we merge our changes into
51 # theirs.
51 # theirs.
52 if opts['switch_parent']:
52 if opts['switch_parent']:
53 firstparent, secondparent = newparent, newheads[0]
53 firstparent, secondparent = newparent, newheads[0]
54 else:
54 else:
55 firstparent, secondparent = newheads[0], newparent
55 firstparent, secondparent = newheads[0], newparent
56 ui.status(_('updating to %d:%s\n') %
56 ui.status(_('updating to %d:%s\n') %
57 (repo.changelog.rev(firstparent),
57 (repo.changelog.rev(firstparent),
58 short(firstparent)))
58 short(firstparent)))
59 hg.clean(repo, firstparent)
59 hg.clean(repo, firstparent)
60 ui.status(_('merging with %d:%s\n') %
60 ui.status(_('merging with %d:%s\n') %
61 (repo.changelog.rev(secondparent), short(secondparent)))
61 (repo.changelog.rev(secondparent), short(secondparent)))
62 err = hg.merge(repo, secondparent, remind=False)
62 err = hg.merge(repo, secondparent, remind=False)
63 if not err:
63 if not err:
64 mod, add, rem = repo.status()[:3]
64 mod, add, rem = repo.status()[:3]
65 message = (cmdutil.logmessage(opts) or
65 message = (cmdutil.logmessage(opts) or
66 (_('Automated merge with %s') %
66 (_('Automated merge with %s') %
67 util.removeauth(other.url())))
67 util.removeauth(other.url())))
68 n = repo.commit(mod + add + rem, message,
68 n = repo.commit(mod + add + rem, message,
69 opts['user'], opts['date'],
69 opts['user'], opts['date'],
70 force_editor=opts.get('force_editor'))
70 force_editor=opts.get('force_editor'))
71 ui.status(_('new changeset %d:%s merges remote changes '
71 ui.status(_('new changeset %d:%s merges remote changes '
72 'with local\n') % (repo.changelog.rev(n),
72 'with local\n') % (repo.changelog.rev(n),
73 short(n)))
73 short(n)))
74
74
75 def pull():
75 def pull():
76 cmdutil.setremoteconfig(ui, opts)
76 cmdutil.setremoteconfig(ui, opts)
77
77
78 other = hg.repository(ui, ui.expandpath(source))
78 other = hg.repository(ui, ui.expandpath(source))
79 ui.status(_('pulling from %s\n') %
79 ui.status(_('pulling from %s\n') %
80 util.hidepassword(ui.expandpath(source)))
80 util.hidepassword(ui.expandpath(source)))
81 revs = None
81 revs = None
82 if opts['rev']:
82 if opts['rev']:
83 if not other.local():
83 if not other.local():
84 raise util.Abort(_("fetch -r doesn't work for remote "
84 raise util.Abort(_("fetch -r doesn't work for remote "
85 "repositories yet"))
85 "repositories yet"))
86 else:
86 else:
87 revs = [other.lookup(rev) for rev in opts['rev']]
87 revs = [other.lookup(rev) for rev in opts['rev']]
88 modheads = repo.pull(other, heads=revs)
88 modheads = repo.pull(other, heads=revs)
89 return postincoming(other, modheads)
89 return postincoming(other, modheads)
90
90
91 date = opts.get('date')
91 date = opts.get('date')
92 if date:
92 if date:
93 opts['date'] = util.parsedate(date)
93 opts['date'] = util.parsedate(date)
94
94
95 parent, p2 = repo.dirstate.parents()
95 parent, p2 = repo.dirstate.parents()
96 if parent != repo.changelog.tip():
96 if parent != repo.changelog.tip():
97 raise util.Abort(_('working dir not at tip '
97 raise util.Abort(_('working dir not at tip '
98 '(use "hg update" to check out tip)'))
98 '(use "hg update" to check out tip)'))
99 if p2 != nullid:
99 if p2 != nullid:
100 raise util.Abort(_('outstanding uncommitted merge'))
100 raise util.Abort(_('outstanding uncommitted merge'))
101 wlock = lock = None
101 wlock = lock = None
102 try:
102 try:
103 wlock = repo.wlock()
103 wlock = repo.wlock()
104 lock = repo.lock()
104 lock = repo.lock()
105 mod, add, rem = repo.status()[:3]
105 mod, add, rem = repo.status()[:3]
106 if mod or add or rem:
106 if mod or add or rem:
107 raise util.Abort(_('outstanding uncommitted changes'))
107 raise util.Abort(_('outstanding uncommitted changes'))
108 if len(repo.heads()) > 1:
108 if len(repo.heads()) > 1:
109 raise util.Abort(_('multiple heads in this repository '
109 raise util.Abort(_('multiple heads in this repository '
110 '(use "hg heads" and "hg merge" to merge)'))
110 '(use "hg heads" and "hg merge" to merge)'))
111 return pull()
111 return pull()
112 finally:
112 finally:
113 del lock, wlock
113 del lock, wlock
114
114
115 cmdtable = {
115 cmdtable = {
116 'fetch':
116 'fetch':
117 (fetch,
117 (fetch,
118 [('r', 'rev', [], _('a specific revision you would like to pull')),
118 [('r', 'rev', [], _('a specific revision you would like to pull')),
119 ('f', 'force-editor', None, _('edit commit message')),
119 ('f', 'force-editor', None, _('edit commit message')),
120 ('', 'switch-parent', None, _('switch parents when merging')),
120 ('', 'switch-parent', None, _('switch parents when merging')),
121 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
121 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
122 _('hg fetch [SOURCE]')),
122 _('hg fetch [SOURCE]')),
123 }
123 }
@@ -1,406 +1,406 b''
1 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
1 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
2 # Published under the GNU GPL
2 # Published under the GNU GPL
3
3
4 '''
4 '''
5 imerge - interactive merge
5 imerge - interactive merge
6 '''
6 '''
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 from mercurial.node import *
9 from mercurial.node import hex, short
10 from mercurial import commands, cmdutil, dispatch, fancyopts
10 from mercurial import commands, cmdutil, dispatch, fancyopts
11 from mercurial import hg, filemerge, util
11 from mercurial import hg, filemerge, util
12 import os, tarfile
12 import os, tarfile
13
13
14 class InvalidStateFileException(Exception): pass
14 class InvalidStateFileException(Exception): pass
15
15
16 class ImergeStateFile(object):
16 class ImergeStateFile(object):
17 def __init__(self, im):
17 def __init__(self, im):
18 self.im = im
18 self.im = im
19
19
20 def save(self, dest):
20 def save(self, dest):
21 tf = tarfile.open(dest, 'w:gz')
21 tf = tarfile.open(dest, 'w:gz')
22
22
23 st = os.path.join(self.im.path, 'status')
23 st = os.path.join(self.im.path, 'status')
24 tf.add(st, os.path.join('.hg', 'imerge', 'status'))
24 tf.add(st, os.path.join('.hg', 'imerge', 'status'))
25
25
26 for f in self.im.resolved:
26 for f in self.im.resolved:
27 (fd, fo) = self.im.conflicts[f]
27 (fd, fo) = self.im.conflicts[f]
28 abssrc = self.im.repo.wjoin(fd)
28 abssrc = self.im.repo.wjoin(fd)
29 tf.add(abssrc, fd)
29 tf.add(abssrc, fd)
30
30
31 tf.close()
31 tf.close()
32
32
33 def load(self, source):
33 def load(self, source):
34 wlock = self.im.repo.wlock()
34 wlock = self.im.repo.wlock()
35 lock = self.im.repo.lock()
35 lock = self.im.repo.lock()
36
36
37 tf = tarfile.open(source, 'r')
37 tf = tarfile.open(source, 'r')
38 contents = tf.getnames()
38 contents = tf.getnames()
39 # tarfile normalizes path separators to '/'
39 # tarfile normalizes path separators to '/'
40 statusfile = '.hg/imerge/status'
40 statusfile = '.hg/imerge/status'
41 if statusfile not in contents:
41 if statusfile not in contents:
42 raise InvalidStateFileException('no status file')
42 raise InvalidStateFileException('no status file')
43
43
44 tf.extract(statusfile, self.im.repo.root)
44 tf.extract(statusfile, self.im.repo.root)
45 p1, p2 = self.im.load()
45 p1, p2 = self.im.load()
46 if self.im.repo.dirstate.parents()[0] != p1.node():
46 if self.im.repo.dirstate.parents()[0] != p1.node():
47 hg.clean(self.im.repo, p1.node())
47 hg.clean(self.im.repo, p1.node())
48 self.im.start(p2.node())
48 self.im.start(p2.node())
49 for tarinfo in tf:
49 for tarinfo in tf:
50 tf.extract(tarinfo, self.im.repo.root)
50 tf.extract(tarinfo, self.im.repo.root)
51 self.im.load()
51 self.im.load()
52
52
53 class Imerge(object):
53 class Imerge(object):
54 def __init__(self, ui, repo):
54 def __init__(self, ui, repo):
55 self.ui = ui
55 self.ui = ui
56 self.repo = repo
56 self.repo = repo
57
57
58 self.path = repo.join('imerge')
58 self.path = repo.join('imerge')
59 self.opener = util.opener(self.path)
59 self.opener = util.opener(self.path)
60
60
61 self.wctx = self.repo.workingctx()
61 self.wctx = self.repo.workingctx()
62 self.conflicts = {}
62 self.conflicts = {}
63 self.resolved = []
63 self.resolved = []
64
64
65 def merging(self):
65 def merging(self):
66 return len(self.wctx.parents()) > 1
66 return len(self.wctx.parents()) > 1
67
67
68 def load(self):
68 def load(self):
69 # status format. \0-delimited file, fields are
69 # status format. \0-delimited file, fields are
70 # p1, p2, conflict count, conflict filenames, resolved filenames
70 # p1, p2, conflict count, conflict filenames, resolved filenames
71 # conflict filenames are tuples of localname, remoteorig, remotenew
71 # conflict filenames are tuples of localname, remoteorig, remotenew
72
72
73 statusfile = self.opener('status')
73 statusfile = self.opener('status')
74
74
75 status = statusfile.read().split('\0')
75 status = statusfile.read().split('\0')
76 if len(status) < 3:
76 if len(status) < 3:
77 raise util.Abort('invalid imerge status file')
77 raise util.Abort('invalid imerge status file')
78
78
79 try:
79 try:
80 parents = [self.repo.changectx(n) for n in status[:2]]
80 parents = [self.repo.changectx(n) for n in status[:2]]
81 except LookupError:
81 except LookupError:
82 raise util.Abort('merge parent %s not in repository' % short(p))
82 raise util.Abort('merge parent %s not in repository' % short(p))
83
83
84 status = status[2:]
84 status = status[2:]
85 conflicts = int(status.pop(0)) * 3
85 conflicts = int(status.pop(0)) * 3
86 self.resolved = status[conflicts:]
86 self.resolved = status[conflicts:]
87 for i in xrange(0, conflicts, 3):
87 for i in xrange(0, conflicts, 3):
88 self.conflicts[status[i]] = (status[i+1], status[i+2])
88 self.conflicts[status[i]] = (status[i+1], status[i+2])
89
89
90 return parents
90 return parents
91
91
92 def save(self):
92 def save(self):
93 lock = self.repo.lock()
93 lock = self.repo.lock()
94
94
95 if not os.path.isdir(self.path):
95 if not os.path.isdir(self.path):
96 os.mkdir(self.path)
96 os.mkdir(self.path)
97 statusfile = self.opener('status', 'wb')
97 statusfile = self.opener('status', 'wb')
98
98
99 out = [hex(n.node()) for n in self.wctx.parents()]
99 out = [hex(n.node()) for n in self.wctx.parents()]
100 out.append(str(len(self.conflicts)))
100 out.append(str(len(self.conflicts)))
101 conflicts = self.conflicts.items()
101 conflicts = self.conflicts.items()
102 conflicts.sort()
102 conflicts.sort()
103 for fw, fd_fo in conflicts:
103 for fw, fd_fo in conflicts:
104 out.append(fw)
104 out.append(fw)
105 out.extend(fd_fo)
105 out.extend(fd_fo)
106 out.extend(self.resolved)
106 out.extend(self.resolved)
107
107
108 statusfile.write('\0'.join(out))
108 statusfile.write('\0'.join(out))
109
109
110 def remaining(self):
110 def remaining(self):
111 return [f for f in self.conflicts if f not in self.resolved]
111 return [f for f in self.conflicts if f not in self.resolved]
112
112
113 def filemerge(self, fn, interactive=True):
113 def filemerge(self, fn, interactive=True):
114 wlock = self.repo.wlock()
114 wlock = self.repo.wlock()
115
115
116 (fd, fo) = self.conflicts[fn]
116 (fd, fo) = self.conflicts[fn]
117 p1, p2 = self.wctx.parents()
117 p1, p2 = self.wctx.parents()
118
118
119 # this could be greatly improved
119 # this could be greatly improved
120 realmerge = os.environ.get('HGMERGE')
120 realmerge = os.environ.get('HGMERGE')
121 if not interactive:
121 if not interactive:
122 os.environ['HGMERGE'] = 'merge'
122 os.environ['HGMERGE'] = 'merge'
123
123
124 # The filemerge ancestor algorithm does not work if self.wctx
124 # The filemerge ancestor algorithm does not work if self.wctx
125 # already has two parents (in normal merge it doesn't yet). But
125 # already has two parents (in normal merge it doesn't yet). But
126 # this is very dirty.
126 # this is very dirty.
127 self.wctx._parents.pop()
127 self.wctx._parents.pop()
128 try:
128 try:
129 # TODO: we should probably revert the file if merge fails
129 # TODO: we should probably revert the file if merge fails
130 return filemerge.filemerge(self.repo, fn, fd, fo, self.wctx, p2)
130 return filemerge.filemerge(self.repo, fn, fd, fo, self.wctx, p2)
131 finally:
131 finally:
132 self.wctx._parents.append(p2)
132 self.wctx._parents.append(p2)
133 if realmerge:
133 if realmerge:
134 os.environ['HGMERGE'] = realmerge
134 os.environ['HGMERGE'] = realmerge
135 elif not interactive:
135 elif not interactive:
136 del os.environ['HGMERGE']
136 del os.environ['HGMERGE']
137
137
138 def start(self, rev=None):
138 def start(self, rev=None):
139 _filemerge = filemerge.filemerge
139 _filemerge = filemerge.filemerge
140 def filemerge_(repo, fw, fd, fo, wctx, mctx):
140 def filemerge_(repo, fw, fd, fo, wctx, mctx):
141 self.conflicts[fw] = (fd, fo)
141 self.conflicts[fw] = (fd, fo)
142
142
143 filemerge.filemerge = filemerge_
143 filemerge.filemerge = filemerge_
144 commands.merge(self.ui, self.repo, rev=rev)
144 commands.merge(self.ui, self.repo, rev=rev)
145 filemerge.filemerge = _filemerge
145 filemerge.filemerge = _filemerge
146
146
147 self.wctx = self.repo.workingctx()
147 self.wctx = self.repo.workingctx()
148 self.save()
148 self.save()
149
149
150 def resume(self):
150 def resume(self):
151 self.load()
151 self.load()
152
152
153 dp = self.repo.dirstate.parents()
153 dp = self.repo.dirstate.parents()
154 p1, p2 = self.wctx.parents()
154 p1, p2 = self.wctx.parents()
155 if p1.node() != dp[0] or p2.node() != dp[1]:
155 if p1.node() != dp[0] or p2.node() != dp[1]:
156 raise util.Abort('imerge state does not match working directory')
156 raise util.Abort('imerge state does not match working directory')
157
157
158 def next(self):
158 def next(self):
159 remaining = self.remaining()
159 remaining = self.remaining()
160 return remaining and remaining[0]
160 return remaining and remaining[0]
161
161
162 def resolve(self, files):
162 def resolve(self, files):
163 resolved = dict.fromkeys(self.resolved)
163 resolved = dict.fromkeys(self.resolved)
164 for fn in files:
164 for fn in files:
165 if fn not in self.conflicts:
165 if fn not in self.conflicts:
166 raise util.Abort('%s is not in the merge set' % fn)
166 raise util.Abort('%s is not in the merge set' % fn)
167 resolved[fn] = True
167 resolved[fn] = True
168 self.resolved = resolved.keys()
168 self.resolved = resolved.keys()
169 self.resolved.sort()
169 self.resolved.sort()
170 self.save()
170 self.save()
171 return 0
171 return 0
172
172
173 def unresolve(self, files):
173 def unresolve(self, files):
174 resolved = dict.fromkeys(self.resolved)
174 resolved = dict.fromkeys(self.resolved)
175 for fn in files:
175 for fn in files:
176 if fn not in resolved:
176 if fn not in resolved:
177 raise util.Abort('%s is not resolved' % fn)
177 raise util.Abort('%s is not resolved' % fn)
178 del resolved[fn]
178 del resolved[fn]
179 self.resolved = resolved.keys()
179 self.resolved = resolved.keys()
180 self.resolved.sort()
180 self.resolved.sort()
181 self.save()
181 self.save()
182 return 0
182 return 0
183
183
184 def pickle(self, dest):
184 def pickle(self, dest):
185 '''write current merge state to file to be resumed elsewhere'''
185 '''write current merge state to file to be resumed elsewhere'''
186 state = ImergeStateFile(self)
186 state = ImergeStateFile(self)
187 return state.save(dest)
187 return state.save(dest)
188
188
189 def unpickle(self, source):
189 def unpickle(self, source):
190 '''read merge state from file'''
190 '''read merge state from file'''
191 state = ImergeStateFile(self)
191 state = ImergeStateFile(self)
192 return state.load(source)
192 return state.load(source)
193
193
194 def load(im, source):
194 def load(im, source):
195 if im.merging():
195 if im.merging():
196 raise util.Abort('there is already a merge in progress '
196 raise util.Abort('there is already a merge in progress '
197 '(update -C <rev> to abort it)' )
197 '(update -C <rev> to abort it)' )
198 m, a, r, d = im.repo.status()[:4]
198 m, a, r, d = im.repo.status()[:4]
199 if m or a or r or d:
199 if m or a or r or d:
200 raise util.Abort('working directory has uncommitted changes')
200 raise util.Abort('working directory has uncommitted changes')
201
201
202 rc = im.unpickle(source)
202 rc = im.unpickle(source)
203 if not rc:
203 if not rc:
204 status(im)
204 status(im)
205 return rc
205 return rc
206
206
207 def merge_(im, filename=None, auto=False):
207 def merge_(im, filename=None, auto=False):
208 success = True
208 success = True
209 if auto and not filename:
209 if auto and not filename:
210 for fn in im.remaining():
210 for fn in im.remaining():
211 rc = im.filemerge(fn, interactive=False)
211 rc = im.filemerge(fn, interactive=False)
212 if rc:
212 if rc:
213 success = False
213 success = False
214 else:
214 else:
215 im.resolve([fn])
215 im.resolve([fn])
216 if success:
216 if success:
217 im.ui.write('all conflicts resolved\n')
217 im.ui.write('all conflicts resolved\n')
218 else:
218 else:
219 status(im)
219 status(im)
220 return 0
220 return 0
221
221
222 if not filename:
222 if not filename:
223 filename = im.next()
223 filename = im.next()
224 if not filename:
224 if not filename:
225 im.ui.write('all conflicts resolved\n')
225 im.ui.write('all conflicts resolved\n')
226 return 0
226 return 0
227
227
228 rc = im.filemerge(filename, interactive=not auto)
228 rc = im.filemerge(filename, interactive=not auto)
229 if not rc:
229 if not rc:
230 im.resolve([filename])
230 im.resolve([filename])
231 if not im.next():
231 if not im.next():
232 im.ui.write('all conflicts resolved\n')
232 im.ui.write('all conflicts resolved\n')
233 return rc
233 return rc
234
234
235 def next(im):
235 def next(im):
236 n = im.next()
236 n = im.next()
237 if n:
237 if n:
238 im.ui.write('%s\n' % n)
238 im.ui.write('%s\n' % n)
239 else:
239 else:
240 im.ui.write('all conflicts resolved\n')
240 im.ui.write('all conflicts resolved\n')
241 return 0
241 return 0
242
242
243 def resolve(im, *files):
243 def resolve(im, *files):
244 if not files:
244 if not files:
245 raise util.Abort('resolve requires at least one filename')
245 raise util.Abort('resolve requires at least one filename')
246 return im.resolve(files)
246 return im.resolve(files)
247
247
248 def save(im, dest):
248 def save(im, dest):
249 return im.pickle(dest)
249 return im.pickle(dest)
250
250
251 def status(im, **opts):
251 def status(im, **opts):
252 if not opts.get('resolved') and not opts.get('unresolved'):
252 if not opts.get('resolved') and not opts.get('unresolved'):
253 opts['resolved'] = True
253 opts['resolved'] = True
254 opts['unresolved'] = True
254 opts['unresolved'] = True
255
255
256 if im.ui.verbose:
256 if im.ui.verbose:
257 p1, p2 = [short(p.node()) for p in im.wctx.parents()]
257 p1, p2 = [short(p.node()) for p in im.wctx.parents()]
258 im.ui.note(_('merging %s and %s\n') % (p1, p2))
258 im.ui.note(_('merging %s and %s\n') % (p1, p2))
259
259
260 conflicts = im.conflicts.keys()
260 conflicts = im.conflicts.keys()
261 conflicts.sort()
261 conflicts.sort()
262 remaining = dict.fromkeys(im.remaining())
262 remaining = dict.fromkeys(im.remaining())
263 st = []
263 st = []
264 for fn in conflicts:
264 for fn in conflicts:
265 if opts.get('no_status'):
265 if opts.get('no_status'):
266 mode = ''
266 mode = ''
267 elif fn in remaining:
267 elif fn in remaining:
268 mode = 'U '
268 mode = 'U '
269 else:
269 else:
270 mode = 'R '
270 mode = 'R '
271 if ((opts.get('resolved') and fn not in remaining)
271 if ((opts.get('resolved') and fn not in remaining)
272 or (opts.get('unresolved') and fn in remaining)):
272 or (opts.get('unresolved') and fn in remaining)):
273 st.append((mode, fn))
273 st.append((mode, fn))
274 st.sort()
274 st.sort()
275 for (mode, fn) in st:
275 for (mode, fn) in st:
276 if im.ui.verbose:
276 if im.ui.verbose:
277 fo, fd = im.conflicts[fn]
277 fo, fd = im.conflicts[fn]
278 if fd != fn:
278 if fd != fn:
279 fn = '%s (%s)' % (fn, fd)
279 fn = '%s (%s)' % (fn, fd)
280 im.ui.write('%s%s\n' % (mode, fn))
280 im.ui.write('%s%s\n' % (mode, fn))
281 if opts.get('unresolved') and not remaining:
281 if opts.get('unresolved') and not remaining:
282 im.ui.write(_('all conflicts resolved\n'))
282 im.ui.write(_('all conflicts resolved\n'))
283
283
284 return 0
284 return 0
285
285
286 def unresolve(im, *files):
286 def unresolve(im, *files):
287 if not files:
287 if not files:
288 raise util.Abort('unresolve requires at least one filename')
288 raise util.Abort('unresolve requires at least one filename')
289 return im.unresolve(files)
289 return im.unresolve(files)
290
290
291 subcmdtable = {
291 subcmdtable = {
292 'load': (load, []),
292 'load': (load, []),
293 'merge':
293 'merge':
294 (merge_,
294 (merge_,
295 [('a', 'auto', None, _('automatically resolve if possible'))]),
295 [('a', 'auto', None, _('automatically resolve if possible'))]),
296 'next': (next, []),
296 'next': (next, []),
297 'resolve': (resolve, []),
297 'resolve': (resolve, []),
298 'save': (save, []),
298 'save': (save, []),
299 'status':
299 'status':
300 (status,
300 (status,
301 [('n', 'no-status', None, _('hide status prefix')),
301 [('n', 'no-status', None, _('hide status prefix')),
302 ('', 'resolved', None, _('only show resolved conflicts')),
302 ('', 'resolved', None, _('only show resolved conflicts')),
303 ('', 'unresolved', None, _('only show unresolved conflicts'))]),
303 ('', 'unresolved', None, _('only show unresolved conflicts'))]),
304 'unresolve': (unresolve, [])
304 'unresolve': (unresolve, [])
305 }
305 }
306
306
307 def dispatch_(im, args, opts):
307 def dispatch_(im, args, opts):
308 def complete(s, choices):
308 def complete(s, choices):
309 candidates = []
309 candidates = []
310 for choice in choices:
310 for choice in choices:
311 if choice.startswith(s):
311 if choice.startswith(s):
312 candidates.append(choice)
312 candidates.append(choice)
313 return candidates
313 return candidates
314
314
315 c, args = args[0], list(args[1:])
315 c, args = args[0], list(args[1:])
316 cmd = complete(c, subcmdtable.keys())
316 cmd = complete(c, subcmdtable.keys())
317 if not cmd:
317 if not cmd:
318 raise cmdutil.UnknownCommand('imerge ' + c)
318 raise cmdutil.UnknownCommand('imerge ' + c)
319 if len(cmd) > 1:
319 if len(cmd) > 1:
320 cmd.sort()
320 cmd.sort()
321 raise cmdutil.AmbiguousCommand('imerge ' + c, cmd)
321 raise cmdutil.AmbiguousCommand('imerge ' + c, cmd)
322 cmd = cmd[0]
322 cmd = cmd[0]
323
323
324 func, optlist = subcmdtable[cmd]
324 func, optlist = subcmdtable[cmd]
325 opts = {}
325 opts = {}
326 try:
326 try:
327 args = fancyopts.fancyopts(args, optlist, opts)
327 args = fancyopts.fancyopts(args, optlist, opts)
328 return func(im, *args, **opts)
328 return func(im, *args, **opts)
329 except fancyopts.getopt.GetoptError, inst:
329 except fancyopts.getopt.GetoptError, inst:
330 raise dispatch.ParseError('imerge', '%s: %s' % (cmd, inst))
330 raise dispatch.ParseError('imerge', '%s: %s' % (cmd, inst))
331 except TypeError:
331 except TypeError:
332 raise dispatch.ParseError('imerge', _('%s: invalid arguments') % cmd)
332 raise dispatch.ParseError('imerge', _('%s: invalid arguments') % cmd)
333
333
334 def imerge(ui, repo, *args, **opts):
334 def imerge(ui, repo, *args, **opts):
335 '''interactive merge
335 '''interactive merge
336
336
337 imerge lets you split a merge into pieces. When you start a merge
337 imerge lets you split a merge into pieces. When you start a merge
338 with imerge, the names of all files with conflicts are recorded.
338 with imerge, the names of all files with conflicts are recorded.
339 You can then merge any of these files, and if the merge is
339 You can then merge any of these files, and if the merge is
340 successful, they will be marked as resolved. When all files are
340 successful, they will be marked as resolved. When all files are
341 resolved, the merge is complete.
341 resolved, the merge is complete.
342
342
343 If no merge is in progress, hg imerge [rev] will merge the working
343 If no merge is in progress, hg imerge [rev] will merge the working
344 directory with rev (defaulting to the other head if the repository
344 directory with rev (defaulting to the other head if the repository
345 only has two heads). You may also resume a saved merge with
345 only has two heads). You may also resume a saved merge with
346 hg imerge load <file>.
346 hg imerge load <file>.
347
347
348 If a merge is in progress, hg imerge will default to merging the
348 If a merge is in progress, hg imerge will default to merging the
349 next unresolved file.
349 next unresolved file.
350
350
351 The following subcommands are available:
351 The following subcommands are available:
352
352
353 status:
353 status:
354 show the current state of the merge
354 show the current state of the merge
355 options:
355 options:
356 -n --no-status: do not print the status prefix
356 -n --no-status: do not print the status prefix
357 --resolved: only print resolved conflicts
357 --resolved: only print resolved conflicts
358 --unresolved: only print unresolved conflicts
358 --unresolved: only print unresolved conflicts
359 next:
359 next:
360 show the next unresolved file merge
360 show the next unresolved file merge
361 merge [<file>]:
361 merge [<file>]:
362 merge <file>. If the file merge is successful, the file will be
362 merge <file>. If the file merge is successful, the file will be
363 recorded as resolved. If no file is given, the next unresolved
363 recorded as resolved. If no file is given, the next unresolved
364 file will be merged.
364 file will be merged.
365 resolve <file>...:
365 resolve <file>...:
366 mark files as successfully merged
366 mark files as successfully merged
367 unresolve <file>...:
367 unresolve <file>...:
368 mark files as requiring merging.
368 mark files as requiring merging.
369 save <file>:
369 save <file>:
370 save the state of the merge to a file to be resumed elsewhere
370 save the state of the merge to a file to be resumed elsewhere
371 load <file>:
371 load <file>:
372 load the state of the merge from a file created by save
372 load the state of the merge from a file created by save
373 '''
373 '''
374
374
375 im = Imerge(ui, repo)
375 im = Imerge(ui, repo)
376
376
377 if im.merging():
377 if im.merging():
378 im.resume()
378 im.resume()
379 else:
379 else:
380 rev = opts.get('rev')
380 rev = opts.get('rev')
381 if rev and args:
381 if rev and args:
382 raise util.Abort('please specify just one revision')
382 raise util.Abort('please specify just one revision')
383
383
384 if len(args) == 2 and args[0] == 'load':
384 if len(args) == 2 and args[0] == 'load':
385 pass
385 pass
386 else:
386 else:
387 if args:
387 if args:
388 rev = args[0]
388 rev = args[0]
389 im.start(rev=rev)
389 im.start(rev=rev)
390 if opts.get('auto'):
390 if opts.get('auto'):
391 args = ['merge', '--auto']
391 args = ['merge', '--auto']
392 else:
392 else:
393 args = ['status']
393 args = ['status']
394
394
395 if not args:
395 if not args:
396 args = ['merge']
396 args = ['merge']
397
397
398 return dispatch_(im, args, opts)
398 return dispatch_(im, args, opts)
399
399
400 cmdtable = {
400 cmdtable = {
401 '^imerge':
401 '^imerge':
402 (imerge,
402 (imerge,
403 [('r', 'rev', '', _('revision to merge')),
403 [('r', 'rev', '', _('revision to merge')),
404 ('a', 'auto', None, _('automatically merge where possible'))],
404 ('a', 'auto', None, _('automatically merge where possible'))],
405 'hg imerge [command]')
405 'hg imerge [command]')
406 }
406 }
@@ -1,556 +1,556 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a DSCM
10 # Keyword expansion hack against the grain of a DSCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an audience
14 # files (like LaTeX packages), that are mostly addressed to an audience
15 # not running a version control system.
15 # not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Setup in hgrc:
24 # Setup in hgrc:
25 #
25 #
26 # [extensions]
26 # [extensions]
27 # # enable extension
27 # # enable extension
28 # hgext.keyword =
28 # hgext.keyword =
29 #
29 #
30 # Files to act upon/ignore are specified in the [keyword] section.
30 # Files to act upon/ignore are specified in the [keyword] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
32 #
32 #
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34
34
35 '''keyword expansion in local repositories
35 '''keyword expansion in local repositories
36
36
37 This extension expands RCS/CVS-like or self-customized $Keywords$
37 This extension expands RCS/CVS-like or self-customized $Keywords$
38 in tracked text files selected by your configuration.
38 in tracked text files selected by your configuration.
39
39
40 Keywords are only expanded in local repositories and not stored in
40 Keywords are only expanded in local repositories and not stored in
41 the change history. The mechanism can be regarded as a convenience
41 the change history. The mechanism can be regarded as a convenience
42 for the current user or for archive distribution.
42 for the current user or for archive distribution.
43
43
44 Configuration is done in the [keyword] and [keywordmaps] sections
44 Configuration is done in the [keyword] and [keywordmaps] sections
45 of hgrc files.
45 of hgrc files.
46
46
47 Example:
47 Example:
48
48
49 [keyword]
49 [keyword]
50 # expand keywords in every python file except those matching "x*"
50 # expand keywords in every python file except those matching "x*"
51 **.py =
51 **.py =
52 x* = ignore
52 x* = ignore
53
53
54 Note: the more specific you are in your filename patterns
54 Note: the more specific you are in your filename patterns
55 the less you lose speed in huge repos.
55 the less you lose speed in huge repos.
56
56
57 For [keywordmaps] template mapping and expansion demonstration and
57 For [keywordmaps] template mapping and expansion demonstration and
58 control run "hg kwdemo".
58 control run "hg kwdemo".
59
59
60 An additional date template filter {date|utcdate} is provided.
60 An additional date template filter {date|utcdate} is provided.
61
61
62 The default template mappings (view with "hg kwdemo -d") can be replaced
62 The default template mappings (view with "hg kwdemo -d") can be replaced
63 with customized keywords and templates.
63 with customized keywords and templates.
64 Again, run "hg kwdemo" to control the results of your config changes.
64 Again, run "hg kwdemo" to control the results of your config changes.
65
65
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 the risk of inadvertedly storing expanded keywords in the change history.
67 the risk of inadvertedly storing expanded keywords in the change history.
68
68
69 To force expansion after enabling it, or a configuration change, run
69 To force expansion after enabling it, or a configuration change, run
70 "hg kwexpand".
70 "hg kwexpand".
71
71
72 Also, when committing with the record extension or using mq's qrecord, be aware
72 Also, when committing with the record extension or using mq's qrecord, be aware
73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
74 question to update keyword expansions after all changes have been checked in.
74 question to update keyword expansions after all changes have been checked in.
75
75
76 Expansions spanning more than one line and incremental expansions,
76 Expansions spanning more than one line and incremental expansions,
77 like CVS' $Log$, are not supported. A keyword template map
77 like CVS' $Log$, are not supported. A keyword template map
78 "Log = {desc}" expands to the first line of the changeset description.
78 "Log = {desc}" expands to the first line of the changeset description.
79 '''
79 '''
80
80
81 from mercurial import commands, cmdutil, context, dispatch, filelog, revlog
81 from mercurial import commands, cmdutil, context, dispatch, filelog, revlog
82 from mercurial import patch, localrepo, templater, templatefilters, util
82 from mercurial import patch, localrepo, templater, templatefilters, util
83 from mercurial.hgweb import webcommands
83 from mercurial.hgweb import webcommands
84 from mercurial.node import *
84 from mercurial.node import nullid, hex
85 from mercurial.i18n import _
85 from mercurial.i18n import _
86 import re, shutil, tempfile, time
86 import re, shutil, tempfile, time
87
87
88 commands.optionalrepo += ' kwdemo'
88 commands.optionalrepo += ' kwdemo'
89
89
90 # hg commands that do not act on keywords
90 # hg commands that do not act on keywords
91 nokwcommands = ('add addremove bundle copy export grep incoming init'
91 nokwcommands = ('add addremove bundle copy export grep incoming init'
92 ' log outgoing push rename rollback tip'
92 ' log outgoing push rename rollback tip'
93 ' convert email glog')
93 ' convert email glog')
94
94
95 # hg commands that trigger expansion only when writing to working dir,
95 # hg commands that trigger expansion only when writing to working dir,
96 # not when reading filelog, and unexpand when reading from working dir
96 # not when reading filelog, and unexpand when reading from working dir
97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
98
98
99 def utcdate(date):
99 def utcdate(date):
100 '''Returns hgdate in cvs-like UTC format.'''
100 '''Returns hgdate in cvs-like UTC format.'''
101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
102
102
103
103
104 # make keyword tools accessible
104 # make keyword tools accessible
105 kwtools = {'templater': None, 'hgcmd': None}
105 kwtools = {'templater': None, 'hgcmd': None}
106
106
107 # store originals of monkeypatches
107 # store originals of monkeypatches
108 _patchfile_init = patch.patchfile.__init__
108 _patchfile_init = patch.patchfile.__init__
109 _patch_diff = patch.diff
109 _patch_diff = patch.diff
110 _dispatch_parse = dispatch._parse
110 _dispatch_parse = dispatch._parse
111
111
112 def _kwpatchfile_init(self, ui, fname, missing=False):
112 def _kwpatchfile_init(self, ui, fname, missing=False):
113 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
113 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
114 rejects or conflicts due to expanded keywords in working dir.'''
114 rejects or conflicts due to expanded keywords in working dir.'''
115 _patchfile_init(self, ui, fname, missing=missing)
115 _patchfile_init(self, ui, fname, missing=missing)
116 # shrink keywords read from working dir
116 # shrink keywords read from working dir
117 kwt = kwtools['templater']
117 kwt = kwtools['templater']
118 self.lines = kwt.shrinklines(self.fname, self.lines)
118 self.lines = kwt.shrinklines(self.fname, self.lines)
119
119
120 def _kw_diff(repo, node1=None, node2=None, files=None, match=util.always,
120 def _kw_diff(repo, node1=None, node2=None, files=None, match=util.always,
121 fp=None, changes=None, opts=None):
121 fp=None, changes=None, opts=None):
122 '''Monkeypatch patch.diff to avoid expansion except when
122 '''Monkeypatch patch.diff to avoid expansion except when
123 comparing against working dir.'''
123 comparing against working dir.'''
124 if node2 is not None:
124 if node2 is not None:
125 kwtools['templater'].matcher = util.never
125 kwtools['templater'].matcher = util.never
126 elif node1 is not None and node1 != repo.changectx().node():
126 elif node1 is not None and node1 != repo.changectx().node():
127 kwtools['templater'].restrict = True
127 kwtools['templater'].restrict = True
128 _patch_diff(repo, node1=node1, node2=node2, files=files, match=match,
128 _patch_diff(repo, node1=node1, node2=node2, files=files, match=match,
129 fp=fp, changes=changes, opts=opts)
129 fp=fp, changes=changes, opts=opts)
130
130
131 def _kwweb_changeset(web, req, tmpl):
131 def _kwweb_changeset(web, req, tmpl):
132 '''Wraps webcommands.changeset turning off keyword expansion.'''
132 '''Wraps webcommands.changeset turning off keyword expansion.'''
133 kwtools['templater'].matcher = util.never
133 kwtools['templater'].matcher = util.never
134 return web.changeset(tmpl, web.changectx(req))
134 return web.changeset(tmpl, web.changectx(req))
135
135
136 def _kwweb_filediff(web, req, tmpl):
136 def _kwweb_filediff(web, req, tmpl):
137 '''Wraps webcommands.filediff turning off keyword expansion.'''
137 '''Wraps webcommands.filediff turning off keyword expansion.'''
138 kwtools['templater'].matcher = util.never
138 kwtools['templater'].matcher = util.never
139 return web.filediff(tmpl, web.filectx(req))
139 return web.filediff(tmpl, web.filectx(req))
140
140
141 def _kwdispatch_parse(ui, args):
141 def _kwdispatch_parse(ui, args):
142 '''Monkeypatch dispatch._parse to obtain running hg command.'''
142 '''Monkeypatch dispatch._parse to obtain running hg command.'''
143 cmd, func, args, options, cmdoptions = _dispatch_parse(ui, args)
143 cmd, func, args, options, cmdoptions = _dispatch_parse(ui, args)
144 kwtools['hgcmd'] = cmd
144 kwtools['hgcmd'] = cmd
145 return cmd, func, args, options, cmdoptions
145 return cmd, func, args, options, cmdoptions
146
146
147 # dispatch._parse is run before reposetup, so wrap it here
147 # dispatch._parse is run before reposetup, so wrap it here
148 dispatch._parse = _kwdispatch_parse
148 dispatch._parse = _kwdispatch_parse
149
149
150
150
151 class kwtemplater(object):
151 class kwtemplater(object):
152 '''
152 '''
153 Sets up keyword templates, corresponding keyword regex, and
153 Sets up keyword templates, corresponding keyword regex, and
154 provides keyword substitution functions.
154 provides keyword substitution functions.
155 '''
155 '''
156 templates = {
156 templates = {
157 'Revision': '{node|short}',
157 'Revision': '{node|short}',
158 'Author': '{author|user}',
158 'Author': '{author|user}',
159 'Date': '{date|utcdate}',
159 'Date': '{date|utcdate}',
160 'RCSFile': '{file|basename},v',
160 'RCSFile': '{file|basename},v',
161 'Source': '{root}/{file},v',
161 'Source': '{root}/{file},v',
162 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
162 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
163 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
163 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
164 }
164 }
165
165
166 def __init__(self, ui, repo, inc, exc):
166 def __init__(self, ui, repo, inc, exc):
167 self.ui = ui
167 self.ui = ui
168 self.repo = repo
168 self.repo = repo
169 self.matcher = util.matcher(repo.root, inc=inc, exc=exc)[1]
169 self.matcher = util.matcher(repo.root, inc=inc, exc=exc)[1]
170 self.restrict = kwtools['hgcmd'] in restricted.split()
170 self.restrict = kwtools['hgcmd'] in restricted.split()
171
171
172 kwmaps = self.ui.configitems('keywordmaps')
172 kwmaps = self.ui.configitems('keywordmaps')
173 if kwmaps: # override default templates
173 if kwmaps: # override default templates
174 kwmaps = [(k, templater.parsestring(v, quoted=False))
174 kwmaps = [(k, templater.parsestring(v, quoted=False))
175 for (k, v) in kwmaps]
175 for (k, v) in kwmaps]
176 self.templates = dict(kwmaps)
176 self.templates = dict(kwmaps)
177 escaped = map(re.escape, self.templates.keys())
177 escaped = map(re.escape, self.templates.keys())
178 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
178 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
179 self.re_kw = re.compile(kwpat)
179 self.re_kw = re.compile(kwpat)
180
180
181 templatefilters.filters['utcdate'] = utcdate
181 templatefilters.filters['utcdate'] = utcdate
182 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
182 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
183 False, '', False)
183 False, '', False)
184
184
185 def getnode(self, path, fnode):
185 def getnode(self, path, fnode):
186 '''Derives changenode from file path and filenode.'''
186 '''Derives changenode from file path and filenode.'''
187 # used by kwfilelog.read and kwexpand
187 # used by kwfilelog.read and kwexpand
188 c = context.filectx(self.repo, path, fileid=fnode)
188 c = context.filectx(self.repo, path, fileid=fnode)
189 return c.node()
189 return c.node()
190
190
191 def substitute(self, data, path, node, subfunc):
191 def substitute(self, data, path, node, subfunc):
192 '''Replaces keywords in data with expanded template.'''
192 '''Replaces keywords in data with expanded template.'''
193 def kwsub(mobj):
193 def kwsub(mobj):
194 kw = mobj.group(1)
194 kw = mobj.group(1)
195 self.ct.use_template(self.templates[kw])
195 self.ct.use_template(self.templates[kw])
196 self.ui.pushbuffer()
196 self.ui.pushbuffer()
197 self.ct.show(changenode=node, root=self.repo.root, file=path)
197 self.ct.show(changenode=node, root=self.repo.root, file=path)
198 ekw = templatefilters.firstline(self.ui.popbuffer())
198 ekw = templatefilters.firstline(self.ui.popbuffer())
199 return '$%s: %s $' % (kw, ekw)
199 return '$%s: %s $' % (kw, ekw)
200 return subfunc(kwsub, data)
200 return subfunc(kwsub, data)
201
201
202 def expand(self, path, node, data):
202 def expand(self, path, node, data):
203 '''Returns data with keywords expanded.'''
203 '''Returns data with keywords expanded.'''
204 if not self.restrict and self.matcher(path) and not util.binary(data):
204 if not self.restrict and self.matcher(path) and not util.binary(data):
205 changenode = self.getnode(path, node)
205 changenode = self.getnode(path, node)
206 return self.substitute(data, path, changenode, self.re_kw.sub)
206 return self.substitute(data, path, changenode, self.re_kw.sub)
207 return data
207 return data
208
208
209 def iskwfile(self, path, islink):
209 def iskwfile(self, path, islink):
210 '''Returns true if path matches [keyword] pattern
210 '''Returns true if path matches [keyword] pattern
211 and is not a symbolic link.
211 and is not a symbolic link.
212 Caveat: localrepository._link fails on Windows.'''
212 Caveat: localrepository._link fails on Windows.'''
213 return self.matcher(path) and not islink(path)
213 return self.matcher(path) and not islink(path)
214
214
215 def overwrite(self, node=None, expand=True, files=None):
215 def overwrite(self, node=None, expand=True, files=None):
216 '''Overwrites selected files expanding/shrinking keywords.'''
216 '''Overwrites selected files expanding/shrinking keywords.'''
217 ctx = self.repo.changectx(node)
217 ctx = self.repo.changectx(node)
218 mf = ctx.manifest()
218 mf = ctx.manifest()
219 if node is not None: # commit
219 if node is not None: # commit
220 files = [f for f in ctx.files() if f in mf]
220 files = [f for f in ctx.files() if f in mf]
221 notify = self.ui.debug
221 notify = self.ui.debug
222 else: # kwexpand/kwshrink
222 else: # kwexpand/kwshrink
223 notify = self.ui.note
223 notify = self.ui.note
224 candidates = [f for f in files if self.iskwfile(f, mf.linkf)]
224 candidates = [f for f in files if self.iskwfile(f, mf.linkf)]
225 if candidates:
225 if candidates:
226 self.restrict = True # do not expand when reading
226 self.restrict = True # do not expand when reading
227 candidates.sort()
227 candidates.sort()
228 action = expand and 'expanding' or 'shrinking'
228 action = expand and 'expanding' or 'shrinking'
229 for f in candidates:
229 for f in candidates:
230 fp = self.repo.file(f)
230 fp = self.repo.file(f)
231 data = fp.read(mf[f])
231 data = fp.read(mf[f])
232 if util.binary(data):
232 if util.binary(data):
233 continue
233 continue
234 if expand:
234 if expand:
235 changenode = node or self.getnode(f, mf[f])
235 changenode = node or self.getnode(f, mf[f])
236 data, found = self.substitute(data, f, changenode,
236 data, found = self.substitute(data, f, changenode,
237 self.re_kw.subn)
237 self.re_kw.subn)
238 else:
238 else:
239 found = self.re_kw.search(data)
239 found = self.re_kw.search(data)
240 if found:
240 if found:
241 notify(_('overwriting %s %s keywords\n') % (f, action))
241 notify(_('overwriting %s %s keywords\n') % (f, action))
242 self.repo.wwrite(f, data, mf.flags(f))
242 self.repo.wwrite(f, data, mf.flags(f))
243 self.repo.dirstate.normal(f)
243 self.repo.dirstate.normal(f)
244 self.restrict = False
244 self.restrict = False
245
245
246 def shrinktext(self, text):
246 def shrinktext(self, text):
247 '''Unconditionally removes all keyword substitutions from text.'''
247 '''Unconditionally removes all keyword substitutions from text.'''
248 return self.re_kw.sub(r'$\1$', text)
248 return self.re_kw.sub(r'$\1$', text)
249
249
250 def shrink(self, fname, text):
250 def shrink(self, fname, text):
251 '''Returns text with all keyword substitutions removed.'''
251 '''Returns text with all keyword substitutions removed.'''
252 if self.matcher(fname) and not util.binary(text):
252 if self.matcher(fname) and not util.binary(text):
253 return self.shrinktext(text)
253 return self.shrinktext(text)
254 return text
254 return text
255
255
256 def shrinklines(self, fname, lines):
256 def shrinklines(self, fname, lines):
257 '''Returns lines with keyword substitutions removed.'''
257 '''Returns lines with keyword substitutions removed.'''
258 if self.matcher(fname):
258 if self.matcher(fname):
259 text = ''.join(lines)
259 text = ''.join(lines)
260 if not util.binary(text):
260 if not util.binary(text):
261 return self.shrinktext(text).splitlines(True)
261 return self.shrinktext(text).splitlines(True)
262 return lines
262 return lines
263
263
264 def wread(self, fname, data):
264 def wread(self, fname, data):
265 '''If in restricted mode returns data read from wdir with
265 '''If in restricted mode returns data read from wdir with
266 keyword substitutions removed.'''
266 keyword substitutions removed.'''
267 return self.restrict and self.shrink(fname, data) or data
267 return self.restrict and self.shrink(fname, data) or data
268
268
269 class kwfilelog(filelog.filelog):
269 class kwfilelog(filelog.filelog):
270 '''
270 '''
271 Subclass of filelog to hook into its read, add, cmp methods.
271 Subclass of filelog to hook into its read, add, cmp methods.
272 Keywords are "stored" unexpanded, and processed on reading.
272 Keywords are "stored" unexpanded, and processed on reading.
273 '''
273 '''
274 def __init__(self, opener, path):
274 def __init__(self, opener, path):
275 super(kwfilelog, self).__init__(opener, path)
275 super(kwfilelog, self).__init__(opener, path)
276 self.kwt = kwtools['templater']
276 self.kwt = kwtools['templater']
277 self.path = path
277 self.path = path
278
278
279 def read(self, node):
279 def read(self, node):
280 '''Expands keywords when reading filelog.'''
280 '''Expands keywords when reading filelog.'''
281 data = super(kwfilelog, self).read(node)
281 data = super(kwfilelog, self).read(node)
282 return self.kwt.expand(self.path, node, data)
282 return self.kwt.expand(self.path, node, data)
283
283
284 def add(self, text, meta, tr, link, p1=None, p2=None):
284 def add(self, text, meta, tr, link, p1=None, p2=None):
285 '''Removes keyword substitutions when adding to filelog.'''
285 '''Removes keyword substitutions when adding to filelog.'''
286 text = self.kwt.shrink(self.path, text)
286 text = self.kwt.shrink(self.path, text)
287 return super(kwfilelog, self).add(text, meta, tr, link, p1=p1, p2=p2)
287 return super(kwfilelog, self).add(text, meta, tr, link, p1=p1, p2=p2)
288
288
289 def cmp(self, node, text):
289 def cmp(self, node, text):
290 '''Removes keyword substitutions for comparison.'''
290 '''Removes keyword substitutions for comparison.'''
291 text = self.kwt.shrink(self.path, text)
291 text = self.kwt.shrink(self.path, text)
292 if self.renamed(node):
292 if self.renamed(node):
293 t2 = super(kwfilelog, self).read(node)
293 t2 = super(kwfilelog, self).read(node)
294 return t2 != text
294 return t2 != text
295 return revlog.revlog.cmp(self, node, text)
295 return revlog.revlog.cmp(self, node, text)
296
296
297 def _status(ui, repo, kwt, *pats, **opts):
297 def _status(ui, repo, kwt, *pats, **opts):
298 '''Bails out if [keyword] configuration is not active.
298 '''Bails out if [keyword] configuration is not active.
299 Returns status of working directory.'''
299 Returns status of working directory.'''
300 if kwt:
300 if kwt:
301 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
301 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
302 return repo.status(files=files, match=match, list_clean=True)
302 return repo.status(files=files, match=match, list_clean=True)
303 if ui.configitems('keyword'):
303 if ui.configitems('keyword'):
304 raise util.Abort(_('[keyword] patterns cannot match'))
304 raise util.Abort(_('[keyword] patterns cannot match'))
305 raise util.Abort(_('no [keyword] patterns configured'))
305 raise util.Abort(_('no [keyword] patterns configured'))
306
306
307 def _kwfwrite(ui, repo, expand, *pats, **opts):
307 def _kwfwrite(ui, repo, expand, *pats, **opts):
308 '''Selects files and passes them to kwtemplater.overwrite.'''
308 '''Selects files and passes them to kwtemplater.overwrite.'''
309 kwt = kwtools['templater']
309 kwt = kwtools['templater']
310 status = _status(ui, repo, kwt, *pats, **opts)
310 status = _status(ui, repo, kwt, *pats, **opts)
311 modified, added, removed, deleted, unknown, ignored, clean = status
311 modified, added, removed, deleted, unknown, ignored, clean = status
312 if modified or added or removed or deleted:
312 if modified or added or removed or deleted:
313 raise util.Abort(_('outstanding uncommitted changes in given files'))
313 raise util.Abort(_('outstanding uncommitted changes in given files'))
314 wlock = lock = None
314 wlock = lock = None
315 try:
315 try:
316 wlock = repo.wlock()
316 wlock = repo.wlock()
317 lock = repo.lock()
317 lock = repo.lock()
318 kwt.overwrite(expand=expand, files=clean)
318 kwt.overwrite(expand=expand, files=clean)
319 finally:
319 finally:
320 del wlock, lock
320 del wlock, lock
321
321
322
322
323 def demo(ui, repo, *args, **opts):
323 def demo(ui, repo, *args, **opts):
324 '''print [keywordmaps] configuration and an expansion example
324 '''print [keywordmaps] configuration and an expansion example
325
325
326 Show current, custom, or default keyword template maps
326 Show current, custom, or default keyword template maps
327 and their expansion.
327 and their expansion.
328
328
329 Extend current configuration by specifying maps as arguments
329 Extend current configuration by specifying maps as arguments
330 and optionally by reading from an additional hgrc file.
330 and optionally by reading from an additional hgrc file.
331
331
332 Override current keyword template maps with "default" option.
332 Override current keyword template maps with "default" option.
333 '''
333 '''
334 def demostatus(stat):
334 def demostatus(stat):
335 ui.status(_('\n\t%s\n') % stat)
335 ui.status(_('\n\t%s\n') % stat)
336
336
337 def demoitems(section, items):
337 def demoitems(section, items):
338 ui.write('[%s]\n' % section)
338 ui.write('[%s]\n' % section)
339 for k, v in items:
339 for k, v in items:
340 ui.write('%s = %s\n' % (k, v))
340 ui.write('%s = %s\n' % (k, v))
341
341
342 msg = 'hg keyword config and expansion example'
342 msg = 'hg keyword config and expansion example'
343 kwstatus = 'current'
343 kwstatus = 'current'
344 fn = 'demo.txt'
344 fn = 'demo.txt'
345 branchname = 'demobranch'
345 branchname = 'demobranch'
346 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
346 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
347 ui.note(_('creating temporary repo at %s\n') % tmpdir)
347 ui.note(_('creating temporary repo at %s\n') % tmpdir)
348 repo = localrepo.localrepository(ui, path=tmpdir, create=True)
348 repo = localrepo.localrepository(ui, path=tmpdir, create=True)
349 ui.setconfig('keyword', fn, '')
349 ui.setconfig('keyword', fn, '')
350 if args or opts.get('rcfile'):
350 if args or opts.get('rcfile'):
351 kwstatus = 'custom'
351 kwstatus = 'custom'
352 if opts.get('rcfile'):
352 if opts.get('rcfile'):
353 ui.readconfig(opts.get('rcfile'))
353 ui.readconfig(opts.get('rcfile'))
354 if opts.get('default'):
354 if opts.get('default'):
355 kwstatus = 'default'
355 kwstatus = 'default'
356 kwmaps = kwtemplater.templates
356 kwmaps = kwtemplater.templates
357 if ui.configitems('keywordmaps'):
357 if ui.configitems('keywordmaps'):
358 # override maps from optional rcfile
358 # override maps from optional rcfile
359 for k, v in kwmaps.iteritems():
359 for k, v in kwmaps.iteritems():
360 ui.setconfig('keywordmaps', k, v)
360 ui.setconfig('keywordmaps', k, v)
361 elif args:
361 elif args:
362 # simulate hgrc parsing
362 # simulate hgrc parsing
363 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
363 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
364 fp = repo.opener('hgrc', 'w')
364 fp = repo.opener('hgrc', 'w')
365 fp.writelines(rcmaps)
365 fp.writelines(rcmaps)
366 fp.close()
366 fp.close()
367 ui.readconfig(repo.join('hgrc'))
367 ui.readconfig(repo.join('hgrc'))
368 if not opts.get('default'):
368 if not opts.get('default'):
369 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
369 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
370 reposetup(ui, repo)
370 reposetup(ui, repo)
371 for k, v in ui.configitems('extensions'):
371 for k, v in ui.configitems('extensions'):
372 if k.endswith('keyword'):
372 if k.endswith('keyword'):
373 extension = '%s = %s' % (k, v)
373 extension = '%s = %s' % (k, v)
374 break
374 break
375 demostatus('config using %s keyword template maps' % kwstatus)
375 demostatus('config using %s keyword template maps' % kwstatus)
376 ui.write('[extensions]\n%s\n' % extension)
376 ui.write('[extensions]\n%s\n' % extension)
377 demoitems('keyword', ui.configitems('keyword'))
377 demoitems('keyword', ui.configitems('keyword'))
378 demoitems('keywordmaps', kwmaps.iteritems())
378 demoitems('keywordmaps', kwmaps.iteritems())
379 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
379 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
380 repo.wopener(fn, 'w').write(keywords)
380 repo.wopener(fn, 'w').write(keywords)
381 repo.add([fn])
381 repo.add([fn])
382 path = repo.wjoin(fn)
382 path = repo.wjoin(fn)
383 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
383 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
384 ui.note(keywords)
384 ui.note(keywords)
385 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
385 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
386 # silence branch command if not verbose
386 # silence branch command if not verbose
387 quiet = ui.quiet
387 quiet = ui.quiet
388 ui.quiet = not ui.verbose
388 ui.quiet = not ui.verbose
389 commands.branch(ui, repo, branchname)
389 commands.branch(ui, repo, branchname)
390 ui.quiet = quiet
390 ui.quiet = quiet
391 for name, cmd in ui.configitems('hooks'):
391 for name, cmd in ui.configitems('hooks'):
392 if name.split('.', 1)[0].find('commit') > -1:
392 if name.split('.', 1)[0].find('commit') > -1:
393 repo.ui.setconfig('hooks', name, '')
393 repo.ui.setconfig('hooks', name, '')
394 ui.note(_('unhooked all commit hooks\n'))
394 ui.note(_('unhooked all commit hooks\n'))
395 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
395 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
396 repo.commit(text=msg)
396 repo.commit(text=msg)
397 format = ui.verbose and ' in %s' % path or ''
397 format = ui.verbose and ' in %s' % path or ''
398 demostatus('%s keywords expanded%s' % (kwstatus, format))
398 demostatus('%s keywords expanded%s' % (kwstatus, format))
399 ui.write(repo.wread(fn))
399 ui.write(repo.wread(fn))
400 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
400 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
401 shutil.rmtree(tmpdir, ignore_errors=True)
401 shutil.rmtree(tmpdir, ignore_errors=True)
402
402
403 def expand(ui, repo, *pats, **opts):
403 def expand(ui, repo, *pats, **opts):
404 '''expand keywords in working directory
404 '''expand keywords in working directory
405
405
406 Run after (re)enabling keyword expansion.
406 Run after (re)enabling keyword expansion.
407
407
408 kwexpand refuses to run if given files contain local changes.
408 kwexpand refuses to run if given files contain local changes.
409 '''
409 '''
410 # 3rd argument sets expansion to True
410 # 3rd argument sets expansion to True
411 _kwfwrite(ui, repo, True, *pats, **opts)
411 _kwfwrite(ui, repo, True, *pats, **opts)
412
412
413 def files(ui, repo, *pats, **opts):
413 def files(ui, repo, *pats, **opts):
414 '''print files currently configured for keyword expansion
414 '''print files currently configured for keyword expansion
415
415
416 Crosscheck which files in working directory are potential targets for
416 Crosscheck which files in working directory are potential targets for
417 keyword expansion.
417 keyword expansion.
418 That is, files matched by [keyword] config patterns but not symlinks.
418 That is, files matched by [keyword] config patterns but not symlinks.
419 '''
419 '''
420 kwt = kwtools['templater']
420 kwt = kwtools['templater']
421 status = _status(ui, repo, kwt, *pats, **opts)
421 status = _status(ui, repo, kwt, *pats, **opts)
422 modified, added, removed, deleted, unknown, ignored, clean = status
422 modified, added, removed, deleted, unknown, ignored, clean = status
423 files = modified + added + clean
423 files = modified + added + clean
424 if opts.get('untracked'):
424 if opts.get('untracked'):
425 files += unknown
425 files += unknown
426 files.sort()
426 files.sort()
427 wctx = repo.workingctx()
427 wctx = repo.workingctx()
428 islink = lambda p: 'l' in wctx.fileflags(p)
428 islink = lambda p: 'l' in wctx.fileflags(p)
429 kwfiles = [f for f in files if kwt.iskwfile(f, islink)]
429 kwfiles = [f for f in files if kwt.iskwfile(f, islink)]
430 cwd = pats and repo.getcwd() or ''
430 cwd = pats and repo.getcwd() or ''
431 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
431 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
432 if opts.get('all') or opts.get('ignore'):
432 if opts.get('all') or opts.get('ignore'):
433 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
433 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
434 for char, filenames in kwfstats:
434 for char, filenames in kwfstats:
435 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
435 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
436 for f in filenames:
436 for f in filenames:
437 ui.write(format % repo.pathto(f, cwd))
437 ui.write(format % repo.pathto(f, cwd))
438
438
439 def shrink(ui, repo, *pats, **opts):
439 def shrink(ui, repo, *pats, **opts):
440 '''revert expanded keywords in working directory
440 '''revert expanded keywords in working directory
441
441
442 Run before changing/disabling active keywords
442 Run before changing/disabling active keywords
443 or if you experience problems with "hg import" or "hg merge".
443 or if you experience problems with "hg import" or "hg merge".
444
444
445 kwshrink refuses to run if given files contain local changes.
445 kwshrink refuses to run if given files contain local changes.
446 '''
446 '''
447 # 3rd argument sets expansion to False
447 # 3rd argument sets expansion to False
448 _kwfwrite(ui, repo, False, *pats, **opts)
448 _kwfwrite(ui, repo, False, *pats, **opts)
449
449
450
450
451 def reposetup(ui, repo):
451 def reposetup(ui, repo):
452 '''Sets up repo as kwrepo for keyword substitution.
452 '''Sets up repo as kwrepo for keyword substitution.
453 Overrides file method to return kwfilelog instead of filelog
453 Overrides file method to return kwfilelog instead of filelog
454 if file matches user configuration.
454 if file matches user configuration.
455 Wraps commit to overwrite configured files with updated
455 Wraps commit to overwrite configured files with updated
456 keyword substitutions.
456 keyword substitutions.
457 This is done for local repos only, and only if there are
457 This is done for local repos only, and only if there are
458 files configured at all for keyword substitution.'''
458 files configured at all for keyword substitution.'''
459
459
460 try:
460 try:
461 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
461 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
462 or '.hg' in util.splitpath(repo.root)
462 or '.hg' in util.splitpath(repo.root)
463 or repo._url.startswith('bundle:')):
463 or repo._url.startswith('bundle:')):
464 return
464 return
465 except AttributeError:
465 except AttributeError:
466 pass
466 pass
467
467
468 inc, exc = [], ['.hg*']
468 inc, exc = [], ['.hg*']
469 for pat, opt in ui.configitems('keyword'):
469 for pat, opt in ui.configitems('keyword'):
470 if opt != 'ignore':
470 if opt != 'ignore':
471 inc.append(pat)
471 inc.append(pat)
472 else:
472 else:
473 exc.append(pat)
473 exc.append(pat)
474 if not inc:
474 if not inc:
475 return
475 return
476
476
477 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
477 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
478
478
479 class kwrepo(repo.__class__):
479 class kwrepo(repo.__class__):
480 def file(self, f):
480 def file(self, f):
481 if f[0] == '/':
481 if f[0] == '/':
482 f = f[1:]
482 f = f[1:]
483 return kwfilelog(self.sopener, f)
483 return kwfilelog(self.sopener, f)
484
484
485 def wread(self, filename):
485 def wread(self, filename):
486 data = super(kwrepo, self).wread(filename)
486 data = super(kwrepo, self).wread(filename)
487 return kwt.wread(filename, data)
487 return kwt.wread(filename, data)
488
488
489 def commit(self, files=None, text='', user=None, date=None,
489 def commit(self, files=None, text='', user=None, date=None,
490 match=util.always, force=False, force_editor=False,
490 match=util.always, force=False, force_editor=False,
491 p1=None, p2=None, extra={}, empty_ok=False):
491 p1=None, p2=None, extra={}, empty_ok=False):
492 wlock = lock = None
492 wlock = lock = None
493 _p1 = _p2 = None
493 _p1 = _p2 = None
494 try:
494 try:
495 wlock = self.wlock()
495 wlock = self.wlock()
496 lock = self.lock()
496 lock = self.lock()
497 # store and postpone commit hooks
497 # store and postpone commit hooks
498 commithooks = {}
498 commithooks = {}
499 for name, cmd in ui.configitems('hooks'):
499 for name, cmd in ui.configitems('hooks'):
500 if name.split('.', 1)[0] == 'commit':
500 if name.split('.', 1)[0] == 'commit':
501 commithooks[name] = cmd
501 commithooks[name] = cmd
502 ui.setconfig('hooks', name, None)
502 ui.setconfig('hooks', name, None)
503 if commithooks:
503 if commithooks:
504 # store parents for commit hook environment
504 # store parents for commit hook environment
505 if p1 is None:
505 if p1 is None:
506 _p1, _p2 = repo.dirstate.parents()
506 _p1, _p2 = repo.dirstate.parents()
507 else:
507 else:
508 _p1, _p2 = p1, p2 or nullid
508 _p1, _p2 = p1, p2 or nullid
509 _p1 = hex(_p1)
509 _p1 = hex(_p1)
510 if _p2 == nullid:
510 if _p2 == nullid:
511 _p2 = ''
511 _p2 = ''
512 else:
512 else:
513 _p2 = hex(_p2)
513 _p2 = hex(_p2)
514
514
515 node = super(kwrepo,
515 node = super(kwrepo,
516 self).commit(files=files, text=text, user=user,
516 self).commit(files=files, text=text, user=user,
517 date=date, match=match, force=force,
517 date=date, match=match, force=force,
518 force_editor=force_editor,
518 force_editor=force_editor,
519 p1=p1, p2=p2, extra=extra,
519 p1=p1, p2=p2, extra=extra,
520 empty_ok=empty_ok)
520 empty_ok=empty_ok)
521
521
522 # restore commit hooks
522 # restore commit hooks
523 for name, cmd in commithooks.iteritems():
523 for name, cmd in commithooks.iteritems():
524 ui.setconfig('hooks', name, cmd)
524 ui.setconfig('hooks', name, cmd)
525 if node is not None:
525 if node is not None:
526 kwt.overwrite(node=node)
526 kwt.overwrite(node=node)
527 repo.hook('commit', node=node, parent1=_p1, parent2=_p2)
527 repo.hook('commit', node=node, parent1=_p1, parent2=_p2)
528 return node
528 return node
529 finally:
529 finally:
530 del wlock, lock
530 del wlock, lock
531
531
532 repo.__class__ = kwrepo
532 repo.__class__ = kwrepo
533 patch.patchfile.__init__ = _kwpatchfile_init
533 patch.patchfile.__init__ = _kwpatchfile_init
534 patch.diff = _kw_diff
534 patch.diff = _kw_diff
535 webcommands.changeset = webcommands.rev = _kwweb_changeset
535 webcommands.changeset = webcommands.rev = _kwweb_changeset
536 webcommands.filediff = webcommands.diff = _kwweb_filediff
536 webcommands.filediff = webcommands.diff = _kwweb_filediff
537
537
538
538
539 cmdtable = {
539 cmdtable = {
540 'kwdemo':
540 'kwdemo':
541 (demo,
541 (demo,
542 [('d', 'default', None, _('show default keyword template maps')),
542 [('d', 'default', None, _('show default keyword template maps')),
543 ('f', 'rcfile', [], _('read maps from rcfile'))],
543 ('f', 'rcfile', [], _('read maps from rcfile'))],
544 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
544 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
545 'kwexpand': (expand, commands.walkopts,
545 'kwexpand': (expand, commands.walkopts,
546 _('hg kwexpand [OPTION]... [FILE]...')),
546 _('hg kwexpand [OPTION]... [FILE]...')),
547 'kwfiles':
547 'kwfiles':
548 (files,
548 (files,
549 [('a', 'all', None, _('show keyword status flags of all files')),
549 [('a', 'all', None, _('show keyword status flags of all files')),
550 ('i', 'ignore', None, _('show files excluded from expansion')),
550 ('i', 'ignore', None, _('show files excluded from expansion')),
551 ('u', 'untracked', None, _('additionally show untracked files')),
551 ('u', 'untracked', None, _('additionally show untracked files')),
552 ] + commands.walkopts,
552 ] + commands.walkopts,
553 _('hg kwfiles [OPTION]... [FILE]...')),
553 _('hg kwfiles [OPTION]... [FILE]...')),
554 'kwshrink': (shrink, commands.walkopts,
554 'kwshrink': (shrink, commands.walkopts,
555 _('hg kwshrink [OPTION]... [FILE]...')),
555 _('hg kwshrink [OPTION]... [FILE]...')),
556 }
556 }
@@ -1,285 +1,285 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to email notifications to people when changesets are
8 # hook extension to email notifications to people when changesets are
9 # committed to a repo they subscribe to.
9 # committed to a repo they subscribe to.
10 #
10 #
11 # default mode is to print messages to stdout, for testing and
11 # default mode is to print messages to stdout, for testing and
12 # configuring.
12 # configuring.
13 #
13 #
14 # to use, configure notify extension and enable in hgrc like this:
14 # to use, configure notify extension and enable in hgrc like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.notify =
17 # hgext.notify =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # one email for each incoming changeset
20 # # one email for each incoming changeset
21 # incoming.notify = python:hgext.notify.hook
21 # incoming.notify = python:hgext.notify.hook
22 # # batch emails when many changesets incoming at one time
22 # # batch emails when many changesets incoming at one time
23 # changegroup.notify = python:hgext.notify.hook
23 # changegroup.notify = python:hgext.notify.hook
24 #
24 #
25 # [notify]
25 # [notify]
26 # # config items go in here
26 # # config items go in here
27 #
27 #
28 # config items:
28 # config items:
29 #
29 #
30 # REQUIRED:
30 # REQUIRED:
31 # config = /path/to/file # file containing subscriptions
31 # config = /path/to/file # file containing subscriptions
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # test = True # print messages to stdout for testing
34 # test = True # print messages to stdout for testing
35 # strip = 3 # number of slashes to strip for url paths
35 # strip = 3 # number of slashes to strip for url paths
36 # domain = example.com # domain to use if committer missing domain
36 # domain = example.com # domain to use if committer missing domain
37 # style = ... # style file to use when formatting email
37 # style = ... # style file to use when formatting email
38 # template = ... # template to use when formatting email
38 # template = ... # template to use when formatting email
39 # incoming = ... # template to use when run as incoming hook
39 # incoming = ... # template to use when run as incoming hook
40 # changegroup = ... # template when run as changegroup hook
40 # changegroup = ... # template when run as changegroup hook
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 # maxsubject = 67 # truncate subject line longer than this
42 # maxsubject = 67 # truncate subject line longer than this
43 # diffstat = True # add a diffstat before the diff content
43 # diffstat = True # add a diffstat before the diff content
44 # sources = serve # notify if source of incoming changes in this list
44 # sources = serve # notify if source of incoming changes in this list
45 # # (serve == ssh or http, push, pull, bundle)
45 # # (serve == ssh or http, push, pull, bundle)
46 # [email]
46 # [email]
47 # from = user@host.com # email address to send as if none given
47 # from = user@host.com # email address to send as if none given
48 # [web]
48 # [web]
49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
49 # baseurl = http://hgserver/... # root of hg web site for browsing commits
50 #
50 #
51 # notify config file has same format as regular hgrc. it has two
51 # notify config file has same format as regular hgrc. it has two
52 # sections so you can express subscriptions in whatever way is handier
52 # sections so you can express subscriptions in whatever way is handier
53 # for you.
53 # for you.
54 #
54 #
55 # [usersubs]
55 # [usersubs]
56 # # key is subscriber email, value is ","-separated list of glob patterns
56 # # key is subscriber email, value is ","-separated list of glob patterns
57 # user@host = pattern
57 # user@host = pattern
58 #
58 #
59 # [reposubs]
59 # [reposubs]
60 # # key is glob pattern, value is ","-separated list of subscriber emails
60 # # key is glob pattern, value is ","-separated list of subscriber emails
61 # pattern = user@host
61 # pattern = user@host
62 #
62 #
63 # glob patterns are matched against path to repo root.
63 # glob patterns are matched against path to repo root.
64 #
64 #
65 # if you like, you can put notify config file in repo that users can
65 # if you like, you can put notify config file in repo that users can
66 # push changes to, they can manage their own subscriptions.
66 # push changes to, they can manage their own subscriptions.
67
67
68 from mercurial.i18n import _
68 from mercurial.i18n import _
69 from mercurial.node import *
69 from mercurial.node import bin, short
70 from mercurial import patch, cmdutil, templater, util, mail
70 from mercurial import patch, cmdutil, templater, util, mail
71 import email.Parser, fnmatch, socket, time
71 import email.Parser, fnmatch, socket, time
72
72
73 # template for single changeset can include email headers.
73 # template for single changeset can include email headers.
74 single_template = '''
74 single_template = '''
75 Subject: changeset in {webroot}: {desc|firstline|strip}
75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 From: {author}
76 From: {author}
77
77
78 changeset {node|short} in {root}
78 changeset {node|short} in {root}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 description:
80 description:
81 \t{desc|tabindent|strip}
81 \t{desc|tabindent|strip}
82 '''.lstrip()
82 '''.lstrip()
83
83
84 # template for multiple changesets should not contain email headers,
84 # template for multiple changesets should not contain email headers,
85 # because only first set of headers will be used and result will look
85 # because only first set of headers will be used and result will look
86 # strange.
86 # strange.
87 multiple_template = '''
87 multiple_template = '''
88 changeset {node|short} in {root}
88 changeset {node|short} in {root}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 summary: {desc|firstline}
90 summary: {desc|firstline}
91 '''
91 '''
92
92
93 deftemplates = {
93 deftemplates = {
94 'changegroup': multiple_template,
94 'changegroup': multiple_template,
95 }
95 }
96
96
97 class notifier(object):
97 class notifier(object):
98 '''email notification class.'''
98 '''email notification class.'''
99
99
100 def __init__(self, ui, repo, hooktype):
100 def __init__(self, ui, repo, hooktype):
101 self.ui = ui
101 self.ui = ui
102 cfg = self.ui.config('notify', 'config')
102 cfg = self.ui.config('notify', 'config')
103 if cfg:
103 if cfg:
104 self.ui.readsections(cfg, 'usersubs', 'reposubs')
104 self.ui.readsections(cfg, 'usersubs', 'reposubs')
105 self.repo = repo
105 self.repo = repo
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 self.root = self.strip(self.repo.root)
107 self.root = self.strip(self.repo.root)
108 self.domain = self.ui.config('notify', 'domain')
108 self.domain = self.ui.config('notify', 'domain')
109 self.subs = self.subscribers()
109 self.subs = self.subscribers()
110
110
111 mapfile = self.ui.config('notify', 'style')
111 mapfile = self.ui.config('notify', 'style')
112 template = (self.ui.config('notify', hooktype) or
112 template = (self.ui.config('notify', hooktype) or
113 self.ui.config('notify', 'template'))
113 self.ui.config('notify', 'template'))
114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
114 self.t = cmdutil.changeset_templater(self.ui, self.repo,
115 False, mapfile, False)
115 False, mapfile, False)
116 if not mapfile and not template:
116 if not mapfile and not template:
117 template = deftemplates.get(hooktype) or single_template
117 template = deftemplates.get(hooktype) or single_template
118 if template:
118 if template:
119 template = templater.parsestring(template, quoted=False)
119 template = templater.parsestring(template, quoted=False)
120 self.t.use_template(template)
120 self.t.use_template(template)
121
121
122 def strip(self, path):
122 def strip(self, path):
123 '''strip leading slashes from local path, turn into web-safe path.'''
123 '''strip leading slashes from local path, turn into web-safe path.'''
124
124
125 path = util.pconvert(path)
125 path = util.pconvert(path)
126 count = self.stripcount
126 count = self.stripcount
127 while count > 0:
127 while count > 0:
128 c = path.find('/')
128 c = path.find('/')
129 if c == -1:
129 if c == -1:
130 break
130 break
131 path = path[c+1:]
131 path = path[c+1:]
132 count -= 1
132 count -= 1
133 return path
133 return path
134
134
135 def fixmail(self, addr):
135 def fixmail(self, addr):
136 '''try to clean up email addresses.'''
136 '''try to clean up email addresses.'''
137
137
138 addr = util.email(addr.strip())
138 addr = util.email(addr.strip())
139 if self.domain:
139 if self.domain:
140 a = addr.find('@localhost')
140 a = addr.find('@localhost')
141 if a != -1:
141 if a != -1:
142 addr = addr[:a]
142 addr = addr[:a]
143 if '@' not in addr:
143 if '@' not in addr:
144 return addr + '@' + self.domain
144 return addr + '@' + self.domain
145 return addr
145 return addr
146
146
147 def subscribers(self):
147 def subscribers(self):
148 '''return list of email addresses of subscribers to this repo.'''
148 '''return list of email addresses of subscribers to this repo.'''
149
149
150 subs = {}
150 subs = {}
151 for user, pats in self.ui.configitems('usersubs'):
151 for user, pats in self.ui.configitems('usersubs'):
152 for pat in pats.split(','):
152 for pat in pats.split(','):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 subs[self.fixmail(user)] = 1
154 subs[self.fixmail(user)] = 1
155 for pat, users in self.ui.configitems('reposubs'):
155 for pat, users in self.ui.configitems('reposubs'):
156 if fnmatch.fnmatch(self.repo.root, pat):
156 if fnmatch.fnmatch(self.repo.root, pat):
157 for user in users.split(','):
157 for user in users.split(','):
158 subs[self.fixmail(user)] = 1
158 subs[self.fixmail(user)] = 1
159 subs = subs.keys()
159 subs = subs.keys()
160 subs.sort()
160 subs.sort()
161 return subs
161 return subs
162
162
163 def url(self, path=None):
163 def url(self, path=None):
164 return self.ui.config('web', 'baseurl') + (path or self.root)
164 return self.ui.config('web', 'baseurl') + (path or self.root)
165
165
166 def node(self, node):
166 def node(self, node):
167 '''format one changeset.'''
167 '''format one changeset.'''
168
168
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 baseurl=self.ui.config('web', 'baseurl'),
170 baseurl=self.ui.config('web', 'baseurl'),
171 root=self.repo.root,
171 root=self.repo.root,
172 webroot=self.root)
172 webroot=self.root)
173
173
174 def skipsource(self, source):
174 def skipsource(self, source):
175 '''true if incoming changes from this source should be skipped.'''
175 '''true if incoming changes from this source should be skipped.'''
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 return source not in ok_sources
177 return source not in ok_sources
178
178
179 def send(self, node, count, data):
179 def send(self, node, count, data):
180 '''send message.'''
180 '''send message.'''
181
181
182 p = email.Parser.Parser()
182 p = email.Parser.Parser()
183 msg = p.parsestr(data)
183 msg = p.parsestr(data)
184
184
185 def fix_subject():
185 def fix_subject():
186 '''try to make subject line exist and be useful.'''
186 '''try to make subject line exist and be useful.'''
187
187
188 subject = msg['Subject']
188 subject = msg['Subject']
189 if not subject:
189 if not subject:
190 if count > 1:
190 if count > 1:
191 subject = _('%s: %d new changesets') % (self.root, count)
191 subject = _('%s: %d new changesets') % (self.root, count)
192 else:
192 else:
193 changes = self.repo.changelog.read(node)
193 changes = self.repo.changelog.read(node)
194 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
194 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 subject = '%s: %s' % (self.root, s)
195 subject = '%s: %s' % (self.root, s)
196 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
196 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 if maxsubject and len(subject) > maxsubject:
197 if maxsubject and len(subject) > maxsubject:
198 subject = subject[:maxsubject-3] + '...'
198 subject = subject[:maxsubject-3] + '...'
199 del msg['Subject']
199 del msg['Subject']
200 msg['Subject'] = subject
200 msg['Subject'] = subject
201
201
202 def fix_sender():
202 def fix_sender():
203 '''try to make message have proper sender.'''
203 '''try to make message have proper sender.'''
204
204
205 sender = msg['From']
205 sender = msg['From']
206 if not sender:
206 if not sender:
207 sender = self.ui.config('email', 'from') or self.ui.username()
207 sender = self.ui.config('email', 'from') or self.ui.username()
208 if '@' not in sender or '@localhost' in sender:
208 if '@' not in sender or '@localhost' in sender:
209 sender = self.fixmail(sender)
209 sender = self.fixmail(sender)
210 del msg['From']
210 del msg['From']
211 msg['From'] = sender
211 msg['From'] = sender
212
212
213 msg['Date'] = util.datestr(date=util.makedate(),
213 msg['Date'] = util.datestr(date=util.makedate(),
214 format="%a, %d %b %Y %H:%M:%S",
214 format="%a, %d %b %Y %H:%M:%S",
215 timezone=True)
215 timezone=True)
216 fix_subject()
216 fix_subject()
217 fix_sender()
217 fix_sender()
218
218
219 msg['X-Hg-Notification'] = 'changeset ' + short(node)
219 msg['X-Hg-Notification'] = 'changeset ' + short(node)
220 if not msg['Message-Id']:
220 if not msg['Message-Id']:
221 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
221 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
222 (short(node), int(time.time()),
222 (short(node), int(time.time()),
223 hash(self.repo.root), socket.getfqdn()))
223 hash(self.repo.root), socket.getfqdn()))
224 msg['To'] = ', '.join(self.subs)
224 msg['To'] = ', '.join(self.subs)
225
225
226 msgtext = msg.as_string(0)
226 msgtext = msg.as_string(0)
227 if self.ui.configbool('notify', 'test', True):
227 if self.ui.configbool('notify', 'test', True):
228 self.ui.write(msgtext)
228 self.ui.write(msgtext)
229 if not msgtext.endswith('\n'):
229 if not msgtext.endswith('\n'):
230 self.ui.write('\n')
230 self.ui.write('\n')
231 else:
231 else:
232 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
232 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
233 (len(self.subs), count))
233 (len(self.subs), count))
234 mail.sendmail(self.ui, util.email(msg['From']),
234 mail.sendmail(self.ui, util.email(msg['From']),
235 self.subs, msgtext)
235 self.subs, msgtext)
236
236
237 def diff(self, node, ref):
237 def diff(self, node, ref):
238 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
238 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
239 if maxdiff == 0:
239 if maxdiff == 0:
240 return
240 return
241 prev = self.repo.changelog.parents(node)[0]
241 prev = self.repo.changelog.parents(node)[0]
242 self.ui.pushbuffer()
242 self.ui.pushbuffer()
243 patch.diff(self.repo, prev, ref)
243 patch.diff(self.repo, prev, ref)
244 difflines = self.ui.popbuffer().splitlines(1)
244 difflines = self.ui.popbuffer().splitlines(1)
245 if self.ui.configbool('notify', 'diffstat', True):
245 if self.ui.configbool('notify', 'diffstat', True):
246 s = patch.diffstat(difflines)
246 s = patch.diffstat(difflines)
247 # s may be nil, don't include the header if it is
247 # s may be nil, don't include the header if it is
248 if s:
248 if s:
249 self.ui.write('\ndiffstat:\n\n%s' % s)
249 self.ui.write('\ndiffstat:\n\n%s' % s)
250 if maxdiff > 0 and len(difflines) > maxdiff:
250 if maxdiff > 0 and len(difflines) > maxdiff:
251 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
251 self.ui.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
252 (len(difflines), maxdiff))
252 (len(difflines), maxdiff))
253 difflines = difflines[:maxdiff]
253 difflines = difflines[:maxdiff]
254 elif difflines:
254 elif difflines:
255 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
255 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
256 self.ui.write(*difflines)
256 self.ui.write(*difflines)
257
257
258 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
258 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
259 '''send email notifications to interested subscribers.
259 '''send email notifications to interested subscribers.
260
260
261 if used as changegroup hook, send one email for all changesets in
261 if used as changegroup hook, send one email for all changesets in
262 changegroup. else send one email per changeset.'''
262 changegroup. else send one email per changeset.'''
263 n = notifier(ui, repo, hooktype)
263 n = notifier(ui, repo, hooktype)
264 if not n.subs:
264 if not n.subs:
265 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
265 ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
266 return
266 return
267 if n.skipsource(source):
267 if n.skipsource(source):
268 ui.debug(_('notify: changes have source "%s" - skipping\n') %
268 ui.debug(_('notify: changes have source "%s" - skipping\n') %
269 source)
269 source)
270 return
270 return
271 node = bin(node)
271 node = bin(node)
272 ui.pushbuffer()
272 ui.pushbuffer()
273 if hooktype == 'changegroup':
273 if hooktype == 'changegroup':
274 start = repo.changelog.rev(node)
274 start = repo.changelog.rev(node)
275 end = repo.changelog.count()
275 end = repo.changelog.count()
276 count = end - start
276 count = end - start
277 for rev in xrange(start, end):
277 for rev in xrange(start, end):
278 n.node(repo.changelog.node(rev))
278 n.node(repo.changelog.node(rev))
279 n.diff(node, repo.changelog.tip())
279 n.diff(node, repo.changelog.tip())
280 else:
280 else:
281 count = 1
281 count = 1
282 n.node(node)
282 n.node(node)
283 n.diff(node, node)
283 n.diff(node, node)
284 data = ui.popbuffer()
284 data = ui.popbuffer()
285 n.send(node, count, data)
285 n.send(node, count, data)
@@ -1,466 +1,466 b''
1 # Command for sending a collection of Mercurial changesets as a series
1 # Command for sending a collection of Mercurial changesets as a series
2 # of patch emails.
2 # of patch emails.
3 #
3 #
4 # The series is started off with a "[PATCH 0 of N]" introduction,
4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 # which describes the series as a whole.
5 # which describes the series as a whole.
6 #
6 #
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 # the first line of the changeset description as the subject text.
8 # the first line of the changeset description as the subject text.
9 # The message contains two or three body parts:
9 # The message contains two or three body parts:
10 #
10 #
11 # The remainder of the changeset description.
11 # The remainder of the changeset description.
12 #
12 #
13 # [Optional] If the diffstat program is installed, the result of
13 # [Optional] If the diffstat program is installed, the result of
14 # running diffstat on the patch.
14 # running diffstat on the patch.
15 #
15 #
16 # The patch itself, as generated by "hg export".
16 # The patch itself, as generated by "hg export".
17 #
17 #
18 # Each message refers to all of its predecessors using the In-Reply-To
18 # Each message refers to all of its predecessors using the In-Reply-To
19 # and References headers, so they will show up as a sequence in
19 # and References headers, so they will show up as a sequence in
20 # threaded mail and news readers, and in mail archives.
20 # threaded mail and news readers, and in mail archives.
21 #
21 #
22 # For each changeset, you will be prompted with a diffstat summary and
22 # For each changeset, you will be prompted with a diffstat summary and
23 # the changeset summary, so you can be sure you are sending the right
23 # the changeset summary, so you can be sure you are sending the right
24 # changes.
24 # changes.
25 #
25 #
26 # To enable this extension:
26 # To enable this extension:
27 #
27 #
28 # [extensions]
28 # [extensions]
29 # hgext.patchbomb =
29 # hgext.patchbomb =
30 #
30 #
31 # To configure other defaults, add a section like this to your hgrc
31 # To configure other defaults, add a section like this to your hgrc
32 # file:
32 # file:
33 #
33 #
34 # [email]
34 # [email]
35 # from = My Name <my@email>
35 # from = My Name <my@email>
36 # to = recipient1, recipient2, ...
36 # to = recipient1, recipient2, ...
37 # cc = cc1, cc2, ...
37 # cc = cc1, cc2, ...
38 # bcc = bcc1, bcc2, ...
38 # bcc = bcc1, bcc2, ...
39 #
39 #
40 # Then you can use the "hg email" command to mail a series of changesets
40 # Then you can use the "hg email" command to mail a series of changesets
41 # as a patchbomb.
41 # as a patchbomb.
42 #
42 #
43 # To avoid sending patches prematurely, it is a good idea to first run
43 # To avoid sending patches prematurely, it is a good idea to first run
44 # the "email" command with the "-n" option (test only). You will be
44 # the "email" command with the "-n" option (test only). You will be
45 # prompted for an email recipient address, a subject an an introductory
45 # prompted for an email recipient address, a subject an an introductory
46 # message describing the patches of your patchbomb. Then when all is
46 # message describing the patches of your patchbomb. Then when all is
47 # done, patchbomb messages are displayed. If PAGER environment variable
47 # done, patchbomb messages are displayed. If PAGER environment variable
48 # is set, your pager will be fired up once for each patchbomb message, so
48 # is set, your pager will be fired up once for each patchbomb message, so
49 # you can verify everything is alright.
49 # you can verify everything is alright.
50 #
50 #
51 # The "-m" (mbox) option is also very useful. Instead of previewing
51 # The "-m" (mbox) option is also very useful. Instead of previewing
52 # each patchbomb message in a pager or sending the messages directly,
52 # each patchbomb message in a pager or sending the messages directly,
53 # it will create a UNIX mailbox file with the patch emails. This
53 # it will create a UNIX mailbox file with the patch emails. This
54 # mailbox file can be previewed with any mail user agent which supports
54 # mailbox file can be previewed with any mail user agent which supports
55 # UNIX mbox files, i.e. with mutt:
55 # UNIX mbox files, i.e. with mutt:
56 #
56 #
57 # % mutt -R -f mbox
57 # % mutt -R -f mbox
58 #
58 #
59 # When you are previewing the patchbomb messages, you can use `formail'
59 # When you are previewing the patchbomb messages, you can use `formail'
60 # (a utility that is commonly installed as part of the procmail package),
60 # (a utility that is commonly installed as part of the procmail package),
61 # to send each message out:
61 # to send each message out:
62 #
62 #
63 # % formail -s sendmail -bm -t < mbox
63 # % formail -s sendmail -bm -t < mbox
64 #
64 #
65 # That should be all. Now your patchbomb is on its way out.
65 # That should be all. Now your patchbomb is on its way out.
66
66
67 import os, errno, socket, tempfile
67 import os, errno, socket, tempfile
68 import email.MIMEMultipart, email.MIMEText, email.MIMEBase
68 import email.MIMEMultipart, email.MIMEText, email.MIMEBase
69 import email.Utils, email.Encoders
69 import email.Utils, email.Encoders
70 from mercurial import cmdutil, commands, hg, mail, ui, patch, util
70 from mercurial import cmdutil, commands, hg, mail, ui, patch, util
71 from mercurial.i18n import _
71 from mercurial.i18n import _
72 from mercurial.node import *
72 from mercurial.node import bin
73
73
74 def patchbomb(ui, repo, *revs, **opts):
74 def patchbomb(ui, repo, *revs, **opts):
75 '''send changesets by email
75 '''send changesets by email
76
76
77 By default, diffs are sent in the format generated by hg export,
77 By default, diffs are sent in the format generated by hg export,
78 one per message. The series starts with a "[PATCH 0 of N]"
78 one per message. The series starts with a "[PATCH 0 of N]"
79 introduction, which describes the series as a whole.
79 introduction, which describes the series as a whole.
80
80
81 Each patch email has a Subject line of "[PATCH M of N] ...", using
81 Each patch email has a Subject line of "[PATCH M of N] ...", using
82 the first line of the changeset description as the subject text.
82 the first line of the changeset description as the subject text.
83 The message contains two or three body parts. First, the rest of
83 The message contains two or three body parts. First, the rest of
84 the changeset description. Next, (optionally) if the diffstat
84 the changeset description. Next, (optionally) if the diffstat
85 program is installed, the result of running diffstat on the patch.
85 program is installed, the result of running diffstat on the patch.
86 Finally, the patch itself, as generated by "hg export".
86 Finally, the patch itself, as generated by "hg export".
87
87
88 With --outgoing, emails will be generated for patches not
88 With --outgoing, emails will be generated for patches not
89 found in the destination repository (or only those which are
89 found in the destination repository (or only those which are
90 ancestors of the specified revisions if any are provided)
90 ancestors of the specified revisions if any are provided)
91
91
92 With --bundle, changesets are selected as for --outgoing,
92 With --bundle, changesets are selected as for --outgoing,
93 but a single email containing a binary Mercurial bundle as an
93 but a single email containing a binary Mercurial bundle as an
94 attachment will be sent.
94 attachment will be sent.
95
95
96 Examples:
96 Examples:
97
97
98 hg email -r 3000 # send patch 3000 only
98 hg email -r 3000 # send patch 3000 only
99 hg email -r 3000 -r 3001 # send patches 3000 and 3001
99 hg email -r 3000 -r 3001 # send patches 3000 and 3001
100 hg email -r 3000:3005 # send patches 3000 through 3005
100 hg email -r 3000:3005 # send patches 3000 through 3005
101 hg email 3000 # send patch 3000 (deprecated)
101 hg email 3000 # send patch 3000 (deprecated)
102
102
103 hg email -o # send all patches not in default
103 hg email -o # send all patches not in default
104 hg email -o DEST # send all patches not in DEST
104 hg email -o DEST # send all patches not in DEST
105 hg email -o -r 3000 # send all ancestors of 3000 not in default
105 hg email -o -r 3000 # send all ancestors of 3000 not in default
106 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
106 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
107
107
108 hg email -b # send bundle of all patches not in default
108 hg email -b # send bundle of all patches not in default
109 hg email -b DEST # send bundle of all patches not in DEST
109 hg email -b DEST # send bundle of all patches not in DEST
110 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
110 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
111 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
111 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
112
112
113 Before using this command, you will need to enable email in your hgrc.
113 Before using this command, you will need to enable email in your hgrc.
114 See the [email] section in hgrc(5) for details.
114 See the [email] section in hgrc(5) for details.
115 '''
115 '''
116
116
117 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
117 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
118 if not ui.interactive:
118 if not ui.interactive:
119 return default
119 return default
120 if default:
120 if default:
121 prompt += ' [%s]' % default
121 prompt += ' [%s]' % default
122 prompt += rest
122 prompt += rest
123 while True:
123 while True:
124 r = ui.prompt(prompt, default=default)
124 r = ui.prompt(prompt, default=default)
125 if r:
125 if r:
126 return r
126 return r
127 if default is not None:
127 if default is not None:
128 return default
128 return default
129 if empty_ok:
129 if empty_ok:
130 return r
130 return r
131 ui.warn(_('Please enter a valid value.\n'))
131 ui.warn(_('Please enter a valid value.\n'))
132
132
133 def confirm(s, denial):
133 def confirm(s, denial):
134 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
134 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
135 raise util.Abort(denial)
135 raise util.Abort(denial)
136
136
137 def cdiffstat(summary, patchlines):
137 def cdiffstat(summary, patchlines):
138 s = patch.diffstat(patchlines)
138 s = patch.diffstat(patchlines)
139 if s:
139 if s:
140 if summary:
140 if summary:
141 ui.write(summary, '\n')
141 ui.write(summary, '\n')
142 ui.write(s, '\n')
142 ui.write(s, '\n')
143 confirm(_('Does the diffstat above look okay'),
143 confirm(_('Does the diffstat above look okay'),
144 _('diffstat rejected'))
144 _('diffstat rejected'))
145 elif s is None:
145 elif s is None:
146 ui.warn(_('No diffstat information available.\n'))
146 ui.warn(_('No diffstat information available.\n'))
147 s = ''
147 s = ''
148 return s
148 return s
149
149
150 def makepatch(patch, idx, total):
150 def makepatch(patch, idx, total):
151 desc = []
151 desc = []
152 node = None
152 node = None
153 body = ''
153 body = ''
154 for line in patch:
154 for line in patch:
155 if line.startswith('#'):
155 if line.startswith('#'):
156 if line.startswith('# Node ID'):
156 if line.startswith('# Node ID'):
157 node = line.split()[-1]
157 node = line.split()[-1]
158 continue
158 continue
159 if line.startswith('diff -r') or line.startswith('diff --git'):
159 if line.startswith('diff -r') or line.startswith('diff --git'):
160 break
160 break
161 desc.append(line)
161 desc.append(line)
162 if not node:
162 if not node:
163 raise ValueError
163 raise ValueError
164
164
165 if opts['attach']:
165 if opts['attach']:
166 body = ('\n'.join(desc[1:]).strip() or
166 body = ('\n'.join(desc[1:]).strip() or
167 'Patch subject is complete summary.')
167 'Patch subject is complete summary.')
168 body += '\n\n\n'
168 body += '\n\n\n'
169
169
170 if opts.get('plain'):
170 if opts.get('plain'):
171 while patch and patch[0].startswith('# '):
171 while patch and patch[0].startswith('# '):
172 patch.pop(0)
172 patch.pop(0)
173 if patch:
173 if patch:
174 patch.pop(0)
174 patch.pop(0)
175 while patch and not patch[0].strip():
175 while patch and not patch[0].strip():
176 patch.pop(0)
176 patch.pop(0)
177 if opts.get('diffstat'):
177 if opts.get('diffstat'):
178 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
178 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
179 if opts.get('attach') or opts.get('inline'):
179 if opts.get('attach') or opts.get('inline'):
180 msg = email.MIMEMultipart.MIMEMultipart()
180 msg = email.MIMEMultipart.MIMEMultipart()
181 if body:
181 if body:
182 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
182 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
183 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
183 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
184 binnode = bin(node)
184 binnode = bin(node)
185 # if node is mq patch, it will have patch file name as tag
185 # if node is mq patch, it will have patch file name as tag
186 patchname = [t for t in repo.nodetags(binnode)
186 patchname = [t for t in repo.nodetags(binnode)
187 if t.endswith('.patch') or t.endswith('.diff')]
187 if t.endswith('.patch') or t.endswith('.diff')]
188 if patchname:
188 if patchname:
189 patchname = patchname[0]
189 patchname = patchname[0]
190 elif total > 1:
190 elif total > 1:
191 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
191 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
192 binnode, idx, total)
192 binnode, idx, total)
193 else:
193 else:
194 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
194 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
195 disposition = 'inline'
195 disposition = 'inline'
196 if opts['attach']:
196 if opts['attach']:
197 disposition = 'attachment'
197 disposition = 'attachment'
198 p['Content-Disposition'] = disposition + '; filename=' + patchname
198 p['Content-Disposition'] = disposition + '; filename=' + patchname
199 msg.attach(p)
199 msg.attach(p)
200 else:
200 else:
201 body += '\n'.join(patch)
201 body += '\n'.join(patch)
202 msg = email.MIMEText.MIMEText(body)
202 msg = email.MIMEText.MIMEText(body)
203
203
204 subj = desc[0].strip().rstrip('. ')
204 subj = desc[0].strip().rstrip('. ')
205 if total == 1:
205 if total == 1:
206 subj = '[PATCH] ' + (opts.get('subject') or subj)
206 subj = '[PATCH] ' + (opts.get('subject') or subj)
207 else:
207 else:
208 tlen = len(str(total))
208 tlen = len(str(total))
209 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
209 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
210 msg['Subject'] = subj
210 msg['Subject'] = subj
211 msg['X-Mercurial-Node'] = node
211 msg['X-Mercurial-Node'] = node
212 return msg
212 return msg
213
213
214 def outgoing(dest, revs):
214 def outgoing(dest, revs):
215 '''Return the revisions present locally but not in dest'''
215 '''Return the revisions present locally but not in dest'''
216 dest = ui.expandpath(dest or 'default-push', dest or 'default')
216 dest = ui.expandpath(dest or 'default-push', dest or 'default')
217 revs = [repo.lookup(rev) for rev in revs]
217 revs = [repo.lookup(rev) for rev in revs]
218 other = hg.repository(ui, dest)
218 other = hg.repository(ui, dest)
219 ui.status(_('comparing with %s\n') % dest)
219 ui.status(_('comparing with %s\n') % dest)
220 o = repo.findoutgoing(other)
220 o = repo.findoutgoing(other)
221 if not o:
221 if not o:
222 ui.status(_("no changes found\n"))
222 ui.status(_("no changes found\n"))
223 return []
223 return []
224 o = repo.changelog.nodesbetween(o, revs or None)[0]
224 o = repo.changelog.nodesbetween(o, revs or None)[0]
225 return [str(repo.changelog.rev(r)) for r in o]
225 return [str(repo.changelog.rev(r)) for r in o]
226
226
227 def getbundle(dest):
227 def getbundle(dest):
228 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
228 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
229 tmpfn = os.path.join(tmpdir, 'bundle')
229 tmpfn = os.path.join(tmpdir, 'bundle')
230 try:
230 try:
231 commands.bundle(ui, repo, tmpfn, dest, **opts)
231 commands.bundle(ui, repo, tmpfn, dest, **opts)
232 return open(tmpfn, 'rb').read()
232 return open(tmpfn, 'rb').read()
233 finally:
233 finally:
234 try:
234 try:
235 os.unlink(tmpfn)
235 os.unlink(tmpfn)
236 except:
236 except:
237 pass
237 pass
238 os.rmdir(tmpdir)
238 os.rmdir(tmpdir)
239
239
240 if not (opts.get('test') or opts.get('mbox')):
240 if not (opts.get('test') or opts.get('mbox')):
241 # really sending
241 # really sending
242 mail.validateconfig(ui)
242 mail.validateconfig(ui)
243
243
244 if not (revs or opts.get('rev')
244 if not (revs or opts.get('rev')
245 or opts.get('outgoing') or opts.get('bundle')):
245 or opts.get('outgoing') or opts.get('bundle')):
246 raise util.Abort(_('specify at least one changeset with -r or -o'))
246 raise util.Abort(_('specify at least one changeset with -r or -o'))
247
247
248 cmdutil.setremoteconfig(ui, opts)
248 cmdutil.setremoteconfig(ui, opts)
249 if opts.get('outgoing') and opts.get('bundle'):
249 if opts.get('outgoing') and opts.get('bundle'):
250 raise util.Abort(_("--outgoing mode always on with --bundle;"
250 raise util.Abort(_("--outgoing mode always on with --bundle;"
251 " do not re-specify --outgoing"))
251 " do not re-specify --outgoing"))
252
252
253 if opts.get('outgoing') or opts.get('bundle'):
253 if opts.get('outgoing') or opts.get('bundle'):
254 if len(revs) > 1:
254 if len(revs) > 1:
255 raise util.Abort(_("too many destinations"))
255 raise util.Abort(_("too many destinations"))
256 dest = revs and revs[0] or None
256 dest = revs and revs[0] or None
257 revs = []
257 revs = []
258
258
259 if opts.get('rev'):
259 if opts.get('rev'):
260 if revs:
260 if revs:
261 raise util.Abort(_('use only one form to specify the revision'))
261 raise util.Abort(_('use only one form to specify the revision'))
262 revs = opts.get('rev')
262 revs = opts.get('rev')
263
263
264 if opts.get('outgoing'):
264 if opts.get('outgoing'):
265 revs = outgoing(dest, opts.get('rev'))
265 revs = outgoing(dest, opts.get('rev'))
266 if opts.get('bundle'):
266 if opts.get('bundle'):
267 opts['revs'] = revs
267 opts['revs'] = revs
268
268
269 # start
269 # start
270 if opts.get('date'):
270 if opts.get('date'):
271 start_time = util.parsedate(opts.get('date'))
271 start_time = util.parsedate(opts.get('date'))
272 else:
272 else:
273 start_time = util.makedate()
273 start_time = util.makedate()
274
274
275 def genmsgid(id):
275 def genmsgid(id):
276 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
276 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
277
277
278 def getdescription(body, sender):
278 def getdescription(body, sender):
279 if opts.get('desc'):
279 if opts.get('desc'):
280 body = open(opts.get('desc')).read()
280 body = open(opts.get('desc')).read()
281 else:
281 else:
282 ui.write(_('\nWrite the introductory message for the '
282 ui.write(_('\nWrite the introductory message for the '
283 'patch series.\n\n'))
283 'patch series.\n\n'))
284 body = ui.edit(body, sender)
284 body = ui.edit(body, sender)
285 return body
285 return body
286
286
287 def getexportmsgs():
287 def getexportmsgs():
288 patches = []
288 patches = []
289
289
290 class exportee:
290 class exportee:
291 def __init__(self, container):
291 def __init__(self, container):
292 self.lines = []
292 self.lines = []
293 self.container = container
293 self.container = container
294 self.name = 'email'
294 self.name = 'email'
295
295
296 def write(self, data):
296 def write(self, data):
297 self.lines.append(data)
297 self.lines.append(data)
298
298
299 def close(self):
299 def close(self):
300 self.container.append(''.join(self.lines).split('\n'))
300 self.container.append(''.join(self.lines).split('\n'))
301 self.lines = []
301 self.lines = []
302
302
303 commands.export(ui, repo, *revs, **{'output': exportee(patches),
303 commands.export(ui, repo, *revs, **{'output': exportee(patches),
304 'switch_parent': False,
304 'switch_parent': False,
305 'text': None,
305 'text': None,
306 'git': opts.get('git')})
306 'git': opts.get('git')})
307
307
308 jumbo = []
308 jumbo = []
309 msgs = []
309 msgs = []
310
310
311 ui.write(_('This patch series consists of %d patches.\n\n')
311 ui.write(_('This patch series consists of %d patches.\n\n')
312 % len(patches))
312 % len(patches))
313
313
314 for p, i in zip(patches, xrange(len(patches))):
314 for p, i in zip(patches, xrange(len(patches))):
315 jumbo.extend(p)
315 jumbo.extend(p)
316 msgs.append(makepatch(p, i + 1, len(patches)))
316 msgs.append(makepatch(p, i + 1, len(patches)))
317
317
318 if len(patches) > 1:
318 if len(patches) > 1:
319 tlen = len(str(len(patches)))
319 tlen = len(str(len(patches)))
320
320
321 subj = '[PATCH %0*d of %d] %s' % (
321 subj = '[PATCH %0*d of %d] %s' % (
322 tlen, 0, len(patches),
322 tlen, 0, len(patches),
323 opts.get('subject') or
323 opts.get('subject') or
324 prompt('Subject:',
324 prompt('Subject:',
325 rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
325 rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
326
326
327 body = ''
327 body = ''
328 if opts.get('diffstat'):
328 if opts.get('diffstat'):
329 d = cdiffstat(_('Final summary:\n'), jumbo)
329 d = cdiffstat(_('Final summary:\n'), jumbo)
330 if d:
330 if d:
331 body = '\n' + d
331 body = '\n' + d
332
332
333 body = getdescription(body, sender)
333 body = getdescription(body, sender)
334 msg = email.MIMEText.MIMEText(body)
334 msg = email.MIMEText.MIMEText(body)
335 msg['Subject'] = subj
335 msg['Subject'] = subj
336
336
337 msgs.insert(0, msg)
337 msgs.insert(0, msg)
338 return msgs
338 return msgs
339
339
340 def getbundlemsgs(bundle):
340 def getbundlemsgs(bundle):
341 subj = (opts.get('subject')
341 subj = (opts.get('subject')
342 or prompt('Subject:', default='A bundle for your repository'))
342 or prompt('Subject:', default='A bundle for your repository'))
343
343
344 body = getdescription('', sender)
344 body = getdescription('', sender)
345 msg = email.MIMEMultipart.MIMEMultipart()
345 msg = email.MIMEMultipart.MIMEMultipart()
346 if body:
346 if body:
347 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
347 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
348 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
348 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
349 datapart.set_payload(bundle)
349 datapart.set_payload(bundle)
350 datapart.add_header('Content-Disposition', 'attachment',
350 datapart.add_header('Content-Disposition', 'attachment',
351 filename='bundle.hg')
351 filename='bundle.hg')
352 email.Encoders.encode_base64(datapart)
352 email.Encoders.encode_base64(datapart)
353 msg.attach(datapart)
353 msg.attach(datapart)
354 msg['Subject'] = subj
354 msg['Subject'] = subj
355 return [msg]
355 return [msg]
356
356
357 sender = (opts.get('from') or ui.config('email', 'from') or
357 sender = (opts.get('from') or ui.config('email', 'from') or
358 ui.config('patchbomb', 'from') or
358 ui.config('patchbomb', 'from') or
359 prompt('From', ui.username()))
359 prompt('From', ui.username()))
360
360
361 if opts.get('bundle'):
361 if opts.get('bundle'):
362 msgs = getbundlemsgs(getbundle(dest))
362 msgs = getbundlemsgs(getbundle(dest))
363 else:
363 else:
364 msgs = getexportmsgs()
364 msgs = getexportmsgs()
365
365
366 def getaddrs(opt, prpt, default = None):
366 def getaddrs(opt, prpt, default = None):
367 addrs = opts.get(opt) or (ui.config('email', opt) or
367 addrs = opts.get(opt) or (ui.config('email', opt) or
368 ui.config('patchbomb', opt) or
368 ui.config('patchbomb', opt) or
369 prompt(prpt, default = default)).split(',')
369 prompt(prpt, default = default)).split(',')
370 return [a.strip() for a in addrs if a.strip()]
370 return [a.strip() for a in addrs if a.strip()]
371
371
372 to = getaddrs('to', 'To')
372 to = getaddrs('to', 'To')
373 cc = getaddrs('cc', 'Cc', '')
373 cc = getaddrs('cc', 'Cc', '')
374
374
375 bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
375 bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
376 ui.config('patchbomb', 'bcc') or '').split(',')
376 ui.config('patchbomb', 'bcc') or '').split(',')
377 bcc = [a.strip() for a in bcc if a.strip()]
377 bcc = [a.strip() for a in bcc if a.strip()]
378
378
379 ui.write('\n')
379 ui.write('\n')
380
380
381 parent = None
381 parent = None
382
382
383 sender_addr = email.Utils.parseaddr(sender)[1]
383 sender_addr = email.Utils.parseaddr(sender)[1]
384 sendmail = None
384 sendmail = None
385 for m in msgs:
385 for m in msgs:
386 try:
386 try:
387 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
387 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
388 except TypeError:
388 except TypeError:
389 m['Message-Id'] = genmsgid('patchbomb')
389 m['Message-Id'] = genmsgid('patchbomb')
390 if parent:
390 if parent:
391 m['In-Reply-To'] = parent
391 m['In-Reply-To'] = parent
392 else:
392 else:
393 parent = m['Message-Id']
393 parent = m['Message-Id']
394 m['Date'] = util.datestr(date=start_time,
394 m['Date'] = util.datestr(date=start_time,
395 format="%a, %d %b %Y %H:%M:%S", timezone=True)
395 format="%a, %d %b %Y %H:%M:%S", timezone=True)
396
396
397 start_time = (start_time[0] + 1, start_time[1])
397 start_time = (start_time[0] + 1, start_time[1])
398 m['From'] = sender
398 m['From'] = sender
399 m['To'] = ', '.join(to)
399 m['To'] = ', '.join(to)
400 if cc:
400 if cc:
401 m['Cc'] = ', '.join(cc)
401 m['Cc'] = ', '.join(cc)
402 if bcc:
402 if bcc:
403 m['Bcc'] = ', '.join(bcc)
403 m['Bcc'] = ', '.join(bcc)
404 if opts.get('test'):
404 if opts.get('test'):
405 ui.status('Displaying ', m['Subject'], ' ...\n')
405 ui.status('Displaying ', m['Subject'], ' ...\n')
406 ui.flush()
406 ui.flush()
407 if 'PAGER' in os.environ:
407 if 'PAGER' in os.environ:
408 fp = os.popen(os.environ['PAGER'], 'w')
408 fp = os.popen(os.environ['PAGER'], 'w')
409 else:
409 else:
410 fp = ui
410 fp = ui
411 try:
411 try:
412 fp.write(m.as_string(0))
412 fp.write(m.as_string(0))
413 fp.write('\n')
413 fp.write('\n')
414 except IOError, inst:
414 except IOError, inst:
415 if inst.errno != errno.EPIPE:
415 if inst.errno != errno.EPIPE:
416 raise
416 raise
417 if fp is not ui:
417 if fp is not ui:
418 fp.close()
418 fp.close()
419 elif opts.get('mbox'):
419 elif opts.get('mbox'):
420 ui.status('Writing ', m['Subject'], ' ...\n')
420 ui.status('Writing ', m['Subject'], ' ...\n')
421 fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
421 fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
422 date = util.datestr(date=start_time,
422 date = util.datestr(date=start_time,
423 format='%a %b %d %H:%M:%S %Y', timezone=False)
423 format='%a %b %d %H:%M:%S %Y', timezone=False)
424 fp.write('From %s %s\n' % (sender_addr, date))
424 fp.write('From %s %s\n' % (sender_addr, date))
425 fp.write(m.as_string(0))
425 fp.write(m.as_string(0))
426 fp.write('\n\n')
426 fp.write('\n\n')
427 fp.close()
427 fp.close()
428 else:
428 else:
429 if not sendmail:
429 if not sendmail:
430 sendmail = mail.connect(ui)
430 sendmail = mail.connect(ui)
431 ui.status('Sending ', m['Subject'], ' ...\n')
431 ui.status('Sending ', m['Subject'], ' ...\n')
432 # Exim does not remove the Bcc field
432 # Exim does not remove the Bcc field
433 del m['Bcc']
433 del m['Bcc']
434 sendmail(sender, to + bcc + cc, m.as_string(0))
434 sendmail(sender, to + bcc + cc, m.as_string(0))
435
435
436 cmdtable = {
436 cmdtable = {
437 "email":
437 "email":
438 (patchbomb,
438 (patchbomb,
439 [('a', 'attach', None, _('send patches as attachments')),
439 [('a', 'attach', None, _('send patches as attachments')),
440 ('i', 'inline', None, _('send patches as inline attachments')),
440 ('i', 'inline', None, _('send patches as inline attachments')),
441 ('', 'bcc', [], _('email addresses of blind copy recipients')),
441 ('', 'bcc', [], _('email addresses of blind copy recipients')),
442 ('c', 'cc', [], _('email addresses of copy recipients')),
442 ('c', 'cc', [], _('email addresses of copy recipients')),
443 ('d', 'diffstat', None, _('add diffstat output to messages')),
443 ('d', 'diffstat', None, _('add diffstat output to messages')),
444 ('', 'date', '', _('use the given date as the sending date')),
444 ('', 'date', '', _('use the given date as the sending date')),
445 ('', 'desc', '', _('use the given file as the series description')),
445 ('', 'desc', '', _('use the given file as the series description')),
446 ('g', 'git', None, _('use git extended diff format')),
446 ('g', 'git', None, _('use git extended diff format')),
447 ('f', 'from', '', _('email address of sender')),
447 ('f', 'from', '', _('email address of sender')),
448 ('', 'plain', None, _('omit hg patch header')),
448 ('', 'plain', None, _('omit hg patch header')),
449 ('n', 'test', None, _('print messages that would be sent')),
449 ('n', 'test', None, _('print messages that would be sent')),
450 ('m', 'mbox', '',
450 ('m', 'mbox', '',
451 _('write messages to mbox file instead of sending them')),
451 _('write messages to mbox file instead of sending them')),
452 ('o', 'outgoing', None,
452 ('o', 'outgoing', None,
453 _('send changes not found in the target repository')),
453 _('send changes not found in the target repository')),
454 ('b', 'bundle', None,
454 ('b', 'bundle', None,
455 _('send changes not in target as a binary bundle')),
455 _('send changes not in target as a binary bundle')),
456 ('r', 'rev', [], _('a revision to send')),
456 ('r', 'rev', [], _('a revision to send')),
457 ('s', 'subject', '',
457 ('s', 'subject', '',
458 _('subject of first message (intro or single patch)')),
458 _('subject of first message (intro or single patch)')),
459 ('t', 'to', [], _('email addresses of recipients')),
459 ('t', 'to', [], _('email addresses of recipients')),
460 ('', 'force', None,
460 ('', 'force', None,
461 _('run even when remote repository is unrelated (with -b)')),
461 _('run even when remote repository is unrelated (with -b)')),
462 ('', 'base', [],
462 ('', 'base', [],
463 _('a base changeset to specify instead of a destination (with -b)')),
463 _('a base changeset to specify instead of a destination (with -b)')),
464 ] + commands.remoteopts,
464 ] + commands.remoteopts,
465 _('hg email [OPTION]... [DEST]...'))
465 _('hg email [OPTION]... [DEST]...'))
466 }
466 }
@@ -1,107 +1,107 b''
1 # win32text.py - LF <-> CRLF translation utilities for Windows users
1 # win32text.py - LF <-> CRLF translation utilities for Windows users
2 #
2 #
3 # This software may be used and distributed according to the terms
3 # This software may be used and distributed according to the terms
4 # of the GNU General Public License, incorporated herein by reference.
4 # of the GNU General Public License, incorporated herein by reference.
5 #
5 #
6 # To perform automatic newline conversion, use:
6 # To perform automatic newline conversion, use:
7 #
7 #
8 # [extensions]
8 # [extensions]
9 # hgext.win32text =
9 # hgext.win32text =
10 # [encode]
10 # [encode]
11 # ** = cleverencode:
11 # ** = cleverencode:
12 # [decode]
12 # [decode]
13 # ** = cleverdecode:
13 # ** = cleverdecode:
14 #
14 #
15 # If not doing conversion, to make sure you do not commit CRLF by accident:
15 # If not doing conversion, to make sure you do not commit CRLF by accident:
16 #
16 #
17 # [hooks]
17 # [hooks]
18 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
18 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
19 #
19 #
20 # To do the same check on a server to prevent CRLF from being pushed or pulled:
20 # To do the same check on a server to prevent CRLF from being pushed or pulled:
21 #
21 #
22 # [hooks]
22 # [hooks]
23 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
23 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
24
24
25 from mercurial import util, ui
25 from mercurial import util, ui
26 from mercurial.i18n import gettext as _
26 from mercurial.i18n import gettext as _
27 from mercurial.node import *
27 from mercurial.node import bin, short
28 import re
28 import re
29
29
30 # regexp for single LF without CR preceding.
30 # regexp for single LF without CR preceding.
31 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
31 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
32
32
33 def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs):
33 def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs):
34 # warn if already has CRLF in repository.
34 # warn if already has CRLF in repository.
35 # it might cause unexpected eol conversion.
35 # it might cause unexpected eol conversion.
36 # see issue 302:
36 # see issue 302:
37 # http://www.selenic.com/mercurial/bts/issue302
37 # http://www.selenic.com/mercurial/bts/issue302
38 if '\r\n' in s and ui and filename and repo:
38 if '\r\n' in s and ui and filename and repo:
39 ui.warn(_('WARNING: %s already has CRLF line endings\n'
39 ui.warn(_('WARNING: %s already has CRLF line endings\n'
40 'and does not need EOL conversion by the win32text plugin.\n'
40 'and does not need EOL conversion by the win32text plugin.\n'
41 'Before your next commit, please reconsider your '
41 'Before your next commit, please reconsider your '
42 'encode/decode settings in \nMercurial.ini or %s.\n') %
42 'encode/decode settings in \nMercurial.ini or %s.\n') %
43 (filename, repo.join('hgrc')))
43 (filename, repo.join('hgrc')))
44 # replace single LF to CRLF
44 # replace single LF to CRLF
45 return re_single_lf.sub('\\1\r\n', s)
45 return re_single_lf.sub('\\1\r\n', s)
46
46
47 def dumbencode(s, cmd):
47 def dumbencode(s, cmd):
48 return s.replace('\r\n', '\n')
48 return s.replace('\r\n', '\n')
49
49
50 def clevertest(s, cmd):
50 def clevertest(s, cmd):
51 if '\0' in s: return False
51 if '\0' in s: return False
52 return True
52 return True
53
53
54 def cleverdecode(s, cmd, **kwargs):
54 def cleverdecode(s, cmd, **kwargs):
55 if clevertest(s, cmd):
55 if clevertest(s, cmd):
56 return dumbdecode(s, cmd, **kwargs)
56 return dumbdecode(s, cmd, **kwargs)
57 return s
57 return s
58
58
59 def cleverencode(s, cmd):
59 def cleverencode(s, cmd):
60 if clevertest(s, cmd):
60 if clevertest(s, cmd):
61 return dumbencode(s, cmd)
61 return dumbencode(s, cmd)
62 return s
62 return s
63
63
64 _filters = {
64 _filters = {
65 'dumbdecode:': dumbdecode,
65 'dumbdecode:': dumbdecode,
66 'dumbencode:': dumbencode,
66 'dumbencode:': dumbencode,
67 'cleverdecode:': cleverdecode,
67 'cleverdecode:': cleverdecode,
68 'cleverencode:': cleverencode,
68 'cleverencode:': cleverencode,
69 }
69 }
70
70
71 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
71 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
72 halt = False
72 halt = False
73 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
73 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
74 c = repo.changectx(rev)
74 c = repo.changectx(rev)
75 for f in c.files():
75 for f in c.files():
76 if f not in c:
76 if f not in c:
77 continue
77 continue
78 data = c[f].data()
78 data = c[f].data()
79 if '\0' not in data and '\r\n' in data:
79 if '\0' not in data and '\r\n' in data:
80 if not halt:
80 if not halt:
81 ui.warn(_('Attempt to commit or push text file(s) '
81 ui.warn(_('Attempt to commit or push text file(s) '
82 'using CRLF line endings\n'))
82 'using CRLF line endings\n'))
83 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
83 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
84 halt = True
84 halt = True
85 if halt and hooktype == 'pretxnchangegroup':
85 if halt and hooktype == 'pretxnchangegroup':
86 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
86 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
87 'add to Mercurial.ini or .hg/hgrc:\n'
87 'add to Mercurial.ini or .hg/hgrc:\n'
88 '\n'
88 '\n'
89 '[hooks]\n'
89 '[hooks]\n'
90 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n'
90 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n'
91 '\n'
91 '\n'
92 'and also consider adding:\n'
92 'and also consider adding:\n'
93 '\n'
93 '\n'
94 '[extensions]\n'
94 '[extensions]\n'
95 'hgext.win32text =\n'
95 'hgext.win32text =\n'
96 '[encode]\n'
96 '[encode]\n'
97 '** = cleverencode:\n'
97 '** = cleverencode:\n'
98 '[decode]\n'
98 '[decode]\n'
99 '** = cleverdecode:\n'))
99 '** = cleverdecode:\n'))
100 return halt
100 return halt
101
101
102 def reposetup(ui, repo):
102 def reposetup(ui, repo):
103 if not repo.local():
103 if not repo.local():
104 return
104 return
105 for name, fn in _filters.iteritems():
105 for name, fn in _filters.iteritems():
106 repo.adddatafilter(name, fn)
106 repo.adddatafilter(name, fn)
107
107
@@ -1,224 +1,224 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of
5 # This software may be used and distributed according to the terms of
6 # the GNU General Public License, incorporated herein by reference.
6 # the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 from node import *
9 from node import hex
10 import cStringIO, os, stat, tarfile, time, util, zipfile
10 import cStringIO, os, stat, tarfile, time, util, zipfile
11 import zlib, gzip
11 import zlib, gzip
12
12
13 def tidyprefix(dest, prefix, suffixes):
13 def tidyprefix(dest, prefix, suffixes):
14 '''choose prefix to use for names in archive. make sure prefix is
14 '''choose prefix to use for names in archive. make sure prefix is
15 safe for consumers.'''
15 safe for consumers.'''
16
16
17 if prefix:
17 if prefix:
18 prefix = util.normpath(prefix)
18 prefix = util.normpath(prefix)
19 else:
19 else:
20 if not isinstance(dest, str):
20 if not isinstance(dest, str):
21 raise ValueError('dest must be string if no prefix')
21 raise ValueError('dest must be string if no prefix')
22 prefix = os.path.basename(dest)
22 prefix = os.path.basename(dest)
23 lower = prefix.lower()
23 lower = prefix.lower()
24 for sfx in suffixes:
24 for sfx in suffixes:
25 if lower.endswith(sfx):
25 if lower.endswith(sfx):
26 prefix = prefix[:-len(sfx)]
26 prefix = prefix[:-len(sfx)]
27 break
27 break
28 lpfx = os.path.normpath(util.localpath(prefix))
28 lpfx = os.path.normpath(util.localpath(prefix))
29 prefix = util.pconvert(lpfx)
29 prefix = util.pconvert(lpfx)
30 if not prefix.endswith('/'):
30 if not prefix.endswith('/'):
31 prefix += '/'
31 prefix += '/'
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 raise util.Abort(_('archive prefix contains illegal components'))
33 raise util.Abort(_('archive prefix contains illegal components'))
34 return prefix
34 return prefix
35
35
36 class tarit:
36 class tarit:
37 '''write archive to tar file or stream. can write uncompressed,
37 '''write archive to tar file or stream. can write uncompressed,
38 or compress with gzip or bzip2.'''
38 or compress with gzip or bzip2.'''
39
39
40 class GzipFileWithTime(gzip.GzipFile):
40 class GzipFileWithTime(gzip.GzipFile):
41
41
42 def __init__(self, *args, **kw):
42 def __init__(self, *args, **kw):
43 timestamp = None
43 timestamp = None
44 if 'timestamp' in kw:
44 if 'timestamp' in kw:
45 timestamp = kw.pop('timestamp')
45 timestamp = kw.pop('timestamp')
46 if timestamp == None:
46 if timestamp == None:
47 self.timestamp = time.time()
47 self.timestamp = time.time()
48 else:
48 else:
49 self.timestamp = timestamp
49 self.timestamp = timestamp
50 gzip.GzipFile.__init__(self, *args, **kw)
50 gzip.GzipFile.__init__(self, *args, **kw)
51
51
52 def _write_gzip_header(self):
52 def _write_gzip_header(self):
53 self.fileobj.write('\037\213') # magic header
53 self.fileobj.write('\037\213') # magic header
54 self.fileobj.write('\010') # compression method
54 self.fileobj.write('\010') # compression method
55 fname = self.filename[:-3]
55 fname = self.filename[:-3]
56 flags = 0
56 flags = 0
57 if fname:
57 if fname:
58 flags = gzip.FNAME
58 flags = gzip.FNAME
59 self.fileobj.write(chr(flags))
59 self.fileobj.write(chr(flags))
60 gzip.write32u(self.fileobj, long(self.timestamp))
60 gzip.write32u(self.fileobj, long(self.timestamp))
61 self.fileobj.write('\002')
61 self.fileobj.write('\002')
62 self.fileobj.write('\377')
62 self.fileobj.write('\377')
63 if fname:
63 if fname:
64 self.fileobj.write(fname + '\000')
64 self.fileobj.write(fname + '\000')
65
65
66 def __init__(self, dest, prefix, mtime, kind=''):
66 def __init__(self, dest, prefix, mtime, kind=''):
67 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
67 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
68 '.tgz', '.tbz2'])
68 '.tgz', '.tbz2'])
69 self.mtime = mtime
69 self.mtime = mtime
70
70
71 def taropen(name, mode, fileobj=None):
71 def taropen(name, mode, fileobj=None):
72 if kind == 'gz':
72 if kind == 'gz':
73 mode = mode[0]
73 mode = mode[0]
74 if not fileobj:
74 if not fileobj:
75 fileobj = open(name, mode + 'b')
75 fileobj = open(name, mode + 'b')
76 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
76 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
77 zlib.Z_BEST_COMPRESSION,
77 zlib.Z_BEST_COMPRESSION,
78 fileobj, timestamp=mtime)
78 fileobj, timestamp=mtime)
79 return tarfile.TarFile.taropen(name, mode, gzfileobj)
79 return tarfile.TarFile.taropen(name, mode, gzfileobj)
80 else:
80 else:
81 return tarfile.open(name, mode + kind, fileobj)
81 return tarfile.open(name, mode + kind, fileobj)
82
82
83 if isinstance(dest, str):
83 if isinstance(dest, str):
84 self.z = taropen(dest, mode='w:')
84 self.z = taropen(dest, mode='w:')
85 else:
85 else:
86 # Python 2.5-2.5.1 have a regression that requires a name arg
86 # Python 2.5-2.5.1 have a regression that requires a name arg
87 self.z = taropen(name='', mode='w|', fileobj=dest)
87 self.z = taropen(name='', mode='w|', fileobj=dest)
88
88
89 def addfile(self, name, mode, islink, data):
89 def addfile(self, name, mode, islink, data):
90 i = tarfile.TarInfo(self.prefix + name)
90 i = tarfile.TarInfo(self.prefix + name)
91 i.mtime = self.mtime
91 i.mtime = self.mtime
92 i.size = len(data)
92 i.size = len(data)
93 if islink:
93 if islink:
94 i.type = tarfile.SYMTYPE
94 i.type = tarfile.SYMTYPE
95 i.mode = 0777
95 i.mode = 0777
96 i.linkname = data
96 i.linkname = data
97 data = None
97 data = None
98 else:
98 else:
99 i.mode = mode
99 i.mode = mode
100 data = cStringIO.StringIO(data)
100 data = cStringIO.StringIO(data)
101 self.z.addfile(i, data)
101 self.z.addfile(i, data)
102
102
103 def done(self):
103 def done(self):
104 self.z.close()
104 self.z.close()
105
105
106 class tellable:
106 class tellable:
107 '''provide tell method for zipfile.ZipFile when writing to http
107 '''provide tell method for zipfile.ZipFile when writing to http
108 response file object.'''
108 response file object.'''
109
109
110 def __init__(self, fp):
110 def __init__(self, fp):
111 self.fp = fp
111 self.fp = fp
112 self.offset = 0
112 self.offset = 0
113
113
114 def __getattr__(self, key):
114 def __getattr__(self, key):
115 return getattr(self.fp, key)
115 return getattr(self.fp, key)
116
116
117 def write(self, s):
117 def write(self, s):
118 self.fp.write(s)
118 self.fp.write(s)
119 self.offset += len(s)
119 self.offset += len(s)
120
120
121 def tell(self):
121 def tell(self):
122 return self.offset
122 return self.offset
123
123
124 class zipit:
124 class zipit:
125 '''write archive to zip file or stream. can write uncompressed,
125 '''write archive to zip file or stream. can write uncompressed,
126 or compressed with deflate.'''
126 or compressed with deflate.'''
127
127
128 def __init__(self, dest, prefix, mtime, compress=True):
128 def __init__(self, dest, prefix, mtime, compress=True):
129 self.prefix = tidyprefix(dest, prefix, ('.zip',))
129 self.prefix = tidyprefix(dest, prefix, ('.zip',))
130 if not isinstance(dest, str):
130 if not isinstance(dest, str):
131 try:
131 try:
132 dest.tell()
132 dest.tell()
133 except (AttributeError, IOError):
133 except (AttributeError, IOError):
134 dest = tellable(dest)
134 dest = tellable(dest)
135 self.z = zipfile.ZipFile(dest, 'w',
135 self.z = zipfile.ZipFile(dest, 'w',
136 compress and zipfile.ZIP_DEFLATED or
136 compress and zipfile.ZIP_DEFLATED or
137 zipfile.ZIP_STORED)
137 zipfile.ZIP_STORED)
138 self.date_time = time.gmtime(mtime)[:6]
138 self.date_time = time.gmtime(mtime)[:6]
139
139
140 def addfile(self, name, mode, islink, data):
140 def addfile(self, name, mode, islink, data):
141 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
141 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
142 i.compress_type = self.z.compression
142 i.compress_type = self.z.compression
143 # unzip will not honor unix file modes unless file creator is
143 # unzip will not honor unix file modes unless file creator is
144 # set to unix (id 3).
144 # set to unix (id 3).
145 i.create_system = 3
145 i.create_system = 3
146 ftype = stat.S_IFREG
146 ftype = stat.S_IFREG
147 if islink:
147 if islink:
148 mode = 0777
148 mode = 0777
149 ftype = stat.S_IFLNK
149 ftype = stat.S_IFLNK
150 i.external_attr = (mode | ftype) << 16L
150 i.external_attr = (mode | ftype) << 16L
151 self.z.writestr(i, data)
151 self.z.writestr(i, data)
152
152
153 def done(self):
153 def done(self):
154 self.z.close()
154 self.z.close()
155
155
156 class fileit:
156 class fileit:
157 '''write archive as files in directory.'''
157 '''write archive as files in directory.'''
158
158
159 def __init__(self, name, prefix, mtime):
159 def __init__(self, name, prefix, mtime):
160 if prefix:
160 if prefix:
161 raise util.Abort(_('cannot give prefix when archiving to files'))
161 raise util.Abort(_('cannot give prefix when archiving to files'))
162 self.basedir = name
162 self.basedir = name
163 self.opener = util.opener(self.basedir)
163 self.opener = util.opener(self.basedir)
164
164
165 def addfile(self, name, mode, islink, data):
165 def addfile(self, name, mode, islink, data):
166 if islink:
166 if islink:
167 self.opener.symlink(data, name)
167 self.opener.symlink(data, name)
168 return
168 return
169 f = self.opener(name, "w", atomictemp=True)
169 f = self.opener(name, "w", atomictemp=True)
170 f.write(data)
170 f.write(data)
171 f.rename()
171 f.rename()
172 destfile = os.path.join(self.basedir, name)
172 destfile = os.path.join(self.basedir, name)
173 os.chmod(destfile, mode)
173 os.chmod(destfile, mode)
174
174
175 def done(self):
175 def done(self):
176 pass
176 pass
177
177
178 archivers = {
178 archivers = {
179 'files': fileit,
179 'files': fileit,
180 'tar': tarit,
180 'tar': tarit,
181 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
181 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
182 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
182 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
183 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
183 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
184 'zip': zipit,
184 'zip': zipit,
185 }
185 }
186
186
187 def archive(repo, dest, node, kind, decode=True, matchfn=None,
187 def archive(repo, dest, node, kind, decode=True, matchfn=None,
188 prefix=None, mtime=None):
188 prefix=None, mtime=None):
189 '''create archive of repo as it was at node.
189 '''create archive of repo as it was at node.
190
190
191 dest can be name of directory, name of archive file, or file
191 dest can be name of directory, name of archive file, or file
192 object to write archive to.
192 object to write archive to.
193
193
194 kind is type of archive to create.
194 kind is type of archive to create.
195
195
196 decode tells whether to put files through decode filters from
196 decode tells whether to put files through decode filters from
197 hgrc.
197 hgrc.
198
198
199 matchfn is function to filter names of files to write to archive.
199 matchfn is function to filter names of files to write to archive.
200
200
201 prefix is name of path to put before every archive member.'''
201 prefix is name of path to put before every archive member.'''
202
202
203 def write(name, mode, islink, getdata):
203 def write(name, mode, islink, getdata):
204 if matchfn and not matchfn(name): return
204 if matchfn and not matchfn(name): return
205 data = getdata()
205 data = getdata()
206 if decode:
206 if decode:
207 data = repo.wwritedata(name, data)
207 data = repo.wwritedata(name, data)
208 archiver.addfile(name, mode, islink, data)
208 archiver.addfile(name, mode, islink, data)
209
209
210 ctx = repo.changectx(node)
210 ctx = repo.changectx(node)
211 if kind not in archivers:
211 if kind not in archivers:
212 raise util.Abort(_("unknown archive type '%s'" % kind))
212 raise util.Abort(_("unknown archive type '%s'" % kind))
213 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
213 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
214 m = ctx.manifest()
214 m = ctx.manifest()
215 items = m.items()
215 items = m.items()
216 items.sort()
216 items.sort()
217 if repo.ui.configbool("ui", "archivemeta", True):
217 if repo.ui.configbool("ui", "archivemeta", True):
218 write('.hg_archival.txt', 0644, False,
218 write('.hg_archival.txt', 0644, False,
219 lambda: 'repo: %s\nnode: %s\n' % (
219 lambda: 'repo: %s\nnode: %s\n' % (
220 hex(repo.changelog.node(0)), hex(node)))
220 hex(repo.changelog.node(0)), hex(node)))
221 for filename, filenode in items:
221 for filename, filenode in items:
222 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
222 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
223 lambda: repo.file(filename).read(filenode))
223 lambda: repo.file(filename).read(filenode))
224 archiver.done()
224 archiver.done()
@@ -1,282 +1,282 b''
1 """
1 """
2 bundlerepo.py - repository class for viewing uncompressed bundles
2 bundlerepo.py - repository class for viewing uncompressed bundles
3
3
4 This provides a read-only repository interface to bundles as if
4 This provides a read-only repository interface to bundles as if
5 they were part of the actual repository.
5 they were part of the actual repository.
6
6
7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import hex, nullid, short
14 from i18n import _
14 from i18n import _
15 import changegroup, util, os, struct, bz2, tempfile, mdiff
15 import changegroup, util, os, struct, bz2, tempfile, mdiff
16 import localrepo, changelog, manifest, filelog, revlog
16 import localrepo, changelog, manifest, filelog, revlog
17
17
18 class bundlerevlog(revlog.revlog):
18 class bundlerevlog(revlog.revlog):
19 def __init__(self, opener, indexfile, bundlefile,
19 def __init__(self, opener, indexfile, bundlefile,
20 linkmapper=None):
20 linkmapper=None):
21 # How it works:
21 # How it works:
22 # to retrieve a revision, we need to know the offset of
22 # to retrieve a revision, we need to know the offset of
23 # the revision in the bundlefile (an opened file).
23 # the revision in the bundlefile (an opened file).
24 #
24 #
25 # We store this offset in the index (start), to differentiate a
25 # We store this offset in the index (start), to differentiate a
26 # rev in the bundle and from a rev in the revlog, we check
26 # rev in the bundle and from a rev in the revlog, we check
27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
28 # (it is bigger since we store the node to which the delta is)
28 # (it is bigger since we store the node to which the delta is)
29 #
29 #
30 revlog.revlog.__init__(self, opener, indexfile)
30 revlog.revlog.__init__(self, opener, indexfile)
31 self.bundlefile = bundlefile
31 self.bundlefile = bundlefile
32 self.basemap = {}
32 self.basemap = {}
33 def chunkpositer():
33 def chunkpositer():
34 for chunk in changegroup.chunkiter(bundlefile):
34 for chunk in changegroup.chunkiter(bundlefile):
35 pos = bundlefile.tell()
35 pos = bundlefile.tell()
36 yield chunk, pos - len(chunk)
36 yield chunk, pos - len(chunk)
37 n = self.count()
37 n = self.count()
38 prev = None
38 prev = None
39 for chunk, start in chunkpositer():
39 for chunk, start in chunkpositer():
40 size = len(chunk)
40 size = len(chunk)
41 if size < 80:
41 if size < 80:
42 raise util.Abort("invalid changegroup")
42 raise util.Abort("invalid changegroup")
43 start += 80
43 start += 80
44 size -= 80
44 size -= 80
45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 if node in self.nodemap:
46 if node in self.nodemap:
47 prev = node
47 prev = node
48 continue
48 continue
49 for p in (p1, p2):
49 for p in (p1, p2):
50 if not p in self.nodemap:
50 if not p in self.nodemap:
51 raise revlog.LookupError(hex(p1), _("unknown parent %s") % short(p1))
51 raise revlog.LookupError(hex(p1), _("unknown parent %s") % short(p1))
52 if linkmapper is None:
52 if linkmapper is None:
53 link = n
53 link = n
54 else:
54 else:
55 link = linkmapper(cs)
55 link = linkmapper(cs)
56
56
57 if not prev:
57 if not prev:
58 prev = p1
58 prev = p1
59 # start, size, full unc. size, base (unused), link, p1, p2, node
59 # start, size, full unc. size, base (unused), link, p1, p2, node
60 e = (revlog.offset_type(start, 0), size, -1, -1, link,
60 e = (revlog.offset_type(start, 0), size, -1, -1, link,
61 self.rev(p1), self.rev(p2), node)
61 self.rev(p1), self.rev(p2), node)
62 self.basemap[n] = prev
62 self.basemap[n] = prev
63 self.index.insert(-1, e)
63 self.index.insert(-1, e)
64 self.nodemap[node] = n
64 self.nodemap[node] = n
65 prev = node
65 prev = node
66 n += 1
66 n += 1
67
67
68 def bundle(self, rev):
68 def bundle(self, rev):
69 """is rev from the bundle"""
69 """is rev from the bundle"""
70 if rev < 0:
70 if rev < 0:
71 return False
71 return False
72 return rev in self.basemap
72 return rev in self.basemap
73 def bundlebase(self, rev): return self.basemap[rev]
73 def bundlebase(self, rev): return self.basemap[rev]
74 def chunk(self, rev, df=None, cachelen=4096):
74 def chunk(self, rev, df=None, cachelen=4096):
75 # Warning: in case of bundle, the diff is against bundlebase,
75 # Warning: in case of bundle, the diff is against bundlebase,
76 # not against rev - 1
76 # not against rev - 1
77 # XXX: could use some caching
77 # XXX: could use some caching
78 if not self.bundle(rev):
78 if not self.bundle(rev):
79 return revlog.revlog.chunk(self, rev, df)
79 return revlog.revlog.chunk(self, rev, df)
80 self.bundlefile.seek(self.start(rev))
80 self.bundlefile.seek(self.start(rev))
81 return self.bundlefile.read(self.length(rev))
81 return self.bundlefile.read(self.length(rev))
82
82
83 def revdiff(self, rev1, rev2):
83 def revdiff(self, rev1, rev2):
84 """return or calculate a delta between two revisions"""
84 """return or calculate a delta between two revisions"""
85 if self.bundle(rev1) and self.bundle(rev2):
85 if self.bundle(rev1) and self.bundle(rev2):
86 # hot path for bundle
86 # hot path for bundle
87 revb = self.rev(self.bundlebase(rev2))
87 revb = self.rev(self.bundlebase(rev2))
88 if revb == rev1:
88 if revb == rev1:
89 return self.chunk(rev2)
89 return self.chunk(rev2)
90 elif not self.bundle(rev1) and not self.bundle(rev2):
90 elif not self.bundle(rev1) and not self.bundle(rev2):
91 return revlog.revlog.revdiff(self, rev1, rev2)
91 return revlog.revlog.revdiff(self, rev1, rev2)
92
92
93 return mdiff.textdiff(self.revision(self.node(rev1)),
93 return mdiff.textdiff(self.revision(self.node(rev1)),
94 self.revision(self.node(rev2)))
94 self.revision(self.node(rev2)))
95
95
96 def revision(self, node):
96 def revision(self, node):
97 """return an uncompressed revision of a given"""
97 """return an uncompressed revision of a given"""
98 if node == nullid: return ""
98 if node == nullid: return ""
99
99
100 text = None
100 text = None
101 chain = []
101 chain = []
102 iter_node = node
102 iter_node = node
103 rev = self.rev(iter_node)
103 rev = self.rev(iter_node)
104 # reconstruct the revision if it is from a changegroup
104 # reconstruct the revision if it is from a changegroup
105 while self.bundle(rev):
105 while self.bundle(rev):
106 if self._cache and self._cache[0] == iter_node:
106 if self._cache and self._cache[0] == iter_node:
107 text = self._cache[2]
107 text = self._cache[2]
108 break
108 break
109 chain.append(rev)
109 chain.append(rev)
110 iter_node = self.bundlebase(rev)
110 iter_node = self.bundlebase(rev)
111 rev = self.rev(iter_node)
111 rev = self.rev(iter_node)
112 if text is None:
112 if text is None:
113 text = revlog.revlog.revision(self, iter_node)
113 text = revlog.revlog.revision(self, iter_node)
114
114
115 while chain:
115 while chain:
116 delta = self.chunk(chain.pop())
116 delta = self.chunk(chain.pop())
117 text = mdiff.patches(text, [delta])
117 text = mdiff.patches(text, [delta])
118
118
119 p1, p2 = self.parents(node)
119 p1, p2 = self.parents(node)
120 if node != revlog.hash(text, p1, p2):
120 if node != revlog.hash(text, p1, p2):
121 raise revlog.RevlogError(_("integrity check failed on %s:%d")
121 raise revlog.RevlogError(_("integrity check failed on %s:%d")
122 % (self.datafile, self.rev(node)))
122 % (self.datafile, self.rev(node)))
123
123
124 self._cache = (node, self.rev(node), text)
124 self._cache = (node, self.rev(node), text)
125 return text
125 return text
126
126
127 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
127 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
128 raise NotImplementedError
128 raise NotImplementedError
129 def addgroup(self, revs, linkmapper, transaction, unique=0):
129 def addgroup(self, revs, linkmapper, transaction, unique=0):
130 raise NotImplementedError
130 raise NotImplementedError
131 def strip(self, rev, minlink):
131 def strip(self, rev, minlink):
132 raise NotImplementedError
132 raise NotImplementedError
133 def checksize(self):
133 def checksize(self):
134 raise NotImplementedError
134 raise NotImplementedError
135
135
136 class bundlechangelog(bundlerevlog, changelog.changelog):
136 class bundlechangelog(bundlerevlog, changelog.changelog):
137 def __init__(self, opener, bundlefile):
137 def __init__(self, opener, bundlefile):
138 changelog.changelog.__init__(self, opener)
138 changelog.changelog.__init__(self, opener)
139 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
139 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
140
140
141 class bundlemanifest(bundlerevlog, manifest.manifest):
141 class bundlemanifest(bundlerevlog, manifest.manifest):
142 def __init__(self, opener, bundlefile, linkmapper):
142 def __init__(self, opener, bundlefile, linkmapper):
143 manifest.manifest.__init__(self, opener)
143 manifest.manifest.__init__(self, opener)
144 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
144 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
145 linkmapper)
145 linkmapper)
146
146
147 class bundlefilelog(bundlerevlog, filelog.filelog):
147 class bundlefilelog(bundlerevlog, filelog.filelog):
148 def __init__(self, opener, path, bundlefile, linkmapper):
148 def __init__(self, opener, path, bundlefile, linkmapper):
149 filelog.filelog.__init__(self, opener, path)
149 filelog.filelog.__init__(self, opener, path)
150 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
150 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
151 linkmapper)
151 linkmapper)
152
152
153 class bundlerepository(localrepo.localrepository):
153 class bundlerepository(localrepo.localrepository):
154 def __init__(self, ui, path, bundlename):
154 def __init__(self, ui, path, bundlename):
155 localrepo.localrepository.__init__(self, ui, path)
155 localrepo.localrepository.__init__(self, ui, path)
156
156
157 if path:
157 if path:
158 self._url = 'bundle:' + path + '+' + bundlename
158 self._url = 'bundle:' + path + '+' + bundlename
159 else:
159 else:
160 self._url = 'bundle:' + bundlename
160 self._url = 'bundle:' + bundlename
161
161
162 self.tempfile = None
162 self.tempfile = None
163 self.bundlefile = open(bundlename, "rb")
163 self.bundlefile = open(bundlename, "rb")
164 header = self.bundlefile.read(6)
164 header = self.bundlefile.read(6)
165 if not header.startswith("HG"):
165 if not header.startswith("HG"):
166 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
166 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
167 elif not header.startswith("HG10"):
167 elif not header.startswith("HG10"):
168 raise util.Abort(_("%s: unknown bundle version") % bundlename)
168 raise util.Abort(_("%s: unknown bundle version") % bundlename)
169 elif header == "HG10BZ":
169 elif header == "HG10BZ":
170 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
170 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
171 suffix=".hg10un", dir=self.path)
171 suffix=".hg10un", dir=self.path)
172 self.tempfile = temp
172 self.tempfile = temp
173 fptemp = os.fdopen(fdtemp, 'wb')
173 fptemp = os.fdopen(fdtemp, 'wb')
174 def generator(f):
174 def generator(f):
175 zd = bz2.BZ2Decompressor()
175 zd = bz2.BZ2Decompressor()
176 zd.decompress("BZ")
176 zd.decompress("BZ")
177 for chunk in f:
177 for chunk in f:
178 yield zd.decompress(chunk)
178 yield zd.decompress(chunk)
179 gen = generator(util.filechunkiter(self.bundlefile, 4096))
179 gen = generator(util.filechunkiter(self.bundlefile, 4096))
180
180
181 try:
181 try:
182 fptemp.write("HG10UN")
182 fptemp.write("HG10UN")
183 for chunk in gen:
183 for chunk in gen:
184 fptemp.write(chunk)
184 fptemp.write(chunk)
185 finally:
185 finally:
186 fptemp.close()
186 fptemp.close()
187 self.bundlefile.close()
187 self.bundlefile.close()
188
188
189 self.bundlefile = open(self.tempfile, "rb")
189 self.bundlefile = open(self.tempfile, "rb")
190 # seek right after the header
190 # seek right after the header
191 self.bundlefile.seek(6)
191 self.bundlefile.seek(6)
192 elif header == "HG10UN":
192 elif header == "HG10UN":
193 # nothing to do
193 # nothing to do
194 pass
194 pass
195 else:
195 else:
196 raise util.Abort(_("%s: unknown bundle compression type")
196 raise util.Abort(_("%s: unknown bundle compression type")
197 % bundlename)
197 % bundlename)
198 # dict with the mapping 'filename' -> position in the bundle
198 # dict with the mapping 'filename' -> position in the bundle
199 self.bundlefilespos = {}
199 self.bundlefilespos = {}
200
200
201 def __getattr__(self, name):
201 def __getattr__(self, name):
202 if name == 'changelog':
202 if name == 'changelog':
203 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
203 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
204 self.manstart = self.bundlefile.tell()
204 self.manstart = self.bundlefile.tell()
205 return self.changelog
205 return self.changelog
206 if name == 'manifest':
206 if name == 'manifest':
207 self.bundlefile.seek(self.manstart)
207 self.bundlefile.seek(self.manstart)
208 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
208 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
209 self.changelog.rev)
209 self.changelog.rev)
210 self.filestart = self.bundlefile.tell()
210 self.filestart = self.bundlefile.tell()
211 return self.manifest
211 return self.manifest
212 if name == 'manstart':
212 if name == 'manstart':
213 self.changelog
213 self.changelog
214 return self.manstart
214 return self.manstart
215 if name == 'filestart':
215 if name == 'filestart':
216 self.manifest
216 self.manifest
217 return self.filestart
217 return self.filestart
218 return localrepo.localrepository.__getattr__(self, name)
218 return localrepo.localrepository.__getattr__(self, name)
219
219
220 def url(self):
220 def url(self):
221 return self._url
221 return self._url
222
222
223 def dev(self):
223 def dev(self):
224 return -1
224 return -1
225
225
226 def file(self, f):
226 def file(self, f):
227 if not self.bundlefilespos:
227 if not self.bundlefilespos:
228 self.bundlefile.seek(self.filestart)
228 self.bundlefile.seek(self.filestart)
229 while 1:
229 while 1:
230 chunk = changegroup.getchunk(self.bundlefile)
230 chunk = changegroup.getchunk(self.bundlefile)
231 if not chunk:
231 if not chunk:
232 break
232 break
233 self.bundlefilespos[chunk] = self.bundlefile.tell()
233 self.bundlefilespos[chunk] = self.bundlefile.tell()
234 for c in changegroup.chunkiter(self.bundlefile):
234 for c in changegroup.chunkiter(self.bundlefile):
235 pass
235 pass
236
236
237 if f[0] == '/':
237 if f[0] == '/':
238 f = f[1:]
238 f = f[1:]
239 if f in self.bundlefilespos:
239 if f in self.bundlefilespos:
240 self.bundlefile.seek(self.bundlefilespos[f])
240 self.bundlefile.seek(self.bundlefilespos[f])
241 return bundlefilelog(self.sopener, f, self.bundlefile,
241 return bundlefilelog(self.sopener, f, self.bundlefile,
242 self.changelog.rev)
242 self.changelog.rev)
243 else:
243 else:
244 return filelog.filelog(self.sopener, f)
244 return filelog.filelog(self.sopener, f)
245
245
246 def close(self):
246 def close(self):
247 """Close assigned bundle file immediately."""
247 """Close assigned bundle file immediately."""
248 self.bundlefile.close()
248 self.bundlefile.close()
249
249
250 def __del__(self):
250 def __del__(self):
251 bundlefile = getattr(self, 'bundlefile', None)
251 bundlefile = getattr(self, 'bundlefile', None)
252 if bundlefile and not bundlefile.closed:
252 if bundlefile and not bundlefile.closed:
253 bundlefile.close()
253 bundlefile.close()
254 tempfile = getattr(self, 'tempfile', None)
254 tempfile = getattr(self, 'tempfile', None)
255 if tempfile is not None:
255 if tempfile is not None:
256 os.unlink(tempfile)
256 os.unlink(tempfile)
257
257
258 def instance(ui, path, create):
258 def instance(ui, path, create):
259 if create:
259 if create:
260 raise util.Abort(_('cannot create new bundle repository'))
260 raise util.Abort(_('cannot create new bundle repository'))
261 parentpath = ui.config("bundle", "mainreporoot", "")
261 parentpath = ui.config("bundle", "mainreporoot", "")
262 if parentpath:
262 if parentpath:
263 # Try to make the full path relative so we get a nice, short URL.
263 # Try to make the full path relative so we get a nice, short URL.
264 # In particular, we don't want temp dir names in test outputs.
264 # In particular, we don't want temp dir names in test outputs.
265 cwd = os.getcwd()
265 cwd = os.getcwd()
266 if parentpath == cwd:
266 if parentpath == cwd:
267 parentpath = ''
267 parentpath = ''
268 else:
268 else:
269 cwd = os.path.join(cwd,'')
269 cwd = os.path.join(cwd,'')
270 if parentpath.startswith(cwd):
270 if parentpath.startswith(cwd):
271 parentpath = parentpath[len(cwd):]
271 parentpath = parentpath[len(cwd):]
272 path = util.drop_scheme('file', path)
272 path = util.drop_scheme('file', path)
273 if path.startswith('bundle:'):
273 if path.startswith('bundle:'):
274 path = util.drop_scheme('bundle', path)
274 path = util.drop_scheme('bundle', path)
275 s = path.split("+", 1)
275 s = path.split("+", 1)
276 if len(s) == 1:
276 if len(s) == 1:
277 repopath, bundlename = parentpath, s[0]
277 repopath, bundlename = parentpath, s[0]
278 else:
278 else:
279 repopath, bundlename = s
279 repopath, bundlename = s
280 else:
280 else:
281 repopath, bundlename = parentpath, path
281 repopath, bundlename = parentpath, path
282 return bundlerepository(ui, repopath, bundlename)
282 return bundlerepository(ui, repopath, bundlename)
@@ -1,192 +1,193 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from node import bin, hex, nullid
9 from revlog import revlog
9 from i18n import _
10 from i18n import _
10 import os, time, util
11 import os, time, util
11
12
12 def _string_escape(text):
13 def _string_escape(text):
13 """
14 """
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 >>> s
17 >>> s
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 >>> res = _string_escape(s)
19 >>> res = _string_escape(s)
19 >>> s == res.decode('string_escape')
20 >>> s == res.decode('string_escape')
20 True
21 True
21 """
22 """
22 # subset of the string_escape codec
23 # subset of the string_escape codec
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 return text.replace('\0', '\\0')
25 return text.replace('\0', '\\0')
25
26
26 class appender:
27 class appender:
27 '''the changelog index must be update last on disk, so we use this class
28 '''the changelog index must be update last on disk, so we use this class
28 to delay writes to it'''
29 to delay writes to it'''
29 def __init__(self, fp, buf):
30 def __init__(self, fp, buf):
30 self.data = buf
31 self.data = buf
31 self.fp = fp
32 self.fp = fp
32 self.offset = fp.tell()
33 self.offset = fp.tell()
33 self.size = util.fstat(fp).st_size
34 self.size = util.fstat(fp).st_size
34
35
35 def end(self):
36 def end(self):
36 return self.size + len("".join(self.data))
37 return self.size + len("".join(self.data))
37 def tell(self):
38 def tell(self):
38 return self.offset
39 return self.offset
39 def flush(self):
40 def flush(self):
40 pass
41 pass
41 def close(self):
42 def close(self):
42 self.fp.close()
43 self.fp.close()
43
44
44 def seek(self, offset, whence=0):
45 def seek(self, offset, whence=0):
45 '''virtual file offset spans real file and data'''
46 '''virtual file offset spans real file and data'''
46 if whence == 0:
47 if whence == 0:
47 self.offset = offset
48 self.offset = offset
48 elif whence == 1:
49 elif whence == 1:
49 self.offset += offset
50 self.offset += offset
50 elif whence == 2:
51 elif whence == 2:
51 self.offset = self.end() + offset
52 self.offset = self.end() + offset
52 if self.offset < self.size:
53 if self.offset < self.size:
53 self.fp.seek(self.offset)
54 self.fp.seek(self.offset)
54
55
55 def read(self, count=-1):
56 def read(self, count=-1):
56 '''only trick here is reads that span real file and data'''
57 '''only trick here is reads that span real file and data'''
57 ret = ""
58 ret = ""
58 if self.offset < self.size:
59 if self.offset < self.size:
59 s = self.fp.read(count)
60 s = self.fp.read(count)
60 ret = s
61 ret = s
61 self.offset += len(s)
62 self.offset += len(s)
62 if count > 0:
63 if count > 0:
63 count -= len(s)
64 count -= len(s)
64 if count != 0:
65 if count != 0:
65 doff = self.offset - self.size
66 doff = self.offset - self.size
66 self.data.insert(0, "".join(self.data))
67 self.data.insert(0, "".join(self.data))
67 del self.data[1:]
68 del self.data[1:]
68 s = self.data[0][doff:doff+count]
69 s = self.data[0][doff:doff+count]
69 self.offset += len(s)
70 self.offset += len(s)
70 ret += s
71 ret += s
71 return ret
72 return ret
72
73
73 def write(self, s):
74 def write(self, s):
74 self.data.append(str(s))
75 self.data.append(str(s))
75 self.offset += len(s)
76 self.offset += len(s)
76
77
77 class changelog(revlog):
78 class changelog(revlog):
78 def __init__(self, opener):
79 def __init__(self, opener):
79 revlog.__init__(self, opener, "00changelog.i")
80 revlog.__init__(self, opener, "00changelog.i")
80
81
81 def delayupdate(self):
82 def delayupdate(self):
82 "delay visibility of index updates to other readers"
83 "delay visibility of index updates to other readers"
83 self._realopener = self.opener
84 self._realopener = self.opener
84 self.opener = self._delayopener
85 self.opener = self._delayopener
85 self._delaycount = self.count()
86 self._delaycount = self.count()
86 self._delaybuf = []
87 self._delaybuf = []
87 self._delayname = None
88 self._delayname = None
88
89
89 def finalize(self, tr):
90 def finalize(self, tr):
90 "finalize index updates"
91 "finalize index updates"
91 self.opener = self._realopener
92 self.opener = self._realopener
92 # move redirected index data back into place
93 # move redirected index data back into place
93 if self._delayname:
94 if self._delayname:
94 util.rename(self._delayname + ".a", self._delayname)
95 util.rename(self._delayname + ".a", self._delayname)
95 elif self._delaybuf:
96 elif self._delaybuf:
96 fp = self.opener(self.indexfile, 'a')
97 fp = self.opener(self.indexfile, 'a')
97 fp.write("".join(self._delaybuf))
98 fp.write("".join(self._delaybuf))
98 fp.close()
99 fp.close()
99 del self._delaybuf
100 del self._delaybuf
100 # split when we're done
101 # split when we're done
101 self.checkinlinesize(tr)
102 self.checkinlinesize(tr)
102
103
103 def _delayopener(self, name, mode='r'):
104 def _delayopener(self, name, mode='r'):
104 fp = self._realopener(name, mode)
105 fp = self._realopener(name, mode)
105 # only divert the index
106 # only divert the index
106 if not name == self.indexfile:
107 if not name == self.indexfile:
107 return fp
108 return fp
108 # if we're doing an initial clone, divert to another file
109 # if we're doing an initial clone, divert to another file
109 if self._delaycount == 0:
110 if self._delaycount == 0:
110 self._delayname = fp.name
111 self._delayname = fp.name
111 return self._realopener(name + ".a", mode)
112 return self._realopener(name + ".a", mode)
112 # otherwise, divert to memory
113 # otherwise, divert to memory
113 return appender(fp, self._delaybuf)
114 return appender(fp, self._delaybuf)
114
115
115 def checkinlinesize(self, tr, fp=None):
116 def checkinlinesize(self, tr, fp=None):
116 if self.opener == self._delayopener:
117 if self.opener == self._delayopener:
117 return
118 return
118 return revlog.checkinlinesize(self, tr, fp)
119 return revlog.checkinlinesize(self, tr, fp)
119
120
120 def decode_extra(self, text):
121 def decode_extra(self, text):
121 extra = {}
122 extra = {}
122 for l in text.split('\0'):
123 for l in text.split('\0'):
123 if l:
124 if l:
124 k, v = l.decode('string_escape').split(':', 1)
125 k, v = l.decode('string_escape').split(':', 1)
125 extra[k] = v
126 extra[k] = v
126 return extra
127 return extra
127
128
128 def encode_extra(self, d):
129 def encode_extra(self, d):
129 # keys must be sorted to produce a deterministic changelog entry
130 # keys must be sorted to produce a deterministic changelog entry
130 keys = d.keys()
131 keys = d.keys()
131 keys.sort()
132 keys.sort()
132 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
133 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
133 return "\0".join(items)
134 return "\0".join(items)
134
135
135 def read(self, node):
136 def read(self, node):
136 """
137 """
137 format used:
138 format used:
138 nodeid\n : manifest node in ascii
139 nodeid\n : manifest node in ascii
139 user\n : user, no \n or \r allowed
140 user\n : user, no \n or \r allowed
140 time tz extra\n : date (time is int or float, timezone is int)
141 time tz extra\n : date (time is int or float, timezone is int)
141 : extra is metadatas, encoded and separated by '\0'
142 : extra is metadatas, encoded and separated by '\0'
142 : older versions ignore it
143 : older versions ignore it
143 files\n\n : files modified by the cset, no \n or \r allowed
144 files\n\n : files modified by the cset, no \n or \r allowed
144 (.*) : comment (free text, ideally utf-8)
145 (.*) : comment (free text, ideally utf-8)
145
146
146 changelog v0 doesn't use extra
147 changelog v0 doesn't use extra
147 """
148 """
148 text = self.revision(node)
149 text = self.revision(node)
149 if not text:
150 if not text:
150 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
151 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
151 last = text.index("\n\n")
152 last = text.index("\n\n")
152 desc = util.tolocal(text[last + 2:])
153 desc = util.tolocal(text[last + 2:])
153 l = text[:last].split('\n')
154 l = text[:last].split('\n')
154 manifest = bin(l[0])
155 manifest = bin(l[0])
155 user = util.tolocal(l[1])
156 user = util.tolocal(l[1])
156
157
157 extra_data = l[2].split(' ', 2)
158 extra_data = l[2].split(' ', 2)
158 if len(extra_data) != 3:
159 if len(extra_data) != 3:
159 time = float(extra_data.pop(0))
160 time = float(extra_data.pop(0))
160 try:
161 try:
161 # various tools did silly things with the time zone field.
162 # various tools did silly things with the time zone field.
162 timezone = int(extra_data[0])
163 timezone = int(extra_data[0])
163 except:
164 except:
164 timezone = 0
165 timezone = 0
165 extra = {}
166 extra = {}
166 else:
167 else:
167 time, timezone, extra = extra_data
168 time, timezone, extra = extra_data
168 time, timezone = float(time), int(timezone)
169 time, timezone = float(time), int(timezone)
169 extra = self.decode_extra(extra)
170 extra = self.decode_extra(extra)
170 if not extra.get('branch'):
171 if not extra.get('branch'):
171 extra['branch'] = 'default'
172 extra['branch'] = 'default'
172 files = l[3:]
173 files = l[3:]
173 return (manifest, user, (time, timezone), files, desc, extra)
174 return (manifest, user, (time, timezone), files, desc, extra)
174
175
175 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
176 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
176 user=None, date=None, extra={}):
177 user=None, date=None, extra={}):
177
178
178 user, desc = util.fromlocal(user), util.fromlocal(desc)
179 user, desc = util.fromlocal(user), util.fromlocal(desc)
179
180
180 if date:
181 if date:
181 parseddate = "%d %d" % util.parsedate(date)
182 parseddate = "%d %d" % util.parsedate(date)
182 else:
183 else:
183 parseddate = "%d %d" % util.makedate()
184 parseddate = "%d %d" % util.makedate()
184 if extra and extra.get("branch") in ("default", ""):
185 if extra and extra.get("branch") in ("default", ""):
185 del extra["branch"]
186 del extra["branch"]
186 if extra:
187 if extra:
187 extra = self.encode_extra(extra)
188 extra = self.encode_extra(extra)
188 parseddate = "%s %s" % (parseddate, extra)
189 parseddate = "%s %s" % (parseddate, extra)
189 list.sort()
190 list.sort()
190 l = [hex(manifest), user, parseddate] + list + ["", desc]
191 l = [hex(manifest), user, parseddate] + list + ["", desc]
191 text = "\n".join(l)
192 text = "\n".join(l)
192 return self.addrevision(text, transaction, self.count(), p1, p2)
193 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,1176 +1,1176 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat
10 import os, sys, bisect, stat
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12
12
13 revrangesep = ':'
13 revrangesep = ':'
14
14
15 class UnknownCommand(Exception):
15 class UnknownCommand(Exception):
16 """Exception raised if command is not in the command table."""
16 """Exception raised if command is not in the command table."""
17 class AmbiguousCommand(Exception):
17 class AmbiguousCommand(Exception):
18 """Exception raised if command shortcut matches more than one command."""
18 """Exception raised if command shortcut matches more than one command."""
19
19
20 def findpossible(ui, cmd, table):
20 def findpossible(ui, cmd, table):
21 """
21 """
22 Return cmd -> (aliases, command table entry)
22 Return cmd -> (aliases, command table entry)
23 for each matching command.
23 for each matching command.
24 Return debug commands (or their aliases) only if no normal command matches.
24 Return debug commands (or their aliases) only if no normal command matches.
25 """
25 """
26 choice = {}
26 choice = {}
27 debugchoice = {}
27 debugchoice = {}
28 for e in table.keys():
28 for e in table.keys():
29 aliases = e.lstrip("^").split("|")
29 aliases = e.lstrip("^").split("|")
30 found = None
30 found = None
31 if cmd in aliases:
31 if cmd in aliases:
32 found = cmd
32 found = cmd
33 elif not ui.config("ui", "strict"):
33 elif not ui.config("ui", "strict"):
34 for a in aliases:
34 for a in aliases:
35 if a.startswith(cmd):
35 if a.startswith(cmd):
36 found = a
36 found = a
37 break
37 break
38 if found is not None:
38 if found is not None:
39 if aliases[0].startswith("debug") or found.startswith("debug"):
39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 debugchoice[found] = (aliases, table[e])
40 debugchoice[found] = (aliases, table[e])
41 else:
41 else:
42 choice[found] = (aliases, table[e])
42 choice[found] = (aliases, table[e])
43
43
44 if not choice and debugchoice:
44 if not choice and debugchoice:
45 choice = debugchoice
45 choice = debugchoice
46
46
47 return choice
47 return choice
48
48
49 def findcmd(ui, cmd, table):
49 def findcmd(ui, cmd, table):
50 """Return (aliases, command table entry) for command string."""
50 """Return (aliases, command table entry) for command string."""
51 choice = findpossible(ui, cmd, table)
51 choice = findpossible(ui, cmd, table)
52
52
53 if cmd in choice:
53 if cmd in choice:
54 return choice[cmd]
54 return choice[cmd]
55
55
56 if len(choice) > 1:
56 if len(choice) > 1:
57 clist = choice.keys()
57 clist = choice.keys()
58 clist.sort()
58 clist.sort()
59 raise AmbiguousCommand(cmd, clist)
59 raise AmbiguousCommand(cmd, clist)
60
60
61 if choice:
61 if choice:
62 return choice.values()[0]
62 return choice.values()[0]
63
63
64 raise UnknownCommand(cmd)
64 raise UnknownCommand(cmd)
65
65
66 def bail_if_changed(repo):
66 def bail_if_changed(repo):
67 if repo.dirstate.parents()[1] != nullid:
67 if repo.dirstate.parents()[1] != nullid:
68 raise util.Abort(_('outstanding uncommitted merge'))
68 raise util.Abort(_('outstanding uncommitted merge'))
69 modified, added, removed, deleted = repo.status()[:4]
69 modified, added, removed, deleted = repo.status()[:4]
70 if modified or added or removed or deleted:
70 if modified or added or removed or deleted:
71 raise util.Abort(_("outstanding uncommitted changes"))
71 raise util.Abort(_("outstanding uncommitted changes"))
72
72
73 def logmessage(opts):
73 def logmessage(opts):
74 """ get the log message according to -m and -l option """
74 """ get the log message according to -m and -l option """
75 message = opts['message']
75 message = opts['message']
76 logfile = opts['logfile']
76 logfile = opts['logfile']
77
77
78 if message and logfile:
78 if message and logfile:
79 raise util.Abort(_('options --message and --logfile are mutually '
79 raise util.Abort(_('options --message and --logfile are mutually '
80 'exclusive'))
80 'exclusive'))
81 if not message and logfile:
81 if not message and logfile:
82 try:
82 try:
83 if logfile == '-':
83 if logfile == '-':
84 message = sys.stdin.read()
84 message = sys.stdin.read()
85 else:
85 else:
86 message = open(logfile).read()
86 message = open(logfile).read()
87 except IOError, inst:
87 except IOError, inst:
88 raise util.Abort(_("can't read commit message '%s': %s") %
88 raise util.Abort(_("can't read commit message '%s': %s") %
89 (logfile, inst.strerror))
89 (logfile, inst.strerror))
90 return message
90 return message
91
91
92 def loglimit(opts):
92 def loglimit(opts):
93 """get the log limit according to option -l/--limit"""
93 """get the log limit according to option -l/--limit"""
94 limit = opts.get('limit')
94 limit = opts.get('limit')
95 if limit:
95 if limit:
96 try:
96 try:
97 limit = int(limit)
97 limit = int(limit)
98 except ValueError:
98 except ValueError:
99 raise util.Abort(_('limit must be a positive integer'))
99 raise util.Abort(_('limit must be a positive integer'))
100 if limit <= 0: raise util.Abort(_('limit must be positive'))
100 if limit <= 0: raise util.Abort(_('limit must be positive'))
101 else:
101 else:
102 limit = sys.maxint
102 limit = sys.maxint
103 return limit
103 return limit
104
104
105 def setremoteconfig(ui, opts):
105 def setremoteconfig(ui, opts):
106 "copy remote options to ui tree"
106 "copy remote options to ui tree"
107 if opts.get('ssh'):
107 if opts.get('ssh'):
108 ui.setconfig("ui", "ssh", opts['ssh'])
108 ui.setconfig("ui", "ssh", opts['ssh'])
109 if opts.get('remotecmd'):
109 if opts.get('remotecmd'):
110 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
110 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
111
111
112 def revpair(repo, revs):
112 def revpair(repo, revs):
113 '''return pair of nodes, given list of revisions. second item can
113 '''return pair of nodes, given list of revisions. second item can
114 be None, meaning use working dir.'''
114 be None, meaning use working dir.'''
115
115
116 def revfix(repo, val, defval):
116 def revfix(repo, val, defval):
117 if not val and val != 0 and defval is not None:
117 if not val and val != 0 and defval is not None:
118 val = defval
118 val = defval
119 return repo.lookup(val)
119 return repo.lookup(val)
120
120
121 if not revs:
121 if not revs:
122 return repo.dirstate.parents()[0], None
122 return repo.dirstate.parents()[0], None
123 end = None
123 end = None
124 if len(revs) == 1:
124 if len(revs) == 1:
125 if revrangesep in revs[0]:
125 if revrangesep in revs[0]:
126 start, end = revs[0].split(revrangesep, 1)
126 start, end = revs[0].split(revrangesep, 1)
127 start = revfix(repo, start, 0)
127 start = revfix(repo, start, 0)
128 end = revfix(repo, end, repo.changelog.count() - 1)
128 end = revfix(repo, end, repo.changelog.count() - 1)
129 else:
129 else:
130 start = revfix(repo, revs[0], None)
130 start = revfix(repo, revs[0], None)
131 elif len(revs) == 2:
131 elif len(revs) == 2:
132 if revrangesep in revs[0] or revrangesep in revs[1]:
132 if revrangesep in revs[0] or revrangesep in revs[1]:
133 raise util.Abort(_('too many revisions specified'))
133 raise util.Abort(_('too many revisions specified'))
134 start = revfix(repo, revs[0], None)
134 start = revfix(repo, revs[0], None)
135 end = revfix(repo, revs[1], None)
135 end = revfix(repo, revs[1], None)
136 else:
136 else:
137 raise util.Abort(_('too many revisions specified'))
137 raise util.Abort(_('too many revisions specified'))
138 return start, end
138 return start, end
139
139
140 def revrange(repo, revs):
140 def revrange(repo, revs):
141 """Yield revision as strings from a list of revision specifications."""
141 """Yield revision as strings from a list of revision specifications."""
142
142
143 def revfix(repo, val, defval):
143 def revfix(repo, val, defval):
144 if not val and val != 0 and defval is not None:
144 if not val and val != 0 and defval is not None:
145 return defval
145 return defval
146 return repo.changelog.rev(repo.lookup(val))
146 return repo.changelog.rev(repo.lookup(val))
147
147
148 seen, l = {}, []
148 seen, l = {}, []
149 for spec in revs:
149 for spec in revs:
150 if revrangesep in spec:
150 if revrangesep in spec:
151 start, end = spec.split(revrangesep, 1)
151 start, end = spec.split(revrangesep, 1)
152 start = revfix(repo, start, 0)
152 start = revfix(repo, start, 0)
153 end = revfix(repo, end, repo.changelog.count() - 1)
153 end = revfix(repo, end, repo.changelog.count() - 1)
154 step = start > end and -1 or 1
154 step = start > end and -1 or 1
155 for rev in xrange(start, end+step, step):
155 for rev in xrange(start, end+step, step):
156 if rev in seen:
156 if rev in seen:
157 continue
157 continue
158 seen[rev] = 1
158 seen[rev] = 1
159 l.append(rev)
159 l.append(rev)
160 else:
160 else:
161 rev = revfix(repo, spec, None)
161 rev = revfix(repo, spec, None)
162 if rev in seen:
162 if rev in seen:
163 continue
163 continue
164 seen[rev] = 1
164 seen[rev] = 1
165 l.append(rev)
165 l.append(rev)
166
166
167 return l
167 return l
168
168
169 def make_filename(repo, pat, node,
169 def make_filename(repo, pat, node,
170 total=None, seqno=None, revwidth=None, pathname=None):
170 total=None, seqno=None, revwidth=None, pathname=None):
171 node_expander = {
171 node_expander = {
172 'H': lambda: hex(node),
172 'H': lambda: hex(node),
173 'R': lambda: str(repo.changelog.rev(node)),
173 'R': lambda: str(repo.changelog.rev(node)),
174 'h': lambda: short(node),
174 'h': lambda: short(node),
175 }
175 }
176 expander = {
176 expander = {
177 '%': lambda: '%',
177 '%': lambda: '%',
178 'b': lambda: os.path.basename(repo.root),
178 'b': lambda: os.path.basename(repo.root),
179 }
179 }
180
180
181 try:
181 try:
182 if node:
182 if node:
183 expander.update(node_expander)
183 expander.update(node_expander)
184 if node:
184 if node:
185 expander['r'] = (lambda:
185 expander['r'] = (lambda:
186 str(repo.changelog.rev(node)).zfill(revwidth or 0))
186 str(repo.changelog.rev(node)).zfill(revwidth or 0))
187 if total is not None:
187 if total is not None:
188 expander['N'] = lambda: str(total)
188 expander['N'] = lambda: str(total)
189 if seqno is not None:
189 if seqno is not None:
190 expander['n'] = lambda: str(seqno)
190 expander['n'] = lambda: str(seqno)
191 if total is not None and seqno is not None:
191 if total is not None and seqno is not None:
192 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
192 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
193 if pathname is not None:
193 if pathname is not None:
194 expander['s'] = lambda: os.path.basename(pathname)
194 expander['s'] = lambda: os.path.basename(pathname)
195 expander['d'] = lambda: os.path.dirname(pathname) or '.'
195 expander['d'] = lambda: os.path.dirname(pathname) or '.'
196 expander['p'] = lambda: pathname
196 expander['p'] = lambda: pathname
197
197
198 newname = []
198 newname = []
199 patlen = len(pat)
199 patlen = len(pat)
200 i = 0
200 i = 0
201 while i < patlen:
201 while i < patlen:
202 c = pat[i]
202 c = pat[i]
203 if c == '%':
203 if c == '%':
204 i += 1
204 i += 1
205 c = pat[i]
205 c = pat[i]
206 c = expander[c]()
206 c = expander[c]()
207 newname.append(c)
207 newname.append(c)
208 i += 1
208 i += 1
209 return ''.join(newname)
209 return ''.join(newname)
210 except KeyError, inst:
210 except KeyError, inst:
211 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
211 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
212 inst.args[0])
212 inst.args[0])
213
213
214 def make_file(repo, pat, node=None,
214 def make_file(repo, pat, node=None,
215 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
215 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
216 if not pat or pat == '-':
216 if not pat or pat == '-':
217 return 'w' in mode and sys.stdout or sys.stdin
217 return 'w' in mode and sys.stdout or sys.stdin
218 if hasattr(pat, 'write') and 'w' in mode:
218 if hasattr(pat, 'write') and 'w' in mode:
219 return pat
219 return pat
220 if hasattr(pat, 'read') and 'r' in mode:
220 if hasattr(pat, 'read') and 'r' in mode:
221 return pat
221 return pat
222 return open(make_filename(repo, pat, node, total, seqno, revwidth,
222 return open(make_filename(repo, pat, node, total, seqno, revwidth,
223 pathname),
223 pathname),
224 mode)
224 mode)
225
225
226 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
226 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
227 cwd = repo.getcwd()
227 cwd = repo.getcwd()
228 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
228 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
229 opts.get('exclude'), globbed=globbed,
229 opts.get('exclude'), globbed=globbed,
230 default=default)
230 default=default)
231
231
232 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
232 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
233 default=None):
233 default=None):
234 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
234 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
235 default=default)
235 default=default)
236 exact = dict.fromkeys(files)
236 exact = dict.fromkeys(files)
237 cwd = repo.getcwd()
237 cwd = repo.getcwd()
238 for src, fn in repo.walk(node=node, files=files, match=matchfn,
238 for src, fn in repo.walk(node=node, files=files, match=matchfn,
239 badmatch=badmatch):
239 badmatch=badmatch):
240 yield src, fn, repo.pathto(fn, cwd), fn in exact
240 yield src, fn, repo.pathto(fn, cwd), fn in exact
241
241
242 def findrenames(repo, added=None, removed=None, threshold=0.5):
242 def findrenames(repo, added=None, removed=None, threshold=0.5):
243 '''find renamed files -- yields (before, after, score) tuples'''
243 '''find renamed files -- yields (before, after, score) tuples'''
244 if added is None or removed is None:
244 if added is None or removed is None:
245 added, removed = repo.status()[1:3]
245 added, removed = repo.status()[1:3]
246 ctx = repo.changectx()
246 ctx = repo.changectx()
247 for a in added:
247 for a in added:
248 aa = repo.wread(a)
248 aa = repo.wread(a)
249 bestname, bestscore = None, threshold
249 bestname, bestscore = None, threshold
250 for r in removed:
250 for r in removed:
251 rr = ctx.filectx(r).data()
251 rr = ctx.filectx(r).data()
252
252
253 # bdiff.blocks() returns blocks of matching lines
253 # bdiff.blocks() returns blocks of matching lines
254 # count the number of bytes in each
254 # count the number of bytes in each
255 equal = 0
255 equal = 0
256 alines = mdiff.splitnewlines(aa)
256 alines = mdiff.splitnewlines(aa)
257 matches = bdiff.blocks(aa, rr)
257 matches = bdiff.blocks(aa, rr)
258 for x1,x2,y1,y2 in matches:
258 for x1,x2,y1,y2 in matches:
259 for line in alines[x1:x2]:
259 for line in alines[x1:x2]:
260 equal += len(line)
260 equal += len(line)
261
261
262 lengths = len(aa) + len(rr)
262 lengths = len(aa) + len(rr)
263 if lengths:
263 if lengths:
264 myscore = equal*2.0 / lengths
264 myscore = equal*2.0 / lengths
265 if myscore >= bestscore:
265 if myscore >= bestscore:
266 bestname, bestscore = r, myscore
266 bestname, bestscore = r, myscore
267 if bestname:
267 if bestname:
268 yield bestname, a, bestscore
268 yield bestname, a, bestscore
269
269
270 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
270 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
271 if dry_run is None:
271 if dry_run is None:
272 dry_run = opts.get('dry_run')
272 dry_run = opts.get('dry_run')
273 if similarity is None:
273 if similarity is None:
274 similarity = float(opts.get('similarity') or 0)
274 similarity = float(opts.get('similarity') or 0)
275 add, remove = [], []
275 add, remove = [], []
276 mapping = {}
276 mapping = {}
277 for src, abs, rel, exact in walk(repo, pats, opts):
277 for src, abs, rel, exact in walk(repo, pats, opts):
278 target = repo.wjoin(abs)
278 target = repo.wjoin(abs)
279 if src == 'f' and abs not in repo.dirstate:
279 if src == 'f' and abs not in repo.dirstate:
280 add.append(abs)
280 add.append(abs)
281 mapping[abs] = rel, exact
281 mapping[abs] = rel, exact
282 if repo.ui.verbose or not exact:
282 if repo.ui.verbose or not exact:
283 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
283 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
284 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
284 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
285 or (os.path.isdir(target) and not os.path.islink(target))):
285 or (os.path.isdir(target) and not os.path.islink(target))):
286 remove.append(abs)
286 remove.append(abs)
287 mapping[abs] = rel, exact
287 mapping[abs] = rel, exact
288 if repo.ui.verbose or not exact:
288 if repo.ui.verbose or not exact:
289 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
289 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
290 if not dry_run:
290 if not dry_run:
291 repo.remove(remove)
291 repo.remove(remove)
292 repo.add(add)
292 repo.add(add)
293 if similarity > 0:
293 if similarity > 0:
294 for old, new, score in findrenames(repo, add, remove, similarity):
294 for old, new, score in findrenames(repo, add, remove, similarity):
295 oldrel, oldexact = mapping[old]
295 oldrel, oldexact = mapping[old]
296 newrel, newexact = mapping[new]
296 newrel, newexact = mapping[new]
297 if repo.ui.verbose or not oldexact or not newexact:
297 if repo.ui.verbose or not oldexact or not newexact:
298 repo.ui.status(_('recording removal of %s as rename to %s '
298 repo.ui.status(_('recording removal of %s as rename to %s '
299 '(%d%% similar)\n') %
299 '(%d%% similar)\n') %
300 (oldrel, newrel, score * 100))
300 (oldrel, newrel, score * 100))
301 if not dry_run:
301 if not dry_run:
302 repo.copy(old, new)
302 repo.copy(old, new)
303
303
304 def copy(ui, repo, pats, opts, rename=False):
304 def copy(ui, repo, pats, opts, rename=False):
305 # called with the repo lock held
305 # called with the repo lock held
306 #
306 #
307 # hgsep => pathname that uses "/" to separate directories
307 # hgsep => pathname that uses "/" to separate directories
308 # ossep => pathname that uses os.sep to separate directories
308 # ossep => pathname that uses os.sep to separate directories
309 cwd = repo.getcwd()
309 cwd = repo.getcwd()
310 targets = {}
310 targets = {}
311 after = opts.get("after")
311 after = opts.get("after")
312 dryrun = opts.get("dry_run")
312 dryrun = opts.get("dry_run")
313
313
314 def walkpat(pat):
314 def walkpat(pat):
315 srcs = []
315 srcs = []
316 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
316 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
317 state = repo.dirstate[abs]
317 state = repo.dirstate[abs]
318 if state in '?r':
318 if state in '?r':
319 if exact and state == '?':
319 if exact and state == '?':
320 ui.warn(_('%s: not copying - file is not managed\n') % rel)
320 ui.warn(_('%s: not copying - file is not managed\n') % rel)
321 if exact and state == 'r':
321 if exact and state == 'r':
322 ui.warn(_('%s: not copying - file has been marked for'
322 ui.warn(_('%s: not copying - file has been marked for'
323 ' remove\n') % rel)
323 ' remove\n') % rel)
324 continue
324 continue
325 # abs: hgsep
325 # abs: hgsep
326 # rel: ossep
326 # rel: ossep
327 srcs.append((abs, rel, exact))
327 srcs.append((abs, rel, exact))
328 return srcs
328 return srcs
329
329
330 # abssrc: hgsep
330 # abssrc: hgsep
331 # relsrc: ossep
331 # relsrc: ossep
332 # otarget: ossep
332 # otarget: ossep
333 def copyfile(abssrc, relsrc, otarget, exact):
333 def copyfile(abssrc, relsrc, otarget, exact):
334 abstarget = util.canonpath(repo.root, cwd, otarget)
334 abstarget = util.canonpath(repo.root, cwd, otarget)
335 reltarget = repo.pathto(abstarget, cwd)
335 reltarget = repo.pathto(abstarget, cwd)
336 target = repo.wjoin(abstarget)
336 target = repo.wjoin(abstarget)
337 src = repo.wjoin(abssrc)
337 src = repo.wjoin(abssrc)
338 state = repo.dirstate[abstarget]
338 state = repo.dirstate[abstarget]
339
339
340 # check for collisions
340 # check for collisions
341 prevsrc = targets.get(abstarget)
341 prevsrc = targets.get(abstarget)
342 if prevsrc is not None:
342 if prevsrc is not None:
343 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
343 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
344 (reltarget, repo.pathto(abssrc, cwd),
344 (reltarget, repo.pathto(abssrc, cwd),
345 repo.pathto(prevsrc, cwd)))
345 repo.pathto(prevsrc, cwd)))
346 return
346 return
347
347
348 # check for overwrites
348 # check for overwrites
349 exists = os.path.exists(target)
349 exists = os.path.exists(target)
350 if (not after and exists or after and state in 'mn'):
350 if (not after and exists or after and state in 'mn'):
351 if not opts['force']:
351 if not opts['force']:
352 ui.warn(_('%s: not overwriting - file exists\n') %
352 ui.warn(_('%s: not overwriting - file exists\n') %
353 reltarget)
353 reltarget)
354 return
354 return
355
355
356 if after:
356 if after:
357 if not exists:
357 if not exists:
358 return
358 return
359 elif not dryrun:
359 elif not dryrun:
360 try:
360 try:
361 if exists:
361 if exists:
362 os.unlink(target)
362 os.unlink(target)
363 targetdir = os.path.dirname(target) or '.'
363 targetdir = os.path.dirname(target) or '.'
364 if not os.path.isdir(targetdir):
364 if not os.path.isdir(targetdir):
365 os.makedirs(targetdir)
365 os.makedirs(targetdir)
366 util.copyfile(src, target)
366 util.copyfile(src, target)
367 except IOError, inst:
367 except IOError, inst:
368 if inst.errno == errno.ENOENT:
368 if inst.errno == errno.ENOENT:
369 ui.warn(_('%s: deleted in working copy\n') % relsrc)
369 ui.warn(_('%s: deleted in working copy\n') % relsrc)
370 else:
370 else:
371 ui.warn(_('%s: cannot copy - %s\n') %
371 ui.warn(_('%s: cannot copy - %s\n') %
372 (relsrc, inst.strerror))
372 (relsrc, inst.strerror))
373 return True # report a failure
373 return True # report a failure
374
374
375 if ui.verbose or not exact:
375 if ui.verbose or not exact:
376 action = rename and "moving" or "copying"
376 action = rename and "moving" or "copying"
377 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
377 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
378
378
379 targets[abstarget] = abssrc
379 targets[abstarget] = abssrc
380
380
381 # fix up dirstate
381 # fix up dirstate
382 origsrc = repo.dirstate.copied(abssrc) or abssrc
382 origsrc = repo.dirstate.copied(abssrc) or abssrc
383 if abstarget == origsrc: # copying back a copy?
383 if abstarget == origsrc: # copying back a copy?
384 if state not in 'mn' and not dryrun:
384 if state not in 'mn' and not dryrun:
385 repo.dirstate.normallookup(abstarget)
385 repo.dirstate.normallookup(abstarget)
386 else:
386 else:
387 if repo.dirstate[origsrc] == 'a':
387 if repo.dirstate[origsrc] == 'a':
388 if not ui.quiet:
388 if not ui.quiet:
389 ui.warn(_("%s has not been committed yet, so no copy "
389 ui.warn(_("%s has not been committed yet, so no copy "
390 "data will be stored for %s.\n")
390 "data will be stored for %s.\n")
391 % (repo.pathto(origsrc, cwd), reltarget))
391 % (repo.pathto(origsrc, cwd), reltarget))
392 if abstarget not in repo.dirstate and not dryrun:
392 if abstarget not in repo.dirstate and not dryrun:
393 repo.add([abstarget])
393 repo.add([abstarget])
394 elif not dryrun:
394 elif not dryrun:
395 repo.copy(origsrc, abstarget)
395 repo.copy(origsrc, abstarget)
396
396
397 if rename and not dryrun:
397 if rename and not dryrun:
398 repo.remove([abssrc], True)
398 repo.remove([abssrc], True)
399
399
400 # pat: ossep
400 # pat: ossep
401 # dest ossep
401 # dest ossep
402 # srcs: list of (hgsep, hgsep, ossep, bool)
402 # srcs: list of (hgsep, hgsep, ossep, bool)
403 # return: function that takes hgsep and returns ossep
403 # return: function that takes hgsep and returns ossep
404 def targetpathfn(pat, dest, srcs):
404 def targetpathfn(pat, dest, srcs):
405 if os.path.isdir(pat):
405 if os.path.isdir(pat):
406 abspfx = util.canonpath(repo.root, cwd, pat)
406 abspfx = util.canonpath(repo.root, cwd, pat)
407 abspfx = util.localpath(abspfx)
407 abspfx = util.localpath(abspfx)
408 if destdirexists:
408 if destdirexists:
409 striplen = len(os.path.split(abspfx)[0])
409 striplen = len(os.path.split(abspfx)[0])
410 else:
410 else:
411 striplen = len(abspfx)
411 striplen = len(abspfx)
412 if striplen:
412 if striplen:
413 striplen += len(os.sep)
413 striplen += len(os.sep)
414 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
414 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
415 elif destdirexists:
415 elif destdirexists:
416 res = lambda p: os.path.join(dest,
416 res = lambda p: os.path.join(dest,
417 os.path.basename(util.localpath(p)))
417 os.path.basename(util.localpath(p)))
418 else:
418 else:
419 res = lambda p: dest
419 res = lambda p: dest
420 return res
420 return res
421
421
422 # pat: ossep
422 # pat: ossep
423 # dest ossep
423 # dest ossep
424 # srcs: list of (hgsep, hgsep, ossep, bool)
424 # srcs: list of (hgsep, hgsep, ossep, bool)
425 # return: function that takes hgsep and returns ossep
425 # return: function that takes hgsep and returns ossep
426 def targetpathafterfn(pat, dest, srcs):
426 def targetpathafterfn(pat, dest, srcs):
427 if util.patkind(pat, None)[0]:
427 if util.patkind(pat, None)[0]:
428 # a mercurial pattern
428 # a mercurial pattern
429 res = lambda p: os.path.join(dest,
429 res = lambda p: os.path.join(dest,
430 os.path.basename(util.localpath(p)))
430 os.path.basename(util.localpath(p)))
431 else:
431 else:
432 abspfx = util.canonpath(repo.root, cwd, pat)
432 abspfx = util.canonpath(repo.root, cwd, pat)
433 if len(abspfx) < len(srcs[0][0]):
433 if len(abspfx) < len(srcs[0][0]):
434 # A directory. Either the target path contains the last
434 # A directory. Either the target path contains the last
435 # component of the source path or it does not.
435 # component of the source path or it does not.
436 def evalpath(striplen):
436 def evalpath(striplen):
437 score = 0
437 score = 0
438 for s in srcs:
438 for s in srcs:
439 t = os.path.join(dest, util.localpath(s[0])[striplen:])
439 t = os.path.join(dest, util.localpath(s[0])[striplen:])
440 if os.path.exists(t):
440 if os.path.exists(t):
441 score += 1
441 score += 1
442 return score
442 return score
443
443
444 abspfx = util.localpath(abspfx)
444 abspfx = util.localpath(abspfx)
445 striplen = len(abspfx)
445 striplen = len(abspfx)
446 if striplen:
446 if striplen:
447 striplen += len(os.sep)
447 striplen += len(os.sep)
448 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
448 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
449 score = evalpath(striplen)
449 score = evalpath(striplen)
450 striplen1 = len(os.path.split(abspfx)[0])
450 striplen1 = len(os.path.split(abspfx)[0])
451 if striplen1:
451 if striplen1:
452 striplen1 += len(os.sep)
452 striplen1 += len(os.sep)
453 if evalpath(striplen1) > score:
453 if evalpath(striplen1) > score:
454 striplen = striplen1
454 striplen = striplen1
455 res = lambda p: os.path.join(dest,
455 res = lambda p: os.path.join(dest,
456 util.localpath(p)[striplen:])
456 util.localpath(p)[striplen:])
457 else:
457 else:
458 # a file
458 # a file
459 if destdirexists:
459 if destdirexists:
460 res = lambda p: os.path.join(dest,
460 res = lambda p: os.path.join(dest,
461 os.path.basename(util.localpath(p)))
461 os.path.basename(util.localpath(p)))
462 else:
462 else:
463 res = lambda p: dest
463 res = lambda p: dest
464 return res
464 return res
465
465
466
466
467 pats = util.expand_glob(pats)
467 pats = util.expand_glob(pats)
468 if not pats:
468 if not pats:
469 raise util.Abort(_('no source or destination specified'))
469 raise util.Abort(_('no source or destination specified'))
470 if len(pats) == 1:
470 if len(pats) == 1:
471 raise util.Abort(_('no destination specified'))
471 raise util.Abort(_('no destination specified'))
472 dest = pats.pop()
472 dest = pats.pop()
473 destdirexists = os.path.isdir(dest)
473 destdirexists = os.path.isdir(dest)
474 if not destdirexists:
474 if not destdirexists:
475 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
475 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
476 raise util.Abort(_('with multiple sources, destination must be an '
476 raise util.Abort(_('with multiple sources, destination must be an '
477 'existing directory'))
477 'existing directory'))
478 if util.endswithsep(dest):
478 if util.endswithsep(dest):
479 raise util.Abort(_('destination %s is not a directory') % dest)
479 raise util.Abort(_('destination %s is not a directory') % dest)
480
480
481 tfn = targetpathfn
481 tfn = targetpathfn
482 if after:
482 if after:
483 tfn = targetpathafterfn
483 tfn = targetpathafterfn
484 copylist = []
484 copylist = []
485 for pat in pats:
485 for pat in pats:
486 srcs = walkpat(pat)
486 srcs = walkpat(pat)
487 if not srcs:
487 if not srcs:
488 continue
488 continue
489 copylist.append((tfn(pat, dest, srcs), srcs))
489 copylist.append((tfn(pat, dest, srcs), srcs))
490 if not copylist:
490 if not copylist:
491 raise util.Abort(_('no files to copy'))
491 raise util.Abort(_('no files to copy'))
492
492
493 errors = 0
493 errors = 0
494 for targetpath, srcs in copylist:
494 for targetpath, srcs in copylist:
495 for abssrc, relsrc, exact in srcs:
495 for abssrc, relsrc, exact in srcs:
496 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
496 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
497 errors += 1
497 errors += 1
498
498
499 if errors:
499 if errors:
500 ui.warn(_('(consider using --after)\n'))
500 ui.warn(_('(consider using --after)\n'))
501
501
502 return errors
502 return errors
503
503
504 def service(opts, parentfn=None, initfn=None, runfn=None):
504 def service(opts, parentfn=None, initfn=None, runfn=None):
505 '''Run a command as a service.'''
505 '''Run a command as a service.'''
506
506
507 if opts['daemon'] and not opts['daemon_pipefds']:
507 if opts['daemon'] and not opts['daemon_pipefds']:
508 rfd, wfd = os.pipe()
508 rfd, wfd = os.pipe()
509 args = sys.argv[:]
509 args = sys.argv[:]
510 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
510 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
511 # Don't pass --cwd to the child process, because we've already
511 # Don't pass --cwd to the child process, because we've already
512 # changed directory.
512 # changed directory.
513 for i in xrange(1,len(args)):
513 for i in xrange(1,len(args)):
514 if args[i].startswith('--cwd='):
514 if args[i].startswith('--cwd='):
515 del args[i]
515 del args[i]
516 break
516 break
517 elif args[i].startswith('--cwd'):
517 elif args[i].startswith('--cwd'):
518 del args[i:i+2]
518 del args[i:i+2]
519 break
519 break
520 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
520 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
521 args[0], args)
521 args[0], args)
522 os.close(wfd)
522 os.close(wfd)
523 os.read(rfd, 1)
523 os.read(rfd, 1)
524 if parentfn:
524 if parentfn:
525 return parentfn(pid)
525 return parentfn(pid)
526 else:
526 else:
527 os._exit(0)
527 os._exit(0)
528
528
529 if initfn:
529 if initfn:
530 initfn()
530 initfn()
531
531
532 if opts['pid_file']:
532 if opts['pid_file']:
533 fp = open(opts['pid_file'], 'w')
533 fp = open(opts['pid_file'], 'w')
534 fp.write(str(os.getpid()) + '\n')
534 fp.write(str(os.getpid()) + '\n')
535 fp.close()
535 fp.close()
536
536
537 if opts['daemon_pipefds']:
537 if opts['daemon_pipefds']:
538 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
538 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
539 os.close(rfd)
539 os.close(rfd)
540 try:
540 try:
541 os.setsid()
541 os.setsid()
542 except AttributeError:
542 except AttributeError:
543 pass
543 pass
544 os.write(wfd, 'y')
544 os.write(wfd, 'y')
545 os.close(wfd)
545 os.close(wfd)
546 sys.stdout.flush()
546 sys.stdout.flush()
547 sys.stderr.flush()
547 sys.stderr.flush()
548 fd = os.open(util.nulldev, os.O_RDWR)
548 fd = os.open(util.nulldev, os.O_RDWR)
549 if fd != 0: os.dup2(fd, 0)
549 if fd != 0: os.dup2(fd, 0)
550 if fd != 1: os.dup2(fd, 1)
550 if fd != 1: os.dup2(fd, 1)
551 if fd != 2: os.dup2(fd, 2)
551 if fd != 2: os.dup2(fd, 2)
552 if fd not in (0, 1, 2): os.close(fd)
552 if fd not in (0, 1, 2): os.close(fd)
553
553
554 if runfn:
554 if runfn:
555 return runfn()
555 return runfn()
556
556
557 class changeset_printer(object):
557 class changeset_printer(object):
558 '''show changeset information when templating not requested.'''
558 '''show changeset information when templating not requested.'''
559
559
560 def __init__(self, ui, repo, patch, buffered):
560 def __init__(self, ui, repo, patch, buffered):
561 self.ui = ui
561 self.ui = ui
562 self.repo = repo
562 self.repo = repo
563 self.buffered = buffered
563 self.buffered = buffered
564 self.patch = patch
564 self.patch = patch
565 self.header = {}
565 self.header = {}
566 self.hunk = {}
566 self.hunk = {}
567 self.lastheader = None
567 self.lastheader = None
568
568
569 def flush(self, rev):
569 def flush(self, rev):
570 if rev in self.header:
570 if rev in self.header:
571 h = self.header[rev]
571 h = self.header[rev]
572 if h != self.lastheader:
572 if h != self.lastheader:
573 self.lastheader = h
573 self.lastheader = h
574 self.ui.write(h)
574 self.ui.write(h)
575 del self.header[rev]
575 del self.header[rev]
576 if rev in self.hunk:
576 if rev in self.hunk:
577 self.ui.write(self.hunk[rev])
577 self.ui.write(self.hunk[rev])
578 del self.hunk[rev]
578 del self.hunk[rev]
579 return 1
579 return 1
580 return 0
580 return 0
581
581
582 def show(self, rev=0, changenode=None, copies=(), **props):
582 def show(self, rev=0, changenode=None, copies=(), **props):
583 if self.buffered:
583 if self.buffered:
584 self.ui.pushbuffer()
584 self.ui.pushbuffer()
585 self._show(rev, changenode, copies, props)
585 self._show(rev, changenode, copies, props)
586 self.hunk[rev] = self.ui.popbuffer()
586 self.hunk[rev] = self.ui.popbuffer()
587 else:
587 else:
588 self._show(rev, changenode, copies, props)
588 self._show(rev, changenode, copies, props)
589
589
590 def _show(self, rev, changenode, copies, props):
590 def _show(self, rev, changenode, copies, props):
591 '''show a single changeset or file revision'''
591 '''show a single changeset or file revision'''
592 log = self.repo.changelog
592 log = self.repo.changelog
593 if changenode is None:
593 if changenode is None:
594 changenode = log.node(rev)
594 changenode = log.node(rev)
595 elif not rev:
595 elif not rev:
596 rev = log.rev(changenode)
596 rev = log.rev(changenode)
597
597
598 if self.ui.quiet:
598 if self.ui.quiet:
599 self.ui.write("%d:%s\n" % (rev, short(changenode)))
599 self.ui.write("%d:%s\n" % (rev, short(changenode)))
600 return
600 return
601
601
602 changes = log.read(changenode)
602 changes = log.read(changenode)
603 date = util.datestr(changes[2])
603 date = util.datestr(changes[2])
604 extra = changes[5]
604 extra = changes[5]
605 branch = extra.get("branch")
605 branch = extra.get("branch")
606
606
607 hexfunc = self.ui.debugflag and hex or short
607 hexfunc = self.ui.debugflag and hex or short
608
608
609 parents = [(p, hexfunc(log.node(p)))
609 parents = [(p, hexfunc(log.node(p)))
610 for p in self._meaningful_parentrevs(log, rev)]
610 for p in self._meaningful_parentrevs(log, rev)]
611
611
612 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
612 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
613
613
614 # don't show the default branch name
614 # don't show the default branch name
615 if branch != 'default':
615 if branch != 'default':
616 branch = util.tolocal(branch)
616 branch = util.tolocal(branch)
617 self.ui.write(_("branch: %s\n") % branch)
617 self.ui.write(_("branch: %s\n") % branch)
618 for tag in self.repo.nodetags(changenode):
618 for tag in self.repo.nodetags(changenode):
619 self.ui.write(_("tag: %s\n") % tag)
619 self.ui.write(_("tag: %s\n") % tag)
620 for parent in parents:
620 for parent in parents:
621 self.ui.write(_("parent: %d:%s\n") % parent)
621 self.ui.write(_("parent: %d:%s\n") % parent)
622
622
623 if self.ui.debugflag:
623 if self.ui.debugflag:
624 self.ui.write(_("manifest: %d:%s\n") %
624 self.ui.write(_("manifest: %d:%s\n") %
625 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
625 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
626 self.ui.write(_("user: %s\n") % changes[1])
626 self.ui.write(_("user: %s\n") % changes[1])
627 self.ui.write(_("date: %s\n") % date)
627 self.ui.write(_("date: %s\n") % date)
628
628
629 if self.ui.debugflag:
629 if self.ui.debugflag:
630 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
630 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
631 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
631 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
632 files):
632 files):
633 if value:
633 if value:
634 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
634 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
635 elif changes[3] and self.ui.verbose:
635 elif changes[3] and self.ui.verbose:
636 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
636 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
637 if copies and self.ui.verbose:
637 if copies and self.ui.verbose:
638 copies = ['%s (%s)' % c for c in copies]
638 copies = ['%s (%s)' % c for c in copies]
639 self.ui.write(_("copies: %s\n") % ' '.join(copies))
639 self.ui.write(_("copies: %s\n") % ' '.join(copies))
640
640
641 if extra and self.ui.debugflag:
641 if extra and self.ui.debugflag:
642 extraitems = extra.items()
642 extraitems = extra.items()
643 extraitems.sort()
643 extraitems.sort()
644 for key, value in extraitems:
644 for key, value in extraitems:
645 self.ui.write(_("extra: %s=%s\n")
645 self.ui.write(_("extra: %s=%s\n")
646 % (key, value.encode('string_escape')))
646 % (key, value.encode('string_escape')))
647
647
648 description = changes[4].strip()
648 description = changes[4].strip()
649 if description:
649 if description:
650 if self.ui.verbose:
650 if self.ui.verbose:
651 self.ui.write(_("description:\n"))
651 self.ui.write(_("description:\n"))
652 self.ui.write(description)
652 self.ui.write(description)
653 self.ui.write("\n\n")
653 self.ui.write("\n\n")
654 else:
654 else:
655 self.ui.write(_("summary: %s\n") %
655 self.ui.write(_("summary: %s\n") %
656 description.splitlines()[0])
656 description.splitlines()[0])
657 self.ui.write("\n")
657 self.ui.write("\n")
658
658
659 self.showpatch(changenode)
659 self.showpatch(changenode)
660
660
661 def showpatch(self, node):
661 def showpatch(self, node):
662 if self.patch:
662 if self.patch:
663 prev = self.repo.changelog.parents(node)[0]
663 prev = self.repo.changelog.parents(node)[0]
664 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
664 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
665 opts=patch.diffopts(self.ui))
665 opts=patch.diffopts(self.ui))
666 self.ui.write("\n")
666 self.ui.write("\n")
667
667
668 def _meaningful_parentrevs(self, log, rev):
668 def _meaningful_parentrevs(self, log, rev):
669 """Return list of meaningful (or all if debug) parentrevs for rev.
669 """Return list of meaningful (or all if debug) parentrevs for rev.
670
670
671 For merges (two non-nullrev revisions) both parents are meaningful.
671 For merges (two non-nullrev revisions) both parents are meaningful.
672 Otherwise the first parent revision is considered meaningful if it
672 Otherwise the first parent revision is considered meaningful if it
673 is not the preceding revision.
673 is not the preceding revision.
674 """
674 """
675 parents = log.parentrevs(rev)
675 parents = log.parentrevs(rev)
676 if not self.ui.debugflag and parents[1] == nullrev:
676 if not self.ui.debugflag and parents[1] == nullrev:
677 if parents[0] >= rev - 1:
677 if parents[0] >= rev - 1:
678 parents = []
678 parents = []
679 else:
679 else:
680 parents = [parents[0]]
680 parents = [parents[0]]
681 return parents
681 return parents
682
682
683
683
684 class changeset_templater(changeset_printer):
684 class changeset_templater(changeset_printer):
685 '''format changeset information.'''
685 '''format changeset information.'''
686
686
687 def __init__(self, ui, repo, patch, mapfile, buffered):
687 def __init__(self, ui, repo, patch, mapfile, buffered):
688 changeset_printer.__init__(self, ui, repo, patch, buffered)
688 changeset_printer.__init__(self, ui, repo, patch, buffered)
689 filters = templatefilters.filters.copy()
689 filters = templatefilters.filters.copy()
690 filters['formatnode'] = (ui.debugflag and (lambda x: x)
690 filters['formatnode'] = (ui.debugflag and (lambda x: x)
691 or (lambda x: x[:12]))
691 or (lambda x: x[:12]))
692 self.t = templater.templater(mapfile, filters,
692 self.t = templater.templater(mapfile, filters,
693 cache={
693 cache={
694 'parent': '{rev}:{node|formatnode} ',
694 'parent': '{rev}:{node|formatnode} ',
695 'manifest': '{rev}:{node|formatnode}',
695 'manifest': '{rev}:{node|formatnode}',
696 'filecopy': '{name} ({source})'})
696 'filecopy': '{name} ({source})'})
697
697
698 def use_template(self, t):
698 def use_template(self, t):
699 '''set template string to use'''
699 '''set template string to use'''
700 self.t.cache['changeset'] = t
700 self.t.cache['changeset'] = t
701
701
702 def _show(self, rev, changenode, copies, props):
702 def _show(self, rev, changenode, copies, props):
703 '''show a single changeset or file revision'''
703 '''show a single changeset or file revision'''
704 log = self.repo.changelog
704 log = self.repo.changelog
705 if changenode is None:
705 if changenode is None:
706 changenode = log.node(rev)
706 changenode = log.node(rev)
707 elif not rev:
707 elif not rev:
708 rev = log.rev(changenode)
708 rev = log.rev(changenode)
709
709
710 changes = log.read(changenode)
710 changes = log.read(changenode)
711
711
712 def showlist(name, values, plural=None, **args):
712 def showlist(name, values, plural=None, **args):
713 '''expand set of values.
713 '''expand set of values.
714 name is name of key in template map.
714 name is name of key in template map.
715 values is list of strings or dicts.
715 values is list of strings or dicts.
716 plural is plural of name, if not simply name + 's'.
716 plural is plural of name, if not simply name + 's'.
717
717
718 expansion works like this, given name 'foo'.
718 expansion works like this, given name 'foo'.
719
719
720 if values is empty, expand 'no_foos'.
720 if values is empty, expand 'no_foos'.
721
721
722 if 'foo' not in template map, return values as a string,
722 if 'foo' not in template map, return values as a string,
723 joined by space.
723 joined by space.
724
724
725 expand 'start_foos'.
725 expand 'start_foos'.
726
726
727 for each value, expand 'foo'. if 'last_foo' in template
727 for each value, expand 'foo'. if 'last_foo' in template
728 map, expand it instead of 'foo' for last key.
728 map, expand it instead of 'foo' for last key.
729
729
730 expand 'end_foos'.
730 expand 'end_foos'.
731 '''
731 '''
732 if plural: names = plural
732 if plural: names = plural
733 else: names = name + 's'
733 else: names = name + 's'
734 if not values:
734 if not values:
735 noname = 'no_' + names
735 noname = 'no_' + names
736 if noname in self.t:
736 if noname in self.t:
737 yield self.t(noname, **args)
737 yield self.t(noname, **args)
738 return
738 return
739 if name not in self.t:
739 if name not in self.t:
740 if isinstance(values[0], str):
740 if isinstance(values[0], str):
741 yield ' '.join(values)
741 yield ' '.join(values)
742 else:
742 else:
743 for v in values:
743 for v in values:
744 yield dict(v, **args)
744 yield dict(v, **args)
745 return
745 return
746 startname = 'start_' + names
746 startname = 'start_' + names
747 if startname in self.t:
747 if startname in self.t:
748 yield self.t(startname, **args)
748 yield self.t(startname, **args)
749 vargs = args.copy()
749 vargs = args.copy()
750 def one(v, tag=name):
750 def one(v, tag=name):
751 try:
751 try:
752 vargs.update(v)
752 vargs.update(v)
753 except (AttributeError, ValueError):
753 except (AttributeError, ValueError):
754 try:
754 try:
755 for a, b in v:
755 for a, b in v:
756 vargs[a] = b
756 vargs[a] = b
757 except ValueError:
757 except ValueError:
758 vargs[name] = v
758 vargs[name] = v
759 return self.t(tag, **vargs)
759 return self.t(tag, **vargs)
760 lastname = 'last_' + name
760 lastname = 'last_' + name
761 if lastname in self.t:
761 if lastname in self.t:
762 last = values.pop()
762 last = values.pop()
763 else:
763 else:
764 last = None
764 last = None
765 for v in values:
765 for v in values:
766 yield one(v)
766 yield one(v)
767 if last is not None:
767 if last is not None:
768 yield one(last, tag=lastname)
768 yield one(last, tag=lastname)
769 endname = 'end_' + names
769 endname = 'end_' + names
770 if endname in self.t:
770 if endname in self.t:
771 yield self.t(endname, **args)
771 yield self.t(endname, **args)
772
772
773 def showbranches(**args):
773 def showbranches(**args):
774 branch = changes[5].get("branch")
774 branch = changes[5].get("branch")
775 if branch != 'default':
775 if branch != 'default':
776 branch = util.tolocal(branch)
776 branch = util.tolocal(branch)
777 return showlist('branch', [branch], plural='branches', **args)
777 return showlist('branch', [branch], plural='branches', **args)
778
778
779 def showparents(**args):
779 def showparents(**args):
780 parents = [[('rev', p), ('node', hex(log.node(p)))]
780 parents = [[('rev', p), ('node', hex(log.node(p)))]
781 for p in self._meaningful_parentrevs(log, rev)]
781 for p in self._meaningful_parentrevs(log, rev)]
782 return showlist('parent', parents, **args)
782 return showlist('parent', parents, **args)
783
783
784 def showtags(**args):
784 def showtags(**args):
785 return showlist('tag', self.repo.nodetags(changenode), **args)
785 return showlist('tag', self.repo.nodetags(changenode), **args)
786
786
787 def showextras(**args):
787 def showextras(**args):
788 extras = changes[5].items()
788 extras = changes[5].items()
789 extras.sort()
789 extras.sort()
790 for key, value in extras:
790 for key, value in extras:
791 args = args.copy()
791 args = args.copy()
792 args.update(dict(key=key, value=value))
792 args.update(dict(key=key, value=value))
793 yield self.t('extra', **args)
793 yield self.t('extra', **args)
794
794
795 def showcopies(**args):
795 def showcopies(**args):
796 c = [{'name': x[0], 'source': x[1]} for x in copies]
796 c = [{'name': x[0], 'source': x[1]} for x in copies]
797 return showlist('file_copy', c, plural='file_copies', **args)
797 return showlist('file_copy', c, plural='file_copies', **args)
798
798
799 files = []
799 files = []
800 def getfiles():
800 def getfiles():
801 if not files:
801 if not files:
802 files[:] = self.repo.status(
802 files[:] = self.repo.status(
803 log.parents(changenode)[0], changenode)[:3]
803 log.parents(changenode)[0], changenode)[:3]
804 return files
804 return files
805 def showfiles(**args):
805 def showfiles(**args):
806 return showlist('file', changes[3], **args)
806 return showlist('file', changes[3], **args)
807 def showmods(**args):
807 def showmods(**args):
808 return showlist('file_mod', getfiles()[0], **args)
808 return showlist('file_mod', getfiles()[0], **args)
809 def showadds(**args):
809 def showadds(**args):
810 return showlist('file_add', getfiles()[1], **args)
810 return showlist('file_add', getfiles()[1], **args)
811 def showdels(**args):
811 def showdels(**args):
812 return showlist('file_del', getfiles()[2], **args)
812 return showlist('file_del', getfiles()[2], **args)
813 def showmanifest(**args):
813 def showmanifest(**args):
814 args = args.copy()
814 args = args.copy()
815 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
815 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
816 node=hex(changes[0])))
816 node=hex(changes[0])))
817 return self.t('manifest', **args)
817 return self.t('manifest', **args)
818
818
819 defprops = {
819 defprops = {
820 'author': changes[1],
820 'author': changes[1],
821 'branches': showbranches,
821 'branches': showbranches,
822 'date': changes[2],
822 'date': changes[2],
823 'desc': changes[4].strip(),
823 'desc': changes[4].strip(),
824 'file_adds': showadds,
824 'file_adds': showadds,
825 'file_dels': showdels,
825 'file_dels': showdels,
826 'file_mods': showmods,
826 'file_mods': showmods,
827 'files': showfiles,
827 'files': showfiles,
828 'file_copies': showcopies,
828 'file_copies': showcopies,
829 'manifest': showmanifest,
829 'manifest': showmanifest,
830 'node': hex(changenode),
830 'node': hex(changenode),
831 'parents': showparents,
831 'parents': showparents,
832 'rev': rev,
832 'rev': rev,
833 'tags': showtags,
833 'tags': showtags,
834 'extras': showextras,
834 'extras': showextras,
835 }
835 }
836 props = props.copy()
836 props = props.copy()
837 props.update(defprops)
837 props.update(defprops)
838
838
839 try:
839 try:
840 if self.ui.debugflag and 'header_debug' in self.t:
840 if self.ui.debugflag and 'header_debug' in self.t:
841 key = 'header_debug'
841 key = 'header_debug'
842 elif self.ui.quiet and 'header_quiet' in self.t:
842 elif self.ui.quiet and 'header_quiet' in self.t:
843 key = 'header_quiet'
843 key = 'header_quiet'
844 elif self.ui.verbose and 'header_verbose' in self.t:
844 elif self.ui.verbose and 'header_verbose' in self.t:
845 key = 'header_verbose'
845 key = 'header_verbose'
846 elif 'header' in self.t:
846 elif 'header' in self.t:
847 key = 'header'
847 key = 'header'
848 else:
848 else:
849 key = ''
849 key = ''
850 if key:
850 if key:
851 h = templater.stringify(self.t(key, **props))
851 h = templater.stringify(self.t(key, **props))
852 if self.buffered:
852 if self.buffered:
853 self.header[rev] = h
853 self.header[rev] = h
854 else:
854 else:
855 self.ui.write(h)
855 self.ui.write(h)
856 if self.ui.debugflag and 'changeset_debug' in self.t:
856 if self.ui.debugflag and 'changeset_debug' in self.t:
857 key = 'changeset_debug'
857 key = 'changeset_debug'
858 elif self.ui.quiet and 'changeset_quiet' in self.t:
858 elif self.ui.quiet and 'changeset_quiet' in self.t:
859 key = 'changeset_quiet'
859 key = 'changeset_quiet'
860 elif self.ui.verbose and 'changeset_verbose' in self.t:
860 elif self.ui.verbose and 'changeset_verbose' in self.t:
861 key = 'changeset_verbose'
861 key = 'changeset_verbose'
862 else:
862 else:
863 key = 'changeset'
863 key = 'changeset'
864 self.ui.write(templater.stringify(self.t(key, **props)))
864 self.ui.write(templater.stringify(self.t(key, **props)))
865 self.showpatch(changenode)
865 self.showpatch(changenode)
866 except KeyError, inst:
866 except KeyError, inst:
867 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
867 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
868 inst.args[0]))
868 inst.args[0]))
869 except SyntaxError, inst:
869 except SyntaxError, inst:
870 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
870 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
871
871
872 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
872 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
873 """show one changeset using template or regular display.
873 """show one changeset using template or regular display.
874
874
875 Display format will be the first non-empty hit of:
875 Display format will be the first non-empty hit of:
876 1. option 'template'
876 1. option 'template'
877 2. option 'style'
877 2. option 'style'
878 3. [ui] setting 'logtemplate'
878 3. [ui] setting 'logtemplate'
879 4. [ui] setting 'style'
879 4. [ui] setting 'style'
880 If all of these values are either the unset or the empty string,
880 If all of these values are either the unset or the empty string,
881 regular display via changeset_printer() is done.
881 regular display via changeset_printer() is done.
882 """
882 """
883 # options
883 # options
884 patch = False
884 patch = False
885 if opts.get('patch'):
885 if opts.get('patch'):
886 patch = matchfn or util.always
886 patch = matchfn or util.always
887
887
888 tmpl = opts.get('template')
888 tmpl = opts.get('template')
889 mapfile = None
889 mapfile = None
890 if tmpl:
890 if tmpl:
891 tmpl = templater.parsestring(tmpl, quoted=False)
891 tmpl = templater.parsestring(tmpl, quoted=False)
892 else:
892 else:
893 mapfile = opts.get('style')
893 mapfile = opts.get('style')
894 # ui settings
894 # ui settings
895 if not mapfile:
895 if not mapfile:
896 tmpl = ui.config('ui', 'logtemplate')
896 tmpl = ui.config('ui', 'logtemplate')
897 if tmpl:
897 if tmpl:
898 tmpl = templater.parsestring(tmpl)
898 tmpl = templater.parsestring(tmpl)
899 else:
899 else:
900 mapfile = ui.config('ui', 'style')
900 mapfile = ui.config('ui', 'style')
901
901
902 if tmpl or mapfile:
902 if tmpl or mapfile:
903 if mapfile:
903 if mapfile:
904 if not os.path.split(mapfile)[0]:
904 if not os.path.split(mapfile)[0]:
905 mapname = (templater.templatepath('map-cmdline.' + mapfile)
905 mapname = (templater.templatepath('map-cmdline.' + mapfile)
906 or templater.templatepath(mapfile))
906 or templater.templatepath(mapfile))
907 if mapname: mapfile = mapname
907 if mapname: mapfile = mapname
908 try:
908 try:
909 t = changeset_templater(ui, repo, patch, mapfile, buffered)
909 t = changeset_templater(ui, repo, patch, mapfile, buffered)
910 except SyntaxError, inst:
910 except SyntaxError, inst:
911 raise util.Abort(inst.args[0])
911 raise util.Abort(inst.args[0])
912 if tmpl: t.use_template(tmpl)
912 if tmpl: t.use_template(tmpl)
913 return t
913 return t
914 return changeset_printer(ui, repo, patch, buffered)
914 return changeset_printer(ui, repo, patch, buffered)
915
915
916 def finddate(ui, repo, date):
916 def finddate(ui, repo, date):
917 """Find the tipmost changeset that matches the given date spec"""
917 """Find the tipmost changeset that matches the given date spec"""
918 df = util.matchdate(date)
918 df = util.matchdate(date)
919 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
919 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
920 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
920 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
921 results = {}
921 results = {}
922 for st, rev, fns in changeiter:
922 for st, rev, fns in changeiter:
923 if st == 'add':
923 if st == 'add':
924 d = get(rev)[2]
924 d = get(rev)[2]
925 if df(d[0]):
925 if df(d[0]):
926 results[rev] = d
926 results[rev] = d
927 elif st == 'iter':
927 elif st == 'iter':
928 if rev in results:
928 if rev in results:
929 ui.status("Found revision %s from %s\n" %
929 ui.status("Found revision %s from %s\n" %
930 (rev, util.datestr(results[rev])))
930 (rev, util.datestr(results[rev])))
931 return str(rev)
931 return str(rev)
932
932
933 raise util.Abort(_("revision matching date not found"))
933 raise util.Abort(_("revision matching date not found"))
934
934
935 def walkchangerevs(ui, repo, pats, change, opts):
935 def walkchangerevs(ui, repo, pats, change, opts):
936 '''Iterate over files and the revs they changed in.
936 '''Iterate over files and the revs they changed in.
937
937
938 Callers most commonly need to iterate backwards over the history
938 Callers most commonly need to iterate backwards over the history
939 it is interested in. Doing so has awful (quadratic-looking)
939 it is interested in. Doing so has awful (quadratic-looking)
940 performance, so we use iterators in a "windowed" way.
940 performance, so we use iterators in a "windowed" way.
941
941
942 We walk a window of revisions in the desired order. Within the
942 We walk a window of revisions in the desired order. Within the
943 window, we first walk forwards to gather data, then in the desired
943 window, we first walk forwards to gather data, then in the desired
944 order (usually backwards) to display it.
944 order (usually backwards) to display it.
945
945
946 This function returns an (iterator, matchfn) tuple. The iterator
946 This function returns an (iterator, matchfn) tuple. The iterator
947 yields 3-tuples. They will be of one of the following forms:
947 yields 3-tuples. They will be of one of the following forms:
948
948
949 "window", incrementing, lastrev: stepping through a window,
949 "window", incrementing, lastrev: stepping through a window,
950 positive if walking forwards through revs, last rev in the
950 positive if walking forwards through revs, last rev in the
951 sequence iterated over - use to reset state for the current window
951 sequence iterated over - use to reset state for the current window
952
952
953 "add", rev, fns: out-of-order traversal of the given file names
953 "add", rev, fns: out-of-order traversal of the given file names
954 fns, which changed during revision rev - use to gather data for
954 fns, which changed during revision rev - use to gather data for
955 possible display
955 possible display
956
956
957 "iter", rev, None: in-order traversal of the revs earlier iterated
957 "iter", rev, None: in-order traversal of the revs earlier iterated
958 over with "add" - use to display data'''
958 over with "add" - use to display data'''
959
959
960 def increasing_windows(start, end, windowsize=8, sizelimit=512):
960 def increasing_windows(start, end, windowsize=8, sizelimit=512):
961 if start < end:
961 if start < end:
962 while start < end:
962 while start < end:
963 yield start, min(windowsize, end-start)
963 yield start, min(windowsize, end-start)
964 start += windowsize
964 start += windowsize
965 if windowsize < sizelimit:
965 if windowsize < sizelimit:
966 windowsize *= 2
966 windowsize *= 2
967 else:
967 else:
968 while start > end:
968 while start > end:
969 yield start, min(windowsize, start-end-1)
969 yield start, min(windowsize, start-end-1)
970 start -= windowsize
970 start -= windowsize
971 if windowsize < sizelimit:
971 if windowsize < sizelimit:
972 windowsize *= 2
972 windowsize *= 2
973
973
974 files, matchfn, anypats = matchpats(repo, pats, opts)
974 files, matchfn, anypats = matchpats(repo, pats, opts)
975 follow = opts.get('follow') or opts.get('follow_first')
975 follow = opts.get('follow') or opts.get('follow_first')
976
976
977 if repo.changelog.count() == 0:
977 if repo.changelog.count() == 0:
978 return [], matchfn
978 return [], matchfn
979
979
980 if follow:
980 if follow:
981 defrange = '%s:0' % repo.changectx().rev()
981 defrange = '%s:0' % repo.changectx().rev()
982 else:
982 else:
983 defrange = '-1:0'
983 defrange = '-1:0'
984 revs = revrange(repo, opts['rev'] or [defrange])
984 revs = revrange(repo, opts['rev'] or [defrange])
985 wanted = {}
985 wanted = {}
986 slowpath = anypats or opts.get('removed')
986 slowpath = anypats or opts.get('removed')
987 fncache = {}
987 fncache = {}
988
988
989 if not slowpath and not files:
989 if not slowpath and not files:
990 # No files, no patterns. Display all revs.
990 # No files, no patterns. Display all revs.
991 wanted = dict.fromkeys(revs)
991 wanted = dict.fromkeys(revs)
992 copies = []
992 copies = []
993 if not slowpath:
993 if not slowpath:
994 # Only files, no patterns. Check the history of each file.
994 # Only files, no patterns. Check the history of each file.
995 def filerevgen(filelog, node):
995 def filerevgen(filelog, node):
996 cl_count = repo.changelog.count()
996 cl_count = repo.changelog.count()
997 if node is None:
997 if node is None:
998 last = filelog.count() - 1
998 last = filelog.count() - 1
999 else:
999 else:
1000 last = filelog.rev(node)
1000 last = filelog.rev(node)
1001 for i, window in increasing_windows(last, nullrev):
1001 for i, window in increasing_windows(last, nullrev):
1002 revs = []
1002 revs = []
1003 for j in xrange(i - window, i + 1):
1003 for j in xrange(i - window, i + 1):
1004 n = filelog.node(j)
1004 n = filelog.node(j)
1005 revs.append((filelog.linkrev(n),
1005 revs.append((filelog.linkrev(n),
1006 follow and filelog.renamed(n)))
1006 follow and filelog.renamed(n)))
1007 revs.reverse()
1007 revs.reverse()
1008 for rev in revs:
1008 for rev in revs:
1009 # only yield rev for which we have the changelog, it can
1009 # only yield rev for which we have the changelog, it can
1010 # happen while doing "hg log" during a pull or commit
1010 # happen while doing "hg log" during a pull or commit
1011 if rev[0] < cl_count:
1011 if rev[0] < cl_count:
1012 yield rev
1012 yield rev
1013 def iterfiles():
1013 def iterfiles():
1014 for filename in files:
1014 for filename in files:
1015 yield filename, None
1015 yield filename, None
1016 for filename_node in copies:
1016 for filename_node in copies:
1017 yield filename_node
1017 yield filename_node
1018 minrev, maxrev = min(revs), max(revs)
1018 minrev, maxrev = min(revs), max(revs)
1019 for file_, node in iterfiles():
1019 for file_, node in iterfiles():
1020 filelog = repo.file(file_)
1020 filelog = repo.file(file_)
1021 # A zero count may be a directory or deleted file, so
1021 # A zero count may be a directory or deleted file, so
1022 # try to find matching entries on the slow path.
1022 # try to find matching entries on the slow path.
1023 if filelog.count() == 0:
1023 if filelog.count() == 0:
1024 slowpath = True
1024 slowpath = True
1025 break
1025 break
1026 for rev, copied in filerevgen(filelog, node):
1026 for rev, copied in filerevgen(filelog, node):
1027 if rev <= maxrev:
1027 if rev <= maxrev:
1028 if rev < minrev:
1028 if rev < minrev:
1029 break
1029 break
1030 fncache.setdefault(rev, [])
1030 fncache.setdefault(rev, [])
1031 fncache[rev].append(file_)
1031 fncache[rev].append(file_)
1032 wanted[rev] = 1
1032 wanted[rev] = 1
1033 if follow and copied:
1033 if follow and copied:
1034 copies.append(copied)
1034 copies.append(copied)
1035 if slowpath:
1035 if slowpath:
1036 if follow:
1036 if follow:
1037 raise util.Abort(_('can only follow copies/renames for explicit '
1037 raise util.Abort(_('can only follow copies/renames for explicit '
1038 'file names'))
1038 'file names'))
1039
1039
1040 # The slow path checks files modified in every changeset.
1040 # The slow path checks files modified in every changeset.
1041 def changerevgen():
1041 def changerevgen():
1042 for i, window in increasing_windows(repo.changelog.count()-1,
1042 for i, window in increasing_windows(repo.changelog.count()-1,
1043 nullrev):
1043 nullrev):
1044 for j in xrange(i - window, i + 1):
1044 for j in xrange(i - window, i + 1):
1045 yield j, change(j)[3]
1045 yield j, change(j)[3]
1046
1046
1047 for rev, changefiles in changerevgen():
1047 for rev, changefiles in changerevgen():
1048 matches = filter(matchfn, changefiles)
1048 matches = filter(matchfn, changefiles)
1049 if matches:
1049 if matches:
1050 fncache[rev] = matches
1050 fncache[rev] = matches
1051 wanted[rev] = 1
1051 wanted[rev] = 1
1052
1052
1053 class followfilter:
1053 class followfilter:
1054 def __init__(self, onlyfirst=False):
1054 def __init__(self, onlyfirst=False):
1055 self.startrev = nullrev
1055 self.startrev = nullrev
1056 self.roots = []
1056 self.roots = []
1057 self.onlyfirst = onlyfirst
1057 self.onlyfirst = onlyfirst
1058
1058
1059 def match(self, rev):
1059 def match(self, rev):
1060 def realparents(rev):
1060 def realparents(rev):
1061 if self.onlyfirst:
1061 if self.onlyfirst:
1062 return repo.changelog.parentrevs(rev)[0:1]
1062 return repo.changelog.parentrevs(rev)[0:1]
1063 else:
1063 else:
1064 return filter(lambda x: x != nullrev,
1064 return filter(lambda x: x != nullrev,
1065 repo.changelog.parentrevs(rev))
1065 repo.changelog.parentrevs(rev))
1066
1066
1067 if self.startrev == nullrev:
1067 if self.startrev == nullrev:
1068 self.startrev = rev
1068 self.startrev = rev
1069 return True
1069 return True
1070
1070
1071 if rev > self.startrev:
1071 if rev > self.startrev:
1072 # forward: all descendants
1072 # forward: all descendants
1073 if not self.roots:
1073 if not self.roots:
1074 self.roots.append(self.startrev)
1074 self.roots.append(self.startrev)
1075 for parent in realparents(rev):
1075 for parent in realparents(rev):
1076 if parent in self.roots:
1076 if parent in self.roots:
1077 self.roots.append(rev)
1077 self.roots.append(rev)
1078 return True
1078 return True
1079 else:
1079 else:
1080 # backwards: all parents
1080 # backwards: all parents
1081 if not self.roots:
1081 if not self.roots:
1082 self.roots.extend(realparents(self.startrev))
1082 self.roots.extend(realparents(self.startrev))
1083 if rev in self.roots:
1083 if rev in self.roots:
1084 self.roots.remove(rev)
1084 self.roots.remove(rev)
1085 self.roots.extend(realparents(rev))
1085 self.roots.extend(realparents(rev))
1086 return True
1086 return True
1087
1087
1088 return False
1088 return False
1089
1089
1090 # it might be worthwhile to do this in the iterator if the rev range
1090 # it might be worthwhile to do this in the iterator if the rev range
1091 # is descending and the prune args are all within that range
1091 # is descending and the prune args are all within that range
1092 for rev in opts.get('prune', ()):
1092 for rev in opts.get('prune', ()):
1093 rev = repo.changelog.rev(repo.lookup(rev))
1093 rev = repo.changelog.rev(repo.lookup(rev))
1094 ff = followfilter()
1094 ff = followfilter()
1095 stop = min(revs[0], revs[-1])
1095 stop = min(revs[0], revs[-1])
1096 for x in xrange(rev, stop-1, -1):
1096 for x in xrange(rev, stop-1, -1):
1097 if ff.match(x) and x in wanted:
1097 if ff.match(x) and x in wanted:
1098 del wanted[x]
1098 del wanted[x]
1099
1099
1100 def iterate():
1100 def iterate():
1101 if follow and not files:
1101 if follow and not files:
1102 ff = followfilter(onlyfirst=opts.get('follow_first'))
1102 ff = followfilter(onlyfirst=opts.get('follow_first'))
1103 def want(rev):
1103 def want(rev):
1104 if ff.match(rev) and rev in wanted:
1104 if ff.match(rev) and rev in wanted:
1105 return True
1105 return True
1106 return False
1106 return False
1107 else:
1107 else:
1108 def want(rev):
1108 def want(rev):
1109 return rev in wanted
1109 return rev in wanted
1110
1110
1111 for i, window in increasing_windows(0, len(revs)):
1111 for i, window in increasing_windows(0, len(revs)):
1112 yield 'window', revs[0] < revs[-1], revs[-1]
1112 yield 'window', revs[0] < revs[-1], revs[-1]
1113 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1113 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1114 srevs = list(nrevs)
1114 srevs = list(nrevs)
1115 srevs.sort()
1115 srevs.sort()
1116 for rev in srevs:
1116 for rev in srevs:
1117 fns = fncache.get(rev)
1117 fns = fncache.get(rev)
1118 if not fns:
1118 if not fns:
1119 def fns_generator():
1119 def fns_generator():
1120 for f in change(rev)[3]:
1120 for f in change(rev)[3]:
1121 if matchfn(f):
1121 if matchfn(f):
1122 yield f
1122 yield f
1123 fns = fns_generator()
1123 fns = fns_generator()
1124 yield 'add', rev, fns
1124 yield 'add', rev, fns
1125 for rev in nrevs:
1125 for rev in nrevs:
1126 yield 'iter', rev, None
1126 yield 'iter', rev, None
1127 return iterate(), matchfn
1127 return iterate(), matchfn
1128
1128
1129 def commit(ui, repo, commitfunc, pats, opts):
1129 def commit(ui, repo, commitfunc, pats, opts):
1130 '''commit the specified files or all outstanding changes'''
1130 '''commit the specified files or all outstanding changes'''
1131 date = opts.get('date')
1131 date = opts.get('date')
1132 if date:
1132 if date:
1133 opts['date'] = util.parsedate(date)
1133 opts['date'] = util.parsedate(date)
1134 message = logmessage(opts)
1134 message = logmessage(opts)
1135
1135
1136 # extract addremove carefully -- this function can be called from a command
1136 # extract addremove carefully -- this function can be called from a command
1137 # that doesn't support addremove
1137 # that doesn't support addremove
1138 if opts.get('addremove'):
1138 if opts.get('addremove'):
1139 addremove(repo, pats, opts)
1139 addremove(repo, pats, opts)
1140
1140
1141 fns, match, anypats = matchpats(repo, pats, opts)
1141 fns, match, anypats = matchpats(repo, pats, opts)
1142 if pats:
1142 if pats:
1143 status = repo.status(files=fns, match=match)
1143 status = repo.status(files=fns, match=match)
1144 modified, added, removed, deleted, unknown = status[:5]
1144 modified, added, removed, deleted, unknown = status[:5]
1145 files = modified + added + removed
1145 files = modified + added + removed
1146 slist = None
1146 slist = None
1147 for f in fns:
1147 for f in fns:
1148 if f == '.':
1148 if f == '.':
1149 continue
1149 continue
1150 if f not in files:
1150 if f not in files:
1151 rf = repo.wjoin(f)
1151 rf = repo.wjoin(f)
1152 rel = repo.pathto(f)
1152 rel = repo.pathto(f)
1153 try:
1153 try:
1154 mode = os.lstat(rf)[stat.ST_MODE]
1154 mode = os.lstat(rf)[stat.ST_MODE]
1155 except OSError:
1155 except OSError:
1156 raise util.Abort(_("file %s not found!") % rel)
1156 raise util.Abort(_("file %s not found!") % rel)
1157 if stat.S_ISDIR(mode):
1157 if stat.S_ISDIR(mode):
1158 name = f + '/'
1158 name = f + '/'
1159 if slist is None:
1159 if slist is None:
1160 slist = list(files)
1160 slist = list(files)
1161 slist.sort()
1161 slist.sort()
1162 i = bisect.bisect(slist, name)
1162 i = bisect.bisect(slist, name)
1163 if i >= len(slist) or not slist[i].startswith(name):
1163 if i >= len(slist) or not slist[i].startswith(name):
1164 raise util.Abort(_("no match under directory %s!")
1164 raise util.Abort(_("no match under directory %s!")
1165 % rel)
1165 % rel)
1166 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1166 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1167 raise util.Abort(_("can't commit %s: "
1167 raise util.Abort(_("can't commit %s: "
1168 "unsupported file type!") % rel)
1168 "unsupported file type!") % rel)
1169 elif f not in repo.dirstate:
1169 elif f not in repo.dirstate:
1170 raise util.Abort(_("file %s not tracked!") % rel)
1170 raise util.Abort(_("file %s not tracked!") % rel)
1171 else:
1171 else:
1172 files = []
1172 files = []
1173 try:
1173 try:
1174 return commitfunc(ui, repo, files, message, match, opts)
1174 return commitfunc(ui, repo, files, message, match, opts)
1175 except ValueError, inst:
1175 except ValueError, inst:
1176 raise util.Abort(str(inst))
1176 raise util.Abort(str(inst))
@@ -1,3179 +1,3179 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, re, sys, urllib
10 import os, re, sys, urllib
11 import hg, util, revlog, bundlerepo, extensions
11 import hg, util, revlog, bundlerepo, extensions
12 import difflib, patch, time, help, mdiff, tempfile
12 import difflib, patch, time, help, mdiff, tempfile
13 import errno, version, socket
13 import errno, version, socket
14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15
15
16 # Commands start here, listed alphabetically
16 # Commands start here, listed alphabetically
17
17
18 def add(ui, repo, *pats, **opts):
18 def add(ui, repo, *pats, **opts):
19 """add the specified files on the next commit
19 """add the specified files on the next commit
20
20
21 Schedule files to be version controlled and added to the repository.
21 Schedule files to be version controlled and added to the repository.
22
22
23 The files will be added to the repository at the next commit. To
23 The files will be added to the repository at the next commit. To
24 undo an add before that, see hg revert.
24 undo an add before that, see hg revert.
25
25
26 If no names are given, add all files in the repository.
26 If no names are given, add all files in the repository.
27 """
27 """
28
28
29 rejected = None
29 rejected = None
30 exacts = {}
30 exacts = {}
31 names = []
31 names = []
32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
33 badmatch=util.always):
33 badmatch=util.always):
34 if exact:
34 if exact:
35 if ui.verbose:
35 if ui.verbose:
36 ui.status(_('adding %s\n') % rel)
36 ui.status(_('adding %s\n') % rel)
37 names.append(abs)
37 names.append(abs)
38 exacts[abs] = 1
38 exacts[abs] = 1
39 elif abs not in repo.dirstate:
39 elif abs not in repo.dirstate:
40 ui.status(_('adding %s\n') % rel)
40 ui.status(_('adding %s\n') % rel)
41 names.append(abs)
41 names.append(abs)
42 if not opts.get('dry_run'):
42 if not opts.get('dry_run'):
43 rejected = repo.add(names)
43 rejected = repo.add(names)
44 rejected = [p for p in rejected if p in exacts]
44 rejected = [p for p in rejected if p in exacts]
45 return rejected and 1 or 0
45 return rejected and 1 or 0
46
46
47 def addremove(ui, repo, *pats, **opts):
47 def addremove(ui, repo, *pats, **opts):
48 """add all new files, delete all missing files
48 """add all new files, delete all missing files
49
49
50 Add all new files and remove all missing files from the repository.
50 Add all new files and remove all missing files from the repository.
51
51
52 New files are ignored if they match any of the patterns in .hgignore. As
52 New files are ignored if they match any of the patterns in .hgignore. As
53 with add, these changes take effect at the next commit.
53 with add, these changes take effect at the next commit.
54
54
55 Use the -s option to detect renamed files. With a parameter > 0,
55 Use the -s option to detect renamed files. With a parameter > 0,
56 this compares every removed file with every added file and records
56 this compares every removed file with every added file and records
57 those similar enough as renames. This option takes a percentage
57 those similar enough as renames. This option takes a percentage
58 between 0 (disabled) and 100 (files must be identical) as its
58 between 0 (disabled) and 100 (files must be identical) as its
59 parameter. Detecting renamed files this way can be expensive.
59 parameter. Detecting renamed files this way can be expensive.
60 """
60 """
61 try:
61 try:
62 sim = float(opts.get('similarity') or 0)
62 sim = float(opts.get('similarity') or 0)
63 except ValueError:
63 except ValueError:
64 raise util.Abort(_('similarity must be a number'))
64 raise util.Abort(_('similarity must be a number'))
65 if sim < 0 or sim > 100:
65 if sim < 0 or sim > 100:
66 raise util.Abort(_('similarity must be between 0 and 100'))
66 raise util.Abort(_('similarity must be between 0 and 100'))
67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68
68
69 def annotate(ui, repo, *pats, **opts):
69 def annotate(ui, repo, *pats, **opts):
70 """show changeset information per file line
70 """show changeset information per file line
71
71
72 List changes in files, showing the revision id responsible for each line
72 List changes in files, showing the revision id responsible for each line
73
73
74 This command is useful to discover who did a change or when a change took
74 This command is useful to discover who did a change or when a change took
75 place.
75 place.
76
76
77 Without the -a option, annotate will avoid processing files it
77 Without the -a option, annotate will avoid processing files it
78 detects as binary. With -a, annotate will generate an annotation
78 detects as binary. With -a, annotate will generate an annotation
79 anyway, probably with undesirable results.
79 anyway, probably with undesirable results.
80 """
80 """
81 datefunc = ui.quiet and util.shortdate or util.datestr
81 datefunc = ui.quiet and util.shortdate or util.datestr
82 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
82 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
83
83
84 if not pats:
84 if not pats:
85 raise util.Abort(_('at least one file name or pattern required'))
85 raise util.Abort(_('at least one file name or pattern required'))
86
86
87 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
87 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
88 ('number', lambda x: str(x[0].rev())),
88 ('number', lambda x: str(x[0].rev())),
89 ('changeset', lambda x: short(x[0].node())),
89 ('changeset', lambda x: short(x[0].node())),
90 ('date', getdate),
90 ('date', getdate),
91 ('follow', lambda x: x[0].path()),
91 ('follow', lambda x: x[0].path()),
92 ]
92 ]
93
93
94 if (not opts['user'] and not opts['changeset'] and not opts['date']
94 if (not opts['user'] and not opts['changeset'] and not opts['date']
95 and not opts['follow']):
95 and not opts['follow']):
96 opts['number'] = 1
96 opts['number'] = 1
97
97
98 linenumber = opts.get('line_number') is not None
98 linenumber = opts.get('line_number') is not None
99 if (linenumber and (not opts['changeset']) and (not opts['number'])):
99 if (linenumber and (not opts['changeset']) and (not opts['number'])):
100 raise util.Abort(_('at least one of -n/-c is required for -l'))
100 raise util.Abort(_('at least one of -n/-c is required for -l'))
101
101
102 funcmap = [func for op, func in opmap if opts.get(op)]
102 funcmap = [func for op, func in opmap if opts.get(op)]
103 if linenumber:
103 if linenumber:
104 lastfunc = funcmap[-1]
104 lastfunc = funcmap[-1]
105 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
105 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
106
106
107 ctx = repo.changectx(opts['rev'])
107 ctx = repo.changectx(opts['rev'])
108
108
109 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
109 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
110 node=ctx.node()):
110 node=ctx.node()):
111 fctx = ctx.filectx(abs)
111 fctx = ctx.filectx(abs)
112 if not opts['text'] and util.binary(fctx.data()):
112 if not opts['text'] and util.binary(fctx.data()):
113 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
113 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
114 continue
114 continue
115
115
116 lines = fctx.annotate(follow=opts.get('follow'),
116 lines = fctx.annotate(follow=opts.get('follow'),
117 linenumber=linenumber)
117 linenumber=linenumber)
118 pieces = []
118 pieces = []
119
119
120 for f in funcmap:
120 for f in funcmap:
121 l = [f(n) for n, dummy in lines]
121 l = [f(n) for n, dummy in lines]
122 if l:
122 if l:
123 m = max(map(len, l))
123 m = max(map(len, l))
124 pieces.append(["%*s" % (m, x) for x in l])
124 pieces.append(["%*s" % (m, x) for x in l])
125
125
126 if pieces:
126 if pieces:
127 for p, l in zip(zip(*pieces), lines):
127 for p, l in zip(zip(*pieces), lines):
128 ui.write("%s: %s" % (" ".join(p), l[1]))
128 ui.write("%s: %s" % (" ".join(p), l[1]))
129
129
130 def archive(ui, repo, dest, **opts):
130 def archive(ui, repo, dest, **opts):
131 '''create unversioned archive of a repository revision
131 '''create unversioned archive of a repository revision
132
132
133 By default, the revision used is the parent of the working
133 By default, the revision used is the parent of the working
134 directory; use "-r" to specify a different revision.
134 directory; use "-r" to specify a different revision.
135
135
136 To specify the type of archive to create, use "-t". Valid
136 To specify the type of archive to create, use "-t". Valid
137 types are:
137 types are:
138
138
139 "files" (default): a directory full of files
139 "files" (default): a directory full of files
140 "tar": tar archive, uncompressed
140 "tar": tar archive, uncompressed
141 "tbz2": tar archive, compressed using bzip2
141 "tbz2": tar archive, compressed using bzip2
142 "tgz": tar archive, compressed using gzip
142 "tgz": tar archive, compressed using gzip
143 "uzip": zip archive, uncompressed
143 "uzip": zip archive, uncompressed
144 "zip": zip archive, compressed using deflate
144 "zip": zip archive, compressed using deflate
145
145
146 The exact name of the destination archive or directory is given
146 The exact name of the destination archive or directory is given
147 using a format string; see "hg help export" for details.
147 using a format string; see "hg help export" for details.
148
148
149 Each member added to an archive file has a directory prefix
149 Each member added to an archive file has a directory prefix
150 prepended. Use "-p" to specify a format string for the prefix.
150 prepended. Use "-p" to specify a format string for the prefix.
151 The default is the basename of the archive, with suffixes removed.
151 The default is the basename of the archive, with suffixes removed.
152 '''
152 '''
153
153
154 ctx = repo.changectx(opts['rev'])
154 ctx = repo.changectx(opts['rev'])
155 if not ctx:
155 if not ctx:
156 raise util.Abort(_('repository has no revisions'))
156 raise util.Abort(_('repository has no revisions'))
157 node = ctx.node()
157 node = ctx.node()
158 dest = cmdutil.make_filename(repo, dest, node)
158 dest = cmdutil.make_filename(repo, dest, node)
159 if os.path.realpath(dest) == repo.root:
159 if os.path.realpath(dest) == repo.root:
160 raise util.Abort(_('repository root cannot be destination'))
160 raise util.Abort(_('repository root cannot be destination'))
161 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
161 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
162 kind = opts.get('type') or 'files'
162 kind = opts.get('type') or 'files'
163 prefix = opts['prefix']
163 prefix = opts['prefix']
164 if dest == '-':
164 if dest == '-':
165 if kind == 'files':
165 if kind == 'files':
166 raise util.Abort(_('cannot archive plain files to stdout'))
166 raise util.Abort(_('cannot archive plain files to stdout'))
167 dest = sys.stdout
167 dest = sys.stdout
168 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
168 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
169 prefix = cmdutil.make_filename(repo, prefix, node)
169 prefix = cmdutil.make_filename(repo, prefix, node)
170 archival.archive(repo, dest, node, kind, not opts['no_decode'],
170 archival.archive(repo, dest, node, kind, not opts['no_decode'],
171 matchfn, prefix)
171 matchfn, prefix)
172
172
173 def backout(ui, repo, node=None, rev=None, **opts):
173 def backout(ui, repo, node=None, rev=None, **opts):
174 '''reverse effect of earlier changeset
174 '''reverse effect of earlier changeset
175
175
176 Commit the backed out changes as a new changeset. The new
176 Commit the backed out changes as a new changeset. The new
177 changeset is a child of the backed out changeset.
177 changeset is a child of the backed out changeset.
178
178
179 If you back out a changeset other than the tip, a new head is
179 If you back out a changeset other than the tip, a new head is
180 created. This head will be the new tip and you should merge this
180 created. This head will be the new tip and you should merge this
181 backout changeset with another head (current one by default).
181 backout changeset with another head (current one by default).
182
182
183 The --merge option remembers the parent of the working directory
183 The --merge option remembers the parent of the working directory
184 before starting the backout, then merges the new head with that
184 before starting the backout, then merges the new head with that
185 changeset afterwards. This saves you from doing the merge by
185 changeset afterwards. This saves you from doing the merge by
186 hand. The result of this merge is not committed, as for a normal
186 hand. The result of this merge is not committed, as for a normal
187 merge.
187 merge.
188
188
189 See 'hg help dates' for a list of formats valid for -d/--date.
189 See 'hg help dates' for a list of formats valid for -d/--date.
190 '''
190 '''
191 if rev and node:
191 if rev and node:
192 raise util.Abort(_("please specify just one revision"))
192 raise util.Abort(_("please specify just one revision"))
193
193
194 if not rev:
194 if not rev:
195 rev = node
195 rev = node
196
196
197 if not rev:
197 if not rev:
198 raise util.Abort(_("please specify a revision to backout"))
198 raise util.Abort(_("please specify a revision to backout"))
199
199
200 date = opts.get('date')
200 date = opts.get('date')
201 if date:
201 if date:
202 opts['date'] = util.parsedate(date)
202 opts['date'] = util.parsedate(date)
203
203
204 cmdutil.bail_if_changed(repo)
204 cmdutil.bail_if_changed(repo)
205 node = repo.lookup(rev)
205 node = repo.lookup(rev)
206
206
207 op1, op2 = repo.dirstate.parents()
207 op1, op2 = repo.dirstate.parents()
208 a = repo.changelog.ancestor(op1, node)
208 a = repo.changelog.ancestor(op1, node)
209 if a != node:
209 if a != node:
210 raise util.Abort(_('cannot back out change on a different branch'))
210 raise util.Abort(_('cannot back out change on a different branch'))
211
211
212 p1, p2 = repo.changelog.parents(node)
212 p1, p2 = repo.changelog.parents(node)
213 if p1 == nullid:
213 if p1 == nullid:
214 raise util.Abort(_('cannot back out a change with no parents'))
214 raise util.Abort(_('cannot back out a change with no parents'))
215 if p2 != nullid:
215 if p2 != nullid:
216 if not opts['parent']:
216 if not opts['parent']:
217 raise util.Abort(_('cannot back out a merge changeset without '
217 raise util.Abort(_('cannot back out a merge changeset without '
218 '--parent'))
218 '--parent'))
219 p = repo.lookup(opts['parent'])
219 p = repo.lookup(opts['parent'])
220 if p not in (p1, p2):
220 if p not in (p1, p2):
221 raise util.Abort(_('%s is not a parent of %s') %
221 raise util.Abort(_('%s is not a parent of %s') %
222 (short(p), short(node)))
222 (short(p), short(node)))
223 parent = p
223 parent = p
224 else:
224 else:
225 if opts['parent']:
225 if opts['parent']:
226 raise util.Abort(_('cannot use --parent on non-merge changeset'))
226 raise util.Abort(_('cannot use --parent on non-merge changeset'))
227 parent = p1
227 parent = p1
228
228
229 hg.clean(repo, node, show_stats=False)
229 hg.clean(repo, node, show_stats=False)
230 revert_opts = opts.copy()
230 revert_opts = opts.copy()
231 revert_opts['date'] = None
231 revert_opts['date'] = None
232 revert_opts['all'] = True
232 revert_opts['all'] = True
233 revert_opts['rev'] = hex(parent)
233 revert_opts['rev'] = hex(parent)
234 revert_opts['no_backup'] = None
234 revert_opts['no_backup'] = None
235 revert(ui, repo, **revert_opts)
235 revert(ui, repo, **revert_opts)
236 commit_opts = opts.copy()
236 commit_opts = opts.copy()
237 commit_opts['addremove'] = False
237 commit_opts['addremove'] = False
238 if not commit_opts['message'] and not commit_opts['logfile']:
238 if not commit_opts['message'] and not commit_opts['logfile']:
239 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
239 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
240 commit_opts['force_editor'] = True
240 commit_opts['force_editor'] = True
241 commit(ui, repo, **commit_opts)
241 commit(ui, repo, **commit_opts)
242 def nice(node):
242 def nice(node):
243 return '%d:%s' % (repo.changelog.rev(node), short(node))
243 return '%d:%s' % (repo.changelog.rev(node), short(node))
244 ui.status(_('changeset %s backs out changeset %s\n') %
244 ui.status(_('changeset %s backs out changeset %s\n') %
245 (nice(repo.changelog.tip()), nice(node)))
245 (nice(repo.changelog.tip()), nice(node)))
246 if op1 != node:
246 if op1 != node:
247 hg.clean(repo, op1, show_stats=False)
247 hg.clean(repo, op1, show_stats=False)
248 if opts['merge']:
248 if opts['merge']:
249 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
249 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
250 hg.merge(repo, hex(repo.changelog.tip()))
250 hg.merge(repo, hex(repo.changelog.tip()))
251 else:
251 else:
252 ui.status(_('the backout changeset is a new head - '
252 ui.status(_('the backout changeset is a new head - '
253 'do not forget to merge\n'))
253 'do not forget to merge\n'))
254 ui.status(_('(use "backout --merge" '
254 ui.status(_('(use "backout --merge" '
255 'if you want to auto-merge)\n'))
255 'if you want to auto-merge)\n'))
256
256
257 def bisect(ui, repo, rev=None, extra=None,
257 def bisect(ui, repo, rev=None, extra=None,
258 reset=None, good=None, bad=None, skip=None, noupdate=None):
258 reset=None, good=None, bad=None, skip=None, noupdate=None):
259 """subdivision search of changesets
259 """subdivision search of changesets
260
260
261 This command helps to find changesets which introduce problems.
261 This command helps to find changesets which introduce problems.
262 To use, mark the earliest changeset you know exhibits the problem
262 To use, mark the earliest changeset you know exhibits the problem
263 as bad, then mark the latest changeset which is free from the
263 as bad, then mark the latest changeset which is free from the
264 problem as good. Bisect will update your working directory to a
264 problem as good. Bisect will update your working directory to a
265 revision for testing. Once you have performed tests, mark the
265 revision for testing. Once you have performed tests, mark the
266 working directory as bad or good and bisect will either update to
266 working directory as bad or good and bisect will either update to
267 another candidate changeset or announce that it has found the bad
267 another candidate changeset or announce that it has found the bad
268 revision.
268 revision.
269 """
269 """
270 # backward compatibility
270 # backward compatibility
271 if rev in "good bad reset init".split():
271 if rev in "good bad reset init".split():
272 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
272 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
273 cmd, rev, extra = rev, extra, None
273 cmd, rev, extra = rev, extra, None
274 if cmd == "good":
274 if cmd == "good":
275 good = True
275 good = True
276 elif cmd == "bad":
276 elif cmd == "bad":
277 bad = True
277 bad = True
278 else:
278 else:
279 reset = True
279 reset = True
280 elif extra or good + bad + skip + reset > 1:
280 elif extra or good + bad + skip + reset > 1:
281 raise util.Abort("Incompatible arguments")
281 raise util.Abort("Incompatible arguments")
282
282
283 if reset:
283 if reset:
284 p = repo.join("bisect.state")
284 p = repo.join("bisect.state")
285 if os.path.exists(p):
285 if os.path.exists(p):
286 os.unlink(p)
286 os.unlink(p)
287 return
287 return
288
288
289 # load state
289 # load state
290 state = {'good': [], 'bad': [], 'skip': []}
290 state = {'good': [], 'bad': [], 'skip': []}
291 if os.path.exists(repo.join("bisect.state")):
291 if os.path.exists(repo.join("bisect.state")):
292 for l in repo.opener("bisect.state"):
292 for l in repo.opener("bisect.state"):
293 kind, node = l[:-1].split()
293 kind, node = l[:-1].split()
294 node = repo.lookup(node)
294 node = repo.lookup(node)
295 if kind not in state:
295 if kind not in state:
296 raise util.Abort(_("unknown bisect kind %s") % kind)
296 raise util.Abort(_("unknown bisect kind %s") % kind)
297 state[kind].append(node)
297 state[kind].append(node)
298
298
299 # update state
299 # update state
300 node = repo.lookup(rev or '.')
300 node = repo.lookup(rev or '.')
301 if good:
301 if good:
302 state['good'].append(node)
302 state['good'].append(node)
303 elif bad:
303 elif bad:
304 state['bad'].append(node)
304 state['bad'].append(node)
305 elif skip:
305 elif skip:
306 state['skip'].append(node)
306 state['skip'].append(node)
307
307
308 # save state
308 # save state
309 f = repo.opener("bisect.state", "w", atomictemp=True)
309 f = repo.opener("bisect.state", "w", atomictemp=True)
310 wlock = repo.wlock()
310 wlock = repo.wlock()
311 try:
311 try:
312 for kind in state:
312 for kind in state:
313 for node in state[kind]:
313 for node in state[kind]:
314 f.write("%s %s\n" % (kind, hg.hex(node)))
314 f.write("%s %s\n" % (kind, hg.hex(node)))
315 f.rename()
315 f.rename()
316 finally:
316 finally:
317 del wlock
317 del wlock
318
318
319 if not state['good'] or not state['bad']:
319 if not state['good'] or not state['bad']:
320 return
320 return
321
321
322 # actually bisect
322 # actually bisect
323 node, changesets, good = hbisect.bisect(repo.changelog, state)
323 node, changesets, good = hbisect.bisect(repo.changelog, state)
324 if changesets == 0:
324 if changesets == 0:
325 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
325 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
326 displayer = cmdutil.show_changeset(ui, repo, {})
326 displayer = cmdutil.show_changeset(ui, repo, {})
327 displayer.show(changenode=node)
327 displayer.show(changenode=node)
328 elif node is not None:
328 elif node is not None:
329 # compute the approximate number of remaining tests
329 # compute the approximate number of remaining tests
330 tests, size = 0, 2
330 tests, size = 0, 2
331 while size <= changesets:
331 while size <= changesets:
332 tests, size = tests + 1, size * 2
332 tests, size = tests + 1, size * 2
333 rev = repo.changelog.rev(node)
333 rev = repo.changelog.rev(node)
334 ui.write(_("Testing changeset %s:%s "
334 ui.write(_("Testing changeset %s:%s "
335 "(%s changesets remaining, ~%s tests)\n")
335 "(%s changesets remaining, ~%s tests)\n")
336 % (rev, hg.short(node), changesets, tests))
336 % (rev, hg.short(node), changesets, tests))
337 if not noupdate:
337 if not noupdate:
338 cmdutil.bail_if_changed(repo)
338 cmdutil.bail_if_changed(repo)
339 return hg.clean(repo, node)
339 return hg.clean(repo, node)
340
340
341 def branch(ui, repo, label=None, **opts):
341 def branch(ui, repo, label=None, **opts):
342 """set or show the current branch name
342 """set or show the current branch name
343
343
344 With no argument, show the current branch name. With one argument,
344 With no argument, show the current branch name. With one argument,
345 set the working directory branch name (the branch does not exist in
345 set the working directory branch name (the branch does not exist in
346 the repository until the next commit).
346 the repository until the next commit).
347
347
348 Unless --force is specified, branch will not let you set a
348 Unless --force is specified, branch will not let you set a
349 branch name that shadows an existing branch.
349 branch name that shadows an existing branch.
350
350
351 Use the command 'hg update' to switch to an existing branch.
351 Use the command 'hg update' to switch to an existing branch.
352 """
352 """
353
353
354 if label:
354 if label:
355 if not opts.get('force') and label in repo.branchtags():
355 if not opts.get('force') and label in repo.branchtags():
356 if label not in [p.branch() for p in repo.workingctx().parents()]:
356 if label not in [p.branch() for p in repo.workingctx().parents()]:
357 raise util.Abort(_('a branch of the same name already exists'
357 raise util.Abort(_('a branch of the same name already exists'
358 ' (use --force to override)'))
358 ' (use --force to override)'))
359 repo.dirstate.setbranch(util.fromlocal(label))
359 repo.dirstate.setbranch(util.fromlocal(label))
360 ui.status(_('marked working directory as branch %s\n') % label)
360 ui.status(_('marked working directory as branch %s\n') % label)
361 else:
361 else:
362 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
362 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
363
363
364 def branches(ui, repo, active=False):
364 def branches(ui, repo, active=False):
365 """list repository named branches
365 """list repository named branches
366
366
367 List the repository's named branches, indicating which ones are
367 List the repository's named branches, indicating which ones are
368 inactive. If active is specified, only show active branches.
368 inactive. If active is specified, only show active branches.
369
369
370 A branch is considered active if it contains unmerged heads.
370 A branch is considered active if it contains unmerged heads.
371
371
372 Use the command 'hg update' to switch to an existing branch.
372 Use the command 'hg update' to switch to an existing branch.
373 """
373 """
374 b = repo.branchtags()
374 b = repo.branchtags()
375 heads = dict.fromkeys(repo.heads(), 1)
375 heads = dict.fromkeys(repo.heads(), 1)
376 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
376 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
377 l.sort()
377 l.sort()
378 l.reverse()
378 l.reverse()
379 for ishead, r, n, t in l:
379 for ishead, r, n, t in l:
380 if active and not ishead:
380 if active and not ishead:
381 # If we're only displaying active branches, abort the loop on
381 # If we're only displaying active branches, abort the loop on
382 # encountering the first inactive head
382 # encountering the first inactive head
383 break
383 break
384 else:
384 else:
385 hexfunc = ui.debugflag and hex or short
385 hexfunc = ui.debugflag and hex or short
386 if ui.quiet:
386 if ui.quiet:
387 ui.write("%s\n" % t)
387 ui.write("%s\n" % t)
388 else:
388 else:
389 spaces = " " * (30 - util.locallen(t))
389 spaces = " " * (30 - util.locallen(t))
390 # The code only gets here if inactive branches are being
390 # The code only gets here if inactive branches are being
391 # displayed or the branch is active.
391 # displayed or the branch is active.
392 isinactive = ((not ishead) and " (inactive)") or ''
392 isinactive = ((not ishead) and " (inactive)") or ''
393 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
393 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
394
394
395 def bundle(ui, repo, fname, dest=None, **opts):
395 def bundle(ui, repo, fname, dest=None, **opts):
396 """create a changegroup file
396 """create a changegroup file
397
397
398 Generate a compressed changegroup file collecting changesets not
398 Generate a compressed changegroup file collecting changesets not
399 found in the other repository.
399 found in the other repository.
400
400
401 If no destination repository is specified the destination is
401 If no destination repository is specified the destination is
402 assumed to have all the nodes specified by one or more --base
402 assumed to have all the nodes specified by one or more --base
403 parameters. To create a bundle containing all changesets, use
403 parameters. To create a bundle containing all changesets, use
404 --all (or --base null).
404 --all (or --base null).
405
405
406 The bundle file can then be transferred using conventional means and
406 The bundle file can then be transferred using conventional means and
407 applied to another repository with the unbundle or pull command.
407 applied to another repository with the unbundle or pull command.
408 This is useful when direct push and pull are not available or when
408 This is useful when direct push and pull are not available or when
409 exporting an entire repository is undesirable.
409 exporting an entire repository is undesirable.
410
410
411 Applying bundles preserves all changeset contents including
411 Applying bundles preserves all changeset contents including
412 permissions, copy/rename information, and revision history.
412 permissions, copy/rename information, and revision history.
413 """
413 """
414 revs = opts.get('rev') or None
414 revs = opts.get('rev') or None
415 if revs:
415 if revs:
416 revs = [repo.lookup(rev) for rev in revs]
416 revs = [repo.lookup(rev) for rev in revs]
417 if opts.get('all'):
417 if opts.get('all'):
418 base = ['null']
418 base = ['null']
419 else:
419 else:
420 base = opts.get('base')
420 base = opts.get('base')
421 if base:
421 if base:
422 if dest:
422 if dest:
423 raise util.Abort(_("--base is incompatible with specifiying "
423 raise util.Abort(_("--base is incompatible with specifiying "
424 "a destination"))
424 "a destination"))
425 base = [repo.lookup(rev) for rev in base]
425 base = [repo.lookup(rev) for rev in base]
426 # create the right base
426 # create the right base
427 # XXX: nodesbetween / changegroup* should be "fixed" instead
427 # XXX: nodesbetween / changegroup* should be "fixed" instead
428 o = []
428 o = []
429 has = {nullid: None}
429 has = {nullid: None}
430 for n in base:
430 for n in base:
431 has.update(repo.changelog.reachable(n))
431 has.update(repo.changelog.reachable(n))
432 if revs:
432 if revs:
433 visit = list(revs)
433 visit = list(revs)
434 else:
434 else:
435 visit = repo.changelog.heads()
435 visit = repo.changelog.heads()
436 seen = {}
436 seen = {}
437 while visit:
437 while visit:
438 n = visit.pop(0)
438 n = visit.pop(0)
439 parents = [p for p in repo.changelog.parents(n) if p not in has]
439 parents = [p for p in repo.changelog.parents(n) if p not in has]
440 if len(parents) == 0:
440 if len(parents) == 0:
441 o.insert(0, n)
441 o.insert(0, n)
442 else:
442 else:
443 for p in parents:
443 for p in parents:
444 if p not in seen:
444 if p not in seen:
445 seen[p] = 1
445 seen[p] = 1
446 visit.append(p)
446 visit.append(p)
447 else:
447 else:
448 cmdutil.setremoteconfig(ui, opts)
448 cmdutil.setremoteconfig(ui, opts)
449 dest, revs, checkout = hg.parseurl(
449 dest, revs, checkout = hg.parseurl(
450 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
450 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
451 other = hg.repository(ui, dest)
451 other = hg.repository(ui, dest)
452 o = repo.findoutgoing(other, force=opts['force'])
452 o = repo.findoutgoing(other, force=opts['force'])
453
453
454 if revs:
454 if revs:
455 cg = repo.changegroupsubset(o, revs, 'bundle')
455 cg = repo.changegroupsubset(o, revs, 'bundle')
456 else:
456 else:
457 cg = repo.changegroup(o, 'bundle')
457 cg = repo.changegroup(o, 'bundle')
458 changegroup.writebundle(cg, fname, "HG10BZ")
458 changegroup.writebundle(cg, fname, "HG10BZ")
459
459
460 def cat(ui, repo, file1, *pats, **opts):
460 def cat(ui, repo, file1, *pats, **opts):
461 """output the current or given revision of files
461 """output the current or given revision of files
462
462
463 Print the specified files as they were at the given revision.
463 Print the specified files as they were at the given revision.
464 If no revision is given, the parent of the working directory is used,
464 If no revision is given, the parent of the working directory is used,
465 or tip if no revision is checked out.
465 or tip if no revision is checked out.
466
466
467 Output may be to a file, in which case the name of the file is
467 Output may be to a file, in which case the name of the file is
468 given using a format string. The formatting rules are the same as
468 given using a format string. The formatting rules are the same as
469 for the export command, with the following additions:
469 for the export command, with the following additions:
470
470
471 %s basename of file being printed
471 %s basename of file being printed
472 %d dirname of file being printed, or '.' if in repo root
472 %d dirname of file being printed, or '.' if in repo root
473 %p root-relative path name of file being printed
473 %p root-relative path name of file being printed
474 """
474 """
475 ctx = repo.changectx(opts['rev'])
475 ctx = repo.changectx(opts['rev'])
476 err = 1
476 err = 1
477 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
477 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
478 ctx.node()):
478 ctx.node()):
479 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
479 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
480 data = ctx.filectx(abs).data()
480 data = ctx.filectx(abs).data()
481 if opts.get('decode'):
481 if opts.get('decode'):
482 data = repo.wwritedata(abs, data)
482 data = repo.wwritedata(abs, data)
483 fp.write(data)
483 fp.write(data)
484 err = 0
484 err = 0
485 return err
485 return err
486
486
487 def clone(ui, source, dest=None, **opts):
487 def clone(ui, source, dest=None, **opts):
488 """make a copy of an existing repository
488 """make a copy of an existing repository
489
489
490 Create a copy of an existing repository in a new directory.
490 Create a copy of an existing repository in a new directory.
491
491
492 If no destination directory name is specified, it defaults to the
492 If no destination directory name is specified, it defaults to the
493 basename of the source.
493 basename of the source.
494
494
495 The location of the source is added to the new repository's
495 The location of the source is added to the new repository's
496 .hg/hgrc file, as the default to be used for future pulls.
496 .hg/hgrc file, as the default to be used for future pulls.
497
497
498 For efficiency, hardlinks are used for cloning whenever the source
498 For efficiency, hardlinks are used for cloning whenever the source
499 and destination are on the same filesystem (note this applies only
499 and destination are on the same filesystem (note this applies only
500 to the repository data, not to the checked out files). Some
500 to the repository data, not to the checked out files). Some
501 filesystems, such as AFS, implement hardlinking incorrectly, but
501 filesystems, such as AFS, implement hardlinking incorrectly, but
502 do not report errors. In these cases, use the --pull option to
502 do not report errors. In these cases, use the --pull option to
503 avoid hardlinking.
503 avoid hardlinking.
504
504
505 You can safely clone repositories and checked out files using full
505 You can safely clone repositories and checked out files using full
506 hardlinks with
506 hardlinks with
507
507
508 $ cp -al REPO REPOCLONE
508 $ cp -al REPO REPOCLONE
509
509
510 which is the fastest way to clone. However, the operation is not
510 which is the fastest way to clone. However, the operation is not
511 atomic (making sure REPO is not modified during the operation is
511 atomic (making sure REPO is not modified during the operation is
512 up to you) and you have to make sure your editor breaks hardlinks
512 up to you) and you have to make sure your editor breaks hardlinks
513 (Emacs and most Linux Kernel tools do so).
513 (Emacs and most Linux Kernel tools do so).
514
514
515 If you use the -r option to clone up to a specific revision, no
515 If you use the -r option to clone up to a specific revision, no
516 subsequent revisions will be present in the cloned repository.
516 subsequent revisions will be present in the cloned repository.
517 This option implies --pull, even on local repositories.
517 This option implies --pull, even on local repositories.
518
518
519 See pull for valid source format details.
519 See pull for valid source format details.
520
520
521 It is possible to specify an ssh:// URL as the destination, but no
521 It is possible to specify an ssh:// URL as the destination, but no
522 .hg/hgrc and working directory will be created on the remote side.
522 .hg/hgrc and working directory will be created on the remote side.
523 Look at the help text for the pull command for important details
523 Look at the help text for the pull command for important details
524 about ssh:// URLs.
524 about ssh:// URLs.
525 """
525 """
526 cmdutil.setremoteconfig(ui, opts)
526 cmdutil.setremoteconfig(ui, opts)
527 hg.clone(ui, source, dest,
527 hg.clone(ui, source, dest,
528 pull=opts['pull'],
528 pull=opts['pull'],
529 stream=opts['uncompressed'],
529 stream=opts['uncompressed'],
530 rev=opts['rev'],
530 rev=opts['rev'],
531 update=not opts['noupdate'])
531 update=not opts['noupdate'])
532
532
533 def commit(ui, repo, *pats, **opts):
533 def commit(ui, repo, *pats, **opts):
534 """commit the specified files or all outstanding changes
534 """commit the specified files or all outstanding changes
535
535
536 Commit changes to the given files into the repository.
536 Commit changes to the given files into the repository.
537
537
538 If a list of files is omitted, all changes reported by "hg status"
538 If a list of files is omitted, all changes reported by "hg status"
539 will be committed.
539 will be committed.
540
540
541 If no commit message is specified, the configured editor is started to
541 If no commit message is specified, the configured editor is started to
542 enter a message.
542 enter a message.
543
543
544 See 'hg help dates' for a list of formats valid for -d/--date.
544 See 'hg help dates' for a list of formats valid for -d/--date.
545 """
545 """
546 def commitfunc(ui, repo, files, message, match, opts):
546 def commitfunc(ui, repo, files, message, match, opts):
547 return repo.commit(files, message, opts['user'], opts['date'], match,
547 return repo.commit(files, message, opts['user'], opts['date'], match,
548 force_editor=opts.get('force_editor'))
548 force_editor=opts.get('force_editor'))
549 cmdutil.commit(ui, repo, commitfunc, pats, opts)
549 cmdutil.commit(ui, repo, commitfunc, pats, opts)
550
550
551 def copy(ui, repo, *pats, **opts):
551 def copy(ui, repo, *pats, **opts):
552 """mark files as copied for the next commit
552 """mark files as copied for the next commit
553
553
554 Mark dest as having copies of source files. If dest is a
554 Mark dest as having copies of source files. If dest is a
555 directory, copies are put in that directory. If dest is a file,
555 directory, copies are put in that directory. If dest is a file,
556 there can only be one source.
556 there can only be one source.
557
557
558 By default, this command copies the contents of files as they
558 By default, this command copies the contents of files as they
559 stand in the working directory. If invoked with --after, the
559 stand in the working directory. If invoked with --after, the
560 operation is recorded, but no copying is performed.
560 operation is recorded, but no copying is performed.
561
561
562 This command takes effect in the next commit. To undo a copy
562 This command takes effect in the next commit. To undo a copy
563 before that, see hg revert.
563 before that, see hg revert.
564 """
564 """
565 wlock = repo.wlock(False)
565 wlock = repo.wlock(False)
566 try:
566 try:
567 return cmdutil.copy(ui, repo, pats, opts)
567 return cmdutil.copy(ui, repo, pats, opts)
568 finally:
568 finally:
569 del wlock
569 del wlock
570
570
571 def debugancestor(ui, repo, *args):
571 def debugancestor(ui, repo, *args):
572 """find the ancestor revision of two revisions in a given index"""
572 """find the ancestor revision of two revisions in a given index"""
573 if len(args) == 3:
573 if len(args) == 3:
574 index, rev1, rev2 = args
574 index, rev1, rev2 = args
575 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
575 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
576 elif len(args) == 2:
576 elif len(args) == 2:
577 if not repo:
577 if not repo:
578 raise util.Abort(_("There is no Mercurial repository here "
578 raise util.Abort(_("There is no Mercurial repository here "
579 "(.hg not found)"))
579 "(.hg not found)"))
580 rev1, rev2 = args
580 rev1, rev2 = args
581 r = repo.changelog
581 r = repo.changelog
582 else:
582 else:
583 raise util.Abort(_('either two or three arguments required'))
583 raise util.Abort(_('either two or three arguments required'))
584 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
584 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
585 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
585 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
586
586
587 def debugcomplete(ui, cmd='', **opts):
587 def debugcomplete(ui, cmd='', **opts):
588 """returns the completion list associated with the given command"""
588 """returns the completion list associated with the given command"""
589
589
590 if opts['options']:
590 if opts['options']:
591 options = []
591 options = []
592 otables = [globalopts]
592 otables = [globalopts]
593 if cmd:
593 if cmd:
594 aliases, entry = cmdutil.findcmd(ui, cmd, table)
594 aliases, entry = cmdutil.findcmd(ui, cmd, table)
595 otables.append(entry[1])
595 otables.append(entry[1])
596 for t in otables:
596 for t in otables:
597 for o in t:
597 for o in t:
598 if o[0]:
598 if o[0]:
599 options.append('-%s' % o[0])
599 options.append('-%s' % o[0])
600 options.append('--%s' % o[1])
600 options.append('--%s' % o[1])
601 ui.write("%s\n" % "\n".join(options))
601 ui.write("%s\n" % "\n".join(options))
602 return
602 return
603
603
604 clist = cmdutil.findpossible(ui, cmd, table).keys()
604 clist = cmdutil.findpossible(ui, cmd, table).keys()
605 clist.sort()
605 clist.sort()
606 ui.write("%s\n" % "\n".join(clist))
606 ui.write("%s\n" % "\n".join(clist))
607
607
608 def debugfsinfo(ui, path = "."):
608 def debugfsinfo(ui, path = "."):
609 file('.debugfsinfo', 'w').write('')
609 file('.debugfsinfo', 'w').write('')
610 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
610 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
611 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
611 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
612 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
612 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
613 and 'yes' or 'no'))
613 and 'yes' or 'no'))
614 os.unlink('.debugfsinfo')
614 os.unlink('.debugfsinfo')
615
615
616 def debugrebuildstate(ui, repo, rev=""):
616 def debugrebuildstate(ui, repo, rev=""):
617 """rebuild the dirstate as it would look like for the given revision"""
617 """rebuild the dirstate as it would look like for the given revision"""
618 if rev == "":
618 if rev == "":
619 rev = repo.changelog.tip()
619 rev = repo.changelog.tip()
620 ctx = repo.changectx(rev)
620 ctx = repo.changectx(rev)
621 files = ctx.manifest()
621 files = ctx.manifest()
622 wlock = repo.wlock()
622 wlock = repo.wlock()
623 try:
623 try:
624 repo.dirstate.rebuild(rev, files)
624 repo.dirstate.rebuild(rev, files)
625 finally:
625 finally:
626 del wlock
626 del wlock
627
627
628 def debugcheckstate(ui, repo):
628 def debugcheckstate(ui, repo):
629 """validate the correctness of the current dirstate"""
629 """validate the correctness of the current dirstate"""
630 parent1, parent2 = repo.dirstate.parents()
630 parent1, parent2 = repo.dirstate.parents()
631 m1 = repo.changectx(parent1).manifest()
631 m1 = repo.changectx(parent1).manifest()
632 m2 = repo.changectx(parent2).manifest()
632 m2 = repo.changectx(parent2).manifest()
633 errors = 0
633 errors = 0
634 for f in repo.dirstate:
634 for f in repo.dirstate:
635 state = repo.dirstate[f]
635 state = repo.dirstate[f]
636 if state in "nr" and f not in m1:
636 if state in "nr" and f not in m1:
637 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
637 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
638 errors += 1
638 errors += 1
639 if state in "a" and f in m1:
639 if state in "a" and f in m1:
640 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
640 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
641 errors += 1
641 errors += 1
642 if state in "m" and f not in m1 and f not in m2:
642 if state in "m" and f not in m1 and f not in m2:
643 ui.warn(_("%s in state %s, but not in either manifest\n") %
643 ui.warn(_("%s in state %s, but not in either manifest\n") %
644 (f, state))
644 (f, state))
645 errors += 1
645 errors += 1
646 for f in m1:
646 for f in m1:
647 state = repo.dirstate[f]
647 state = repo.dirstate[f]
648 if state not in "nrm":
648 if state not in "nrm":
649 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
649 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
650 errors += 1
650 errors += 1
651 if errors:
651 if errors:
652 error = _(".hg/dirstate inconsistent with current parent's manifest")
652 error = _(".hg/dirstate inconsistent with current parent's manifest")
653 raise util.Abort(error)
653 raise util.Abort(error)
654
654
655 def showconfig(ui, repo, *values, **opts):
655 def showconfig(ui, repo, *values, **opts):
656 """show combined config settings from all hgrc files
656 """show combined config settings from all hgrc files
657
657
658 With no args, print names and values of all config items.
658 With no args, print names and values of all config items.
659
659
660 With one arg of the form section.name, print just the value of
660 With one arg of the form section.name, print just the value of
661 that config item.
661 that config item.
662
662
663 With multiple args, print names and values of all config items
663 With multiple args, print names and values of all config items
664 with matching section names."""
664 with matching section names."""
665
665
666 untrusted = bool(opts.get('untrusted'))
666 untrusted = bool(opts.get('untrusted'))
667 if values:
667 if values:
668 if len([v for v in values if '.' in v]) > 1:
668 if len([v for v in values if '.' in v]) > 1:
669 raise util.Abort(_('only one config item permitted'))
669 raise util.Abort(_('only one config item permitted'))
670 for section, name, value in ui.walkconfig(untrusted=untrusted):
670 for section, name, value in ui.walkconfig(untrusted=untrusted):
671 sectname = section + '.' + name
671 sectname = section + '.' + name
672 if values:
672 if values:
673 for v in values:
673 for v in values:
674 if v == section:
674 if v == section:
675 ui.write('%s=%s\n' % (sectname, value))
675 ui.write('%s=%s\n' % (sectname, value))
676 elif v == sectname:
676 elif v == sectname:
677 ui.write(value, '\n')
677 ui.write(value, '\n')
678 else:
678 else:
679 ui.write('%s=%s\n' % (sectname, value))
679 ui.write('%s=%s\n' % (sectname, value))
680
680
681 def debugsetparents(ui, repo, rev1, rev2=None):
681 def debugsetparents(ui, repo, rev1, rev2=None):
682 """manually set the parents of the current working directory
682 """manually set the parents of the current working directory
683
683
684 This is useful for writing repository conversion tools, but should
684 This is useful for writing repository conversion tools, but should
685 be used with care.
685 be used with care.
686 """
686 """
687
687
688 if not rev2:
688 if not rev2:
689 rev2 = hex(nullid)
689 rev2 = hex(nullid)
690
690
691 wlock = repo.wlock()
691 wlock = repo.wlock()
692 try:
692 try:
693 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
693 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
694 finally:
694 finally:
695 del wlock
695 del wlock
696
696
697 def debugstate(ui, repo):
697 def debugstate(ui, repo):
698 """show the contents of the current dirstate"""
698 """show the contents of the current dirstate"""
699 k = repo.dirstate._map.items()
699 k = repo.dirstate._map.items()
700 k.sort()
700 k.sort()
701 for file_, ent in k:
701 for file_, ent in k:
702 if ent[3] == -1:
702 if ent[3] == -1:
703 # Pad or slice to locale representation
703 # Pad or slice to locale representation
704 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
704 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
705 timestr = 'unset'
705 timestr = 'unset'
706 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
706 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
707 else:
707 else:
708 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
708 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
709 if ent[1] & 020000:
709 if ent[1] & 020000:
710 mode = 'lnk'
710 mode = 'lnk'
711 else:
711 else:
712 mode = '%3o' % (ent[1] & 0777)
712 mode = '%3o' % (ent[1] & 0777)
713 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
713 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
714 for f in repo.dirstate.copies():
714 for f in repo.dirstate.copies():
715 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
715 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
716
716
717 def debugdata(ui, file_, rev):
717 def debugdata(ui, file_, rev):
718 """dump the contents of a data file revision"""
718 """dump the contents of a data file revision"""
719 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
719 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
720 try:
720 try:
721 ui.write(r.revision(r.lookup(rev)))
721 ui.write(r.revision(r.lookup(rev)))
722 except KeyError:
722 except KeyError:
723 raise util.Abort(_('invalid revision identifier %s') % rev)
723 raise util.Abort(_('invalid revision identifier %s') % rev)
724
724
725 def debugdate(ui, date, range=None, **opts):
725 def debugdate(ui, date, range=None, **opts):
726 """parse and display a date"""
726 """parse and display a date"""
727 if opts["extended"]:
727 if opts["extended"]:
728 d = util.parsedate(date, util.extendeddateformats)
728 d = util.parsedate(date, util.extendeddateformats)
729 else:
729 else:
730 d = util.parsedate(date)
730 d = util.parsedate(date)
731 ui.write("internal: %s %s\n" % d)
731 ui.write("internal: %s %s\n" % d)
732 ui.write("standard: %s\n" % util.datestr(d))
732 ui.write("standard: %s\n" % util.datestr(d))
733 if range:
733 if range:
734 m = util.matchdate(range)
734 m = util.matchdate(range)
735 ui.write("match: %s\n" % m(d[0]))
735 ui.write("match: %s\n" % m(d[0]))
736
736
737 def debugindex(ui, file_):
737 def debugindex(ui, file_):
738 """dump the contents of an index file"""
738 """dump the contents of an index file"""
739 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
739 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
740 ui.write(" rev offset length base linkrev" +
740 ui.write(" rev offset length base linkrev" +
741 " nodeid p1 p2\n")
741 " nodeid p1 p2\n")
742 for i in xrange(r.count()):
742 for i in xrange(r.count()):
743 node = r.node(i)
743 node = r.node(i)
744 try:
744 try:
745 pp = r.parents(node)
745 pp = r.parents(node)
746 except:
746 except:
747 pp = [nullid, nullid]
747 pp = [nullid, nullid]
748 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
748 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
749 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
749 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
750 short(node), short(pp[0]), short(pp[1])))
750 short(node), short(pp[0]), short(pp[1])))
751
751
752 def debugindexdot(ui, file_):
752 def debugindexdot(ui, file_):
753 """dump an index DAG as a .dot file"""
753 """dump an index DAG as a .dot file"""
754 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
754 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
755 ui.write("digraph G {\n")
755 ui.write("digraph G {\n")
756 for i in xrange(r.count()):
756 for i in xrange(r.count()):
757 node = r.node(i)
757 node = r.node(i)
758 pp = r.parents(node)
758 pp = r.parents(node)
759 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
759 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
760 if pp[1] != nullid:
760 if pp[1] != nullid:
761 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
761 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
762 ui.write("}\n")
762 ui.write("}\n")
763
763
764 def debuginstall(ui):
764 def debuginstall(ui):
765 '''test Mercurial installation'''
765 '''test Mercurial installation'''
766
766
767 def writetemp(contents):
767 def writetemp(contents):
768 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
768 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
769 f = os.fdopen(fd, "wb")
769 f = os.fdopen(fd, "wb")
770 f.write(contents)
770 f.write(contents)
771 f.close()
771 f.close()
772 return name
772 return name
773
773
774 problems = 0
774 problems = 0
775
775
776 # encoding
776 # encoding
777 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
777 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
778 try:
778 try:
779 util.fromlocal("test")
779 util.fromlocal("test")
780 except util.Abort, inst:
780 except util.Abort, inst:
781 ui.write(" %s\n" % inst)
781 ui.write(" %s\n" % inst)
782 ui.write(_(" (check that your locale is properly set)\n"))
782 ui.write(_(" (check that your locale is properly set)\n"))
783 problems += 1
783 problems += 1
784
784
785 # compiled modules
785 # compiled modules
786 ui.status(_("Checking extensions...\n"))
786 ui.status(_("Checking extensions...\n"))
787 try:
787 try:
788 import bdiff, mpatch, base85
788 import bdiff, mpatch, base85
789 except Exception, inst:
789 except Exception, inst:
790 ui.write(" %s\n" % inst)
790 ui.write(" %s\n" % inst)
791 ui.write(_(" One or more extensions could not be found"))
791 ui.write(_(" One or more extensions could not be found"))
792 ui.write(_(" (check that you compiled the extensions)\n"))
792 ui.write(_(" (check that you compiled the extensions)\n"))
793 problems += 1
793 problems += 1
794
794
795 # templates
795 # templates
796 ui.status(_("Checking templates...\n"))
796 ui.status(_("Checking templates...\n"))
797 try:
797 try:
798 import templater
798 import templater
799 t = templater.templater(templater.templatepath("map-cmdline.default"))
799 t = templater.templater(templater.templatepath("map-cmdline.default"))
800 except Exception, inst:
800 except Exception, inst:
801 ui.write(" %s\n" % inst)
801 ui.write(" %s\n" % inst)
802 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
802 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
803 problems += 1
803 problems += 1
804
804
805 # patch
805 # patch
806 ui.status(_("Checking patch...\n"))
806 ui.status(_("Checking patch...\n"))
807 patchproblems = 0
807 patchproblems = 0
808 a = "1\n2\n3\n4\n"
808 a = "1\n2\n3\n4\n"
809 b = "1\n2\n3\ninsert\n4\n"
809 b = "1\n2\n3\ninsert\n4\n"
810 fa = writetemp(a)
810 fa = writetemp(a)
811 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
811 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
812 os.path.basename(fa))
812 os.path.basename(fa))
813 fd = writetemp(d)
813 fd = writetemp(d)
814
814
815 files = {}
815 files = {}
816 try:
816 try:
817 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
817 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
818 except util.Abort, e:
818 except util.Abort, e:
819 ui.write(_(" patch call failed:\n"))
819 ui.write(_(" patch call failed:\n"))
820 ui.write(" " + str(e) + "\n")
820 ui.write(" " + str(e) + "\n")
821 patchproblems += 1
821 patchproblems += 1
822 else:
822 else:
823 if list(files) != [os.path.basename(fa)]:
823 if list(files) != [os.path.basename(fa)]:
824 ui.write(_(" unexpected patch output!\n"))
824 ui.write(_(" unexpected patch output!\n"))
825 patchproblems += 1
825 patchproblems += 1
826 a = file(fa).read()
826 a = file(fa).read()
827 if a != b:
827 if a != b:
828 ui.write(_(" patch test failed!\n"))
828 ui.write(_(" patch test failed!\n"))
829 patchproblems += 1
829 patchproblems += 1
830
830
831 if patchproblems:
831 if patchproblems:
832 if ui.config('ui', 'patch'):
832 if ui.config('ui', 'patch'):
833 ui.write(_(" (Current patch tool may be incompatible with patch,"
833 ui.write(_(" (Current patch tool may be incompatible with patch,"
834 " or misconfigured. Please check your .hgrc file)\n"))
834 " or misconfigured. Please check your .hgrc file)\n"))
835 else:
835 else:
836 ui.write(_(" Internal patcher failure, please report this error"
836 ui.write(_(" Internal patcher failure, please report this error"
837 " to http://www.selenic.com/mercurial/bts\n"))
837 " to http://www.selenic.com/mercurial/bts\n"))
838 problems += patchproblems
838 problems += patchproblems
839
839
840 os.unlink(fa)
840 os.unlink(fa)
841 os.unlink(fd)
841 os.unlink(fd)
842
842
843 # editor
843 # editor
844 ui.status(_("Checking commit editor...\n"))
844 ui.status(_("Checking commit editor...\n"))
845 editor = ui.geteditor()
845 editor = ui.geteditor()
846 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
846 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
847 if not cmdpath:
847 if not cmdpath:
848 if editor == 'vi':
848 if editor == 'vi':
849 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
849 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
850 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
850 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
851 else:
851 else:
852 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
852 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
853 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
853 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
854 problems += 1
854 problems += 1
855
855
856 # check username
856 # check username
857 ui.status(_("Checking username...\n"))
857 ui.status(_("Checking username...\n"))
858 user = os.environ.get("HGUSER")
858 user = os.environ.get("HGUSER")
859 if user is None:
859 if user is None:
860 user = ui.config("ui", "username")
860 user = ui.config("ui", "username")
861 if user is None:
861 if user is None:
862 user = os.environ.get("EMAIL")
862 user = os.environ.get("EMAIL")
863 if not user:
863 if not user:
864 ui.warn(" ")
864 ui.warn(" ")
865 ui.username()
865 ui.username()
866 ui.write(_(" (specify a username in your .hgrc file)\n"))
866 ui.write(_(" (specify a username in your .hgrc file)\n"))
867
867
868 if not problems:
868 if not problems:
869 ui.status(_("No problems detected\n"))
869 ui.status(_("No problems detected\n"))
870 else:
870 else:
871 ui.write(_("%s problems detected,"
871 ui.write(_("%s problems detected,"
872 " please check your install!\n") % problems)
872 " please check your install!\n") % problems)
873
873
874 return problems
874 return problems
875
875
876 def debugrename(ui, repo, file1, *pats, **opts):
876 def debugrename(ui, repo, file1, *pats, **opts):
877 """dump rename information"""
877 """dump rename information"""
878
878
879 ctx = repo.changectx(opts.get('rev', 'tip'))
879 ctx = repo.changectx(opts.get('rev', 'tip'))
880 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
880 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
881 ctx.node()):
881 ctx.node()):
882 fctx = ctx.filectx(abs)
882 fctx = ctx.filectx(abs)
883 m = fctx.filelog().renamed(fctx.filenode())
883 m = fctx.filelog().renamed(fctx.filenode())
884 if m:
884 if m:
885 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
885 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
886 else:
886 else:
887 ui.write(_("%s not renamed\n") % rel)
887 ui.write(_("%s not renamed\n") % rel)
888
888
889 def debugwalk(ui, repo, *pats, **opts):
889 def debugwalk(ui, repo, *pats, **opts):
890 """show how files match on given patterns"""
890 """show how files match on given patterns"""
891 items = list(cmdutil.walk(repo, pats, opts))
891 items = list(cmdutil.walk(repo, pats, opts))
892 if not items:
892 if not items:
893 return
893 return
894 fmt = '%%s %%-%ds %%-%ds %%s' % (
894 fmt = '%%s %%-%ds %%-%ds %%s' % (
895 max([len(abs) for (src, abs, rel, exact) in items]),
895 max([len(abs) for (src, abs, rel, exact) in items]),
896 max([len(rel) for (src, abs, rel, exact) in items]))
896 max([len(rel) for (src, abs, rel, exact) in items]))
897 for src, abs, rel, exact in items:
897 for src, abs, rel, exact in items:
898 line = fmt % (src, abs, rel, exact and 'exact' or '')
898 line = fmt % (src, abs, rel, exact and 'exact' or '')
899 ui.write("%s\n" % line.rstrip())
899 ui.write("%s\n" % line.rstrip())
900
900
901 def diff(ui, repo, *pats, **opts):
901 def diff(ui, repo, *pats, **opts):
902 """diff repository (or selected files)
902 """diff repository (or selected files)
903
903
904 Show differences between revisions for the specified files.
904 Show differences between revisions for the specified files.
905
905
906 Differences between files are shown using the unified diff format.
906 Differences between files are shown using the unified diff format.
907
907
908 NOTE: diff may generate unexpected results for merges, as it will
908 NOTE: diff may generate unexpected results for merges, as it will
909 default to comparing against the working directory's first parent
909 default to comparing against the working directory's first parent
910 changeset if no revisions are specified.
910 changeset if no revisions are specified.
911
911
912 When two revision arguments are given, then changes are shown
912 When two revision arguments are given, then changes are shown
913 between those revisions. If only one revision is specified then
913 between those revisions. If only one revision is specified then
914 that revision is compared to the working directory, and, when no
914 that revision is compared to the working directory, and, when no
915 revisions are specified, the working directory files are compared
915 revisions are specified, the working directory files are compared
916 to its parent.
916 to its parent.
917
917
918 Without the -a option, diff will avoid generating diffs of files
918 Without the -a option, diff will avoid generating diffs of files
919 it detects as binary. With -a, diff will generate a diff anyway,
919 it detects as binary. With -a, diff will generate a diff anyway,
920 probably with undesirable results.
920 probably with undesirable results.
921 """
921 """
922 node1, node2 = cmdutil.revpair(repo, opts['rev'])
922 node1, node2 = cmdutil.revpair(repo, opts['rev'])
923
923
924 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
924 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
925
925
926 patch.diff(repo, node1, node2, fns, match=matchfn,
926 patch.diff(repo, node1, node2, fns, match=matchfn,
927 opts=patch.diffopts(ui, opts))
927 opts=patch.diffopts(ui, opts))
928
928
929 def export(ui, repo, *changesets, **opts):
929 def export(ui, repo, *changesets, **opts):
930 """dump the header and diffs for one or more changesets
930 """dump the header and diffs for one or more changesets
931
931
932 Print the changeset header and diffs for one or more revisions.
932 Print the changeset header and diffs for one or more revisions.
933
933
934 The information shown in the changeset header is: author,
934 The information shown in the changeset header is: author,
935 changeset hash, parent(s) and commit comment.
935 changeset hash, parent(s) and commit comment.
936
936
937 NOTE: export may generate unexpected diff output for merge changesets,
937 NOTE: export may generate unexpected diff output for merge changesets,
938 as it will compare the merge changeset against its first parent only.
938 as it will compare the merge changeset against its first parent only.
939
939
940 Output may be to a file, in which case the name of the file is
940 Output may be to a file, in which case the name of the file is
941 given using a format string. The formatting rules are as follows:
941 given using a format string. The formatting rules are as follows:
942
942
943 %% literal "%" character
943 %% literal "%" character
944 %H changeset hash (40 bytes of hexadecimal)
944 %H changeset hash (40 bytes of hexadecimal)
945 %N number of patches being generated
945 %N number of patches being generated
946 %R changeset revision number
946 %R changeset revision number
947 %b basename of the exporting repository
947 %b basename of the exporting repository
948 %h short-form changeset hash (12 bytes of hexadecimal)
948 %h short-form changeset hash (12 bytes of hexadecimal)
949 %n zero-padded sequence number, starting at 1
949 %n zero-padded sequence number, starting at 1
950 %r zero-padded changeset revision number
950 %r zero-padded changeset revision number
951
951
952 Without the -a option, export will avoid generating diffs of files
952 Without the -a option, export will avoid generating diffs of files
953 it detects as binary. With -a, export will generate a diff anyway,
953 it detects as binary. With -a, export will generate a diff anyway,
954 probably with undesirable results.
954 probably with undesirable results.
955
955
956 With the --switch-parent option, the diff will be against the second
956 With the --switch-parent option, the diff will be against the second
957 parent. It can be useful to review a merge.
957 parent. It can be useful to review a merge.
958 """
958 """
959 if not changesets:
959 if not changesets:
960 raise util.Abort(_("export requires at least one changeset"))
960 raise util.Abort(_("export requires at least one changeset"))
961 revs = cmdutil.revrange(repo, changesets)
961 revs = cmdutil.revrange(repo, changesets)
962 if len(revs) > 1:
962 if len(revs) > 1:
963 ui.note(_('exporting patches:\n'))
963 ui.note(_('exporting patches:\n'))
964 else:
964 else:
965 ui.note(_('exporting patch:\n'))
965 ui.note(_('exporting patch:\n'))
966 patch.export(repo, revs, template=opts['output'],
966 patch.export(repo, revs, template=opts['output'],
967 switch_parent=opts['switch_parent'],
967 switch_parent=opts['switch_parent'],
968 opts=patch.diffopts(ui, opts))
968 opts=patch.diffopts(ui, opts))
969
969
970 def grep(ui, repo, pattern, *pats, **opts):
970 def grep(ui, repo, pattern, *pats, **opts):
971 """search for a pattern in specified files and revisions
971 """search for a pattern in specified files and revisions
972
972
973 Search revisions of files for a regular expression.
973 Search revisions of files for a regular expression.
974
974
975 This command behaves differently than Unix grep. It only accepts
975 This command behaves differently than Unix grep. It only accepts
976 Python/Perl regexps. It searches repository history, not the
976 Python/Perl regexps. It searches repository history, not the
977 working directory. It always prints the revision number in which
977 working directory. It always prints the revision number in which
978 a match appears.
978 a match appears.
979
979
980 By default, grep only prints output for the first revision of a
980 By default, grep only prints output for the first revision of a
981 file in which it finds a match. To get it to print every revision
981 file in which it finds a match. To get it to print every revision
982 that contains a change in match status ("-" for a match that
982 that contains a change in match status ("-" for a match that
983 becomes a non-match, or "+" for a non-match that becomes a match),
983 becomes a non-match, or "+" for a non-match that becomes a match),
984 use the --all flag.
984 use the --all flag.
985 """
985 """
986 reflags = 0
986 reflags = 0
987 if opts['ignore_case']:
987 if opts['ignore_case']:
988 reflags |= re.I
988 reflags |= re.I
989 try:
989 try:
990 regexp = re.compile(pattern, reflags)
990 regexp = re.compile(pattern, reflags)
991 except Exception, inst:
991 except Exception, inst:
992 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
992 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
993 return None
993 return None
994 sep, eol = ':', '\n'
994 sep, eol = ':', '\n'
995 if opts['print0']:
995 if opts['print0']:
996 sep = eol = '\0'
996 sep = eol = '\0'
997
997
998 fcache = {}
998 fcache = {}
999 def getfile(fn):
999 def getfile(fn):
1000 if fn not in fcache:
1000 if fn not in fcache:
1001 fcache[fn] = repo.file(fn)
1001 fcache[fn] = repo.file(fn)
1002 return fcache[fn]
1002 return fcache[fn]
1003
1003
1004 def matchlines(body):
1004 def matchlines(body):
1005 begin = 0
1005 begin = 0
1006 linenum = 0
1006 linenum = 0
1007 while True:
1007 while True:
1008 match = regexp.search(body, begin)
1008 match = regexp.search(body, begin)
1009 if not match:
1009 if not match:
1010 break
1010 break
1011 mstart, mend = match.span()
1011 mstart, mend = match.span()
1012 linenum += body.count('\n', begin, mstart) + 1
1012 linenum += body.count('\n', begin, mstart) + 1
1013 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1013 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1014 lend = body.find('\n', mend)
1014 lend = body.find('\n', mend)
1015 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1015 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1016 begin = lend + 1
1016 begin = lend + 1
1017
1017
1018 class linestate(object):
1018 class linestate(object):
1019 def __init__(self, line, linenum, colstart, colend):
1019 def __init__(self, line, linenum, colstart, colend):
1020 self.line = line
1020 self.line = line
1021 self.linenum = linenum
1021 self.linenum = linenum
1022 self.colstart = colstart
1022 self.colstart = colstart
1023 self.colend = colend
1023 self.colend = colend
1024
1024
1025 def __eq__(self, other):
1025 def __eq__(self, other):
1026 return self.line == other.line
1026 return self.line == other.line
1027
1027
1028 matches = {}
1028 matches = {}
1029 copies = {}
1029 copies = {}
1030 def grepbody(fn, rev, body):
1030 def grepbody(fn, rev, body):
1031 matches[rev].setdefault(fn, [])
1031 matches[rev].setdefault(fn, [])
1032 m = matches[rev][fn]
1032 m = matches[rev][fn]
1033 for lnum, cstart, cend, line in matchlines(body):
1033 for lnum, cstart, cend, line in matchlines(body):
1034 s = linestate(line, lnum, cstart, cend)
1034 s = linestate(line, lnum, cstart, cend)
1035 m.append(s)
1035 m.append(s)
1036
1036
1037 def difflinestates(a, b):
1037 def difflinestates(a, b):
1038 sm = difflib.SequenceMatcher(None, a, b)
1038 sm = difflib.SequenceMatcher(None, a, b)
1039 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1039 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1040 if tag == 'insert':
1040 if tag == 'insert':
1041 for i in xrange(blo, bhi):
1041 for i in xrange(blo, bhi):
1042 yield ('+', b[i])
1042 yield ('+', b[i])
1043 elif tag == 'delete':
1043 elif tag == 'delete':
1044 for i in xrange(alo, ahi):
1044 for i in xrange(alo, ahi):
1045 yield ('-', a[i])
1045 yield ('-', a[i])
1046 elif tag == 'replace':
1046 elif tag == 'replace':
1047 for i in xrange(alo, ahi):
1047 for i in xrange(alo, ahi):
1048 yield ('-', a[i])
1048 yield ('-', a[i])
1049 for i in xrange(blo, bhi):
1049 for i in xrange(blo, bhi):
1050 yield ('+', b[i])
1050 yield ('+', b[i])
1051
1051
1052 prev = {}
1052 prev = {}
1053 def display(fn, rev, states, prevstates):
1053 def display(fn, rev, states, prevstates):
1054 datefunc = ui.quiet and util.shortdate or util.datestr
1054 datefunc = ui.quiet and util.shortdate or util.datestr
1055 found = False
1055 found = False
1056 filerevmatches = {}
1056 filerevmatches = {}
1057 r = prev.get(fn, -1)
1057 r = prev.get(fn, -1)
1058 if opts['all']:
1058 if opts['all']:
1059 iter = difflinestates(states, prevstates)
1059 iter = difflinestates(states, prevstates)
1060 else:
1060 else:
1061 iter = [('', l) for l in prevstates]
1061 iter = [('', l) for l in prevstates]
1062 for change, l in iter:
1062 for change, l in iter:
1063 cols = [fn, str(r)]
1063 cols = [fn, str(r)]
1064 if opts['line_number']:
1064 if opts['line_number']:
1065 cols.append(str(l.linenum))
1065 cols.append(str(l.linenum))
1066 if opts['all']:
1066 if opts['all']:
1067 cols.append(change)
1067 cols.append(change)
1068 if opts['user']:
1068 if opts['user']:
1069 cols.append(ui.shortuser(get(r)[1]))
1069 cols.append(ui.shortuser(get(r)[1]))
1070 if opts.get('date'):
1070 if opts.get('date'):
1071 cols.append(datefunc(get(r)[2]))
1071 cols.append(datefunc(get(r)[2]))
1072 if opts['files_with_matches']:
1072 if opts['files_with_matches']:
1073 c = (fn, r)
1073 c = (fn, r)
1074 if c in filerevmatches:
1074 if c in filerevmatches:
1075 continue
1075 continue
1076 filerevmatches[c] = 1
1076 filerevmatches[c] = 1
1077 else:
1077 else:
1078 cols.append(l.line)
1078 cols.append(l.line)
1079 ui.write(sep.join(cols), eol)
1079 ui.write(sep.join(cols), eol)
1080 found = True
1080 found = True
1081 return found
1081 return found
1082
1082
1083 fstate = {}
1083 fstate = {}
1084 skip = {}
1084 skip = {}
1085 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1085 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1086 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1086 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1087 found = False
1087 found = False
1088 follow = opts.get('follow')
1088 follow = opts.get('follow')
1089 for st, rev, fns in changeiter:
1089 for st, rev, fns in changeiter:
1090 if st == 'window':
1090 if st == 'window':
1091 matches.clear()
1091 matches.clear()
1092 elif st == 'add':
1092 elif st == 'add':
1093 ctx = repo.changectx(rev)
1093 ctx = repo.changectx(rev)
1094 matches[rev] = {}
1094 matches[rev] = {}
1095 for fn in fns:
1095 for fn in fns:
1096 if fn in skip:
1096 if fn in skip:
1097 continue
1097 continue
1098 try:
1098 try:
1099 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1099 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1100 fstate.setdefault(fn, [])
1100 fstate.setdefault(fn, [])
1101 if follow:
1101 if follow:
1102 copied = getfile(fn).renamed(ctx.filenode(fn))
1102 copied = getfile(fn).renamed(ctx.filenode(fn))
1103 if copied:
1103 if copied:
1104 copies.setdefault(rev, {})[fn] = copied[0]
1104 copies.setdefault(rev, {})[fn] = copied[0]
1105 except revlog.LookupError:
1105 except revlog.LookupError:
1106 pass
1106 pass
1107 elif st == 'iter':
1107 elif st == 'iter':
1108 states = matches[rev].items()
1108 states = matches[rev].items()
1109 states.sort()
1109 states.sort()
1110 for fn, m in states:
1110 for fn, m in states:
1111 copy = copies.get(rev, {}).get(fn)
1111 copy = copies.get(rev, {}).get(fn)
1112 if fn in skip:
1112 if fn in skip:
1113 if copy:
1113 if copy:
1114 skip[copy] = True
1114 skip[copy] = True
1115 continue
1115 continue
1116 if fn in prev or fstate[fn]:
1116 if fn in prev or fstate[fn]:
1117 r = display(fn, rev, m, fstate[fn])
1117 r = display(fn, rev, m, fstate[fn])
1118 found = found or r
1118 found = found or r
1119 if r and not opts['all']:
1119 if r and not opts['all']:
1120 skip[fn] = True
1120 skip[fn] = True
1121 if copy:
1121 if copy:
1122 skip[copy] = True
1122 skip[copy] = True
1123 fstate[fn] = m
1123 fstate[fn] = m
1124 if copy:
1124 if copy:
1125 fstate[copy] = m
1125 fstate[copy] = m
1126 prev[fn] = rev
1126 prev[fn] = rev
1127
1127
1128 fstate = fstate.items()
1128 fstate = fstate.items()
1129 fstate.sort()
1129 fstate.sort()
1130 for fn, state in fstate:
1130 for fn, state in fstate:
1131 if fn in skip:
1131 if fn in skip:
1132 continue
1132 continue
1133 if fn not in copies.get(prev[fn], {}):
1133 if fn not in copies.get(prev[fn], {}):
1134 found = display(fn, rev, {}, state) or found
1134 found = display(fn, rev, {}, state) or found
1135 return (not found and 1) or 0
1135 return (not found and 1) or 0
1136
1136
1137 def heads(ui, repo, *branchrevs, **opts):
1137 def heads(ui, repo, *branchrevs, **opts):
1138 """show current repository heads or show branch heads
1138 """show current repository heads or show branch heads
1139
1139
1140 With no arguments, show all repository head changesets.
1140 With no arguments, show all repository head changesets.
1141
1141
1142 If branch or revisions names are given this will show the heads of
1142 If branch or revisions names are given this will show the heads of
1143 the specified branches or the branches those revisions are tagged
1143 the specified branches or the branches those revisions are tagged
1144 with.
1144 with.
1145
1145
1146 Repository "heads" are changesets that don't have child
1146 Repository "heads" are changesets that don't have child
1147 changesets. They are where development generally takes place and
1147 changesets. They are where development generally takes place and
1148 are the usual targets for update and merge operations.
1148 are the usual targets for update and merge operations.
1149
1149
1150 Branch heads are changesets that have a given branch tag, but have
1150 Branch heads are changesets that have a given branch tag, but have
1151 no child changesets with that tag. They are usually where
1151 no child changesets with that tag. They are usually where
1152 development on the given branch takes place.
1152 development on the given branch takes place.
1153 """
1153 """
1154 if opts['rev']:
1154 if opts['rev']:
1155 start = repo.lookup(opts['rev'])
1155 start = repo.lookup(opts['rev'])
1156 else:
1156 else:
1157 start = None
1157 start = None
1158 if not branchrevs:
1158 if not branchrevs:
1159 # Assume we're looking repo-wide heads if no revs were specified.
1159 # Assume we're looking repo-wide heads if no revs were specified.
1160 heads = repo.heads(start)
1160 heads = repo.heads(start)
1161 else:
1161 else:
1162 heads = []
1162 heads = []
1163 visitedset = util.set()
1163 visitedset = util.set()
1164 for branchrev in branchrevs:
1164 for branchrev in branchrevs:
1165 branch = repo.changectx(branchrev).branch()
1165 branch = repo.changectx(branchrev).branch()
1166 if branch in visitedset:
1166 if branch in visitedset:
1167 continue
1167 continue
1168 visitedset.add(branch)
1168 visitedset.add(branch)
1169 bheads = repo.branchheads(branch, start)
1169 bheads = repo.branchheads(branch, start)
1170 if not bheads:
1170 if not bheads:
1171 if branch != branchrev:
1171 if branch != branchrev:
1172 ui.warn(_("no changes on branch %s containing %s are "
1172 ui.warn(_("no changes on branch %s containing %s are "
1173 "reachable from %s\n")
1173 "reachable from %s\n")
1174 % (branch, branchrev, opts['rev']))
1174 % (branch, branchrev, opts['rev']))
1175 else:
1175 else:
1176 ui.warn(_("no changes on branch %s are reachable from %s\n")
1176 ui.warn(_("no changes on branch %s are reachable from %s\n")
1177 % (branch, opts['rev']))
1177 % (branch, opts['rev']))
1178 heads.extend(bheads)
1178 heads.extend(bheads)
1179 if not heads:
1179 if not heads:
1180 return 1
1180 return 1
1181 displayer = cmdutil.show_changeset(ui, repo, opts)
1181 displayer = cmdutil.show_changeset(ui, repo, opts)
1182 for n in heads:
1182 for n in heads:
1183 displayer.show(changenode=n)
1183 displayer.show(changenode=n)
1184
1184
1185 def help_(ui, name=None, with_version=False):
1185 def help_(ui, name=None, with_version=False):
1186 """show help for a command, extension, or list of commands
1186 """show help for a command, extension, or list of commands
1187
1187
1188 With no arguments, print a list of commands and short help.
1188 With no arguments, print a list of commands and short help.
1189
1189
1190 Given a command name, print help for that command.
1190 Given a command name, print help for that command.
1191
1191
1192 Given an extension name, print help for that extension, and the
1192 Given an extension name, print help for that extension, and the
1193 commands it provides."""
1193 commands it provides."""
1194 option_lists = []
1194 option_lists = []
1195
1195
1196 def addglobalopts(aliases):
1196 def addglobalopts(aliases):
1197 if ui.verbose:
1197 if ui.verbose:
1198 option_lists.append((_("global options:"), globalopts))
1198 option_lists.append((_("global options:"), globalopts))
1199 if name == 'shortlist':
1199 if name == 'shortlist':
1200 option_lists.append((_('use "hg help" for the full list '
1200 option_lists.append((_('use "hg help" for the full list '
1201 'of commands'), ()))
1201 'of commands'), ()))
1202 else:
1202 else:
1203 if name == 'shortlist':
1203 if name == 'shortlist':
1204 msg = _('use "hg help" for the full list of commands '
1204 msg = _('use "hg help" for the full list of commands '
1205 'or "hg -v" for details')
1205 'or "hg -v" for details')
1206 elif aliases:
1206 elif aliases:
1207 msg = _('use "hg -v help%s" to show aliases and '
1207 msg = _('use "hg -v help%s" to show aliases and '
1208 'global options') % (name and " " + name or "")
1208 'global options') % (name and " " + name or "")
1209 else:
1209 else:
1210 msg = _('use "hg -v help %s" to show global options') % name
1210 msg = _('use "hg -v help %s" to show global options') % name
1211 option_lists.append((msg, ()))
1211 option_lists.append((msg, ()))
1212
1212
1213 def helpcmd(name):
1213 def helpcmd(name):
1214 if with_version:
1214 if with_version:
1215 version_(ui)
1215 version_(ui)
1216 ui.write('\n')
1216 ui.write('\n')
1217 aliases, i = cmdutil.findcmd(ui, name, table)
1217 aliases, i = cmdutil.findcmd(ui, name, table)
1218 # synopsis
1218 # synopsis
1219 ui.write("%s\n" % i[2])
1219 ui.write("%s\n" % i[2])
1220
1220
1221 # aliases
1221 # aliases
1222 if not ui.quiet and len(aliases) > 1:
1222 if not ui.quiet and len(aliases) > 1:
1223 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1223 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1224
1224
1225 # description
1225 # description
1226 doc = i[0].__doc__
1226 doc = i[0].__doc__
1227 if not doc:
1227 if not doc:
1228 doc = _("(No help text available)")
1228 doc = _("(No help text available)")
1229 if ui.quiet:
1229 if ui.quiet:
1230 doc = doc.splitlines(0)[0]
1230 doc = doc.splitlines(0)[0]
1231 ui.write("\n%s\n" % doc.rstrip())
1231 ui.write("\n%s\n" % doc.rstrip())
1232
1232
1233 if not ui.quiet:
1233 if not ui.quiet:
1234 # options
1234 # options
1235 if i[1]:
1235 if i[1]:
1236 option_lists.append((_("options:\n"), i[1]))
1236 option_lists.append((_("options:\n"), i[1]))
1237
1237
1238 addglobalopts(False)
1238 addglobalopts(False)
1239
1239
1240 def helplist(header, select=None):
1240 def helplist(header, select=None):
1241 h = {}
1241 h = {}
1242 cmds = {}
1242 cmds = {}
1243 for c, e in table.items():
1243 for c, e in table.items():
1244 f = c.split("|", 1)[0]
1244 f = c.split("|", 1)[0]
1245 if select and not select(f):
1245 if select and not select(f):
1246 continue
1246 continue
1247 if name == "shortlist" and not f.startswith("^"):
1247 if name == "shortlist" and not f.startswith("^"):
1248 continue
1248 continue
1249 f = f.lstrip("^")
1249 f = f.lstrip("^")
1250 if not ui.debugflag and f.startswith("debug"):
1250 if not ui.debugflag and f.startswith("debug"):
1251 continue
1251 continue
1252 doc = e[0].__doc__
1252 doc = e[0].__doc__
1253 if not doc:
1253 if not doc:
1254 doc = _("(No help text available)")
1254 doc = _("(No help text available)")
1255 h[f] = doc.splitlines(0)[0].rstrip()
1255 h[f] = doc.splitlines(0)[0].rstrip()
1256 cmds[f] = c.lstrip("^")
1256 cmds[f] = c.lstrip("^")
1257
1257
1258 if not h:
1258 if not h:
1259 ui.status(_('no commands defined\n'))
1259 ui.status(_('no commands defined\n'))
1260 return
1260 return
1261
1261
1262 ui.status(header)
1262 ui.status(header)
1263 fns = h.keys()
1263 fns = h.keys()
1264 fns.sort()
1264 fns.sort()
1265 m = max(map(len, fns))
1265 m = max(map(len, fns))
1266 for f in fns:
1266 for f in fns:
1267 if ui.verbose:
1267 if ui.verbose:
1268 commands = cmds[f].replace("|",", ")
1268 commands = cmds[f].replace("|",", ")
1269 ui.write(" %s:\n %s\n"%(commands, h[f]))
1269 ui.write(" %s:\n %s\n"%(commands, h[f]))
1270 else:
1270 else:
1271 ui.write(' %-*s %s\n' % (m, f, h[f]))
1271 ui.write(' %-*s %s\n' % (m, f, h[f]))
1272
1272
1273 if not ui.quiet:
1273 if not ui.quiet:
1274 addglobalopts(True)
1274 addglobalopts(True)
1275
1275
1276 def helptopic(name):
1276 def helptopic(name):
1277 v = None
1277 v = None
1278 for i in help.helptable:
1278 for i in help.helptable:
1279 l = i.split('|')
1279 l = i.split('|')
1280 if name in l:
1280 if name in l:
1281 v = i
1281 v = i
1282 header = l[-1]
1282 header = l[-1]
1283 if not v:
1283 if not v:
1284 raise cmdutil.UnknownCommand(name)
1284 raise cmdutil.UnknownCommand(name)
1285
1285
1286 # description
1286 # description
1287 doc = help.helptable[v]
1287 doc = help.helptable[v]
1288 if not doc:
1288 if not doc:
1289 doc = _("(No help text available)")
1289 doc = _("(No help text available)")
1290 if callable(doc):
1290 if callable(doc):
1291 doc = doc()
1291 doc = doc()
1292
1292
1293 ui.write("%s\n" % header)
1293 ui.write("%s\n" % header)
1294 ui.write("%s\n" % doc.rstrip())
1294 ui.write("%s\n" % doc.rstrip())
1295
1295
1296 def helpext(name):
1296 def helpext(name):
1297 try:
1297 try:
1298 mod = extensions.find(name)
1298 mod = extensions.find(name)
1299 except KeyError:
1299 except KeyError:
1300 raise cmdutil.UnknownCommand(name)
1300 raise cmdutil.UnknownCommand(name)
1301
1301
1302 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1302 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1303 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1303 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1304 for d in doc[1:]:
1304 for d in doc[1:]:
1305 ui.write(d, '\n')
1305 ui.write(d, '\n')
1306
1306
1307 ui.status('\n')
1307 ui.status('\n')
1308
1308
1309 try:
1309 try:
1310 ct = mod.cmdtable
1310 ct = mod.cmdtable
1311 except AttributeError:
1311 except AttributeError:
1312 ct = {}
1312 ct = {}
1313
1313
1314 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1314 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1315 helplist(_('list of commands:\n\n'), modcmds.has_key)
1315 helplist(_('list of commands:\n\n'), modcmds.has_key)
1316
1316
1317 if name and name != 'shortlist':
1317 if name and name != 'shortlist':
1318 i = None
1318 i = None
1319 for f in (helpcmd, helptopic, helpext):
1319 for f in (helpcmd, helptopic, helpext):
1320 try:
1320 try:
1321 f(name)
1321 f(name)
1322 i = None
1322 i = None
1323 break
1323 break
1324 except cmdutil.UnknownCommand, inst:
1324 except cmdutil.UnknownCommand, inst:
1325 i = inst
1325 i = inst
1326 if i:
1326 if i:
1327 raise i
1327 raise i
1328
1328
1329 else:
1329 else:
1330 # program name
1330 # program name
1331 if ui.verbose or with_version:
1331 if ui.verbose or with_version:
1332 version_(ui)
1332 version_(ui)
1333 else:
1333 else:
1334 ui.status(_("Mercurial Distributed SCM\n"))
1334 ui.status(_("Mercurial Distributed SCM\n"))
1335 ui.status('\n')
1335 ui.status('\n')
1336
1336
1337 # list of commands
1337 # list of commands
1338 if name == "shortlist":
1338 if name == "shortlist":
1339 header = _('basic commands:\n\n')
1339 header = _('basic commands:\n\n')
1340 else:
1340 else:
1341 header = _('list of commands:\n\n')
1341 header = _('list of commands:\n\n')
1342
1342
1343 helplist(header)
1343 helplist(header)
1344
1344
1345 # list all option lists
1345 # list all option lists
1346 opt_output = []
1346 opt_output = []
1347 for title, options in option_lists:
1347 for title, options in option_lists:
1348 opt_output.append(("\n%s" % title, None))
1348 opt_output.append(("\n%s" % title, None))
1349 for shortopt, longopt, default, desc in options:
1349 for shortopt, longopt, default, desc in options:
1350 if "DEPRECATED" in desc and not ui.verbose: continue
1350 if "DEPRECATED" in desc and not ui.verbose: continue
1351 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1351 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1352 longopt and " --%s" % longopt),
1352 longopt and " --%s" % longopt),
1353 "%s%s" % (desc,
1353 "%s%s" % (desc,
1354 default
1354 default
1355 and _(" (default: %s)") % default
1355 and _(" (default: %s)") % default
1356 or "")))
1356 or "")))
1357
1357
1358 if opt_output:
1358 if opt_output:
1359 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1359 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1360 for first, second in opt_output:
1360 for first, second in opt_output:
1361 if second:
1361 if second:
1362 ui.write(" %-*s %s\n" % (opts_len, first, second))
1362 ui.write(" %-*s %s\n" % (opts_len, first, second))
1363 else:
1363 else:
1364 ui.write("%s\n" % first)
1364 ui.write("%s\n" % first)
1365
1365
1366 def identify(ui, repo, source=None,
1366 def identify(ui, repo, source=None,
1367 rev=None, num=None, id=None, branch=None, tags=None):
1367 rev=None, num=None, id=None, branch=None, tags=None):
1368 """identify the working copy or specified revision
1368 """identify the working copy or specified revision
1369
1369
1370 With no revision, print a summary of the current state of the repo.
1370 With no revision, print a summary of the current state of the repo.
1371
1371
1372 With a path, do a lookup in another repository.
1372 With a path, do a lookup in another repository.
1373
1373
1374 This summary identifies the repository state using one or two parent
1374 This summary identifies the repository state using one or two parent
1375 hash identifiers, followed by a "+" if there are uncommitted changes
1375 hash identifiers, followed by a "+" if there are uncommitted changes
1376 in the working directory, a list of tags for this revision and a branch
1376 in the working directory, a list of tags for this revision and a branch
1377 name for non-default branches.
1377 name for non-default branches.
1378 """
1378 """
1379
1379
1380 if not repo and not source:
1380 if not repo and not source:
1381 raise util.Abort(_("There is no Mercurial repository here "
1381 raise util.Abort(_("There is no Mercurial repository here "
1382 "(.hg not found)"))
1382 "(.hg not found)"))
1383
1383
1384 hexfunc = ui.debugflag and hex or short
1384 hexfunc = ui.debugflag and hex or short
1385 default = not (num or id or branch or tags)
1385 default = not (num or id or branch or tags)
1386 output = []
1386 output = []
1387
1387
1388 if source:
1388 if source:
1389 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1389 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1390 srepo = hg.repository(ui, source)
1390 srepo = hg.repository(ui, source)
1391 if not rev and revs:
1391 if not rev and revs:
1392 rev = revs[0]
1392 rev = revs[0]
1393 if not rev:
1393 if not rev:
1394 rev = "tip"
1394 rev = "tip"
1395 if num or branch or tags:
1395 if num or branch or tags:
1396 raise util.Abort(
1396 raise util.Abort(
1397 "can't query remote revision number, branch, or tags")
1397 "can't query remote revision number, branch, or tags")
1398 output = [hexfunc(srepo.lookup(rev))]
1398 output = [hexfunc(srepo.lookup(rev))]
1399 elif not rev:
1399 elif not rev:
1400 ctx = repo.workingctx()
1400 ctx = repo.workingctx()
1401 parents = ctx.parents()
1401 parents = ctx.parents()
1402 changed = False
1402 changed = False
1403 if default or id or num:
1403 if default or id or num:
1404 changed = ctx.files() + ctx.deleted()
1404 changed = ctx.files() + ctx.deleted()
1405 if default or id:
1405 if default or id:
1406 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1406 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1407 (changed) and "+" or "")]
1407 (changed) and "+" or "")]
1408 if num:
1408 if num:
1409 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1409 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1410 (changed) and "+" or ""))
1410 (changed) and "+" or ""))
1411 else:
1411 else:
1412 ctx = repo.changectx(rev)
1412 ctx = repo.changectx(rev)
1413 if default or id:
1413 if default or id:
1414 output = [hexfunc(ctx.node())]
1414 output = [hexfunc(ctx.node())]
1415 if num:
1415 if num:
1416 output.append(str(ctx.rev()))
1416 output.append(str(ctx.rev()))
1417
1417
1418 if not source and default and not ui.quiet:
1418 if not source and default and not ui.quiet:
1419 b = util.tolocal(ctx.branch())
1419 b = util.tolocal(ctx.branch())
1420 if b != 'default':
1420 if b != 'default':
1421 output.append("(%s)" % b)
1421 output.append("(%s)" % b)
1422
1422
1423 # multiple tags for a single parent separated by '/'
1423 # multiple tags for a single parent separated by '/'
1424 t = "/".join(ctx.tags())
1424 t = "/".join(ctx.tags())
1425 if t:
1425 if t:
1426 output.append(t)
1426 output.append(t)
1427
1427
1428 if branch:
1428 if branch:
1429 output.append(util.tolocal(ctx.branch()))
1429 output.append(util.tolocal(ctx.branch()))
1430
1430
1431 if tags:
1431 if tags:
1432 output.extend(ctx.tags())
1432 output.extend(ctx.tags())
1433
1433
1434 ui.write("%s\n" % ' '.join(output))
1434 ui.write("%s\n" % ' '.join(output))
1435
1435
1436 def import_(ui, repo, patch1, *patches, **opts):
1436 def import_(ui, repo, patch1, *patches, **opts):
1437 """import an ordered set of patches
1437 """import an ordered set of patches
1438
1438
1439 Import a list of patches and commit them individually.
1439 Import a list of patches and commit them individually.
1440
1440
1441 If there are outstanding changes in the working directory, import
1441 If there are outstanding changes in the working directory, import
1442 will abort unless given the -f flag.
1442 will abort unless given the -f flag.
1443
1443
1444 You can import a patch straight from a mail message. Even patches
1444 You can import a patch straight from a mail message. Even patches
1445 as attachments work (body part must be type text/plain or
1445 as attachments work (body part must be type text/plain or
1446 text/x-patch to be used). From and Subject headers of email
1446 text/x-patch to be used). From and Subject headers of email
1447 message are used as default committer and commit message. All
1447 message are used as default committer and commit message. All
1448 text/plain body parts before first diff are added to commit
1448 text/plain body parts before first diff are added to commit
1449 message.
1449 message.
1450
1450
1451 If the imported patch was generated by hg export, user and description
1451 If the imported patch was generated by hg export, user and description
1452 from patch override values from message headers and body. Values
1452 from patch override values from message headers and body. Values
1453 given on command line with -m and -u override these.
1453 given on command line with -m and -u override these.
1454
1454
1455 If --exact is specified, import will set the working directory
1455 If --exact is specified, import will set the working directory
1456 to the parent of each patch before applying it, and will abort
1456 to the parent of each patch before applying it, and will abort
1457 if the resulting changeset has a different ID than the one
1457 if the resulting changeset has a different ID than the one
1458 recorded in the patch. This may happen due to character set
1458 recorded in the patch. This may happen due to character set
1459 problems or other deficiencies in the text patch format.
1459 problems or other deficiencies in the text patch format.
1460
1460
1461 To read a patch from standard input, use patch name "-".
1461 To read a patch from standard input, use patch name "-".
1462 See 'hg help dates' for a list of formats valid for -d/--date.
1462 See 'hg help dates' for a list of formats valid for -d/--date.
1463 """
1463 """
1464 patches = (patch1,) + patches
1464 patches = (patch1,) + patches
1465
1465
1466 date = opts.get('date')
1466 date = opts.get('date')
1467 if date:
1467 if date:
1468 opts['date'] = util.parsedate(date)
1468 opts['date'] = util.parsedate(date)
1469
1469
1470 if opts.get('exact') or not opts['force']:
1470 if opts.get('exact') or not opts['force']:
1471 cmdutil.bail_if_changed(repo)
1471 cmdutil.bail_if_changed(repo)
1472
1472
1473 d = opts["base"]
1473 d = opts["base"]
1474 strip = opts["strip"]
1474 strip = opts["strip"]
1475 wlock = lock = None
1475 wlock = lock = None
1476 try:
1476 try:
1477 wlock = repo.wlock()
1477 wlock = repo.wlock()
1478 lock = repo.lock()
1478 lock = repo.lock()
1479 for p in patches:
1479 for p in patches:
1480 pf = os.path.join(d, p)
1480 pf = os.path.join(d, p)
1481
1481
1482 if pf == '-':
1482 if pf == '-':
1483 ui.status(_("applying patch from stdin\n"))
1483 ui.status(_("applying patch from stdin\n"))
1484 data = patch.extract(ui, sys.stdin)
1484 data = patch.extract(ui, sys.stdin)
1485 else:
1485 else:
1486 ui.status(_("applying %s\n") % p)
1486 ui.status(_("applying %s\n") % p)
1487 if os.path.exists(pf):
1487 if os.path.exists(pf):
1488 data = patch.extract(ui, file(pf, 'rb'))
1488 data = patch.extract(ui, file(pf, 'rb'))
1489 else:
1489 else:
1490 data = patch.extract(ui, urllib.urlopen(pf))
1490 data = patch.extract(ui, urllib.urlopen(pf))
1491 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1491 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1492
1492
1493 if tmpname is None:
1493 if tmpname is None:
1494 raise util.Abort(_('no diffs found'))
1494 raise util.Abort(_('no diffs found'))
1495
1495
1496 try:
1496 try:
1497 cmdline_message = cmdutil.logmessage(opts)
1497 cmdline_message = cmdutil.logmessage(opts)
1498 if cmdline_message:
1498 if cmdline_message:
1499 # pickup the cmdline msg
1499 # pickup the cmdline msg
1500 message = cmdline_message
1500 message = cmdline_message
1501 elif message:
1501 elif message:
1502 # pickup the patch msg
1502 # pickup the patch msg
1503 message = message.strip()
1503 message = message.strip()
1504 else:
1504 else:
1505 # launch the editor
1505 # launch the editor
1506 message = None
1506 message = None
1507 ui.debug(_('message:\n%s\n') % message)
1507 ui.debug(_('message:\n%s\n') % message)
1508
1508
1509 wp = repo.workingctx().parents()
1509 wp = repo.workingctx().parents()
1510 if opts.get('exact'):
1510 if opts.get('exact'):
1511 if not nodeid or not p1:
1511 if not nodeid or not p1:
1512 raise util.Abort(_('not a mercurial patch'))
1512 raise util.Abort(_('not a mercurial patch'))
1513 p1 = repo.lookup(p1)
1513 p1 = repo.lookup(p1)
1514 p2 = repo.lookup(p2 or hex(nullid))
1514 p2 = repo.lookup(p2 or hex(nullid))
1515
1515
1516 if p1 != wp[0].node():
1516 if p1 != wp[0].node():
1517 hg.clean(repo, p1)
1517 hg.clean(repo, p1)
1518 repo.dirstate.setparents(p1, p2)
1518 repo.dirstate.setparents(p1, p2)
1519 elif p2:
1519 elif p2:
1520 try:
1520 try:
1521 p1 = repo.lookup(p1)
1521 p1 = repo.lookup(p1)
1522 p2 = repo.lookup(p2)
1522 p2 = repo.lookup(p2)
1523 if p1 == wp[0].node():
1523 if p1 == wp[0].node():
1524 repo.dirstate.setparents(p1, p2)
1524 repo.dirstate.setparents(p1, p2)
1525 except hg.RepoError:
1525 except hg.RepoError:
1526 pass
1526 pass
1527 if opts.get('exact') or opts.get('import_branch'):
1527 if opts.get('exact') or opts.get('import_branch'):
1528 repo.dirstate.setbranch(branch or 'default')
1528 repo.dirstate.setbranch(branch or 'default')
1529
1529
1530 files = {}
1530 files = {}
1531 try:
1531 try:
1532 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1532 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1533 files=files)
1533 files=files)
1534 finally:
1534 finally:
1535 files = patch.updatedir(ui, repo, files)
1535 files = patch.updatedir(ui, repo, files)
1536 if not opts.get('no_commit'):
1536 if not opts.get('no_commit'):
1537 n = repo.commit(files, message, opts.get('user') or user,
1537 n = repo.commit(files, message, opts.get('user') or user,
1538 opts.get('date') or date)
1538 opts.get('date') or date)
1539 if opts.get('exact'):
1539 if opts.get('exact'):
1540 if hex(n) != nodeid:
1540 if hex(n) != nodeid:
1541 repo.rollback()
1541 repo.rollback()
1542 raise util.Abort(_('patch is damaged'
1542 raise util.Abort(_('patch is damaged'
1543 ' or loses information'))
1543 ' or loses information'))
1544 # Force a dirstate write so that the next transaction
1544 # Force a dirstate write so that the next transaction
1545 # backups an up-do-date file.
1545 # backups an up-do-date file.
1546 repo.dirstate.write()
1546 repo.dirstate.write()
1547 finally:
1547 finally:
1548 os.unlink(tmpname)
1548 os.unlink(tmpname)
1549 finally:
1549 finally:
1550 del lock, wlock
1550 del lock, wlock
1551
1551
1552 def incoming(ui, repo, source="default", **opts):
1552 def incoming(ui, repo, source="default", **opts):
1553 """show new changesets found in source
1553 """show new changesets found in source
1554
1554
1555 Show new changesets found in the specified path/URL or the default
1555 Show new changesets found in the specified path/URL or the default
1556 pull location. These are the changesets that would be pulled if a pull
1556 pull location. These are the changesets that would be pulled if a pull
1557 was requested.
1557 was requested.
1558
1558
1559 For remote repository, using --bundle avoids downloading the changesets
1559 For remote repository, using --bundle avoids downloading the changesets
1560 twice if the incoming is followed by a pull.
1560 twice if the incoming is followed by a pull.
1561
1561
1562 See pull for valid source format details.
1562 See pull for valid source format details.
1563 """
1563 """
1564 limit = cmdutil.loglimit(opts)
1564 limit = cmdutil.loglimit(opts)
1565 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1565 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1566 cmdutil.setremoteconfig(ui, opts)
1566 cmdutil.setremoteconfig(ui, opts)
1567
1567
1568 other = hg.repository(ui, source)
1568 other = hg.repository(ui, source)
1569 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1569 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1570 if revs:
1570 if revs:
1571 revs = [other.lookup(rev) for rev in revs]
1571 revs = [other.lookup(rev) for rev in revs]
1572 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1572 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1573 if not incoming:
1573 if not incoming:
1574 try:
1574 try:
1575 os.unlink(opts["bundle"])
1575 os.unlink(opts["bundle"])
1576 except:
1576 except:
1577 pass
1577 pass
1578 ui.status(_("no changes found\n"))
1578 ui.status(_("no changes found\n"))
1579 return 1
1579 return 1
1580
1580
1581 cleanup = None
1581 cleanup = None
1582 try:
1582 try:
1583 fname = opts["bundle"]
1583 fname = opts["bundle"]
1584 if fname or not other.local():
1584 if fname or not other.local():
1585 # create a bundle (uncompressed if other repo is not local)
1585 # create a bundle (uncompressed if other repo is not local)
1586 if revs is None:
1586 if revs is None:
1587 cg = other.changegroup(incoming, "incoming")
1587 cg = other.changegroup(incoming, "incoming")
1588 else:
1588 else:
1589 cg = other.changegroupsubset(incoming, revs, 'incoming')
1589 cg = other.changegroupsubset(incoming, revs, 'incoming')
1590 bundletype = other.local() and "HG10BZ" or "HG10UN"
1590 bundletype = other.local() and "HG10BZ" or "HG10UN"
1591 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1591 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1592 # keep written bundle?
1592 # keep written bundle?
1593 if opts["bundle"]:
1593 if opts["bundle"]:
1594 cleanup = None
1594 cleanup = None
1595 if not other.local():
1595 if not other.local():
1596 # use the created uncompressed bundlerepo
1596 # use the created uncompressed bundlerepo
1597 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1597 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1598
1598
1599 o = other.changelog.nodesbetween(incoming, revs)[0]
1599 o = other.changelog.nodesbetween(incoming, revs)[0]
1600 if opts['newest_first']:
1600 if opts['newest_first']:
1601 o.reverse()
1601 o.reverse()
1602 displayer = cmdutil.show_changeset(ui, other, opts)
1602 displayer = cmdutil.show_changeset(ui, other, opts)
1603 count = 0
1603 count = 0
1604 for n in o:
1604 for n in o:
1605 if count >= limit:
1605 if count >= limit:
1606 break
1606 break
1607 parents = [p for p in other.changelog.parents(n) if p != nullid]
1607 parents = [p for p in other.changelog.parents(n) if p != nullid]
1608 if opts['no_merges'] and len(parents) == 2:
1608 if opts['no_merges'] and len(parents) == 2:
1609 continue
1609 continue
1610 count += 1
1610 count += 1
1611 displayer.show(changenode=n)
1611 displayer.show(changenode=n)
1612 finally:
1612 finally:
1613 if hasattr(other, 'close'):
1613 if hasattr(other, 'close'):
1614 other.close()
1614 other.close()
1615 if cleanup:
1615 if cleanup:
1616 os.unlink(cleanup)
1616 os.unlink(cleanup)
1617
1617
1618 def init(ui, dest=".", **opts):
1618 def init(ui, dest=".", **opts):
1619 """create a new repository in the given directory
1619 """create a new repository in the given directory
1620
1620
1621 Initialize a new repository in the given directory. If the given
1621 Initialize a new repository in the given directory. If the given
1622 directory does not exist, it is created.
1622 directory does not exist, it is created.
1623
1623
1624 If no directory is given, the current directory is used.
1624 If no directory is given, the current directory is used.
1625
1625
1626 It is possible to specify an ssh:// URL as the destination.
1626 It is possible to specify an ssh:// URL as the destination.
1627 Look at the help text for the pull command for important details
1627 Look at the help text for the pull command for important details
1628 about ssh:// URLs.
1628 about ssh:// URLs.
1629 """
1629 """
1630 cmdutil.setremoteconfig(ui, opts)
1630 cmdutil.setremoteconfig(ui, opts)
1631 hg.repository(ui, dest, create=1)
1631 hg.repository(ui, dest, create=1)
1632
1632
1633 def locate(ui, repo, *pats, **opts):
1633 def locate(ui, repo, *pats, **opts):
1634 """locate files matching specific patterns
1634 """locate files matching specific patterns
1635
1635
1636 Print all files under Mercurial control whose names match the
1636 Print all files under Mercurial control whose names match the
1637 given patterns.
1637 given patterns.
1638
1638
1639 This command searches the entire repository by default. To search
1639 This command searches the entire repository by default. To search
1640 just the current directory and its subdirectories, use
1640 just the current directory and its subdirectories, use
1641 "--include .".
1641 "--include .".
1642
1642
1643 If no patterns are given to match, this command prints all file
1643 If no patterns are given to match, this command prints all file
1644 names.
1644 names.
1645
1645
1646 If you want to feed the output of this command into the "xargs"
1646 If you want to feed the output of this command into the "xargs"
1647 command, use the "-0" option to both this command and "xargs".
1647 command, use the "-0" option to both this command and "xargs".
1648 This will avoid the problem of "xargs" treating single filenames
1648 This will avoid the problem of "xargs" treating single filenames
1649 that contain white space as multiple filenames.
1649 that contain white space as multiple filenames.
1650 """
1650 """
1651 end = opts['print0'] and '\0' or '\n'
1651 end = opts['print0'] and '\0' or '\n'
1652 rev = opts['rev']
1652 rev = opts['rev']
1653 if rev:
1653 if rev:
1654 node = repo.lookup(rev)
1654 node = repo.lookup(rev)
1655 else:
1655 else:
1656 node = None
1656 node = None
1657
1657
1658 ret = 1
1658 ret = 1
1659 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1659 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1660 badmatch=util.always,
1660 badmatch=util.always,
1661 default='relglob'):
1661 default='relglob'):
1662 if src == 'b':
1662 if src == 'b':
1663 continue
1663 continue
1664 if not node and abs not in repo.dirstate:
1664 if not node and abs not in repo.dirstate:
1665 continue
1665 continue
1666 if opts['fullpath']:
1666 if opts['fullpath']:
1667 ui.write(os.path.join(repo.root, abs), end)
1667 ui.write(os.path.join(repo.root, abs), end)
1668 else:
1668 else:
1669 ui.write(((pats and rel) or abs), end)
1669 ui.write(((pats and rel) or abs), end)
1670 ret = 0
1670 ret = 0
1671
1671
1672 return ret
1672 return ret
1673
1673
1674 def log(ui, repo, *pats, **opts):
1674 def log(ui, repo, *pats, **opts):
1675 """show revision history of entire repository or files
1675 """show revision history of entire repository or files
1676
1676
1677 Print the revision history of the specified files or the entire
1677 Print the revision history of the specified files or the entire
1678 project.
1678 project.
1679
1679
1680 File history is shown without following rename or copy history of
1680 File history is shown without following rename or copy history of
1681 files. Use -f/--follow with a file name to follow history across
1681 files. Use -f/--follow with a file name to follow history across
1682 renames and copies. --follow without a file name will only show
1682 renames and copies. --follow without a file name will only show
1683 ancestors or descendants of the starting revision. --follow-first
1683 ancestors or descendants of the starting revision. --follow-first
1684 only follows the first parent of merge revisions.
1684 only follows the first parent of merge revisions.
1685
1685
1686 If no revision range is specified, the default is tip:0 unless
1686 If no revision range is specified, the default is tip:0 unless
1687 --follow is set, in which case the working directory parent is
1687 --follow is set, in which case the working directory parent is
1688 used as the starting revision.
1688 used as the starting revision.
1689
1689
1690 See 'hg help dates' for a list of formats valid for -d/--date.
1690 See 'hg help dates' for a list of formats valid for -d/--date.
1691
1691
1692 By default this command outputs: changeset id and hash, tags,
1692 By default this command outputs: changeset id and hash, tags,
1693 non-trivial parents, user, date and time, and a summary for each
1693 non-trivial parents, user, date and time, and a summary for each
1694 commit. When the -v/--verbose switch is used, the list of changed
1694 commit. When the -v/--verbose switch is used, the list of changed
1695 files and full commit message is shown.
1695 files and full commit message is shown.
1696
1696
1697 NOTE: log -p may generate unexpected diff output for merge
1697 NOTE: log -p may generate unexpected diff output for merge
1698 changesets, as it will compare the merge changeset against its
1698 changesets, as it will compare the merge changeset against its
1699 first parent only. Also, the files: list will only reflect files
1699 first parent only. Also, the files: list will only reflect files
1700 that are different from BOTH parents.
1700 that are different from BOTH parents.
1701
1701
1702 """
1702 """
1703
1703
1704 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1704 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1705 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1705 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1706
1706
1707 limit = cmdutil.loglimit(opts)
1707 limit = cmdutil.loglimit(opts)
1708 count = 0
1708 count = 0
1709
1709
1710 if opts['copies'] and opts['rev']:
1710 if opts['copies'] and opts['rev']:
1711 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1711 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1712 else:
1712 else:
1713 endrev = repo.changelog.count()
1713 endrev = repo.changelog.count()
1714 rcache = {}
1714 rcache = {}
1715 ncache = {}
1715 ncache = {}
1716 def getrenamed(fn, rev):
1716 def getrenamed(fn, rev):
1717 '''looks up all renames for a file (up to endrev) the first
1717 '''looks up all renames for a file (up to endrev) the first
1718 time the file is given. It indexes on the changerev and only
1718 time the file is given. It indexes on the changerev and only
1719 parses the manifest if linkrev != changerev.
1719 parses the manifest if linkrev != changerev.
1720 Returns rename info for fn at changerev rev.'''
1720 Returns rename info for fn at changerev rev.'''
1721 if fn not in rcache:
1721 if fn not in rcache:
1722 rcache[fn] = {}
1722 rcache[fn] = {}
1723 ncache[fn] = {}
1723 ncache[fn] = {}
1724 fl = repo.file(fn)
1724 fl = repo.file(fn)
1725 for i in xrange(fl.count()):
1725 for i in xrange(fl.count()):
1726 node = fl.node(i)
1726 node = fl.node(i)
1727 lr = fl.linkrev(node)
1727 lr = fl.linkrev(node)
1728 renamed = fl.renamed(node)
1728 renamed = fl.renamed(node)
1729 rcache[fn][lr] = renamed
1729 rcache[fn][lr] = renamed
1730 if renamed:
1730 if renamed:
1731 ncache[fn][node] = renamed
1731 ncache[fn][node] = renamed
1732 if lr >= endrev:
1732 if lr >= endrev:
1733 break
1733 break
1734 if rev in rcache[fn]:
1734 if rev in rcache[fn]:
1735 return rcache[fn][rev]
1735 return rcache[fn][rev]
1736
1736
1737 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1737 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1738 # filectx logic.
1738 # filectx logic.
1739
1739
1740 try:
1740 try:
1741 return repo.changectx(rev).filectx(fn).renamed()
1741 return repo.changectx(rev).filectx(fn).renamed()
1742 except revlog.LookupError:
1742 except revlog.LookupError:
1743 pass
1743 pass
1744 return None
1744 return None
1745
1745
1746 df = False
1746 df = False
1747 if opts["date"]:
1747 if opts["date"]:
1748 df = util.matchdate(opts["date"])
1748 df = util.matchdate(opts["date"])
1749
1749
1750 only_branches = opts['only_branch']
1750 only_branches = opts['only_branch']
1751
1751
1752 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1752 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1753 for st, rev, fns in changeiter:
1753 for st, rev, fns in changeiter:
1754 if st == 'add':
1754 if st == 'add':
1755 changenode = repo.changelog.node(rev)
1755 changenode = repo.changelog.node(rev)
1756 parents = [p for p in repo.changelog.parentrevs(rev)
1756 parents = [p for p in repo.changelog.parentrevs(rev)
1757 if p != nullrev]
1757 if p != nullrev]
1758 if opts['no_merges'] and len(parents) == 2:
1758 if opts['no_merges'] and len(parents) == 2:
1759 continue
1759 continue
1760 if opts['only_merges'] and len(parents) != 2:
1760 if opts['only_merges'] and len(parents) != 2:
1761 continue
1761 continue
1762
1762
1763 if only_branches:
1763 if only_branches:
1764 revbranch = get(rev)[5]['branch']
1764 revbranch = get(rev)[5]['branch']
1765 if revbranch not in only_branches:
1765 if revbranch not in only_branches:
1766 continue
1766 continue
1767
1767
1768 if df:
1768 if df:
1769 changes = get(rev)
1769 changes = get(rev)
1770 if not df(changes[2][0]):
1770 if not df(changes[2][0]):
1771 continue
1771 continue
1772
1772
1773 if opts['keyword']:
1773 if opts['keyword']:
1774 changes = get(rev)
1774 changes = get(rev)
1775 miss = 0
1775 miss = 0
1776 for k in [kw.lower() for kw in opts['keyword']]:
1776 for k in [kw.lower() for kw in opts['keyword']]:
1777 if not (k in changes[1].lower() or
1777 if not (k in changes[1].lower() or
1778 k in changes[4].lower() or
1778 k in changes[4].lower() or
1779 k in " ".join(changes[3]).lower()):
1779 k in " ".join(changes[3]).lower()):
1780 miss = 1
1780 miss = 1
1781 break
1781 break
1782 if miss:
1782 if miss:
1783 continue
1783 continue
1784
1784
1785 copies = []
1785 copies = []
1786 if opts.get('copies') and rev:
1786 if opts.get('copies') and rev:
1787 for fn in get(rev)[3]:
1787 for fn in get(rev)[3]:
1788 rename = getrenamed(fn, rev)
1788 rename = getrenamed(fn, rev)
1789 if rename:
1789 if rename:
1790 copies.append((fn, rename[0]))
1790 copies.append((fn, rename[0]))
1791 displayer.show(rev, changenode, copies=copies)
1791 displayer.show(rev, changenode, copies=copies)
1792 elif st == 'iter':
1792 elif st == 'iter':
1793 if count == limit: break
1793 if count == limit: break
1794 if displayer.flush(rev):
1794 if displayer.flush(rev):
1795 count += 1
1795 count += 1
1796
1796
1797 def manifest(ui, repo, node=None, rev=None):
1797 def manifest(ui, repo, node=None, rev=None):
1798 """output the current or given revision of the project manifest
1798 """output the current or given revision of the project manifest
1799
1799
1800 Print a list of version controlled files for the given revision.
1800 Print a list of version controlled files for the given revision.
1801 If no revision is given, the parent of the working directory is used,
1801 If no revision is given, the parent of the working directory is used,
1802 or tip if no revision is checked out.
1802 or tip if no revision is checked out.
1803
1803
1804 The manifest is the list of files being version controlled. If no revision
1804 The manifest is the list of files being version controlled. If no revision
1805 is given then the first parent of the working directory is used.
1805 is given then the first parent of the working directory is used.
1806
1806
1807 With -v flag, print file permissions, symlink and executable bits. With
1807 With -v flag, print file permissions, symlink and executable bits. With
1808 --debug flag, print file revision hashes.
1808 --debug flag, print file revision hashes.
1809 """
1809 """
1810
1810
1811 if rev and node:
1811 if rev and node:
1812 raise util.Abort(_("please specify just one revision"))
1812 raise util.Abort(_("please specify just one revision"))
1813
1813
1814 if not node:
1814 if not node:
1815 node = rev
1815 node = rev
1816
1816
1817 m = repo.changectx(node).manifest()
1817 m = repo.changectx(node).manifest()
1818 files = m.keys()
1818 files = m.keys()
1819 files.sort()
1819 files.sort()
1820
1820
1821 for f in files:
1821 for f in files:
1822 if ui.debugflag:
1822 if ui.debugflag:
1823 ui.write("%40s " % hex(m[f]))
1823 ui.write("%40s " % hex(m[f]))
1824 if ui.verbose:
1824 if ui.verbose:
1825 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1825 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1826 perm = m.execf(f) and "755" or "644"
1826 perm = m.execf(f) and "755" or "644"
1827 ui.write("%3s %1s " % (perm, type))
1827 ui.write("%3s %1s " % (perm, type))
1828 ui.write("%s\n" % f)
1828 ui.write("%s\n" % f)
1829
1829
1830 def merge(ui, repo, node=None, force=None, rev=None):
1830 def merge(ui, repo, node=None, force=None, rev=None):
1831 """merge working directory with another revision
1831 """merge working directory with another revision
1832
1832
1833 Merge the contents of the current working directory and the
1833 Merge the contents of the current working directory and the
1834 requested revision. Files that changed between either parent are
1834 requested revision. Files that changed between either parent are
1835 marked as changed for the next commit and a commit must be
1835 marked as changed for the next commit and a commit must be
1836 performed before any further updates are allowed.
1836 performed before any further updates are allowed.
1837
1837
1838 If no revision is specified, the working directory's parent is a
1838 If no revision is specified, the working directory's parent is a
1839 head revision, and the repository contains exactly one other head,
1839 head revision, and the repository contains exactly one other head,
1840 the other head is merged with by default. Otherwise, an explicit
1840 the other head is merged with by default. Otherwise, an explicit
1841 revision to merge with must be provided.
1841 revision to merge with must be provided.
1842 """
1842 """
1843
1843
1844 if rev and node:
1844 if rev and node:
1845 raise util.Abort(_("please specify just one revision"))
1845 raise util.Abort(_("please specify just one revision"))
1846 if not node:
1846 if not node:
1847 node = rev
1847 node = rev
1848
1848
1849 if not node:
1849 if not node:
1850 heads = repo.heads()
1850 heads = repo.heads()
1851 if len(heads) > 2:
1851 if len(heads) > 2:
1852 raise util.Abort(_('repo has %d heads - '
1852 raise util.Abort(_('repo has %d heads - '
1853 'please merge with an explicit rev') %
1853 'please merge with an explicit rev') %
1854 len(heads))
1854 len(heads))
1855 parent = repo.dirstate.parents()[0]
1855 parent = repo.dirstate.parents()[0]
1856 if len(heads) == 1:
1856 if len(heads) == 1:
1857 msg = _('there is nothing to merge')
1857 msg = _('there is nothing to merge')
1858 if parent != repo.lookup(repo.workingctx().branch()):
1858 if parent != repo.lookup(repo.workingctx().branch()):
1859 msg = _('%s - use "hg update" instead') % msg
1859 msg = _('%s - use "hg update" instead') % msg
1860 raise util.Abort(msg)
1860 raise util.Abort(msg)
1861
1861
1862 if parent not in heads:
1862 if parent not in heads:
1863 raise util.Abort(_('working dir not at a head rev - '
1863 raise util.Abort(_('working dir not at a head rev - '
1864 'use "hg update" or merge with an explicit rev'))
1864 'use "hg update" or merge with an explicit rev'))
1865 node = parent == heads[0] and heads[-1] or heads[0]
1865 node = parent == heads[0] and heads[-1] or heads[0]
1866 return hg.merge(repo, node, force=force)
1866 return hg.merge(repo, node, force=force)
1867
1867
1868 def outgoing(ui, repo, dest=None, **opts):
1868 def outgoing(ui, repo, dest=None, **opts):
1869 """show changesets not found in destination
1869 """show changesets not found in destination
1870
1870
1871 Show changesets not found in the specified destination repository or
1871 Show changesets not found in the specified destination repository or
1872 the default push location. These are the changesets that would be pushed
1872 the default push location. These are the changesets that would be pushed
1873 if a push was requested.
1873 if a push was requested.
1874
1874
1875 See pull for valid destination format details.
1875 See pull for valid destination format details.
1876 """
1876 """
1877 limit = cmdutil.loglimit(opts)
1877 limit = cmdutil.loglimit(opts)
1878 dest, revs, checkout = hg.parseurl(
1878 dest, revs, checkout = hg.parseurl(
1879 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1879 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1880 cmdutil.setremoteconfig(ui, opts)
1880 cmdutil.setremoteconfig(ui, opts)
1881 if revs:
1881 if revs:
1882 revs = [repo.lookup(rev) for rev in revs]
1882 revs = [repo.lookup(rev) for rev in revs]
1883
1883
1884 other = hg.repository(ui, dest)
1884 other = hg.repository(ui, dest)
1885 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1885 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1886 o = repo.findoutgoing(other, force=opts['force'])
1886 o = repo.findoutgoing(other, force=opts['force'])
1887 if not o:
1887 if not o:
1888 ui.status(_("no changes found\n"))
1888 ui.status(_("no changes found\n"))
1889 return 1
1889 return 1
1890 o = repo.changelog.nodesbetween(o, revs)[0]
1890 o = repo.changelog.nodesbetween(o, revs)[0]
1891 if opts['newest_first']:
1891 if opts['newest_first']:
1892 o.reverse()
1892 o.reverse()
1893 displayer = cmdutil.show_changeset(ui, repo, opts)
1893 displayer = cmdutil.show_changeset(ui, repo, opts)
1894 count = 0
1894 count = 0
1895 for n in o:
1895 for n in o:
1896 if count >= limit:
1896 if count >= limit:
1897 break
1897 break
1898 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1898 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1899 if opts['no_merges'] and len(parents) == 2:
1899 if opts['no_merges'] and len(parents) == 2:
1900 continue
1900 continue
1901 count += 1
1901 count += 1
1902 displayer.show(changenode=n)
1902 displayer.show(changenode=n)
1903
1903
1904 def parents(ui, repo, file_=None, **opts):
1904 def parents(ui, repo, file_=None, **opts):
1905 """show the parents of the working dir or revision
1905 """show the parents of the working dir or revision
1906
1906
1907 Print the working directory's parent revisions. If a
1907 Print the working directory's parent revisions. If a
1908 revision is given via --rev, the parent of that revision
1908 revision is given via --rev, the parent of that revision
1909 will be printed. If a file argument is given, revision in
1909 will be printed. If a file argument is given, revision in
1910 which the file was last changed (before the working directory
1910 which the file was last changed (before the working directory
1911 revision or the argument to --rev if given) is printed.
1911 revision or the argument to --rev if given) is printed.
1912 """
1912 """
1913 rev = opts.get('rev')
1913 rev = opts.get('rev')
1914 if rev:
1914 if rev:
1915 ctx = repo.changectx(rev)
1915 ctx = repo.changectx(rev)
1916 else:
1916 else:
1917 ctx = repo.workingctx()
1917 ctx = repo.workingctx()
1918
1918
1919 if file_:
1919 if file_:
1920 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1920 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1921 if anypats or len(files) != 1:
1921 if anypats or len(files) != 1:
1922 raise util.Abort(_('can only specify an explicit file name'))
1922 raise util.Abort(_('can only specify an explicit file name'))
1923 file_ = files[0]
1923 file_ = files[0]
1924 filenodes = []
1924 filenodes = []
1925 for cp in ctx.parents():
1925 for cp in ctx.parents():
1926 if not cp:
1926 if not cp:
1927 continue
1927 continue
1928 try:
1928 try:
1929 filenodes.append(cp.filenode(file_))
1929 filenodes.append(cp.filenode(file_))
1930 except revlog.LookupError:
1930 except revlog.LookupError:
1931 pass
1931 pass
1932 if not filenodes:
1932 if not filenodes:
1933 raise util.Abort(_("'%s' not found in manifest!") % file_)
1933 raise util.Abort(_("'%s' not found in manifest!") % file_)
1934 fl = repo.file(file_)
1934 fl = repo.file(file_)
1935 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1935 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1936 else:
1936 else:
1937 p = [cp.node() for cp in ctx.parents()]
1937 p = [cp.node() for cp in ctx.parents()]
1938
1938
1939 displayer = cmdutil.show_changeset(ui, repo, opts)
1939 displayer = cmdutil.show_changeset(ui, repo, opts)
1940 for n in p:
1940 for n in p:
1941 if n != nullid:
1941 if n != nullid:
1942 displayer.show(changenode=n)
1942 displayer.show(changenode=n)
1943
1943
1944 def paths(ui, repo, search=None):
1944 def paths(ui, repo, search=None):
1945 """show definition of symbolic path names
1945 """show definition of symbolic path names
1946
1946
1947 Show definition of symbolic path name NAME. If no name is given, show
1947 Show definition of symbolic path name NAME. If no name is given, show
1948 definition of available names.
1948 definition of available names.
1949
1949
1950 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1950 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1951 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1951 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1952 """
1952 """
1953 if search:
1953 if search:
1954 for name, path in ui.configitems("paths"):
1954 for name, path in ui.configitems("paths"):
1955 if name == search:
1955 if name == search:
1956 ui.write("%s\n" % path)
1956 ui.write("%s\n" % path)
1957 return
1957 return
1958 ui.warn(_("not found!\n"))
1958 ui.warn(_("not found!\n"))
1959 return 1
1959 return 1
1960 else:
1960 else:
1961 for name, path in ui.configitems("paths"):
1961 for name, path in ui.configitems("paths"):
1962 ui.write("%s = %s\n" % (name, path))
1962 ui.write("%s = %s\n" % (name, path))
1963
1963
1964 def postincoming(ui, repo, modheads, optupdate, checkout):
1964 def postincoming(ui, repo, modheads, optupdate, checkout):
1965 if modheads == 0:
1965 if modheads == 0:
1966 return
1966 return
1967 if optupdate:
1967 if optupdate:
1968 if modheads <= 1 or checkout:
1968 if modheads <= 1 or checkout:
1969 return hg.update(repo, checkout)
1969 return hg.update(repo, checkout)
1970 else:
1970 else:
1971 ui.status(_("not updating, since new heads added\n"))
1971 ui.status(_("not updating, since new heads added\n"))
1972 if modheads > 1:
1972 if modheads > 1:
1973 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1973 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1974 else:
1974 else:
1975 ui.status(_("(run 'hg update' to get a working copy)\n"))
1975 ui.status(_("(run 'hg update' to get a working copy)\n"))
1976
1976
1977 def pull(ui, repo, source="default", **opts):
1977 def pull(ui, repo, source="default", **opts):
1978 """pull changes from the specified source
1978 """pull changes from the specified source
1979
1979
1980 Pull changes from a remote repository to a local one.
1980 Pull changes from a remote repository to a local one.
1981
1981
1982 This finds all changes from the repository at the specified path
1982 This finds all changes from the repository at the specified path
1983 or URL and adds them to the local repository. By default, this
1983 or URL and adds them to the local repository. By default, this
1984 does not update the copy of the project in the working directory.
1984 does not update the copy of the project in the working directory.
1985
1985
1986 Valid URLs are of the form:
1986 Valid URLs are of the form:
1987
1987
1988 local/filesystem/path (or file://local/filesystem/path)
1988 local/filesystem/path (or file://local/filesystem/path)
1989 http://[user@]host[:port]/[path]
1989 http://[user@]host[:port]/[path]
1990 https://[user@]host[:port]/[path]
1990 https://[user@]host[:port]/[path]
1991 ssh://[user@]host[:port]/[path]
1991 ssh://[user@]host[:port]/[path]
1992 static-http://host[:port]/[path]
1992 static-http://host[:port]/[path]
1993
1993
1994 Paths in the local filesystem can either point to Mercurial
1994 Paths in the local filesystem can either point to Mercurial
1995 repositories or to bundle files (as created by 'hg bundle' or
1995 repositories or to bundle files (as created by 'hg bundle' or
1996 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1996 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1997 allows access to a Mercurial repository where you simply use a web
1997 allows access to a Mercurial repository where you simply use a web
1998 server to publish the .hg directory as static content.
1998 server to publish the .hg directory as static content.
1999
1999
2000 An optional identifier after # indicates a particular branch, tag,
2000 An optional identifier after # indicates a particular branch, tag,
2001 or changeset to pull.
2001 or changeset to pull.
2002
2002
2003 Some notes about using SSH with Mercurial:
2003 Some notes about using SSH with Mercurial:
2004 - SSH requires an accessible shell account on the destination machine
2004 - SSH requires an accessible shell account on the destination machine
2005 and a copy of hg in the remote path or specified with as remotecmd.
2005 and a copy of hg in the remote path or specified with as remotecmd.
2006 - path is relative to the remote user's home directory by default.
2006 - path is relative to the remote user's home directory by default.
2007 Use an extra slash at the start of a path to specify an absolute path:
2007 Use an extra slash at the start of a path to specify an absolute path:
2008 ssh://example.com//tmp/repository
2008 ssh://example.com//tmp/repository
2009 - Mercurial doesn't use its own compression via SSH; the right thing
2009 - Mercurial doesn't use its own compression via SSH; the right thing
2010 to do is to configure it in your ~/.ssh/config, e.g.:
2010 to do is to configure it in your ~/.ssh/config, e.g.:
2011 Host *.mylocalnetwork.example.com
2011 Host *.mylocalnetwork.example.com
2012 Compression no
2012 Compression no
2013 Host *
2013 Host *
2014 Compression yes
2014 Compression yes
2015 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2015 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2016 with the --ssh command line option.
2016 with the --ssh command line option.
2017 """
2017 """
2018 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2018 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2019 cmdutil.setremoteconfig(ui, opts)
2019 cmdutil.setremoteconfig(ui, opts)
2020
2020
2021 other = hg.repository(ui, source)
2021 other = hg.repository(ui, source)
2022 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2022 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2023 if revs:
2023 if revs:
2024 try:
2024 try:
2025 revs = [other.lookup(rev) for rev in revs]
2025 revs = [other.lookup(rev) for rev in revs]
2026 except repo.NoCapability:
2026 except repo.NoCapability:
2027 error = _("Other repository doesn't support revision lookup, "
2027 error = _("Other repository doesn't support revision lookup, "
2028 "so a rev cannot be specified.")
2028 "so a rev cannot be specified.")
2029 raise util.Abort(error)
2029 raise util.Abort(error)
2030
2030
2031 modheads = repo.pull(other, heads=revs, force=opts['force'])
2031 modheads = repo.pull(other, heads=revs, force=opts['force'])
2032 return postincoming(ui, repo, modheads, opts['update'], checkout)
2032 return postincoming(ui, repo, modheads, opts['update'], checkout)
2033
2033
2034 def push(ui, repo, dest=None, **opts):
2034 def push(ui, repo, dest=None, **opts):
2035 """push changes to the specified destination
2035 """push changes to the specified destination
2036
2036
2037 Push changes from the local repository to the given destination.
2037 Push changes from the local repository to the given destination.
2038
2038
2039 This is the symmetrical operation for pull. It helps to move
2039 This is the symmetrical operation for pull. It helps to move
2040 changes from the current repository to a different one. If the
2040 changes from the current repository to a different one. If the
2041 destination is local this is identical to a pull in that directory
2041 destination is local this is identical to a pull in that directory
2042 from the current one.
2042 from the current one.
2043
2043
2044 By default, push will refuse to run if it detects the result would
2044 By default, push will refuse to run if it detects the result would
2045 increase the number of remote heads. This generally indicates the
2045 increase the number of remote heads. This generally indicates the
2046 the client has forgotten to sync and merge before pushing.
2046 the client has forgotten to sync and merge before pushing.
2047
2047
2048 Valid URLs are of the form:
2048 Valid URLs are of the form:
2049
2049
2050 local/filesystem/path (or file://local/filesystem/path)
2050 local/filesystem/path (or file://local/filesystem/path)
2051 ssh://[user@]host[:port]/[path]
2051 ssh://[user@]host[:port]/[path]
2052 http://[user@]host[:port]/[path]
2052 http://[user@]host[:port]/[path]
2053 https://[user@]host[:port]/[path]
2053 https://[user@]host[:port]/[path]
2054
2054
2055 An optional identifier after # indicates a particular branch, tag,
2055 An optional identifier after # indicates a particular branch, tag,
2056 or changeset to push.
2056 or changeset to push.
2057
2057
2058 Look at the help text for the pull command for important details
2058 Look at the help text for the pull command for important details
2059 about ssh:// URLs.
2059 about ssh:// URLs.
2060
2060
2061 Pushing to http:// and https:// URLs is only possible, if this
2061 Pushing to http:// and https:// URLs is only possible, if this
2062 feature is explicitly enabled on the remote Mercurial server.
2062 feature is explicitly enabled on the remote Mercurial server.
2063 """
2063 """
2064 dest, revs, checkout = hg.parseurl(
2064 dest, revs, checkout = hg.parseurl(
2065 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2065 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2066 cmdutil.setremoteconfig(ui, opts)
2066 cmdutil.setremoteconfig(ui, opts)
2067
2067
2068 other = hg.repository(ui, dest)
2068 other = hg.repository(ui, dest)
2069 ui.status('pushing to %s\n' % util.hidepassword(dest))
2069 ui.status('pushing to %s\n' % util.hidepassword(dest))
2070 if revs:
2070 if revs:
2071 revs = [repo.lookup(rev) for rev in revs]
2071 revs = [repo.lookup(rev) for rev in revs]
2072 r = repo.push(other, opts['force'], revs=revs)
2072 r = repo.push(other, opts['force'], revs=revs)
2073 return r == 0
2073 return r == 0
2074
2074
2075 def rawcommit(ui, repo, *pats, **opts):
2075 def rawcommit(ui, repo, *pats, **opts):
2076 """raw commit interface (DEPRECATED)
2076 """raw commit interface (DEPRECATED)
2077
2077
2078 (DEPRECATED)
2078 (DEPRECATED)
2079 Lowlevel commit, for use in helper scripts.
2079 Lowlevel commit, for use in helper scripts.
2080
2080
2081 This command is not intended to be used by normal users, as it is
2081 This command is not intended to be used by normal users, as it is
2082 primarily useful for importing from other SCMs.
2082 primarily useful for importing from other SCMs.
2083
2083
2084 This command is now deprecated and will be removed in a future
2084 This command is now deprecated and will be removed in a future
2085 release, please use debugsetparents and commit instead.
2085 release, please use debugsetparents and commit instead.
2086 """
2086 """
2087
2087
2088 ui.warn(_("(the rawcommit command is deprecated)\n"))
2088 ui.warn(_("(the rawcommit command is deprecated)\n"))
2089
2089
2090 message = cmdutil.logmessage(opts)
2090 message = cmdutil.logmessage(opts)
2091
2091
2092 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2092 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2093 if opts['files']:
2093 if opts['files']:
2094 files += open(opts['files']).read().splitlines()
2094 files += open(opts['files']).read().splitlines()
2095
2095
2096 parents = [repo.lookup(p) for p in opts['parent']]
2096 parents = [repo.lookup(p) for p in opts['parent']]
2097
2097
2098 try:
2098 try:
2099 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2099 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2100 except ValueError, inst:
2100 except ValueError, inst:
2101 raise util.Abort(str(inst))
2101 raise util.Abort(str(inst))
2102
2102
2103 def recover(ui, repo):
2103 def recover(ui, repo):
2104 """roll back an interrupted transaction
2104 """roll back an interrupted transaction
2105
2105
2106 Recover from an interrupted commit or pull.
2106 Recover from an interrupted commit or pull.
2107
2107
2108 This command tries to fix the repository status after an interrupted
2108 This command tries to fix the repository status after an interrupted
2109 operation. It should only be necessary when Mercurial suggests it.
2109 operation. It should only be necessary when Mercurial suggests it.
2110 """
2110 """
2111 if repo.recover():
2111 if repo.recover():
2112 return hg.verify(repo)
2112 return hg.verify(repo)
2113 return 1
2113 return 1
2114
2114
2115 def remove(ui, repo, *pats, **opts):
2115 def remove(ui, repo, *pats, **opts):
2116 """remove the specified files on the next commit
2116 """remove the specified files on the next commit
2117
2117
2118 Schedule the indicated files for removal from the repository.
2118 Schedule the indicated files for removal from the repository.
2119
2119
2120 This only removes files from the current branch, not from the
2120 This only removes files from the current branch, not from the
2121 entire project history. If the files still exist in the working
2121 entire project history. If the files still exist in the working
2122 directory, they will be deleted from it. If invoked with --after,
2122 directory, they will be deleted from it. If invoked with --after,
2123 files are marked as removed, but not actually unlinked unless --force
2123 files are marked as removed, but not actually unlinked unless --force
2124 is also given. Without exact file names, --after will only mark
2124 is also given. Without exact file names, --after will only mark
2125 files as removed if they are no longer in the working directory.
2125 files as removed if they are no longer in the working directory.
2126
2126
2127 This command schedules the files to be removed at the next commit.
2127 This command schedules the files to be removed at the next commit.
2128 To undo a remove before that, see hg revert.
2128 To undo a remove before that, see hg revert.
2129
2129
2130 Modified files and added files are not removed by default. To
2130 Modified files and added files are not removed by default. To
2131 remove them, use the -f/--force option.
2131 remove them, use the -f/--force option.
2132 """
2132 """
2133 if not opts['after'] and not pats:
2133 if not opts['after'] and not pats:
2134 raise util.Abort(_('no files specified'))
2134 raise util.Abort(_('no files specified'))
2135 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2135 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2136 exact = dict.fromkeys(files)
2136 exact = dict.fromkeys(files)
2137 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2137 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2138 modified, added, removed, deleted, unknown = mardu
2138 modified, added, removed, deleted, unknown = mardu
2139 remove, forget = [], []
2139 remove, forget = [], []
2140 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2140 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2141 reason = None
2141 reason = None
2142 if abs in modified and not opts['force']:
2142 if abs in modified and not opts['force']:
2143 reason = _('is modified (use -f to force removal)')
2143 reason = _('is modified (use -f to force removal)')
2144 elif abs in added:
2144 elif abs in added:
2145 if opts['force']:
2145 if opts['force']:
2146 forget.append(abs)
2146 forget.append(abs)
2147 continue
2147 continue
2148 reason = _('has been marked for add (use -f to force removal)')
2148 reason = _('has been marked for add (use -f to force removal)')
2149 exact = 1 # force the message
2149 exact = 1 # force the message
2150 elif abs not in repo.dirstate:
2150 elif abs not in repo.dirstate:
2151 reason = _('is not managed')
2151 reason = _('is not managed')
2152 elif opts['after'] and not exact and abs not in deleted:
2152 elif opts['after'] and not exact and abs not in deleted:
2153 continue
2153 continue
2154 elif abs in removed:
2154 elif abs in removed:
2155 continue
2155 continue
2156 if reason:
2156 if reason:
2157 if exact:
2157 if exact:
2158 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2158 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2159 else:
2159 else:
2160 if ui.verbose or not exact:
2160 if ui.verbose or not exact:
2161 ui.status(_('removing %s\n') % rel)
2161 ui.status(_('removing %s\n') % rel)
2162 remove.append(abs)
2162 remove.append(abs)
2163 repo.forget(forget)
2163 repo.forget(forget)
2164 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2164 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2165
2165
2166 def rename(ui, repo, *pats, **opts):
2166 def rename(ui, repo, *pats, **opts):
2167 """rename files; equivalent of copy + remove
2167 """rename files; equivalent of copy + remove
2168
2168
2169 Mark dest as copies of sources; mark sources for deletion. If
2169 Mark dest as copies of sources; mark sources for deletion. If
2170 dest is a directory, copies are put in that directory. If dest is
2170 dest is a directory, copies are put in that directory. If dest is
2171 a file, there can only be one source.
2171 a file, there can only be one source.
2172
2172
2173 By default, this command copies the contents of files as they
2173 By default, this command copies the contents of files as they
2174 stand in the working directory. If invoked with --after, the
2174 stand in the working directory. If invoked with --after, the
2175 operation is recorded, but no copying is performed.
2175 operation is recorded, but no copying is performed.
2176
2176
2177 This command takes effect in the next commit. To undo a rename
2177 This command takes effect in the next commit. To undo a rename
2178 before that, see hg revert.
2178 before that, see hg revert.
2179 """
2179 """
2180 wlock = repo.wlock(False)
2180 wlock = repo.wlock(False)
2181 try:
2181 try:
2182 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2182 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2183 finally:
2183 finally:
2184 del wlock
2184 del wlock
2185
2185
2186 def revert(ui, repo, *pats, **opts):
2186 def revert(ui, repo, *pats, **opts):
2187 """restore individual files or dirs to an earlier state
2187 """restore individual files or dirs to an earlier state
2188
2188
2189 (use update -r to check out earlier revisions, revert does not
2189 (use update -r to check out earlier revisions, revert does not
2190 change the working dir parents)
2190 change the working dir parents)
2191
2191
2192 With no revision specified, revert the named files or directories
2192 With no revision specified, revert the named files or directories
2193 to the contents they had in the parent of the working directory.
2193 to the contents they had in the parent of the working directory.
2194 This restores the contents of the affected files to an unmodified
2194 This restores the contents of the affected files to an unmodified
2195 state and unschedules adds, removes, copies, and renames. If the
2195 state and unschedules adds, removes, copies, and renames. If the
2196 working directory has two parents, you must explicitly specify the
2196 working directory has two parents, you must explicitly specify the
2197 revision to revert to.
2197 revision to revert to.
2198
2198
2199 Using the -r option, revert the given files or directories to their
2199 Using the -r option, revert the given files or directories to their
2200 contents as of a specific revision. This can be helpful to "roll
2200 contents as of a specific revision. This can be helpful to "roll
2201 back" some or all of an earlier change.
2201 back" some or all of an earlier change.
2202 See 'hg help dates' for a list of formats valid for -d/--date.
2202 See 'hg help dates' for a list of formats valid for -d/--date.
2203
2203
2204 Revert modifies the working directory. It does not commit any
2204 Revert modifies the working directory. It does not commit any
2205 changes, or change the parent of the working directory. If you
2205 changes, or change the parent of the working directory. If you
2206 revert to a revision other than the parent of the working
2206 revert to a revision other than the parent of the working
2207 directory, the reverted files will thus appear modified
2207 directory, the reverted files will thus appear modified
2208 afterwards.
2208 afterwards.
2209
2209
2210 If a file has been deleted, it is restored. If the executable
2210 If a file has been deleted, it is restored. If the executable
2211 mode of a file was changed, it is reset.
2211 mode of a file was changed, it is reset.
2212
2212
2213 If names are given, all files matching the names are reverted.
2213 If names are given, all files matching the names are reverted.
2214 If no arguments are given, no files are reverted.
2214 If no arguments are given, no files are reverted.
2215
2215
2216 Modified files are saved with a .orig suffix before reverting.
2216 Modified files are saved with a .orig suffix before reverting.
2217 To disable these backups, use --no-backup.
2217 To disable these backups, use --no-backup.
2218 """
2218 """
2219
2219
2220 if opts["date"]:
2220 if opts["date"]:
2221 if opts["rev"]:
2221 if opts["rev"]:
2222 raise util.Abort(_("you can't specify a revision and a date"))
2222 raise util.Abort(_("you can't specify a revision and a date"))
2223 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2223 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2224
2224
2225 if not pats and not opts['all']:
2225 if not pats and not opts['all']:
2226 raise util.Abort(_('no files or directories specified; '
2226 raise util.Abort(_('no files or directories specified; '
2227 'use --all to revert the whole repo'))
2227 'use --all to revert the whole repo'))
2228
2228
2229 parent, p2 = repo.dirstate.parents()
2229 parent, p2 = repo.dirstate.parents()
2230 if not opts['rev'] and p2 != nullid:
2230 if not opts['rev'] and p2 != nullid:
2231 raise util.Abort(_('uncommitted merge - please provide a '
2231 raise util.Abort(_('uncommitted merge - please provide a '
2232 'specific revision'))
2232 'specific revision'))
2233 ctx = repo.changectx(opts['rev'])
2233 ctx = repo.changectx(opts['rev'])
2234 node = ctx.node()
2234 node = ctx.node()
2235 mf = ctx.manifest()
2235 mf = ctx.manifest()
2236 if node == parent:
2236 if node == parent:
2237 pmf = mf
2237 pmf = mf
2238 else:
2238 else:
2239 pmf = None
2239 pmf = None
2240
2240
2241 # need all matching names in dirstate and manifest of target rev,
2241 # need all matching names in dirstate and manifest of target rev,
2242 # so have to walk both. do not print errors if files exist in one
2242 # so have to walk both. do not print errors if files exist in one
2243 # but not other.
2243 # but not other.
2244
2244
2245 names = {}
2245 names = {}
2246
2246
2247 wlock = repo.wlock()
2247 wlock = repo.wlock()
2248 try:
2248 try:
2249 # walk dirstate.
2249 # walk dirstate.
2250 files = []
2250 files = []
2251 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2251 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2252 badmatch=mf.has_key):
2252 badmatch=mf.has_key):
2253 names[abs] = (rel, exact)
2253 names[abs] = (rel, exact)
2254 if src != 'b':
2254 if src != 'b':
2255 files.append(abs)
2255 files.append(abs)
2256
2256
2257 # walk target manifest.
2257 # walk target manifest.
2258
2258
2259 def badmatch(path):
2259 def badmatch(path):
2260 if path in names:
2260 if path in names:
2261 return True
2261 return True
2262 path_ = path + '/'
2262 path_ = path + '/'
2263 for f in names:
2263 for f in names:
2264 if f.startswith(path_):
2264 if f.startswith(path_):
2265 return True
2265 return True
2266 return False
2266 return False
2267
2267
2268 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2268 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2269 badmatch=badmatch):
2269 badmatch=badmatch):
2270 if abs in names or src == 'b':
2270 if abs in names or src == 'b':
2271 continue
2271 continue
2272 names[abs] = (rel, exact)
2272 names[abs] = (rel, exact)
2273
2273
2274 changes = repo.status(files=files, match=names.has_key)[:4]
2274 changes = repo.status(files=files, match=names.has_key)[:4]
2275 modified, added, removed, deleted = map(dict.fromkeys, changes)
2275 modified, added, removed, deleted = map(dict.fromkeys, changes)
2276
2276
2277 # if f is a rename, also revert the source
2277 # if f is a rename, also revert the source
2278 cwd = repo.getcwd()
2278 cwd = repo.getcwd()
2279 for f in added:
2279 for f in added:
2280 src = repo.dirstate.copied(f)
2280 src = repo.dirstate.copied(f)
2281 if src and src not in names and repo.dirstate[src] == 'r':
2281 if src and src not in names and repo.dirstate[src] == 'r':
2282 removed[src] = None
2282 removed[src] = None
2283 names[src] = (repo.pathto(src, cwd), True)
2283 names[src] = (repo.pathto(src, cwd), True)
2284
2284
2285 def removeforget(abs):
2285 def removeforget(abs):
2286 if repo.dirstate[abs] == 'a':
2286 if repo.dirstate[abs] == 'a':
2287 return _('forgetting %s\n')
2287 return _('forgetting %s\n')
2288 return _('removing %s\n')
2288 return _('removing %s\n')
2289
2289
2290 revert = ([], _('reverting %s\n'))
2290 revert = ([], _('reverting %s\n'))
2291 add = ([], _('adding %s\n'))
2291 add = ([], _('adding %s\n'))
2292 remove = ([], removeforget)
2292 remove = ([], removeforget)
2293 undelete = ([], _('undeleting %s\n'))
2293 undelete = ([], _('undeleting %s\n'))
2294
2294
2295 disptable = (
2295 disptable = (
2296 # dispatch table:
2296 # dispatch table:
2297 # file state
2297 # file state
2298 # action if in target manifest
2298 # action if in target manifest
2299 # action if not in target manifest
2299 # action if not in target manifest
2300 # make backup if in target manifest
2300 # make backup if in target manifest
2301 # make backup if not in target manifest
2301 # make backup if not in target manifest
2302 (modified, revert, remove, True, True),
2302 (modified, revert, remove, True, True),
2303 (added, revert, remove, True, False),
2303 (added, revert, remove, True, False),
2304 (removed, undelete, None, False, False),
2304 (removed, undelete, None, False, False),
2305 (deleted, revert, remove, False, False),
2305 (deleted, revert, remove, False, False),
2306 )
2306 )
2307
2307
2308 entries = names.items()
2308 entries = names.items()
2309 entries.sort()
2309 entries.sort()
2310
2310
2311 for abs, (rel, exact) in entries:
2311 for abs, (rel, exact) in entries:
2312 mfentry = mf.get(abs)
2312 mfentry = mf.get(abs)
2313 target = repo.wjoin(abs)
2313 target = repo.wjoin(abs)
2314 def handle(xlist, dobackup):
2314 def handle(xlist, dobackup):
2315 xlist[0].append(abs)
2315 xlist[0].append(abs)
2316 if dobackup and not opts['no_backup'] and util.lexists(target):
2316 if dobackup and not opts['no_backup'] and util.lexists(target):
2317 bakname = "%s.orig" % rel
2317 bakname = "%s.orig" % rel
2318 ui.note(_('saving current version of %s as %s\n') %
2318 ui.note(_('saving current version of %s as %s\n') %
2319 (rel, bakname))
2319 (rel, bakname))
2320 if not opts.get('dry_run'):
2320 if not opts.get('dry_run'):
2321 util.copyfile(target, bakname)
2321 util.copyfile(target, bakname)
2322 if ui.verbose or not exact:
2322 if ui.verbose or not exact:
2323 msg = xlist[1]
2323 msg = xlist[1]
2324 if not isinstance(msg, basestring):
2324 if not isinstance(msg, basestring):
2325 msg = msg(abs)
2325 msg = msg(abs)
2326 ui.status(msg % rel)
2326 ui.status(msg % rel)
2327 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2327 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2328 if abs not in table: continue
2328 if abs not in table: continue
2329 # file has changed in dirstate
2329 # file has changed in dirstate
2330 if mfentry:
2330 if mfentry:
2331 handle(hitlist, backuphit)
2331 handle(hitlist, backuphit)
2332 elif misslist is not None:
2332 elif misslist is not None:
2333 handle(misslist, backupmiss)
2333 handle(misslist, backupmiss)
2334 break
2334 break
2335 else:
2335 else:
2336 if abs not in repo.dirstate:
2336 if abs not in repo.dirstate:
2337 if mfentry:
2337 if mfentry:
2338 handle(add, True)
2338 handle(add, True)
2339 elif exact:
2339 elif exact:
2340 ui.warn(_('file not managed: %s\n') % rel)
2340 ui.warn(_('file not managed: %s\n') % rel)
2341 continue
2341 continue
2342 # file has not changed in dirstate
2342 # file has not changed in dirstate
2343 if node == parent:
2343 if node == parent:
2344 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2344 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2345 continue
2345 continue
2346 if pmf is None:
2346 if pmf is None:
2347 # only need parent manifest in this unlikely case,
2347 # only need parent manifest in this unlikely case,
2348 # so do not read by default
2348 # so do not read by default
2349 pmf = repo.changectx(parent).manifest()
2349 pmf = repo.changectx(parent).manifest()
2350 if abs in pmf:
2350 if abs in pmf:
2351 if mfentry:
2351 if mfentry:
2352 # if version of file is same in parent and target
2352 # if version of file is same in parent and target
2353 # manifests, do nothing
2353 # manifests, do nothing
2354 if (pmf[abs] != mfentry or
2354 if (pmf[abs] != mfentry or
2355 pmf.flags(abs) != mf.flags(abs)):
2355 pmf.flags(abs) != mf.flags(abs)):
2356 handle(revert, False)
2356 handle(revert, False)
2357 else:
2357 else:
2358 handle(remove, False)
2358 handle(remove, False)
2359
2359
2360 if not opts.get('dry_run'):
2360 if not opts.get('dry_run'):
2361 def checkout(f):
2361 def checkout(f):
2362 fc = ctx[f]
2362 fc = ctx[f]
2363 repo.wwrite(f, fc.data(), fc.fileflags())
2363 repo.wwrite(f, fc.data(), fc.fileflags())
2364
2364
2365 audit_path = util.path_auditor(repo.root)
2365 audit_path = util.path_auditor(repo.root)
2366 for f in remove[0]:
2366 for f in remove[0]:
2367 if repo.dirstate[f] == 'a':
2367 if repo.dirstate[f] == 'a':
2368 repo.dirstate.forget(f)
2368 repo.dirstate.forget(f)
2369 continue
2369 continue
2370 audit_path(f)
2370 audit_path(f)
2371 try:
2371 try:
2372 util.unlink(repo.wjoin(f))
2372 util.unlink(repo.wjoin(f))
2373 except OSError:
2373 except OSError:
2374 pass
2374 pass
2375 repo.dirstate.remove(f)
2375 repo.dirstate.remove(f)
2376
2376
2377 for f in revert[0]:
2377 for f in revert[0]:
2378 checkout(f)
2378 checkout(f)
2379
2379
2380 for f in add[0]:
2380 for f in add[0]:
2381 checkout(f)
2381 checkout(f)
2382 repo.dirstate.add(f)
2382 repo.dirstate.add(f)
2383
2383
2384 normal = repo.dirstate.normallookup
2384 normal = repo.dirstate.normallookup
2385 if node == parent and p2 == nullid:
2385 if node == parent and p2 == nullid:
2386 normal = repo.dirstate.normal
2386 normal = repo.dirstate.normal
2387 for f in undelete[0]:
2387 for f in undelete[0]:
2388 checkout(f)
2388 checkout(f)
2389 normal(f)
2389 normal(f)
2390
2390
2391 finally:
2391 finally:
2392 del wlock
2392 del wlock
2393
2393
2394 def rollback(ui, repo):
2394 def rollback(ui, repo):
2395 """roll back the last transaction
2395 """roll back the last transaction
2396
2396
2397 This command should be used with care. There is only one level of
2397 This command should be used with care. There is only one level of
2398 rollback, and there is no way to undo a rollback. It will also
2398 rollback, and there is no way to undo a rollback. It will also
2399 restore the dirstate at the time of the last transaction, losing
2399 restore the dirstate at the time of the last transaction, losing
2400 any dirstate changes since that time.
2400 any dirstate changes since that time.
2401
2401
2402 Transactions are used to encapsulate the effects of all commands
2402 Transactions are used to encapsulate the effects of all commands
2403 that create new changesets or propagate existing changesets into a
2403 that create new changesets or propagate existing changesets into a
2404 repository. For example, the following commands are transactional,
2404 repository. For example, the following commands are transactional,
2405 and their effects can be rolled back:
2405 and their effects can be rolled back:
2406
2406
2407 commit
2407 commit
2408 import
2408 import
2409 pull
2409 pull
2410 push (with this repository as destination)
2410 push (with this repository as destination)
2411 unbundle
2411 unbundle
2412
2412
2413 This command is not intended for use on public repositories. Once
2413 This command is not intended for use on public repositories. Once
2414 changes are visible for pull by other users, rolling a transaction
2414 changes are visible for pull by other users, rolling a transaction
2415 back locally is ineffective (someone else may already have pulled
2415 back locally is ineffective (someone else may already have pulled
2416 the changes). Furthermore, a race is possible with readers of the
2416 the changes). Furthermore, a race is possible with readers of the
2417 repository; for example an in-progress pull from the repository
2417 repository; for example an in-progress pull from the repository
2418 may fail if a rollback is performed.
2418 may fail if a rollback is performed.
2419 """
2419 """
2420 repo.rollback()
2420 repo.rollback()
2421
2421
2422 def root(ui, repo):
2422 def root(ui, repo):
2423 """print the root (top) of the current working dir
2423 """print the root (top) of the current working dir
2424
2424
2425 Print the root directory of the current repository.
2425 Print the root directory of the current repository.
2426 """
2426 """
2427 ui.write(repo.root + "\n")
2427 ui.write(repo.root + "\n")
2428
2428
2429 def serve(ui, repo, **opts):
2429 def serve(ui, repo, **opts):
2430 """export the repository via HTTP
2430 """export the repository via HTTP
2431
2431
2432 Start a local HTTP repository browser and pull server.
2432 Start a local HTTP repository browser and pull server.
2433
2433
2434 By default, the server logs accesses to stdout and errors to
2434 By default, the server logs accesses to stdout and errors to
2435 stderr. Use the "-A" and "-E" options to log to files.
2435 stderr. Use the "-A" and "-E" options to log to files.
2436 """
2436 """
2437
2437
2438 if opts["stdio"]:
2438 if opts["stdio"]:
2439 if repo is None:
2439 if repo is None:
2440 raise hg.RepoError(_("There is no Mercurial repository here"
2440 raise hg.RepoError(_("There is no Mercurial repository here"
2441 " (.hg not found)"))
2441 " (.hg not found)"))
2442 s = sshserver.sshserver(ui, repo)
2442 s = sshserver.sshserver(ui, repo)
2443 s.serve_forever()
2443 s.serve_forever()
2444
2444
2445 parentui = ui.parentui or ui
2445 parentui = ui.parentui or ui
2446 optlist = ("name templates style address port prefix ipv6"
2446 optlist = ("name templates style address port prefix ipv6"
2447 " accesslog errorlog webdir_conf certificate")
2447 " accesslog errorlog webdir_conf certificate")
2448 for o in optlist.split():
2448 for o in optlist.split():
2449 if opts[o]:
2449 if opts[o]:
2450 parentui.setconfig("web", o, str(opts[o]))
2450 parentui.setconfig("web", o, str(opts[o]))
2451 if (repo is not None) and (repo.ui != parentui):
2451 if (repo is not None) and (repo.ui != parentui):
2452 repo.ui.setconfig("web", o, str(opts[o]))
2452 repo.ui.setconfig("web", o, str(opts[o]))
2453
2453
2454 if repo is None and not ui.config("web", "webdir_conf"):
2454 if repo is None and not ui.config("web", "webdir_conf"):
2455 raise hg.RepoError(_("There is no Mercurial repository here"
2455 raise hg.RepoError(_("There is no Mercurial repository here"
2456 " (.hg not found)"))
2456 " (.hg not found)"))
2457
2457
2458 class service:
2458 class service:
2459 def init(self):
2459 def init(self):
2460 util.set_signal_handler()
2460 util.set_signal_handler()
2461 try:
2461 try:
2462 self.httpd = hgweb.server.create_server(parentui, repo)
2462 self.httpd = hgweb.server.create_server(parentui, repo)
2463 except socket.error, inst:
2463 except socket.error, inst:
2464 raise util.Abort(_('cannot start server: ') + inst.args[1])
2464 raise util.Abort(_('cannot start server: ') + inst.args[1])
2465
2465
2466 if not ui.verbose: return
2466 if not ui.verbose: return
2467
2467
2468 if self.httpd.prefix:
2468 if self.httpd.prefix:
2469 prefix = self.httpd.prefix.strip('/') + '/'
2469 prefix = self.httpd.prefix.strip('/') + '/'
2470 else:
2470 else:
2471 prefix = ''
2471 prefix = ''
2472
2472
2473 if self.httpd.port != 80:
2473 if self.httpd.port != 80:
2474 ui.status(_('listening at http://%s:%d/%s\n') %
2474 ui.status(_('listening at http://%s:%d/%s\n') %
2475 (self.httpd.addr, self.httpd.port, prefix))
2475 (self.httpd.addr, self.httpd.port, prefix))
2476 else:
2476 else:
2477 ui.status(_('listening at http://%s/%s\n') %
2477 ui.status(_('listening at http://%s/%s\n') %
2478 (self.httpd.addr, prefix))
2478 (self.httpd.addr, prefix))
2479
2479
2480 def run(self):
2480 def run(self):
2481 self.httpd.serve_forever()
2481 self.httpd.serve_forever()
2482
2482
2483 service = service()
2483 service = service()
2484
2484
2485 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2485 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2486
2486
2487 def status(ui, repo, *pats, **opts):
2487 def status(ui, repo, *pats, **opts):
2488 """show changed files in the working directory
2488 """show changed files in the working directory
2489
2489
2490 Show status of files in the repository. If names are given, only
2490 Show status of files in the repository. If names are given, only
2491 files that match are shown. Files that are clean or ignored or
2491 files that match are shown. Files that are clean or ignored or
2492 source of a copy/move operation, are not listed unless -c (clean),
2492 source of a copy/move operation, are not listed unless -c (clean),
2493 -i (ignored), -C (copies) or -A is given. Unless options described
2493 -i (ignored), -C (copies) or -A is given. Unless options described
2494 with "show only ..." are given, the options -mardu are used.
2494 with "show only ..." are given, the options -mardu are used.
2495
2495
2496 Option -q/--quiet hides untracked (unknown and ignored) files
2496 Option -q/--quiet hides untracked (unknown and ignored) files
2497 unless explicitly requested with -u/--unknown or -i/-ignored.
2497 unless explicitly requested with -u/--unknown or -i/-ignored.
2498
2498
2499 NOTE: status may appear to disagree with diff if permissions have
2499 NOTE: status may appear to disagree with diff if permissions have
2500 changed or a merge has occurred. The standard diff format does not
2500 changed or a merge has occurred. The standard diff format does not
2501 report permission changes and diff only reports changes relative
2501 report permission changes and diff only reports changes relative
2502 to one merge parent.
2502 to one merge parent.
2503
2503
2504 If one revision is given, it is used as the base revision.
2504 If one revision is given, it is used as the base revision.
2505 If two revisions are given, the difference between them is shown.
2505 If two revisions are given, the difference between them is shown.
2506
2506
2507 The codes used to show the status of files are:
2507 The codes used to show the status of files are:
2508 M = modified
2508 M = modified
2509 A = added
2509 A = added
2510 R = removed
2510 R = removed
2511 C = clean
2511 C = clean
2512 ! = deleted, but still tracked
2512 ! = deleted, but still tracked
2513 ? = not tracked
2513 ? = not tracked
2514 I = ignored
2514 I = ignored
2515 = the previous added file was copied from here
2515 = the previous added file was copied from here
2516 """
2516 """
2517
2517
2518 all = opts['all']
2518 all = opts['all']
2519 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2519 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2520
2520
2521 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2521 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2522 cwd = (pats and repo.getcwd()) or ''
2522 cwd = (pats and repo.getcwd()) or ''
2523 modified, added, removed, deleted, unknown, ignored, clean = [
2523 modified, added, removed, deleted, unknown, ignored, clean = [
2524 n for n in repo.status(node1=node1, node2=node2, files=files,
2524 n for n in repo.status(node1=node1, node2=node2, files=files,
2525 match=matchfn,
2525 match=matchfn,
2526 list_ignored=opts['ignored']
2526 list_ignored=opts['ignored']
2527 or all and not ui.quiet,
2527 or all and not ui.quiet,
2528 list_clean=opts['clean'] or all,
2528 list_clean=opts['clean'] or all,
2529 list_unknown=opts['unknown']
2529 list_unknown=opts['unknown']
2530 or not (ui.quiet or
2530 or not (ui.quiet or
2531 opts['modified'] or
2531 opts['modified'] or
2532 opts['added'] or
2532 opts['added'] or
2533 opts['removed'] or
2533 opts['removed'] or
2534 opts['deleted'] or
2534 opts['deleted'] or
2535 opts['ignored']))]
2535 opts['ignored']))]
2536
2536
2537 changetypes = (('modified', 'M', modified),
2537 changetypes = (('modified', 'M', modified),
2538 ('added', 'A', added),
2538 ('added', 'A', added),
2539 ('removed', 'R', removed),
2539 ('removed', 'R', removed),
2540 ('deleted', '!', deleted),
2540 ('deleted', '!', deleted),
2541 ('unknown', '?', unknown),
2541 ('unknown', '?', unknown),
2542 ('ignored', 'I', ignored))
2542 ('ignored', 'I', ignored))
2543
2543
2544 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2544 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2545
2545
2546 end = opts['print0'] and '\0' or '\n'
2546 end = opts['print0'] and '\0' or '\n'
2547
2547
2548 for opt, char, changes in ([ct for ct in explicit_changetypes
2548 for opt, char, changes in ([ct for ct in explicit_changetypes
2549 if all or opts[ct[0]]]
2549 if all or opts[ct[0]]]
2550 or changetypes):
2550 or changetypes):
2551
2551
2552 if opts['no_status']:
2552 if opts['no_status']:
2553 format = "%%s%s" % end
2553 format = "%%s%s" % end
2554 else:
2554 else:
2555 format = "%s %%s%s" % (char, end)
2555 format = "%s %%s%s" % (char, end)
2556
2556
2557 for f in changes:
2557 for f in changes:
2558 ui.write(format % repo.pathto(f, cwd))
2558 ui.write(format % repo.pathto(f, cwd))
2559 if ((all or opts.get('copies')) and not opts.get('no_status')):
2559 if ((all or opts.get('copies')) and not opts.get('no_status')):
2560 copied = repo.dirstate.copied(f)
2560 copied = repo.dirstate.copied(f)
2561 if copied:
2561 if copied:
2562 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2562 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2563
2563
2564 def tag(ui, repo, name, rev_=None, **opts):
2564 def tag(ui, repo, name, rev_=None, **opts):
2565 """add a tag for the current or given revision
2565 """add a tag for the current or given revision
2566
2566
2567 Name a particular revision using <name>.
2567 Name a particular revision using <name>.
2568
2568
2569 Tags are used to name particular revisions of the repository and are
2569 Tags are used to name particular revisions of the repository and are
2570 very useful to compare different revision, to go back to significant
2570 very useful to compare different revision, to go back to significant
2571 earlier versions or to mark branch points as releases, etc.
2571 earlier versions or to mark branch points as releases, etc.
2572
2572
2573 If no revision is given, the parent of the working directory is used,
2573 If no revision is given, the parent of the working directory is used,
2574 or tip if no revision is checked out.
2574 or tip if no revision is checked out.
2575
2575
2576 To facilitate version control, distribution, and merging of tags,
2576 To facilitate version control, distribution, and merging of tags,
2577 they are stored as a file named ".hgtags" which is managed
2577 they are stored as a file named ".hgtags" which is managed
2578 similarly to other project files and can be hand-edited if
2578 similarly to other project files and can be hand-edited if
2579 necessary. The file '.hg/localtags' is used for local tags (not
2579 necessary. The file '.hg/localtags' is used for local tags (not
2580 shared among repositories).
2580 shared among repositories).
2581
2581
2582 See 'hg help dates' for a list of formats valid for -d/--date.
2582 See 'hg help dates' for a list of formats valid for -d/--date.
2583 """
2583 """
2584 if name in ['tip', '.', 'null']:
2584 if name in ['tip', '.', 'null']:
2585 raise util.Abort(_("the name '%s' is reserved") % name)
2585 raise util.Abort(_("the name '%s' is reserved") % name)
2586 if rev_ is not None:
2586 if rev_ is not None:
2587 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2587 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2588 "please use 'hg tag [-r REV] NAME' instead\n"))
2588 "please use 'hg tag [-r REV] NAME' instead\n"))
2589 if opts['rev']:
2589 if opts['rev']:
2590 raise util.Abort(_("use only one form to specify the revision"))
2590 raise util.Abort(_("use only one form to specify the revision"))
2591 if opts['rev'] and opts['remove']:
2591 if opts['rev'] and opts['remove']:
2592 raise util.Abort(_("--rev and --remove are incompatible"))
2592 raise util.Abort(_("--rev and --remove are incompatible"))
2593 if opts['rev']:
2593 if opts['rev']:
2594 rev_ = opts['rev']
2594 rev_ = opts['rev']
2595 message = opts['message']
2595 message = opts['message']
2596 if opts['remove']:
2596 if opts['remove']:
2597 tagtype = repo.tagtype(name)
2597 tagtype = repo.tagtype(name)
2598
2598
2599 if not tagtype:
2599 if not tagtype:
2600 raise util.Abort(_('tag %s does not exist') % name)
2600 raise util.Abort(_('tag %s does not exist') % name)
2601 if opts['local'] and tagtype == 'global':
2601 if opts['local'] and tagtype == 'global':
2602 raise util.Abort(_('%s tag is global') % name)
2602 raise util.Abort(_('%s tag is global') % name)
2603 if not opts['local'] and tagtype == 'local':
2603 if not opts['local'] and tagtype == 'local':
2604 raise util.Abort(_('%s tag is local') % name)
2604 raise util.Abort(_('%s tag is local') % name)
2605
2605
2606 rev_ = nullid
2606 rev_ = nullid
2607 if not message:
2607 if not message:
2608 message = _('Removed tag %s') % name
2608 message = _('Removed tag %s') % name
2609 elif name in repo.tags() and not opts['force']:
2609 elif name in repo.tags() and not opts['force']:
2610 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2610 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2611 % name)
2611 % name)
2612 if not rev_ and repo.dirstate.parents()[1] != nullid:
2612 if not rev_ and repo.dirstate.parents()[1] != nullid:
2613 raise util.Abort(_('uncommitted merge - please provide a '
2613 raise util.Abort(_('uncommitted merge - please provide a '
2614 'specific revision'))
2614 'specific revision'))
2615 r = repo.changectx(rev_).node()
2615 r = repo.changectx(rev_).node()
2616
2616
2617 if not message:
2617 if not message:
2618 message = _('Added tag %s for changeset %s') % (name, short(r))
2618 message = _('Added tag %s for changeset %s') % (name, short(r))
2619
2619
2620 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2620 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2621
2621
2622 def tags(ui, repo):
2622 def tags(ui, repo):
2623 """list repository tags
2623 """list repository tags
2624
2624
2625 List the repository tags.
2625 List the repository tags.
2626
2626
2627 This lists both regular and local tags. When the -v/--verbose switch
2627 This lists both regular and local tags. When the -v/--verbose switch
2628 is used, a third column "local" is printed for local tags.
2628 is used, a third column "local" is printed for local tags.
2629 """
2629 """
2630
2630
2631 l = repo.tagslist()
2631 l = repo.tagslist()
2632 l.reverse()
2632 l.reverse()
2633 hexfunc = ui.debugflag and hex or short
2633 hexfunc = ui.debugflag and hex or short
2634 tagtype = ""
2634 tagtype = ""
2635
2635
2636 for t, n in l:
2636 for t, n in l:
2637 if ui.quiet:
2637 if ui.quiet:
2638 ui.write("%s\n" % t)
2638 ui.write("%s\n" % t)
2639 continue
2639 continue
2640
2640
2641 try:
2641 try:
2642 hn = hexfunc(n)
2642 hn = hexfunc(n)
2643 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2643 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2644 except revlog.LookupError:
2644 except revlog.LookupError:
2645 r = " ?:%s" % hn
2645 r = " ?:%s" % hn
2646 else:
2646 else:
2647 spaces = " " * (30 - util.locallen(t))
2647 spaces = " " * (30 - util.locallen(t))
2648 if ui.verbose:
2648 if ui.verbose:
2649 if repo.tagtype(t) == 'local':
2649 if repo.tagtype(t) == 'local':
2650 tagtype = " local"
2650 tagtype = " local"
2651 else:
2651 else:
2652 tagtype = ""
2652 tagtype = ""
2653 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2653 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2654
2654
2655 def tip(ui, repo, **opts):
2655 def tip(ui, repo, **opts):
2656 """show the tip revision
2656 """show the tip revision
2657
2657
2658 Show the tip revision.
2658 Show the tip revision.
2659 """
2659 """
2660 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2660 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2661
2661
2662 def unbundle(ui, repo, fname1, *fnames, **opts):
2662 def unbundle(ui, repo, fname1, *fnames, **opts):
2663 """apply one or more changegroup files
2663 """apply one or more changegroup files
2664
2664
2665 Apply one or more compressed changegroup files generated by the
2665 Apply one or more compressed changegroup files generated by the
2666 bundle command.
2666 bundle command.
2667 """
2667 """
2668 fnames = (fname1,) + fnames
2668 fnames = (fname1,) + fnames
2669
2669
2670 lock = None
2670 lock = None
2671 try:
2671 try:
2672 lock = repo.lock()
2672 lock = repo.lock()
2673 for fname in fnames:
2673 for fname in fnames:
2674 if os.path.exists(fname):
2674 if os.path.exists(fname):
2675 f = open(fname, "rb")
2675 f = open(fname, "rb")
2676 else:
2676 else:
2677 f = urllib.urlopen(fname)
2677 f = urllib.urlopen(fname)
2678 gen = changegroup.readbundle(f, fname)
2678 gen = changegroup.readbundle(f, fname)
2679 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2679 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2680 finally:
2680 finally:
2681 del lock
2681 del lock
2682
2682
2683 return postincoming(ui, repo, modheads, opts['update'], None)
2683 return postincoming(ui, repo, modheads, opts['update'], None)
2684
2684
2685 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2685 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2686 """update working directory
2686 """update working directory
2687
2687
2688 Update the working directory to the specified revision, or the
2688 Update the working directory to the specified revision, or the
2689 tip of the current branch if none is specified.
2689 tip of the current branch if none is specified.
2690 See 'hg help dates' for a list of formats valid for -d/--date.
2690 See 'hg help dates' for a list of formats valid for -d/--date.
2691
2691
2692 If there are no outstanding changes in the working directory and
2692 If there are no outstanding changes in the working directory and
2693 there is a linear relationship between the current version and the
2693 there is a linear relationship between the current version and the
2694 requested version, the result is the requested version.
2694 requested version, the result is the requested version.
2695
2695
2696 To merge the working directory with another revision, use the
2696 To merge the working directory with another revision, use the
2697 merge command.
2697 merge command.
2698
2698
2699 By default, update will refuse to run if doing so would require
2699 By default, update will refuse to run if doing so would require
2700 discarding local changes.
2700 discarding local changes.
2701 """
2701 """
2702 if rev and node:
2702 if rev and node:
2703 raise util.Abort(_("please specify just one revision"))
2703 raise util.Abort(_("please specify just one revision"))
2704
2704
2705 if not rev:
2705 if not rev:
2706 rev = node
2706 rev = node
2707
2707
2708 if date:
2708 if date:
2709 if rev:
2709 if rev:
2710 raise util.Abort(_("you can't specify a revision and a date"))
2710 raise util.Abort(_("you can't specify a revision and a date"))
2711 rev = cmdutil.finddate(ui, repo, date)
2711 rev = cmdutil.finddate(ui, repo, date)
2712
2712
2713 if clean:
2713 if clean:
2714 return hg.clean(repo, rev)
2714 return hg.clean(repo, rev)
2715 else:
2715 else:
2716 return hg.update(repo, rev)
2716 return hg.update(repo, rev)
2717
2717
2718 def verify(ui, repo):
2718 def verify(ui, repo):
2719 """verify the integrity of the repository
2719 """verify the integrity of the repository
2720
2720
2721 Verify the integrity of the current repository.
2721 Verify the integrity of the current repository.
2722
2722
2723 This will perform an extensive check of the repository's
2723 This will perform an extensive check of the repository's
2724 integrity, validating the hashes and checksums of each entry in
2724 integrity, validating the hashes and checksums of each entry in
2725 the changelog, manifest, and tracked files, as well as the
2725 the changelog, manifest, and tracked files, as well as the
2726 integrity of their crosslinks and indices.
2726 integrity of their crosslinks and indices.
2727 """
2727 """
2728 return hg.verify(repo)
2728 return hg.verify(repo)
2729
2729
2730 def version_(ui):
2730 def version_(ui):
2731 """output version and copyright information"""
2731 """output version and copyright information"""
2732 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2732 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2733 % version.get_version())
2733 % version.get_version())
2734 ui.status(_(
2734 ui.status(_(
2735 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2735 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2736 "This is free software; see the source for copying conditions. "
2736 "This is free software; see the source for copying conditions. "
2737 "There is NO\nwarranty; "
2737 "There is NO\nwarranty; "
2738 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2738 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2739 ))
2739 ))
2740
2740
2741 # Command options and aliases are listed here, alphabetically
2741 # Command options and aliases are listed here, alphabetically
2742
2742
2743 globalopts = [
2743 globalopts = [
2744 ('R', 'repository', '',
2744 ('R', 'repository', '',
2745 _('repository root directory or symbolic path name')),
2745 _('repository root directory or symbolic path name')),
2746 ('', 'cwd', '', _('change working directory')),
2746 ('', 'cwd', '', _('change working directory')),
2747 ('y', 'noninteractive', None,
2747 ('y', 'noninteractive', None,
2748 _('do not prompt, assume \'yes\' for any required answers')),
2748 _('do not prompt, assume \'yes\' for any required answers')),
2749 ('q', 'quiet', None, _('suppress output')),
2749 ('q', 'quiet', None, _('suppress output')),
2750 ('v', 'verbose', None, _('enable additional output')),
2750 ('v', 'verbose', None, _('enable additional output')),
2751 ('', 'config', [], _('set/override config option')),
2751 ('', 'config', [], _('set/override config option')),
2752 ('', 'debug', None, _('enable debugging output')),
2752 ('', 'debug', None, _('enable debugging output')),
2753 ('', 'debugger', None, _('start debugger')),
2753 ('', 'debugger', None, _('start debugger')),
2754 ('', 'encoding', util._encoding, _('set the charset encoding')),
2754 ('', 'encoding', util._encoding, _('set the charset encoding')),
2755 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2755 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2756 ('', 'lsprof', None, _('print improved command execution profile')),
2756 ('', 'lsprof', None, _('print improved command execution profile')),
2757 ('', 'traceback', None, _('print traceback on exception')),
2757 ('', 'traceback', None, _('print traceback on exception')),
2758 ('', 'time', None, _('time how long the command takes')),
2758 ('', 'time', None, _('time how long the command takes')),
2759 ('', 'profile', None, _('print command execution profile')),
2759 ('', 'profile', None, _('print command execution profile')),
2760 ('', 'version', None, _('output version information and exit')),
2760 ('', 'version', None, _('output version information and exit')),
2761 ('h', 'help', None, _('display help and exit')),
2761 ('h', 'help', None, _('display help and exit')),
2762 ]
2762 ]
2763
2763
2764 dryrunopts = [('n', 'dry-run', None,
2764 dryrunopts = [('n', 'dry-run', None,
2765 _('do not perform actions, just print output'))]
2765 _('do not perform actions, just print output'))]
2766
2766
2767 remoteopts = [
2767 remoteopts = [
2768 ('e', 'ssh', '', _('specify ssh command to use')),
2768 ('e', 'ssh', '', _('specify ssh command to use')),
2769 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2769 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2770 ]
2770 ]
2771
2771
2772 walkopts = [
2772 walkopts = [
2773 ('I', 'include', [], _('include names matching the given patterns')),
2773 ('I', 'include', [], _('include names matching the given patterns')),
2774 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2774 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2775 ]
2775 ]
2776
2776
2777 commitopts = [
2777 commitopts = [
2778 ('m', 'message', '', _('use <text> as commit message')),
2778 ('m', 'message', '', _('use <text> as commit message')),
2779 ('l', 'logfile', '', _('read commit message from <file>')),
2779 ('l', 'logfile', '', _('read commit message from <file>')),
2780 ]
2780 ]
2781
2781
2782 commitopts2 = [
2782 commitopts2 = [
2783 ('d', 'date', '', _('record datecode as commit date')),
2783 ('d', 'date', '', _('record datecode as commit date')),
2784 ('u', 'user', '', _('record user as committer')),
2784 ('u', 'user', '', _('record user as committer')),
2785 ]
2785 ]
2786
2786
2787 templateopts = [
2787 templateopts = [
2788 ('', 'style', '', _('display using template map file')),
2788 ('', 'style', '', _('display using template map file')),
2789 ('', 'template', '', _('display with template')),
2789 ('', 'template', '', _('display with template')),
2790 ]
2790 ]
2791
2791
2792 logopts = [
2792 logopts = [
2793 ('p', 'patch', None, _('show patch')),
2793 ('p', 'patch', None, _('show patch')),
2794 ('l', 'limit', '', _('limit number of changes displayed')),
2794 ('l', 'limit', '', _('limit number of changes displayed')),
2795 ('M', 'no-merges', None, _('do not show merges')),
2795 ('M', 'no-merges', None, _('do not show merges')),
2796 ] + templateopts
2796 ] + templateopts
2797
2797
2798 table = {
2798 table = {
2799 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2799 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2800 "addremove":
2800 "addremove":
2801 (addremove,
2801 (addremove,
2802 [('s', 'similarity', '',
2802 [('s', 'similarity', '',
2803 _('guess renamed files by similarity (0<=s<=100)')),
2803 _('guess renamed files by similarity (0<=s<=100)')),
2804 ] + walkopts + dryrunopts,
2804 ] + walkopts + dryrunopts,
2805 _('hg addremove [OPTION]... [FILE]...')),
2805 _('hg addremove [OPTION]... [FILE]...')),
2806 "^annotate|blame":
2806 "^annotate|blame":
2807 (annotate,
2807 (annotate,
2808 [('r', 'rev', '', _('annotate the specified revision')),
2808 [('r', 'rev', '', _('annotate the specified revision')),
2809 ('f', 'follow', None, _('follow file copies and renames')),
2809 ('f', 'follow', None, _('follow file copies and renames')),
2810 ('a', 'text', None, _('treat all files as text')),
2810 ('a', 'text', None, _('treat all files as text')),
2811 ('u', 'user', None, _('list the author (long with -v)')),
2811 ('u', 'user', None, _('list the author (long with -v)')),
2812 ('d', 'date', None, _('list the date (short with -q)')),
2812 ('d', 'date', None, _('list the date (short with -q)')),
2813 ('n', 'number', None, _('list the revision number (default)')),
2813 ('n', 'number', None, _('list the revision number (default)')),
2814 ('c', 'changeset', None, _('list the changeset')),
2814 ('c', 'changeset', None, _('list the changeset')),
2815 ('l', 'line-number', None,
2815 ('l', 'line-number', None,
2816 _('show line number at the first appearance'))
2816 _('show line number at the first appearance'))
2817 ] + walkopts,
2817 ] + walkopts,
2818 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2818 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2819 "archive":
2819 "archive":
2820 (archive,
2820 (archive,
2821 [('', 'no-decode', None, _('do not pass files through decoders')),
2821 [('', 'no-decode', None, _('do not pass files through decoders')),
2822 ('p', 'prefix', '', _('directory prefix for files in archive')),
2822 ('p', 'prefix', '', _('directory prefix for files in archive')),
2823 ('r', 'rev', '', _('revision to distribute')),
2823 ('r', 'rev', '', _('revision to distribute')),
2824 ('t', 'type', '', _('type of distribution to create')),
2824 ('t', 'type', '', _('type of distribution to create')),
2825 ] + walkopts,
2825 ] + walkopts,
2826 _('hg archive [OPTION]... DEST')),
2826 _('hg archive [OPTION]... DEST')),
2827 "backout":
2827 "backout":
2828 (backout,
2828 (backout,
2829 [('', 'merge', None,
2829 [('', 'merge', None,
2830 _('merge with old dirstate parent after backout')),
2830 _('merge with old dirstate parent after backout')),
2831 ('', 'parent', '', _('parent to choose when backing out merge')),
2831 ('', 'parent', '', _('parent to choose when backing out merge')),
2832 ('r', 'rev', '', _('revision to backout')),
2832 ('r', 'rev', '', _('revision to backout')),
2833 ] + walkopts + commitopts + commitopts2,
2833 ] + walkopts + commitopts + commitopts2,
2834 _('hg backout [OPTION]... [-r] REV')),
2834 _('hg backout [OPTION]... [-r] REV')),
2835 "bisect":
2835 "bisect":
2836 (bisect,
2836 (bisect,
2837 [('r', 'reset', False, _('reset bisect state')),
2837 [('r', 'reset', False, _('reset bisect state')),
2838 ('g', 'good', False, _('mark changeset good')),
2838 ('g', 'good', False, _('mark changeset good')),
2839 ('b', 'bad', False, _('mark changeset bad')),
2839 ('b', 'bad', False, _('mark changeset bad')),
2840 ('s', 'skip', False, _('skip testing changeset')),
2840 ('s', 'skip', False, _('skip testing changeset')),
2841 ('U', 'noupdate', False, _('do not update to target'))],
2841 ('U', 'noupdate', False, _('do not update to target'))],
2842 _("hg bisect [-gbsr] [REV]")),
2842 _("hg bisect [-gbsr] [REV]")),
2843 "branch":
2843 "branch":
2844 (branch,
2844 (branch,
2845 [('f', 'force', None,
2845 [('f', 'force', None,
2846 _('set branch name even if it shadows an existing branch'))],
2846 _('set branch name even if it shadows an existing branch'))],
2847 _('hg branch [-f] [NAME]')),
2847 _('hg branch [-f] [NAME]')),
2848 "branches":
2848 "branches":
2849 (branches,
2849 (branches,
2850 [('a', 'active', False,
2850 [('a', 'active', False,
2851 _('show only branches that have unmerged heads'))],
2851 _('show only branches that have unmerged heads'))],
2852 _('hg branches [-a]')),
2852 _('hg branches [-a]')),
2853 "bundle":
2853 "bundle":
2854 (bundle,
2854 (bundle,
2855 [('f', 'force', None,
2855 [('f', 'force', None,
2856 _('run even when remote repository is unrelated')),
2856 _('run even when remote repository is unrelated')),
2857 ('r', 'rev', [],
2857 ('r', 'rev', [],
2858 _('a changeset you would like to bundle')),
2858 _('a changeset you would like to bundle')),
2859 ('', 'base', [],
2859 ('', 'base', [],
2860 _('a base changeset to specify instead of a destination')),
2860 _('a base changeset to specify instead of a destination')),
2861 ('a', 'all', None,
2861 ('a', 'all', None,
2862 _('bundle all changesets in the repository')),
2862 _('bundle all changesets in the repository')),
2863 ] + remoteopts,
2863 ] + remoteopts,
2864 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
2864 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
2865 "cat":
2865 "cat":
2866 (cat,
2866 (cat,
2867 [('o', 'output', '', _('print output to file with formatted name')),
2867 [('o', 'output', '', _('print output to file with formatted name')),
2868 ('r', 'rev', '', _('print the given revision')),
2868 ('r', 'rev', '', _('print the given revision')),
2869 ('', 'decode', None, _('apply any matching decode filter')),
2869 ('', 'decode', None, _('apply any matching decode filter')),
2870 ] + walkopts,
2870 ] + walkopts,
2871 _('hg cat [OPTION]... FILE...')),
2871 _('hg cat [OPTION]... FILE...')),
2872 "^clone":
2872 "^clone":
2873 (clone,
2873 (clone,
2874 [('U', 'noupdate', None, _('do not update the new working directory')),
2874 [('U', 'noupdate', None, _('do not update the new working directory')),
2875 ('r', 'rev', [],
2875 ('r', 'rev', [],
2876 _('a changeset you would like to have after cloning')),
2876 _('a changeset you would like to have after cloning')),
2877 ('', 'pull', None, _('use pull protocol to copy metadata')),
2877 ('', 'pull', None, _('use pull protocol to copy metadata')),
2878 ('', 'uncompressed', None,
2878 ('', 'uncompressed', None,
2879 _('use uncompressed transfer (fast over LAN)')),
2879 _('use uncompressed transfer (fast over LAN)')),
2880 ] + remoteopts,
2880 ] + remoteopts,
2881 _('hg clone [OPTION]... SOURCE [DEST]')),
2881 _('hg clone [OPTION]... SOURCE [DEST]')),
2882 "^commit|ci":
2882 "^commit|ci":
2883 (commit,
2883 (commit,
2884 [('A', 'addremove', None,
2884 [('A', 'addremove', None,
2885 _('mark new/missing files as added/removed before committing')),
2885 _('mark new/missing files as added/removed before committing')),
2886 ] + walkopts + commitopts + commitopts2,
2886 ] + walkopts + commitopts + commitopts2,
2887 _('hg commit [OPTION]... [FILE]...')),
2887 _('hg commit [OPTION]... [FILE]...')),
2888 "copy|cp":
2888 "copy|cp":
2889 (copy,
2889 (copy,
2890 [('A', 'after', None, _('record a copy that has already occurred')),
2890 [('A', 'after', None, _('record a copy that has already occurred')),
2891 ('f', 'force', None,
2891 ('f', 'force', None,
2892 _('forcibly copy over an existing managed file')),
2892 _('forcibly copy over an existing managed file')),
2893 ] + walkopts + dryrunopts,
2893 ] + walkopts + dryrunopts,
2894 _('hg copy [OPTION]... [SOURCE]... DEST')),
2894 _('hg copy [OPTION]... [SOURCE]... DEST')),
2895 "debugancestor": (debugancestor, [],
2895 "debugancestor": (debugancestor, [],
2896 _('hg debugancestor [INDEX] REV1 REV2')),
2896 _('hg debugancestor [INDEX] REV1 REV2')),
2897 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2897 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2898 "debugcomplete":
2898 "debugcomplete":
2899 (debugcomplete,
2899 (debugcomplete,
2900 [('o', 'options', None, _('show the command options'))],
2900 [('o', 'options', None, _('show the command options'))],
2901 _('hg debugcomplete [-o] CMD')),
2901 _('hg debugcomplete [-o] CMD')),
2902 "debugdate":
2902 "debugdate":
2903 (debugdate,
2903 (debugdate,
2904 [('e', 'extended', None, _('try extended date formats'))],
2904 [('e', 'extended', None, _('try extended date formats'))],
2905 _('hg debugdate [-e] DATE [RANGE]')),
2905 _('hg debugdate [-e] DATE [RANGE]')),
2906 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2906 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2907 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2907 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2908 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2908 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2909 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2909 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2910 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2910 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2911 "debugrawcommit|rawcommit":
2911 "debugrawcommit|rawcommit":
2912 (rawcommit,
2912 (rawcommit,
2913 [('p', 'parent', [], _('parent')),
2913 [('p', 'parent', [], _('parent')),
2914 ('F', 'files', '', _('file list'))
2914 ('F', 'files', '', _('file list'))
2915 ] + commitopts + commitopts2,
2915 ] + commitopts + commitopts2,
2916 _('hg debugrawcommit [OPTION]... [FILE]...')),
2916 _('hg debugrawcommit [OPTION]... [FILE]...')),
2917 "debugrebuildstate":
2917 "debugrebuildstate":
2918 (debugrebuildstate,
2918 (debugrebuildstate,
2919 [('r', 'rev', '', _('revision to rebuild to'))],
2919 [('r', 'rev', '', _('revision to rebuild to'))],
2920 _('hg debugrebuildstate [-r REV] [REV]')),
2920 _('hg debugrebuildstate [-r REV] [REV]')),
2921 "debugrename":
2921 "debugrename":
2922 (debugrename,
2922 (debugrename,
2923 [('r', 'rev', '', _('revision to debug'))],
2923 [('r', 'rev', '', _('revision to debug'))],
2924 _('hg debugrename [-r REV] FILE')),
2924 _('hg debugrename [-r REV] FILE')),
2925 "debugsetparents":
2925 "debugsetparents":
2926 (debugsetparents,
2926 (debugsetparents,
2927 [],
2927 [],
2928 _('hg debugsetparents REV1 [REV2]')),
2928 _('hg debugsetparents REV1 [REV2]')),
2929 "debugstate": (debugstate, [], _('hg debugstate')),
2929 "debugstate": (debugstate, [], _('hg debugstate')),
2930 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2930 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2931 "^diff":
2931 "^diff":
2932 (diff,
2932 (diff,
2933 [('r', 'rev', [], _('revision')),
2933 [('r', 'rev', [], _('revision')),
2934 ('a', 'text', None, _('treat all files as text')),
2934 ('a', 'text', None, _('treat all files as text')),
2935 ('p', 'show-function', None,
2935 ('p', 'show-function', None,
2936 _('show which function each change is in')),
2936 _('show which function each change is in')),
2937 ('g', 'git', None, _('use git extended diff format')),
2937 ('g', 'git', None, _('use git extended diff format')),
2938 ('', 'nodates', None, _("don't include dates in diff headers")),
2938 ('', 'nodates', None, _("don't include dates in diff headers")),
2939 ('w', 'ignore-all-space', None,
2939 ('w', 'ignore-all-space', None,
2940 _('ignore white space when comparing lines')),
2940 _('ignore white space when comparing lines')),
2941 ('b', 'ignore-space-change', None,
2941 ('b', 'ignore-space-change', None,
2942 _('ignore changes in the amount of white space')),
2942 _('ignore changes in the amount of white space')),
2943 ('B', 'ignore-blank-lines', None,
2943 ('B', 'ignore-blank-lines', None,
2944 _('ignore changes whose lines are all blank')),
2944 _('ignore changes whose lines are all blank')),
2945 ('U', 'unified', 3,
2945 ('U', 'unified', 3,
2946 _('number of lines of context to show'))
2946 _('number of lines of context to show'))
2947 ] + walkopts,
2947 ] + walkopts,
2948 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2948 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2949 "^export":
2949 "^export":
2950 (export,
2950 (export,
2951 [('o', 'output', '', _('print output to file with formatted name')),
2951 [('o', 'output', '', _('print output to file with formatted name')),
2952 ('a', 'text', None, _('treat all files as text')),
2952 ('a', 'text', None, _('treat all files as text')),
2953 ('g', 'git', None, _('use git extended diff format')),
2953 ('g', 'git', None, _('use git extended diff format')),
2954 ('', 'nodates', None, _("don't include dates in diff headers")),
2954 ('', 'nodates', None, _("don't include dates in diff headers")),
2955 ('', 'switch-parent', None, _('diff against the second parent'))],
2955 ('', 'switch-parent', None, _('diff against the second parent'))],
2956 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2956 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2957 "grep":
2957 "grep":
2958 (grep,
2958 (grep,
2959 [('0', 'print0', None, _('end fields with NUL')),
2959 [('0', 'print0', None, _('end fields with NUL')),
2960 ('', 'all', None, _('print all revisions that match')),
2960 ('', 'all', None, _('print all revisions that match')),
2961 ('f', 'follow', None,
2961 ('f', 'follow', None,
2962 _('follow changeset history, or file history across copies and renames')),
2962 _('follow changeset history, or file history across copies and renames')),
2963 ('i', 'ignore-case', None, _('ignore case when matching')),
2963 ('i', 'ignore-case', None, _('ignore case when matching')),
2964 ('l', 'files-with-matches', None,
2964 ('l', 'files-with-matches', None,
2965 _('print only filenames and revs that match')),
2965 _('print only filenames and revs that match')),
2966 ('n', 'line-number', None, _('print matching line numbers')),
2966 ('n', 'line-number', None, _('print matching line numbers')),
2967 ('r', 'rev', [], _('search in given revision range')),
2967 ('r', 'rev', [], _('search in given revision range')),
2968 ('u', 'user', None, _('list the author (long with -v)')),
2968 ('u', 'user', None, _('list the author (long with -v)')),
2969 ('d', 'date', None, _('list the date (short with -q)')),
2969 ('d', 'date', None, _('list the date (short with -q)')),
2970 ] + walkopts,
2970 ] + walkopts,
2971 _('hg grep [OPTION]... PATTERN [FILE]...')),
2971 _('hg grep [OPTION]... PATTERN [FILE]...')),
2972 "heads":
2972 "heads":
2973 (heads,
2973 (heads,
2974 [('r', 'rev', '', _('show only heads which are descendants of rev')),
2974 [('r', 'rev', '', _('show only heads which are descendants of rev')),
2975 ] + templateopts,
2975 ] + templateopts,
2976 _('hg heads [-r REV] [REV]...')),
2976 _('hg heads [-r REV] [REV]...')),
2977 "help": (help_, [], _('hg help [COMMAND]')),
2977 "help": (help_, [], _('hg help [COMMAND]')),
2978 "identify|id":
2978 "identify|id":
2979 (identify,
2979 (identify,
2980 [('r', 'rev', '', _('identify the specified rev')),
2980 [('r', 'rev', '', _('identify the specified rev')),
2981 ('n', 'num', None, _('show local revision number')),
2981 ('n', 'num', None, _('show local revision number')),
2982 ('i', 'id', None, _('show global revision id')),
2982 ('i', 'id', None, _('show global revision id')),
2983 ('b', 'branch', None, _('show branch')),
2983 ('b', 'branch', None, _('show branch')),
2984 ('t', 'tags', None, _('show tags'))],
2984 ('t', 'tags', None, _('show tags'))],
2985 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2985 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2986 "import|patch":
2986 "import|patch":
2987 (import_,
2987 (import_,
2988 [('p', 'strip', 1,
2988 [('p', 'strip', 1,
2989 _('directory strip option for patch. This has the same\n'
2989 _('directory strip option for patch. This has the same\n'
2990 'meaning as the corresponding patch option')),
2990 'meaning as the corresponding patch option')),
2991 ('b', 'base', '', _('base path')),
2991 ('b', 'base', '', _('base path')),
2992 ('f', 'force', None,
2992 ('f', 'force', None,
2993 _('skip check for outstanding uncommitted changes')),
2993 _('skip check for outstanding uncommitted changes')),
2994 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2994 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2995 ('', 'exact', None,
2995 ('', 'exact', None,
2996 _('apply patch to the nodes from which it was generated')),
2996 _('apply patch to the nodes from which it was generated')),
2997 ('', 'import-branch', None,
2997 ('', 'import-branch', None,
2998 _('Use any branch information in patch (implied by --exact)'))] +
2998 _('Use any branch information in patch (implied by --exact)'))] +
2999 commitopts + commitopts2,
2999 commitopts + commitopts2,
3000 _('hg import [OPTION]... PATCH...')),
3000 _('hg import [OPTION]... PATCH...')),
3001 "incoming|in":
3001 "incoming|in":
3002 (incoming,
3002 (incoming,
3003 [('f', 'force', None,
3003 [('f', 'force', None,
3004 _('run even when remote repository is unrelated')),
3004 _('run even when remote repository is unrelated')),
3005 ('n', 'newest-first', None, _('show newest record first')),
3005 ('n', 'newest-first', None, _('show newest record first')),
3006 ('', 'bundle', '', _('file to store the bundles into')),
3006 ('', 'bundle', '', _('file to store the bundles into')),
3007 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3007 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3008 ] + logopts + remoteopts,
3008 ] + logopts + remoteopts,
3009 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3009 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3010 ' [--bundle FILENAME] [SOURCE]')),
3010 ' [--bundle FILENAME] [SOURCE]')),
3011 "^init":
3011 "^init":
3012 (init,
3012 (init,
3013 remoteopts,
3013 remoteopts,
3014 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3014 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3015 "locate":
3015 "locate":
3016 (locate,
3016 (locate,
3017 [('r', 'rev', '', _('search the repository as it stood at rev')),
3017 [('r', 'rev', '', _('search the repository as it stood at rev')),
3018 ('0', 'print0', None,
3018 ('0', 'print0', None,
3019 _('end filenames with NUL, for use with xargs')),
3019 _('end filenames with NUL, for use with xargs')),
3020 ('f', 'fullpath', None,
3020 ('f', 'fullpath', None,
3021 _('print complete paths from the filesystem root')),
3021 _('print complete paths from the filesystem root')),
3022 ] + walkopts,
3022 ] + walkopts,
3023 _('hg locate [OPTION]... [PATTERN]...')),
3023 _('hg locate [OPTION]... [PATTERN]...')),
3024 "^log|history":
3024 "^log|history":
3025 (log,
3025 (log,
3026 [('f', 'follow', None,
3026 [('f', 'follow', None,
3027 _('follow changeset history, or file history across copies and renames')),
3027 _('follow changeset history, or file history across copies and renames')),
3028 ('', 'follow-first', None,
3028 ('', 'follow-first', None,
3029 _('only follow the first parent of merge changesets')),
3029 _('only follow the first parent of merge changesets')),
3030 ('d', 'date', '', _('show revs matching date spec')),
3030 ('d', 'date', '', _('show revs matching date spec')),
3031 ('C', 'copies', None, _('show copied files')),
3031 ('C', 'copies', None, _('show copied files')),
3032 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3032 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3033 ('r', 'rev', [], _('show the specified revision or range')),
3033 ('r', 'rev', [], _('show the specified revision or range')),
3034 ('', 'removed', None, _('include revs where files were removed')),
3034 ('', 'removed', None, _('include revs where files were removed')),
3035 ('m', 'only-merges', None, _('show only merges')),
3035 ('m', 'only-merges', None, _('show only merges')),
3036 ('b', 'only-branch', [],
3036 ('b', 'only-branch', [],
3037 _('show only changesets within the given named branch')),
3037 _('show only changesets within the given named branch')),
3038 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3038 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3039 ] + logopts + walkopts,
3039 ] + logopts + walkopts,
3040 _('hg log [OPTION]... [FILE]')),
3040 _('hg log [OPTION]... [FILE]')),
3041 "manifest":
3041 "manifest":
3042 (manifest,
3042 (manifest,
3043 [('r', 'rev', '', _('revision to display'))],
3043 [('r', 'rev', '', _('revision to display'))],
3044 _('hg manifest [-r REV]')),
3044 _('hg manifest [-r REV]')),
3045 "^merge":
3045 "^merge":
3046 (merge,
3046 (merge,
3047 [('f', 'force', None, _('force a merge with outstanding changes')),
3047 [('f', 'force', None, _('force a merge with outstanding changes')),
3048 ('r', 'rev', '', _('revision to merge')),
3048 ('r', 'rev', '', _('revision to merge')),
3049 ],
3049 ],
3050 _('hg merge [-f] [[-r] REV]')),
3050 _('hg merge [-f] [[-r] REV]')),
3051 "outgoing|out":
3051 "outgoing|out":
3052 (outgoing,
3052 (outgoing,
3053 [('f', 'force', None,
3053 [('f', 'force', None,
3054 _('run even when remote repository is unrelated')),
3054 _('run even when remote repository is unrelated')),
3055 ('r', 'rev', [], _('a specific revision you would like to push')),
3055 ('r', 'rev', [], _('a specific revision you would like to push')),
3056 ('n', 'newest-first', None, _('show newest record first')),
3056 ('n', 'newest-first', None, _('show newest record first')),
3057 ] + logopts + remoteopts,
3057 ] + logopts + remoteopts,
3058 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3058 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3059 "^parents":
3059 "^parents":
3060 (parents,
3060 (parents,
3061 [('r', 'rev', '', _('show parents from the specified rev')),
3061 [('r', 'rev', '', _('show parents from the specified rev')),
3062 ] + templateopts,
3062 ] + templateopts,
3063 _('hg parents [-r REV] [FILE]')),
3063 _('hg parents [-r REV] [FILE]')),
3064 "paths": (paths, [], _('hg paths [NAME]')),
3064 "paths": (paths, [], _('hg paths [NAME]')),
3065 "^pull":
3065 "^pull":
3066 (pull,
3066 (pull,
3067 [('u', 'update', None,
3067 [('u', 'update', None,
3068 _('update to new tip if changesets were pulled')),
3068 _('update to new tip if changesets were pulled')),
3069 ('f', 'force', None,
3069 ('f', 'force', None,
3070 _('run even when remote repository is unrelated')),
3070 _('run even when remote repository is unrelated')),
3071 ('r', 'rev', [],
3071 ('r', 'rev', [],
3072 _('a specific revision up to which you would like to pull')),
3072 _('a specific revision up to which you would like to pull')),
3073 ] + remoteopts,
3073 ] + remoteopts,
3074 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3074 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3075 "^push":
3075 "^push":
3076 (push,
3076 (push,
3077 [('f', 'force', None, _('force push')),
3077 [('f', 'force', None, _('force push')),
3078 ('r', 'rev', [], _('a specific revision you would like to push')),
3078 ('r', 'rev', [], _('a specific revision you would like to push')),
3079 ] + remoteopts,
3079 ] + remoteopts,
3080 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3080 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3081 "recover": (recover, [], _('hg recover')),
3081 "recover": (recover, [], _('hg recover')),
3082 "^remove|rm":
3082 "^remove|rm":
3083 (remove,
3083 (remove,
3084 [('A', 'after', None, _('record remove without deleting')),
3084 [('A', 'after', None, _('record remove without deleting')),
3085 ('f', 'force', None, _('remove file even if modified')),
3085 ('f', 'force', None, _('remove file even if modified')),
3086 ] + walkopts,
3086 ] + walkopts,
3087 _('hg remove [OPTION]... FILE...')),
3087 _('hg remove [OPTION]... FILE...')),
3088 "rename|mv":
3088 "rename|mv":
3089 (rename,
3089 (rename,
3090 [('A', 'after', None, _('record a rename that has already occurred')),
3090 [('A', 'after', None, _('record a rename that has already occurred')),
3091 ('f', 'force', None,
3091 ('f', 'force', None,
3092 _('forcibly copy over an existing managed file')),
3092 _('forcibly copy over an existing managed file')),
3093 ] + walkopts + dryrunopts,
3093 ] + walkopts + dryrunopts,
3094 _('hg rename [OPTION]... SOURCE... DEST')),
3094 _('hg rename [OPTION]... SOURCE... DEST')),
3095 "revert":
3095 "revert":
3096 (revert,
3096 (revert,
3097 [('a', 'all', None, _('revert all changes when no arguments given')),
3097 [('a', 'all', None, _('revert all changes when no arguments given')),
3098 ('d', 'date', '', _('tipmost revision matching date')),
3098 ('d', 'date', '', _('tipmost revision matching date')),
3099 ('r', 'rev', '', _('revision to revert to')),
3099 ('r', 'rev', '', _('revision to revert to')),
3100 ('', 'no-backup', None, _('do not save backup copies of files')),
3100 ('', 'no-backup', None, _('do not save backup copies of files')),
3101 ] + walkopts + dryrunopts,
3101 ] + walkopts + dryrunopts,
3102 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3102 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3103 "rollback": (rollback, [], _('hg rollback')),
3103 "rollback": (rollback, [], _('hg rollback')),
3104 "root": (root, [], _('hg root')),
3104 "root": (root, [], _('hg root')),
3105 "^serve":
3105 "^serve":
3106 (serve,
3106 (serve,
3107 [('A', 'accesslog', '', _('name of access log file to write to')),
3107 [('A', 'accesslog', '', _('name of access log file to write to')),
3108 ('d', 'daemon', None, _('run server in background')),
3108 ('d', 'daemon', None, _('run server in background')),
3109 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3109 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3110 ('E', 'errorlog', '', _('name of error log file to write to')),
3110 ('E', 'errorlog', '', _('name of error log file to write to')),
3111 ('p', 'port', 0, _('port to use (default: 8000)')),
3111 ('p', 'port', 0, _('port to use (default: 8000)')),
3112 ('a', 'address', '', _('address to use')),
3112 ('a', 'address', '', _('address to use')),
3113 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3113 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3114 ('n', 'name', '',
3114 ('n', 'name', '',
3115 _('name to show in web pages (default: working dir)')),
3115 _('name to show in web pages (default: working dir)')),
3116 ('', 'webdir-conf', '', _('name of the webdir config file'
3116 ('', 'webdir-conf', '', _('name of the webdir config file'
3117 ' (serve more than one repo)')),
3117 ' (serve more than one repo)')),
3118 ('', 'pid-file', '', _('name of file to write process ID to')),
3118 ('', 'pid-file', '', _('name of file to write process ID to')),
3119 ('', 'stdio', None, _('for remote clients')),
3119 ('', 'stdio', None, _('for remote clients')),
3120 ('t', 'templates', '', _('web templates to use')),
3120 ('t', 'templates', '', _('web templates to use')),
3121 ('', 'style', '', _('template style to use')),
3121 ('', 'style', '', _('template style to use')),
3122 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3122 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3123 ('', 'certificate', '', _('SSL certificate file'))],
3123 ('', 'certificate', '', _('SSL certificate file'))],
3124 _('hg serve [OPTION]...')),
3124 _('hg serve [OPTION]...')),
3125 "showconfig|debugconfig":
3125 "showconfig|debugconfig":
3126 (showconfig,
3126 (showconfig,
3127 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3127 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3128 _('hg showconfig [-u] [NAME]...')),
3128 _('hg showconfig [-u] [NAME]...')),
3129 "^status|st":
3129 "^status|st":
3130 (status,
3130 (status,
3131 [('A', 'all', None, _('show status of all files')),
3131 [('A', 'all', None, _('show status of all files')),
3132 ('m', 'modified', None, _('show only modified files')),
3132 ('m', 'modified', None, _('show only modified files')),
3133 ('a', 'added', None, _('show only added files')),
3133 ('a', 'added', None, _('show only added files')),
3134 ('r', 'removed', None, _('show only removed files')),
3134 ('r', 'removed', None, _('show only removed files')),
3135 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3135 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3136 ('c', 'clean', None, _('show only files without changes')),
3136 ('c', 'clean', None, _('show only files without changes')),
3137 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3137 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3138 ('i', 'ignored', None, _('show only ignored files')),
3138 ('i', 'ignored', None, _('show only ignored files')),
3139 ('n', 'no-status', None, _('hide status prefix')),
3139 ('n', 'no-status', None, _('hide status prefix')),
3140 ('C', 'copies', None, _('show source of copied files')),
3140 ('C', 'copies', None, _('show source of copied files')),
3141 ('0', 'print0', None,
3141 ('0', 'print0', None,
3142 _('end filenames with NUL, for use with xargs')),
3142 _('end filenames with NUL, for use with xargs')),
3143 ('', 'rev', [], _('show difference from revision')),
3143 ('', 'rev', [], _('show difference from revision')),
3144 ] + walkopts,
3144 ] + walkopts,
3145 _('hg status [OPTION]... [FILE]...')),
3145 _('hg status [OPTION]... [FILE]...')),
3146 "tag":
3146 "tag":
3147 (tag,
3147 (tag,
3148 [('f', 'force', None, _('replace existing tag')),
3148 [('f', 'force', None, _('replace existing tag')),
3149 ('l', 'local', None, _('make the tag local')),
3149 ('l', 'local', None, _('make the tag local')),
3150 ('r', 'rev', '', _('revision to tag')),
3150 ('r', 'rev', '', _('revision to tag')),
3151 ('', 'remove', None, _('remove a tag')),
3151 ('', 'remove', None, _('remove a tag')),
3152 # -l/--local is already there, commitopts cannot be used
3152 # -l/--local is already there, commitopts cannot be used
3153 ('m', 'message', '', _('use <text> as commit message')),
3153 ('m', 'message', '', _('use <text> as commit message')),
3154 ] + commitopts2,
3154 ] + commitopts2,
3155 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3155 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3156 "tags": (tags, [], _('hg tags')),
3156 "tags": (tags, [], _('hg tags')),
3157 "tip":
3157 "tip":
3158 (tip,
3158 (tip,
3159 [('p', 'patch', None, _('show patch')),
3159 [('p', 'patch', None, _('show patch')),
3160 ] + templateopts,
3160 ] + templateopts,
3161 _('hg tip [-p]')),
3161 _('hg tip [-p]')),
3162 "unbundle":
3162 "unbundle":
3163 (unbundle,
3163 (unbundle,
3164 [('u', 'update', None,
3164 [('u', 'update', None,
3165 _('update to new tip if changesets were unbundled'))],
3165 _('update to new tip if changesets were unbundled'))],
3166 _('hg unbundle [-u] FILE...')),
3166 _('hg unbundle [-u] FILE...')),
3167 "^update|up|checkout|co":
3167 "^update|up|checkout|co":
3168 (update,
3168 (update,
3169 [('C', 'clean', None, _('overwrite locally modified files')),
3169 [('C', 'clean', None, _('overwrite locally modified files')),
3170 ('d', 'date', '', _('tipmost revision matching date')),
3170 ('d', 'date', '', _('tipmost revision matching date')),
3171 ('r', 'rev', '', _('revision'))],
3171 ('r', 'rev', '', _('revision'))],
3172 _('hg update [-C] [-d DATE] [[-r] REV]')),
3172 _('hg update [-C] [-d DATE] [[-r] REV]')),
3173 "verify": (verify, [], _('hg verify')),
3173 "verify": (verify, [], _('hg verify')),
3174 "version": (version_, [], _('hg version')),
3174 "version": (version_, [], _('hg version')),
3175 }
3175 }
3176
3176
3177 norepo = ("clone init version help debugcomplete debugdata"
3177 norepo = ("clone init version help debugcomplete debugdata"
3178 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3178 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3179 optionalrepo = ("identify paths serve showconfig debugancestor")
3179 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,620 +1,620 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, repo, revlog, util, os, errno
10 import ancestor, bdiff, repo, revlog, util, os, errno
11
11
12 class changectx(object):
12 class changectx(object):
13 """A changecontext object makes access to data related to a particular
13 """A changecontext object makes access to data related to a particular
14 changeset convenient."""
14 changeset convenient."""
15 def __init__(self, repo, changeid=None):
15 def __init__(self, repo, changeid=None):
16 """changeid is a revision number, node, or tag"""
16 """changeid is a revision number, node, or tag"""
17 self._repo = repo
17 self._repo = repo
18
18
19 if not changeid and changeid != 0:
19 if not changeid and changeid != 0:
20 p1, p2 = self._repo.dirstate.parents()
20 p1, p2 = self._repo.dirstate.parents()
21 self._rev = self._repo.changelog.rev(p1)
21 self._rev = self._repo.changelog.rev(p1)
22 if self._rev == -1:
22 if self._rev == -1:
23 changeid = 'tip'
23 changeid = 'tip'
24 else:
24 else:
25 self._node = p1
25 self._node = p1
26 return
26 return
27
27
28 self._node = self._repo.lookup(changeid)
28 self._node = self._repo.lookup(changeid)
29 self._rev = self._repo.changelog.rev(self._node)
29 self._rev = self._repo.changelog.rev(self._node)
30
30
31 def __str__(self):
31 def __str__(self):
32 return short(self.node())
32 return short(self.node())
33
33
34 def __repr__(self):
34 def __repr__(self):
35 return "<changectx %s>" % str(self)
35 return "<changectx %s>" % str(self)
36
36
37 def __eq__(self, other):
37 def __eq__(self, other):
38 try:
38 try:
39 return self._rev == other._rev
39 return self._rev == other._rev
40 except AttributeError:
40 except AttributeError:
41 return False
41 return False
42
42
43 def __ne__(self, other):
43 def __ne__(self, other):
44 return not (self == other)
44 return not (self == other)
45
45
46 def __nonzero__(self):
46 def __nonzero__(self):
47 return self._rev != nullrev
47 return self._rev != nullrev
48
48
49 def __getattr__(self, name):
49 def __getattr__(self, name):
50 if name == '_changeset':
50 if name == '_changeset':
51 self._changeset = self._repo.changelog.read(self.node())
51 self._changeset = self._repo.changelog.read(self.node())
52 return self._changeset
52 return self._changeset
53 elif name == '_manifest':
53 elif name == '_manifest':
54 self._manifest = self._repo.manifest.read(self._changeset[0])
54 self._manifest = self._repo.manifest.read(self._changeset[0])
55 return self._manifest
55 return self._manifest
56 elif name == '_manifestdelta':
56 elif name == '_manifestdelta':
57 md = self._repo.manifest.readdelta(self._changeset[0])
57 md = self._repo.manifest.readdelta(self._changeset[0])
58 self._manifestdelta = md
58 self._manifestdelta = md
59 return self._manifestdelta
59 return self._manifestdelta
60 else:
60 else:
61 raise AttributeError, name
61 raise AttributeError, name
62
62
63 def __contains__(self, key):
63 def __contains__(self, key):
64 return key in self._manifest
64 return key in self._manifest
65
65
66 def __getitem__(self, key):
66 def __getitem__(self, key):
67 return self.filectx(key)
67 return self.filectx(key)
68
68
69 def __iter__(self):
69 def __iter__(self):
70 a = self._manifest.keys()
70 a = self._manifest.keys()
71 a.sort()
71 a.sort()
72 for f in a:
72 for f in a:
73 yield f
73 yield f
74
74
75 def changeset(self): return self._changeset
75 def changeset(self): return self._changeset
76 def manifest(self): return self._manifest
76 def manifest(self): return self._manifest
77
77
78 def rev(self): return self._rev
78 def rev(self): return self._rev
79 def node(self): return self._node
79 def node(self): return self._node
80 def user(self): return self._changeset[1]
80 def user(self): return self._changeset[1]
81 def date(self): return self._changeset[2]
81 def date(self): return self._changeset[2]
82 def files(self): return self._changeset[3]
82 def files(self): return self._changeset[3]
83 def description(self): return self._changeset[4]
83 def description(self): return self._changeset[4]
84 def branch(self): return self._changeset[5].get("branch")
84 def branch(self): return self._changeset[5].get("branch")
85 def extra(self): return self._changeset[5]
85 def extra(self): return self._changeset[5]
86 def tags(self): return self._repo.nodetags(self._node)
86 def tags(self): return self._repo.nodetags(self._node)
87
87
88 def parents(self):
88 def parents(self):
89 """return contexts for each parent changeset"""
89 """return contexts for each parent changeset"""
90 p = self._repo.changelog.parents(self._node)
90 p = self._repo.changelog.parents(self._node)
91 return [changectx(self._repo, x) for x in p]
91 return [changectx(self._repo, x) for x in p]
92
92
93 def children(self):
93 def children(self):
94 """return contexts for each child changeset"""
94 """return contexts for each child changeset"""
95 c = self._repo.changelog.children(self._node)
95 c = self._repo.changelog.children(self._node)
96 return [changectx(self._repo, x) for x in c]
96 return [changectx(self._repo, x) for x in c]
97
97
98 def _fileinfo(self, path):
98 def _fileinfo(self, path):
99 if '_manifest' in self.__dict__:
99 if '_manifest' in self.__dict__:
100 try:
100 try:
101 return self._manifest[path], self._manifest.flags(path)
101 return self._manifest[path], self._manifest.flags(path)
102 except KeyError:
102 except KeyError:
103 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
103 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
104 if '_manifestdelta' in self.__dict__ or path in self.files():
104 if '_manifestdelta' in self.__dict__ or path in self.files():
105 if path in self._manifestdelta:
105 if path in self._manifestdelta:
106 return self._manifestdelta[path], self._manifestdelta.flags(path)
106 return self._manifestdelta[path], self._manifestdelta.flags(path)
107 node, flag = self._repo.manifest.find(self._changeset[0], path)
107 node, flag = self._repo.manifest.find(self._changeset[0], path)
108 if not node:
108 if not node:
109 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
109 raise revlog.LookupError(path, _("'%s' not found in manifest") % path)
110
110
111 return node, flag
111 return node, flag
112
112
113 def filenode(self, path):
113 def filenode(self, path):
114 return self._fileinfo(path)[0]
114 return self._fileinfo(path)[0]
115
115
116 def fileflags(self, path):
116 def fileflags(self, path):
117 try:
117 try:
118 return self._fileinfo(path)[1]
118 return self._fileinfo(path)[1]
119 except revlog.LookupError:
119 except revlog.LookupError:
120 return ''
120 return ''
121
121
122 def filectx(self, path, fileid=None, filelog=None):
122 def filectx(self, path, fileid=None, filelog=None):
123 """get a file context from this changeset"""
123 """get a file context from this changeset"""
124 if fileid is None:
124 if fileid is None:
125 fileid = self.filenode(path)
125 fileid = self.filenode(path)
126 return filectx(self._repo, path, fileid=fileid,
126 return filectx(self._repo, path, fileid=fileid,
127 changectx=self, filelog=filelog)
127 changectx=self, filelog=filelog)
128
128
129 def filectxs(self):
129 def filectxs(self):
130 """generate a file context for each file in this changeset's
130 """generate a file context for each file in this changeset's
131 manifest"""
131 manifest"""
132 mf = self.manifest()
132 mf = self.manifest()
133 m = mf.keys()
133 m = mf.keys()
134 m.sort()
134 m.sort()
135 for f in m:
135 for f in m:
136 yield self.filectx(f, fileid=mf[f])
136 yield self.filectx(f, fileid=mf[f])
137
137
138 def ancestor(self, c2):
138 def ancestor(self, c2):
139 """
139 """
140 return the ancestor context of self and c2
140 return the ancestor context of self and c2
141 """
141 """
142 n = self._repo.changelog.ancestor(self._node, c2._node)
142 n = self._repo.changelog.ancestor(self._node, c2._node)
143 return changectx(self._repo, n)
143 return changectx(self._repo, n)
144
144
145 class filectx(object):
145 class filectx(object):
146 """A filecontext object makes access to data related to a particular
146 """A filecontext object makes access to data related to a particular
147 filerevision convenient."""
147 filerevision convenient."""
148 def __init__(self, repo, path, changeid=None, fileid=None,
148 def __init__(self, repo, path, changeid=None, fileid=None,
149 filelog=None, changectx=None):
149 filelog=None, changectx=None):
150 """changeid can be a changeset revision, node, or tag.
150 """changeid can be a changeset revision, node, or tag.
151 fileid can be a file revision or node."""
151 fileid can be a file revision or node."""
152 self._repo = repo
152 self._repo = repo
153 self._path = path
153 self._path = path
154
154
155 assert (changeid is not None
155 assert (changeid is not None
156 or fileid is not None
156 or fileid is not None
157 or changectx is not None)
157 or changectx is not None)
158
158
159 if filelog:
159 if filelog:
160 self._filelog = filelog
160 self._filelog = filelog
161
161
162 if changeid is not None:
162 if changeid is not None:
163 self._changeid = changeid
163 self._changeid = changeid
164 if changectx is not None:
164 if changectx is not None:
165 self._changectx = changectx
165 self._changectx = changectx
166 if fileid is not None:
166 if fileid is not None:
167 self._fileid = fileid
167 self._fileid = fileid
168
168
169 def __getattr__(self, name):
169 def __getattr__(self, name):
170 if name == '_changectx':
170 if name == '_changectx':
171 self._changectx = changectx(self._repo, self._changeid)
171 self._changectx = changectx(self._repo, self._changeid)
172 return self._changectx
172 return self._changectx
173 elif name == '_filelog':
173 elif name == '_filelog':
174 self._filelog = self._repo.file(self._path)
174 self._filelog = self._repo.file(self._path)
175 return self._filelog
175 return self._filelog
176 elif name == '_changeid':
176 elif name == '_changeid':
177 if '_changectx' in self.__dict__:
177 if '_changectx' in self.__dict__:
178 self._changeid = self._changectx.rev()
178 self._changeid = self._changectx.rev()
179 else:
179 else:
180 self._changeid = self._filelog.linkrev(self._filenode)
180 self._changeid = self._filelog.linkrev(self._filenode)
181 return self._changeid
181 return self._changeid
182 elif name == '_filenode':
182 elif name == '_filenode':
183 if '_fileid' in self.__dict__:
183 if '_fileid' in self.__dict__:
184 self._filenode = self._filelog.lookup(self._fileid)
184 self._filenode = self._filelog.lookup(self._fileid)
185 else:
185 else:
186 self._filenode = self._changectx.filenode(self._path)
186 self._filenode = self._changectx.filenode(self._path)
187 return self._filenode
187 return self._filenode
188 elif name == '_filerev':
188 elif name == '_filerev':
189 self._filerev = self._filelog.rev(self._filenode)
189 self._filerev = self._filelog.rev(self._filenode)
190 return self._filerev
190 return self._filerev
191 else:
191 else:
192 raise AttributeError, name
192 raise AttributeError, name
193
193
194 def __nonzero__(self):
194 def __nonzero__(self):
195 try:
195 try:
196 n = self._filenode
196 n = self._filenode
197 return True
197 return True
198 except revlog.LookupError:
198 except revlog.LookupError:
199 # file is missing
199 # file is missing
200 return False
200 return False
201
201
202 def __str__(self):
202 def __str__(self):
203 return "%s@%s" % (self.path(), short(self.node()))
203 return "%s@%s" % (self.path(), short(self.node()))
204
204
205 def __repr__(self):
205 def __repr__(self):
206 return "<filectx %s>" % str(self)
206 return "<filectx %s>" % str(self)
207
207
208 def __eq__(self, other):
208 def __eq__(self, other):
209 try:
209 try:
210 return (self._path == other._path
210 return (self._path == other._path
211 and self._fileid == other._fileid)
211 and self._fileid == other._fileid)
212 except AttributeError:
212 except AttributeError:
213 return False
213 return False
214
214
215 def __ne__(self, other):
215 def __ne__(self, other):
216 return not (self == other)
216 return not (self == other)
217
217
218 def filectx(self, fileid):
218 def filectx(self, fileid):
219 '''opens an arbitrary revision of the file without
219 '''opens an arbitrary revision of the file without
220 opening a new filelog'''
220 opening a new filelog'''
221 return filectx(self._repo, self._path, fileid=fileid,
221 return filectx(self._repo, self._path, fileid=fileid,
222 filelog=self._filelog)
222 filelog=self._filelog)
223
223
224 def filerev(self): return self._filerev
224 def filerev(self): return self._filerev
225 def filenode(self): return self._filenode
225 def filenode(self): return self._filenode
226 def fileflags(self): return self._changectx.fileflags(self._path)
226 def fileflags(self): return self._changectx.fileflags(self._path)
227 def isexec(self): return 'x' in self.fileflags()
227 def isexec(self): return 'x' in self.fileflags()
228 def islink(self): return 'l' in self.fileflags()
228 def islink(self): return 'l' in self.fileflags()
229 def filelog(self): return self._filelog
229 def filelog(self): return self._filelog
230
230
231 def rev(self):
231 def rev(self):
232 if '_changectx' in self.__dict__:
232 if '_changectx' in self.__dict__:
233 return self._changectx.rev()
233 return self._changectx.rev()
234 if '_changeid' in self.__dict__:
234 if '_changeid' in self.__dict__:
235 return self._changectx.rev()
235 return self._changectx.rev()
236 return self._filelog.linkrev(self._filenode)
236 return self._filelog.linkrev(self._filenode)
237
237
238 def linkrev(self): return self._filelog.linkrev(self._filenode)
238 def linkrev(self): return self._filelog.linkrev(self._filenode)
239 def node(self): return self._changectx.node()
239 def node(self): return self._changectx.node()
240 def user(self): return self._changectx.user()
240 def user(self): return self._changectx.user()
241 def date(self): return self._changectx.date()
241 def date(self): return self._changectx.date()
242 def files(self): return self._changectx.files()
242 def files(self): return self._changectx.files()
243 def description(self): return self._changectx.description()
243 def description(self): return self._changectx.description()
244 def branch(self): return self._changectx.branch()
244 def branch(self): return self._changectx.branch()
245 def manifest(self): return self._changectx.manifest()
245 def manifest(self): return self._changectx.manifest()
246 def changectx(self): return self._changectx
246 def changectx(self): return self._changectx
247
247
248 def data(self): return self._filelog.read(self._filenode)
248 def data(self): return self._filelog.read(self._filenode)
249 def path(self): return self._path
249 def path(self): return self._path
250 def size(self): return self._filelog.size(self._filerev)
250 def size(self): return self._filelog.size(self._filerev)
251
251
252 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
252 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
253
253
254 def renamed(self):
254 def renamed(self):
255 """check if file was actually renamed in this changeset revision
255 """check if file was actually renamed in this changeset revision
256
256
257 If rename logged in file revision, we report copy for changeset only
257 If rename logged in file revision, we report copy for changeset only
258 if file revisions linkrev points back to the changeset in question
258 if file revisions linkrev points back to the changeset in question
259 or both changeset parents contain different file revisions.
259 or both changeset parents contain different file revisions.
260 """
260 """
261
261
262 renamed = self._filelog.renamed(self._filenode)
262 renamed = self._filelog.renamed(self._filenode)
263 if not renamed:
263 if not renamed:
264 return renamed
264 return renamed
265
265
266 if self.rev() == self.linkrev():
266 if self.rev() == self.linkrev():
267 return renamed
267 return renamed
268
268
269 name = self.path()
269 name = self.path()
270 fnode = self._filenode
270 fnode = self._filenode
271 for p in self._changectx.parents():
271 for p in self._changectx.parents():
272 try:
272 try:
273 if fnode == p.filenode(name):
273 if fnode == p.filenode(name):
274 return None
274 return None
275 except revlog.LookupError:
275 except revlog.LookupError:
276 pass
276 pass
277 return renamed
277 return renamed
278
278
279 def parents(self):
279 def parents(self):
280 p = self._path
280 p = self._path
281 fl = self._filelog
281 fl = self._filelog
282 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
282 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
283
283
284 r = self._filelog.renamed(self._filenode)
284 r = self._filelog.renamed(self._filenode)
285 if r:
285 if r:
286 pl[0] = (r[0], r[1], None)
286 pl[0] = (r[0], r[1], None)
287
287
288 return [filectx(self._repo, p, fileid=n, filelog=l)
288 return [filectx(self._repo, p, fileid=n, filelog=l)
289 for p,n,l in pl if n != nullid]
289 for p,n,l in pl if n != nullid]
290
290
291 def children(self):
291 def children(self):
292 # hard for renames
292 # hard for renames
293 c = self._filelog.children(self._filenode)
293 c = self._filelog.children(self._filenode)
294 return [filectx(self._repo, self._path, fileid=x,
294 return [filectx(self._repo, self._path, fileid=x,
295 filelog=self._filelog) for x in c]
295 filelog=self._filelog) for x in c]
296
296
297 def annotate(self, follow=False, linenumber=None):
297 def annotate(self, follow=False, linenumber=None):
298 '''returns a list of tuples of (ctx, line) for each line
298 '''returns a list of tuples of (ctx, line) for each line
299 in the file, where ctx is the filectx of the node where
299 in the file, where ctx is the filectx of the node where
300 that line was last changed.
300 that line was last changed.
301 This returns tuples of ((ctx, linenumber), line) for each line,
301 This returns tuples of ((ctx, linenumber), line) for each line,
302 if "linenumber" parameter is NOT "None".
302 if "linenumber" parameter is NOT "None".
303 In such tuples, linenumber means one at the first appearance
303 In such tuples, linenumber means one at the first appearance
304 in the managed file.
304 in the managed file.
305 To reduce annotation cost,
305 To reduce annotation cost,
306 this returns fixed value(False is used) as linenumber,
306 this returns fixed value(False is used) as linenumber,
307 if "linenumber" parameter is "False".'''
307 if "linenumber" parameter is "False".'''
308
308
309 def decorate_compat(text, rev):
309 def decorate_compat(text, rev):
310 return ([rev] * len(text.splitlines()), text)
310 return ([rev] * len(text.splitlines()), text)
311
311
312 def without_linenumber(text, rev):
312 def without_linenumber(text, rev):
313 return ([(rev, False)] * len(text.splitlines()), text)
313 return ([(rev, False)] * len(text.splitlines()), text)
314
314
315 def with_linenumber(text, rev):
315 def with_linenumber(text, rev):
316 size = len(text.splitlines())
316 size = len(text.splitlines())
317 return ([(rev, i) for i in xrange(1, size + 1)], text)
317 return ([(rev, i) for i in xrange(1, size + 1)], text)
318
318
319 decorate = (((linenumber is None) and decorate_compat) or
319 decorate = (((linenumber is None) and decorate_compat) or
320 (linenumber and with_linenumber) or
320 (linenumber and with_linenumber) or
321 without_linenumber)
321 without_linenumber)
322
322
323 def pair(parent, child):
323 def pair(parent, child):
324 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
324 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
325 child[0][b1:b2] = parent[0][a1:a2]
325 child[0][b1:b2] = parent[0][a1:a2]
326 return child
326 return child
327
327
328 getlog = util.cachefunc(lambda x: self._repo.file(x))
328 getlog = util.cachefunc(lambda x: self._repo.file(x))
329 def getctx(path, fileid):
329 def getctx(path, fileid):
330 log = path == self._path and self._filelog or getlog(path)
330 log = path == self._path and self._filelog or getlog(path)
331 return filectx(self._repo, path, fileid=fileid, filelog=log)
331 return filectx(self._repo, path, fileid=fileid, filelog=log)
332 getctx = util.cachefunc(getctx)
332 getctx = util.cachefunc(getctx)
333
333
334 def parents(f):
334 def parents(f):
335 # we want to reuse filectx objects as much as possible
335 # we want to reuse filectx objects as much as possible
336 p = f._path
336 p = f._path
337 if f._filerev is None: # working dir
337 if f._filerev is None: # working dir
338 pl = [(n.path(), n.filerev()) for n in f.parents()]
338 pl = [(n.path(), n.filerev()) for n in f.parents()]
339 else:
339 else:
340 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
340 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
341
341
342 if follow:
342 if follow:
343 r = f.renamed()
343 r = f.renamed()
344 if r:
344 if r:
345 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
345 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
346
346
347 return [getctx(p, n) for p, n in pl if n != nullrev]
347 return [getctx(p, n) for p, n in pl if n != nullrev]
348
348
349 # use linkrev to find the first changeset where self appeared
349 # use linkrev to find the first changeset where self appeared
350 if self.rev() != self.linkrev():
350 if self.rev() != self.linkrev():
351 base = self.filectx(self.filerev())
351 base = self.filectx(self.filerev())
352 else:
352 else:
353 base = self
353 base = self
354
354
355 # find all ancestors
355 # find all ancestors
356 needed = {base: 1}
356 needed = {base: 1}
357 visit = [base]
357 visit = [base]
358 files = [base._path]
358 files = [base._path]
359 while visit:
359 while visit:
360 f = visit.pop(0)
360 f = visit.pop(0)
361 for p in parents(f):
361 for p in parents(f):
362 if p not in needed:
362 if p not in needed:
363 needed[p] = 1
363 needed[p] = 1
364 visit.append(p)
364 visit.append(p)
365 if p._path not in files:
365 if p._path not in files:
366 files.append(p._path)
366 files.append(p._path)
367 else:
367 else:
368 # count how many times we'll use this
368 # count how many times we'll use this
369 needed[p] += 1
369 needed[p] += 1
370
370
371 # sort by revision (per file) which is a topological order
371 # sort by revision (per file) which is a topological order
372 visit = []
372 visit = []
373 for f in files:
373 for f in files:
374 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
374 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
375 visit.extend(fn)
375 visit.extend(fn)
376 visit.sort()
376 visit.sort()
377 hist = {}
377 hist = {}
378
378
379 for r, f in visit:
379 for r, f in visit:
380 curr = decorate(f.data(), f)
380 curr = decorate(f.data(), f)
381 for p in parents(f):
381 for p in parents(f):
382 if p != nullid:
382 if p != nullid:
383 curr = pair(hist[p], curr)
383 curr = pair(hist[p], curr)
384 # trim the history of unneeded revs
384 # trim the history of unneeded revs
385 needed[p] -= 1
385 needed[p] -= 1
386 if not needed[p]:
386 if not needed[p]:
387 del hist[p]
387 del hist[p]
388 hist[f] = curr
388 hist[f] = curr
389
389
390 return zip(hist[f][0], hist[f][1].splitlines(1))
390 return zip(hist[f][0], hist[f][1].splitlines(1))
391
391
392 def ancestor(self, fc2):
392 def ancestor(self, fc2):
393 """
393 """
394 find the common ancestor file context, if any, of self, and fc2
394 find the common ancestor file context, if any, of self, and fc2
395 """
395 """
396
396
397 acache = {}
397 acache = {}
398
398
399 # prime the ancestor cache for the working directory
399 # prime the ancestor cache for the working directory
400 for c in (self, fc2):
400 for c in (self, fc2):
401 if c._filerev == None:
401 if c._filerev == None:
402 pl = [(n.path(), n.filenode()) for n in c.parents()]
402 pl = [(n.path(), n.filenode()) for n in c.parents()]
403 acache[(c._path, None)] = pl
403 acache[(c._path, None)] = pl
404
404
405 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
405 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
406 def parents(vertex):
406 def parents(vertex):
407 if vertex in acache:
407 if vertex in acache:
408 return acache[vertex]
408 return acache[vertex]
409 f, n = vertex
409 f, n = vertex
410 if f not in flcache:
410 if f not in flcache:
411 flcache[f] = self._repo.file(f)
411 flcache[f] = self._repo.file(f)
412 fl = flcache[f]
412 fl = flcache[f]
413 pl = [(f, p) for p in fl.parents(n) if p != nullid]
413 pl = [(f, p) for p in fl.parents(n) if p != nullid]
414 re = fl.renamed(n)
414 re = fl.renamed(n)
415 if re:
415 if re:
416 pl.append(re)
416 pl.append(re)
417 acache[vertex] = pl
417 acache[vertex] = pl
418 return pl
418 return pl
419
419
420 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
420 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
421 v = ancestor.ancestor(a, b, parents)
421 v = ancestor.ancestor(a, b, parents)
422 if v:
422 if v:
423 f, n = v
423 f, n = v
424 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
424 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
425
425
426 return None
426 return None
427
427
428 class workingctx(changectx):
428 class workingctx(changectx):
429 """A workingctx object makes access to data related to
429 """A workingctx object makes access to data related to
430 the current working directory convenient."""
430 the current working directory convenient."""
431 def __init__(self, repo):
431 def __init__(self, repo):
432 self._repo = repo
432 self._repo = repo
433 self._rev = None
433 self._rev = None
434 self._node = None
434 self._node = None
435
435
436 def __str__(self):
436 def __str__(self):
437 return str(self._parents[0]) + "+"
437 return str(self._parents[0]) + "+"
438
438
439 def __nonzero__(self):
439 def __nonzero__(self):
440 return True
440 return True
441
441
442 def __getattr__(self, name):
442 def __getattr__(self, name):
443 if name == '_parents':
443 if name == '_parents':
444 self._parents = self._repo.parents()
444 self._parents = self._repo.parents()
445 return self._parents
445 return self._parents
446 if name == '_status':
446 if name == '_status':
447 self._status = self._repo.status()
447 self._status = self._repo.status()
448 return self._status
448 return self._status
449 if name == '_manifest':
449 if name == '_manifest':
450 self._buildmanifest()
450 self._buildmanifest()
451 return self._manifest
451 return self._manifest
452 else:
452 else:
453 raise AttributeError, name
453 raise AttributeError, name
454
454
455 def _buildmanifest(self):
455 def _buildmanifest(self):
456 """generate a manifest corresponding to the working directory"""
456 """generate a manifest corresponding to the working directory"""
457
457
458 man = self._parents[0].manifest().copy()
458 man = self._parents[0].manifest().copy()
459 copied = self._repo.dirstate.copies()
459 copied = self._repo.dirstate.copies()
460 is_exec = util.execfunc(self._repo.root,
460 is_exec = util.execfunc(self._repo.root,
461 lambda p: man.execf(copied.get(p,p)))
461 lambda p: man.execf(copied.get(p,p)))
462 is_link = util.linkfunc(self._repo.root,
462 is_link = util.linkfunc(self._repo.root,
463 lambda p: man.linkf(copied.get(p,p)))
463 lambda p: man.linkf(copied.get(p,p)))
464 modified, added, removed, deleted, unknown = self._status[:5]
464 modified, added, removed, deleted, unknown = self._status[:5]
465 for i, l in (("a", added), ("m", modified), ("u", unknown)):
465 for i, l in (("a", added), ("m", modified), ("u", unknown)):
466 for f in l:
466 for f in l:
467 man[f] = man.get(copied.get(f, f), nullid) + i
467 man[f] = man.get(copied.get(f, f), nullid) + i
468 try:
468 try:
469 man.set(f, is_exec(f), is_link(f))
469 man.set(f, is_exec(f), is_link(f))
470 except OSError:
470 except OSError:
471 pass
471 pass
472
472
473 for f in deleted + removed:
473 for f in deleted + removed:
474 if f in man:
474 if f in man:
475 del man[f]
475 del man[f]
476
476
477 self._manifest = man
477 self._manifest = man
478
478
479 def manifest(self): return self._manifest
479 def manifest(self): return self._manifest
480
480
481 def user(self): return self._repo.ui.username()
481 def user(self): return self._repo.ui.username()
482 def date(self): return util.makedate()
482 def date(self): return util.makedate()
483 def description(self): return ""
483 def description(self): return ""
484 def files(self):
484 def files(self):
485 f = self.modified() + self.added() + self.removed()
485 f = self.modified() + self.added() + self.removed()
486 f.sort()
486 f.sort()
487 return f
487 return f
488
488
489 def modified(self): return self._status[0]
489 def modified(self): return self._status[0]
490 def added(self): return self._status[1]
490 def added(self): return self._status[1]
491 def removed(self): return self._status[2]
491 def removed(self): return self._status[2]
492 def deleted(self): return self._status[3]
492 def deleted(self): return self._status[3]
493 def unknown(self): return self._status[4]
493 def unknown(self): return self._status[4]
494 def clean(self): return self._status[5]
494 def clean(self): return self._status[5]
495 def branch(self): return self._repo.dirstate.branch()
495 def branch(self): return self._repo.dirstate.branch()
496
496
497 def tags(self):
497 def tags(self):
498 t = []
498 t = []
499 [t.extend(p.tags()) for p in self.parents()]
499 [t.extend(p.tags()) for p in self.parents()]
500 return t
500 return t
501
501
502 def parents(self):
502 def parents(self):
503 """return contexts for each parent changeset"""
503 """return contexts for each parent changeset"""
504 return self._parents
504 return self._parents
505
505
506 def children(self):
506 def children(self):
507 return []
507 return []
508
508
509 def fileflags(self, path):
509 def fileflags(self, path):
510 if '_manifest' in self.__dict__:
510 if '_manifest' in self.__dict__:
511 try:
511 try:
512 return self._manifest.flags(path)
512 return self._manifest.flags(path)
513 except KeyError:
513 except KeyError:
514 return ''
514 return ''
515
515
516 pnode = self._parents[0].changeset()[0]
516 pnode = self._parents[0].changeset()[0]
517 orig = self._repo.dirstate.copies().get(path, path)
517 orig = self._repo.dirstate.copies().get(path, path)
518 node, flag = self._repo.manifest.find(pnode, orig)
518 node, flag = self._repo.manifest.find(pnode, orig)
519 is_link = util.linkfunc(self._repo.root, lambda p: 'l' in flag)
519 is_link = util.linkfunc(self._repo.root, lambda p: 'l' in flag)
520 is_exec = util.execfunc(self._repo.root, lambda p: 'x' in flag)
520 is_exec = util.execfunc(self._repo.root, lambda p: 'x' in flag)
521 try:
521 try:
522 return (is_link(path) and 'l' or '') + (is_exec(path) and 'e' or '')
522 return (is_link(path) and 'l' or '') + (is_exec(path) and 'e' or '')
523 except OSError:
523 except OSError:
524 pass
524 pass
525
525
526 if not node or path in self.deleted() or path in self.removed():
526 if not node or path in self.deleted() or path in self.removed():
527 return ''
527 return ''
528 return flag
528 return flag
529
529
530 def filectx(self, path, filelog=None):
530 def filectx(self, path, filelog=None):
531 """get a file context from the working directory"""
531 """get a file context from the working directory"""
532 return workingfilectx(self._repo, path, workingctx=self,
532 return workingfilectx(self._repo, path, workingctx=self,
533 filelog=filelog)
533 filelog=filelog)
534
534
535 def ancestor(self, c2):
535 def ancestor(self, c2):
536 """return the ancestor context of self and c2"""
536 """return the ancestor context of self and c2"""
537 return self._parents[0].ancestor(c2) # punt on two parents for now
537 return self._parents[0].ancestor(c2) # punt on two parents for now
538
538
539 class workingfilectx(filectx):
539 class workingfilectx(filectx):
540 """A workingfilectx object makes access to data related to a particular
540 """A workingfilectx object makes access to data related to a particular
541 file in the working directory convenient."""
541 file in the working directory convenient."""
542 def __init__(self, repo, path, filelog=None, workingctx=None):
542 def __init__(self, repo, path, filelog=None, workingctx=None):
543 """changeid can be a changeset revision, node, or tag.
543 """changeid can be a changeset revision, node, or tag.
544 fileid can be a file revision or node."""
544 fileid can be a file revision or node."""
545 self._repo = repo
545 self._repo = repo
546 self._path = path
546 self._path = path
547 self._changeid = None
547 self._changeid = None
548 self._filerev = self._filenode = None
548 self._filerev = self._filenode = None
549
549
550 if filelog:
550 if filelog:
551 self._filelog = filelog
551 self._filelog = filelog
552 if workingctx:
552 if workingctx:
553 self._changectx = workingctx
553 self._changectx = workingctx
554
554
555 def __getattr__(self, name):
555 def __getattr__(self, name):
556 if name == '_changectx':
556 if name == '_changectx':
557 self._changectx = workingctx(self._repo)
557 self._changectx = workingctx(self._repo)
558 return self._changectx
558 return self._changectx
559 elif name == '_repopath':
559 elif name == '_repopath':
560 self._repopath = (self._repo.dirstate.copied(self._path)
560 self._repopath = (self._repo.dirstate.copied(self._path)
561 or self._path)
561 or self._path)
562 return self._repopath
562 return self._repopath
563 elif name == '_filelog':
563 elif name == '_filelog':
564 self._filelog = self._repo.file(self._repopath)
564 self._filelog = self._repo.file(self._repopath)
565 return self._filelog
565 return self._filelog
566 else:
566 else:
567 raise AttributeError, name
567 raise AttributeError, name
568
568
569 def __nonzero__(self):
569 def __nonzero__(self):
570 return True
570 return True
571
571
572 def __str__(self):
572 def __str__(self):
573 return "%s@%s" % (self.path(), self._changectx)
573 return "%s@%s" % (self.path(), self._changectx)
574
574
575 def filectx(self, fileid):
575 def filectx(self, fileid):
576 '''opens an arbitrary revision of the file without
576 '''opens an arbitrary revision of the file without
577 opening a new filelog'''
577 opening a new filelog'''
578 return filectx(self._repo, self._repopath, fileid=fileid,
578 return filectx(self._repo, self._repopath, fileid=fileid,
579 filelog=self._filelog)
579 filelog=self._filelog)
580
580
581 def rev(self):
581 def rev(self):
582 if '_changectx' in self.__dict__:
582 if '_changectx' in self.__dict__:
583 return self._changectx.rev()
583 return self._changectx.rev()
584 return self._filelog.linkrev(self._filenode)
584 return self._filelog.linkrev(self._filenode)
585
585
586 def data(self): return self._repo.wread(self._path)
586 def data(self): return self._repo.wread(self._path)
587 def renamed(self):
587 def renamed(self):
588 rp = self._repopath
588 rp = self._repopath
589 if rp == self._path:
589 if rp == self._path:
590 return None
590 return None
591 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
591 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
592
592
593 def parents(self):
593 def parents(self):
594 '''return parent filectxs, following copies if necessary'''
594 '''return parent filectxs, following copies if necessary'''
595 p = self._path
595 p = self._path
596 rp = self._repopath
596 rp = self._repopath
597 pcl = self._changectx._parents
597 pcl = self._changectx._parents
598 fl = self._filelog
598 fl = self._filelog
599 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
599 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
600 if len(pcl) > 1:
600 if len(pcl) > 1:
601 if rp != p:
601 if rp != p:
602 fl = None
602 fl = None
603 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
603 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
604
604
605 return [filectx(self._repo, p, fileid=n, filelog=l)
605 return [filectx(self._repo, p, fileid=n, filelog=l)
606 for p,n,l in pl if n != nullid]
606 for p,n,l in pl if n != nullid]
607
607
608 def children(self):
608 def children(self):
609 return []
609 return []
610
610
611 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
611 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
612 def date(self):
612 def date(self):
613 t, tz = self._changectx.date()
613 t, tz = self._changectx.date()
614 try:
614 try:
615 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
615 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
616 except OSError, err:
616 except OSError, err:
617 if err.errno != errno.ENOENT: raise
617 if err.errno != errno.ENOENT: raise
618 return (t, tz)
618 return (t, tz)
619
619
620 def cmp(self, text): return self._repo.wread(self._path) == text
620 def cmp(self, text): return self._repo.wread(self._path) == text
@@ -1,598 +1,598 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import nullid
11 from i18n import _
11 from i18n import _
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 import cStringIO, osutil
13 import cStringIO, osutil
14
14
15 _unknown = ('?', 0, 0, 0)
15 _unknown = ('?', 0, 0, 0)
16 _format = ">cllll"
16 _format = ">cllll"
17
17
18 class dirstate(object):
18 class dirstate(object):
19
19
20 def __init__(self, opener, ui, root):
20 def __init__(self, opener, ui, root):
21 self._opener = opener
21 self._opener = opener
22 self._root = root
22 self._root = root
23 self._dirty = False
23 self._dirty = False
24 self._dirtypl = False
24 self._dirtypl = False
25 self._ui = ui
25 self._ui = ui
26
26
27 def __getattr__(self, name):
27 def __getattr__(self, name):
28 if name == '_map':
28 if name == '_map':
29 self._read()
29 self._read()
30 return self._map
30 return self._map
31 elif name == '_copymap':
31 elif name == '_copymap':
32 self._read()
32 self._read()
33 return self._copymap
33 return self._copymap
34 elif name == '_branch':
34 elif name == '_branch':
35 try:
35 try:
36 self._branch = (self._opener("branch").read().strip()
36 self._branch = (self._opener("branch").read().strip()
37 or "default")
37 or "default")
38 except IOError:
38 except IOError:
39 self._branch = "default"
39 self._branch = "default"
40 return self._branch
40 return self._branch
41 elif name == '_pl':
41 elif name == '_pl':
42 self._pl = [nullid, nullid]
42 self._pl = [nullid, nullid]
43 try:
43 try:
44 st = self._opener("dirstate").read(40)
44 st = self._opener("dirstate").read(40)
45 if len(st) == 40:
45 if len(st) == 40:
46 self._pl = st[:20], st[20:40]
46 self._pl = st[:20], st[20:40]
47 except IOError, err:
47 except IOError, err:
48 if err.errno != errno.ENOENT: raise
48 if err.errno != errno.ENOENT: raise
49 return self._pl
49 return self._pl
50 elif name == '_dirs':
50 elif name == '_dirs':
51 self._dirs = {}
51 self._dirs = {}
52 for f in self._map:
52 for f in self._map:
53 if self[f] != 'r':
53 if self[f] != 'r':
54 self._incpath(f)
54 self._incpath(f)
55 return self._dirs
55 return self._dirs
56 elif name == '_ignore':
56 elif name == '_ignore':
57 files = [self._join('.hgignore')]
57 files = [self._join('.hgignore')]
58 for name, path in self._ui.configitems("ui"):
58 for name, path in self._ui.configitems("ui"):
59 if name == 'ignore' or name.startswith('ignore.'):
59 if name == 'ignore' or name.startswith('ignore.'):
60 files.append(os.path.expanduser(path))
60 files.append(os.path.expanduser(path))
61 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
61 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
62 return self._ignore
62 return self._ignore
63 elif name == '_slash':
63 elif name == '_slash':
64 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
64 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
65 return self._slash
65 return self._slash
66 else:
66 else:
67 raise AttributeError, name
67 raise AttributeError, name
68
68
69 def _join(self, f):
69 def _join(self, f):
70 return os.path.join(self._root, f)
70 return os.path.join(self._root, f)
71
71
72 def getcwd(self):
72 def getcwd(self):
73 cwd = os.getcwd()
73 cwd = os.getcwd()
74 if cwd == self._root: return ''
74 if cwd == self._root: return ''
75 # self._root ends with a path separator if self._root is '/' or 'C:\'
75 # self._root ends with a path separator if self._root is '/' or 'C:\'
76 rootsep = self._root
76 rootsep = self._root
77 if not util.endswithsep(rootsep):
77 if not util.endswithsep(rootsep):
78 rootsep += os.sep
78 rootsep += os.sep
79 if cwd.startswith(rootsep):
79 if cwd.startswith(rootsep):
80 return cwd[len(rootsep):]
80 return cwd[len(rootsep):]
81 else:
81 else:
82 # we're outside the repo. return an absolute path.
82 # we're outside the repo. return an absolute path.
83 return cwd
83 return cwd
84
84
85 def pathto(self, f, cwd=None):
85 def pathto(self, f, cwd=None):
86 if cwd is None:
86 if cwd is None:
87 cwd = self.getcwd()
87 cwd = self.getcwd()
88 path = util.pathto(self._root, cwd, f)
88 path = util.pathto(self._root, cwd, f)
89 if self._slash:
89 if self._slash:
90 return util.normpath(path)
90 return util.normpath(path)
91 return path
91 return path
92
92
93 def __getitem__(self, key):
93 def __getitem__(self, key):
94 ''' current states:
94 ''' current states:
95 n normal
95 n normal
96 m needs merging
96 m needs merging
97 r marked for removal
97 r marked for removal
98 a marked for addition
98 a marked for addition
99 ? not tracked'''
99 ? not tracked'''
100 return self._map.get(key, ("?",))[0]
100 return self._map.get(key, ("?",))[0]
101
101
102 def __contains__(self, key):
102 def __contains__(self, key):
103 return key in self._map
103 return key in self._map
104
104
105 def __iter__(self):
105 def __iter__(self):
106 a = self._map.keys()
106 a = self._map.keys()
107 a.sort()
107 a.sort()
108 for x in a:
108 for x in a:
109 yield x
109 yield x
110
110
111 def parents(self):
111 def parents(self):
112 return self._pl
112 return self._pl
113
113
114 def branch(self):
114 def branch(self):
115 return self._branch
115 return self._branch
116
116
117 def setparents(self, p1, p2=nullid):
117 def setparents(self, p1, p2=nullid):
118 self._dirty = self._dirtypl = True
118 self._dirty = self._dirtypl = True
119 self._pl = p1, p2
119 self._pl = p1, p2
120
120
121 def setbranch(self, branch):
121 def setbranch(self, branch):
122 self._branch = branch
122 self._branch = branch
123 self._opener("branch", "w").write(branch + '\n')
123 self._opener("branch", "w").write(branch + '\n')
124
124
125 def _read(self):
125 def _read(self):
126 self._map = {}
126 self._map = {}
127 self._copymap = {}
127 self._copymap = {}
128 if not self._dirtypl:
128 if not self._dirtypl:
129 self._pl = [nullid, nullid]
129 self._pl = [nullid, nullid]
130 try:
130 try:
131 st = self._opener("dirstate").read()
131 st = self._opener("dirstate").read()
132 except IOError, err:
132 except IOError, err:
133 if err.errno != errno.ENOENT: raise
133 if err.errno != errno.ENOENT: raise
134 return
134 return
135 if not st:
135 if not st:
136 return
136 return
137
137
138 if not self._dirtypl:
138 if not self._dirtypl:
139 self._pl = [st[:20], st[20: 40]]
139 self._pl = [st[:20], st[20: 40]]
140
140
141 # deref fields so they will be local in loop
141 # deref fields so they will be local in loop
142 dmap = self._map
142 dmap = self._map
143 copymap = self._copymap
143 copymap = self._copymap
144 unpack = struct.unpack
144 unpack = struct.unpack
145 e_size = struct.calcsize(_format)
145 e_size = struct.calcsize(_format)
146 pos1 = 40
146 pos1 = 40
147 l = len(st)
147 l = len(st)
148
148
149 # the inner loop
149 # the inner loop
150 while pos1 < l:
150 while pos1 < l:
151 pos2 = pos1 + e_size
151 pos2 = pos1 + e_size
152 e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
152 e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
153 pos1 = pos2 + e[4]
153 pos1 = pos2 + e[4]
154 f = st[pos2:pos1]
154 f = st[pos2:pos1]
155 if '\0' in f:
155 if '\0' in f:
156 f, c = f.split('\0')
156 f, c = f.split('\0')
157 copymap[f] = c
157 copymap[f] = c
158 dmap[f] = e # we hold onto e[4] because making a subtuple is slow
158 dmap[f] = e # we hold onto e[4] because making a subtuple is slow
159
159
160 def invalidate(self):
160 def invalidate(self):
161 for a in "_map _copymap _branch _pl _dirs _ignore".split():
161 for a in "_map _copymap _branch _pl _dirs _ignore".split():
162 if a in self.__dict__:
162 if a in self.__dict__:
163 delattr(self, a)
163 delattr(self, a)
164 self._dirty = False
164 self._dirty = False
165
165
166 def copy(self, source, dest):
166 def copy(self, source, dest):
167 self._dirty = True
167 self._dirty = True
168 self._copymap[dest] = source
168 self._copymap[dest] = source
169
169
170 def copied(self, file):
170 def copied(self, file):
171 return self._copymap.get(file, None)
171 return self._copymap.get(file, None)
172
172
173 def copies(self):
173 def copies(self):
174 return self._copymap
174 return self._copymap
175
175
176 def _incpath(self, path):
176 def _incpath(self, path):
177 c = path.rfind('/')
177 c = path.rfind('/')
178 if c >= 0:
178 if c >= 0:
179 dirs = self._dirs
179 dirs = self._dirs
180 base = path[:c]
180 base = path[:c]
181 if base not in dirs:
181 if base not in dirs:
182 self._incpath(base)
182 self._incpath(base)
183 dirs[base] = 1
183 dirs[base] = 1
184 else:
184 else:
185 dirs[base] += 1
185 dirs[base] += 1
186
186
187 def _decpath(self, path):
187 def _decpath(self, path):
188 c = path.rfind('/')
188 c = path.rfind('/')
189 if c >= 0:
189 if c >= 0:
190 base = path[:c]
190 base = path[:c]
191 dirs = self._dirs
191 dirs = self._dirs
192 if dirs[base] == 1:
192 if dirs[base] == 1:
193 del dirs[base]
193 del dirs[base]
194 self._decpath(base)
194 self._decpath(base)
195 else:
195 else:
196 dirs[base] -= 1
196 dirs[base] -= 1
197
197
198 def _incpathcheck(self, f):
198 def _incpathcheck(self, f):
199 if '\r' in f or '\n' in f:
199 if '\r' in f or '\n' in f:
200 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
200 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
201 % f)
201 % f)
202 # shadows
202 # shadows
203 if f in self._dirs:
203 if f in self._dirs:
204 raise util.Abort(_('directory %r already in dirstate') % f)
204 raise util.Abort(_('directory %r already in dirstate') % f)
205 for c in strutil.rfindall(f, '/'):
205 for c in strutil.rfindall(f, '/'):
206 d = f[:c]
206 d = f[:c]
207 if d in self._dirs:
207 if d in self._dirs:
208 break
208 break
209 if d in self._map and self[d] != 'r':
209 if d in self._map and self[d] != 'r':
210 raise util.Abort(_('file %r in dirstate clashes with %r') %
210 raise util.Abort(_('file %r in dirstate clashes with %r') %
211 (d, f))
211 (d, f))
212 self._incpath(f)
212 self._incpath(f)
213
213
214 def _changepath(self, f, newstate, relaxed=False):
214 def _changepath(self, f, newstate, relaxed=False):
215 # handle upcoming path changes
215 # handle upcoming path changes
216 oldstate = self[f]
216 oldstate = self[f]
217 if oldstate not in "?r" and newstate in "?r":
217 if oldstate not in "?r" and newstate in "?r":
218 if "_dirs" in self.__dict__:
218 if "_dirs" in self.__dict__:
219 self._decpath(f)
219 self._decpath(f)
220 return
220 return
221 if oldstate in "?r" and newstate not in "?r":
221 if oldstate in "?r" and newstate not in "?r":
222 if relaxed and oldstate == '?':
222 if relaxed and oldstate == '?':
223 # XXX
223 # XXX
224 # in relaxed mode we assume the caller knows
224 # in relaxed mode we assume the caller knows
225 # what it is doing, workaround for updating
225 # what it is doing, workaround for updating
226 # dir-to-file revisions
226 # dir-to-file revisions
227 if "_dirs" in self.__dict__:
227 if "_dirs" in self.__dict__:
228 self._incpath(f)
228 self._incpath(f)
229 return
229 return
230 self._incpathcheck(f)
230 self._incpathcheck(f)
231 return
231 return
232
232
233 def normal(self, f):
233 def normal(self, f):
234 'mark a file normal and clean'
234 'mark a file normal and clean'
235 self._dirty = True
235 self._dirty = True
236 self._changepath(f, 'n', True)
236 self._changepath(f, 'n', True)
237 s = os.lstat(self._join(f))
237 s = os.lstat(self._join(f))
238 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
238 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
239 if f in self._copymap:
239 if f in self._copymap:
240 del self._copymap[f]
240 del self._copymap[f]
241
241
242 def normallookup(self, f):
242 def normallookup(self, f):
243 'mark a file normal, but possibly dirty'
243 'mark a file normal, but possibly dirty'
244 self._dirty = True
244 self._dirty = True
245 self._changepath(f, 'n', True)
245 self._changepath(f, 'n', True)
246 self._map[f] = ('n', 0, -1, -1, 0)
246 self._map[f] = ('n', 0, -1, -1, 0)
247 if f in self._copymap:
247 if f in self._copymap:
248 del self._copymap[f]
248 del self._copymap[f]
249
249
250 def normaldirty(self, f):
250 def normaldirty(self, f):
251 'mark a file normal, but dirty'
251 'mark a file normal, but dirty'
252 self._dirty = True
252 self._dirty = True
253 self._changepath(f, 'n', True)
253 self._changepath(f, 'n', True)
254 self._map[f] = ('n', 0, -2, -1, 0)
254 self._map[f] = ('n', 0, -2, -1, 0)
255 if f in self._copymap:
255 if f in self._copymap:
256 del self._copymap[f]
256 del self._copymap[f]
257
257
258 def add(self, f):
258 def add(self, f):
259 'mark a file added'
259 'mark a file added'
260 self._dirty = True
260 self._dirty = True
261 self._changepath(f, 'a')
261 self._changepath(f, 'a')
262 self._map[f] = ('a', 0, -1, -1, 0)
262 self._map[f] = ('a', 0, -1, -1, 0)
263 if f in self._copymap:
263 if f in self._copymap:
264 del self._copymap[f]
264 del self._copymap[f]
265
265
266 def remove(self, f):
266 def remove(self, f):
267 'mark a file removed'
267 'mark a file removed'
268 self._dirty = True
268 self._dirty = True
269 self._changepath(f, 'r')
269 self._changepath(f, 'r')
270 self._map[f] = ('r', 0, 0, 0, 0)
270 self._map[f] = ('r', 0, 0, 0, 0)
271 if f in self._copymap:
271 if f in self._copymap:
272 del self._copymap[f]
272 del self._copymap[f]
273
273
274 def merge(self, f):
274 def merge(self, f):
275 'mark a file merged'
275 'mark a file merged'
276 self._dirty = True
276 self._dirty = True
277 s = os.lstat(self._join(f))
277 s = os.lstat(self._join(f))
278 self._changepath(f, 'm', True)
278 self._changepath(f, 'm', True)
279 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
279 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
280 if f in self._copymap:
280 if f in self._copymap:
281 del self._copymap[f]
281 del self._copymap[f]
282
282
283 def forget(self, f):
283 def forget(self, f):
284 'forget a file'
284 'forget a file'
285 self._dirty = True
285 self._dirty = True
286 try:
286 try:
287 self._changepath(f, '?')
287 self._changepath(f, '?')
288 del self._map[f]
288 del self._map[f]
289 except KeyError:
289 except KeyError:
290 self._ui.warn(_("not in dirstate: %s\n") % f)
290 self._ui.warn(_("not in dirstate: %s\n") % f)
291
291
292 def clear(self):
292 def clear(self):
293 self._map = {}
293 self._map = {}
294 if "_dirs" in self.__dict__:
294 if "_dirs" in self.__dict__:
295 delattr(self, "_dirs");
295 delattr(self, "_dirs");
296 self._copymap = {}
296 self._copymap = {}
297 self._pl = [nullid, nullid]
297 self._pl = [nullid, nullid]
298 self._dirty = True
298 self._dirty = True
299
299
300 def rebuild(self, parent, files):
300 def rebuild(self, parent, files):
301 self.clear()
301 self.clear()
302 for f in files:
302 for f in files:
303 if files.execf(f):
303 if files.execf(f):
304 self._map[f] = ('n', 0777, -1, 0, 0)
304 self._map[f] = ('n', 0777, -1, 0, 0)
305 else:
305 else:
306 self._map[f] = ('n', 0666, -1, 0, 0)
306 self._map[f] = ('n', 0666, -1, 0, 0)
307 self._pl = (parent, nullid)
307 self._pl = (parent, nullid)
308 self._dirty = True
308 self._dirty = True
309
309
310 def write(self):
310 def write(self):
311 if not self._dirty:
311 if not self._dirty:
312 return
312 return
313 cs = cStringIO.StringIO()
313 cs = cStringIO.StringIO()
314 copymap = self._copymap
314 copymap = self._copymap
315 pack = struct.pack
315 pack = struct.pack
316 write = cs.write
316 write = cs.write
317 write("".join(self._pl))
317 write("".join(self._pl))
318 for f, e in self._map.iteritems():
318 for f, e in self._map.iteritems():
319 if f in copymap:
319 if f in copymap:
320 f = "%s\0%s" % (f, copymap[f])
320 f = "%s\0%s" % (f, copymap[f])
321 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
321 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
322 write(e)
322 write(e)
323 write(f)
323 write(f)
324 st = self._opener("dirstate", "w", atomictemp=True)
324 st = self._opener("dirstate", "w", atomictemp=True)
325 st.write(cs.getvalue())
325 st.write(cs.getvalue())
326 st.rename()
326 st.rename()
327 self._dirty = self._dirtypl = False
327 self._dirty = self._dirtypl = False
328
328
329 def _filter(self, files):
329 def _filter(self, files):
330 ret = {}
330 ret = {}
331 unknown = []
331 unknown = []
332
332
333 for x in files:
333 for x in files:
334 if x == '.':
334 if x == '.':
335 return self._map.copy()
335 return self._map.copy()
336 if x not in self._map:
336 if x not in self._map:
337 unknown.append(x)
337 unknown.append(x)
338 else:
338 else:
339 ret[x] = self._map[x]
339 ret[x] = self._map[x]
340
340
341 if not unknown:
341 if not unknown:
342 return ret
342 return ret
343
343
344 b = self._map.keys()
344 b = self._map.keys()
345 b.sort()
345 b.sort()
346 blen = len(b)
346 blen = len(b)
347
347
348 for x in unknown:
348 for x in unknown:
349 bs = bisect.bisect(b, "%s%s" % (x, '/'))
349 bs = bisect.bisect(b, "%s%s" % (x, '/'))
350 while bs < blen:
350 while bs < blen:
351 s = b[bs]
351 s = b[bs]
352 if len(s) > len(x) and s.startswith(x):
352 if len(s) > len(x) and s.startswith(x):
353 ret[s] = self._map[s]
353 ret[s] = self._map[s]
354 else:
354 else:
355 break
355 break
356 bs += 1
356 bs += 1
357 return ret
357 return ret
358
358
359 def _supported(self, f, mode, verbose=False):
359 def _supported(self, f, mode, verbose=False):
360 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
360 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
361 return True
361 return True
362 if verbose:
362 if verbose:
363 kind = 'unknown'
363 kind = 'unknown'
364 if stat.S_ISCHR(mode): kind = _('character device')
364 if stat.S_ISCHR(mode): kind = _('character device')
365 elif stat.S_ISBLK(mode): kind = _('block device')
365 elif stat.S_ISBLK(mode): kind = _('block device')
366 elif stat.S_ISFIFO(mode): kind = _('fifo')
366 elif stat.S_ISFIFO(mode): kind = _('fifo')
367 elif stat.S_ISSOCK(mode): kind = _('socket')
367 elif stat.S_ISSOCK(mode): kind = _('socket')
368 elif stat.S_ISDIR(mode): kind = _('directory')
368 elif stat.S_ISDIR(mode): kind = _('directory')
369 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
369 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
370 % (self.pathto(f), kind))
370 % (self.pathto(f), kind))
371 return False
371 return False
372
372
373 def _dirignore(self, f):
373 def _dirignore(self, f):
374 if self._ignore(f):
374 if self._ignore(f):
375 return True
375 return True
376 for c in strutil.findall(f, '/'):
376 for c in strutil.findall(f, '/'):
377 if self._ignore(f[:c]):
377 if self._ignore(f[:c]):
378 return True
378 return True
379 return False
379 return False
380
380
381 def walk(self, files=None, match=util.always, badmatch=None):
381 def walk(self, files=None, match=util.always, badmatch=None):
382 # filter out the stat
382 # filter out the stat
383 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
383 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
384 yield src, f
384 yield src, f
385
385
386 def statwalk(self, files=None, match=util.always, unknown=True,
386 def statwalk(self, files=None, match=util.always, unknown=True,
387 ignored=False, badmatch=None, directories=False):
387 ignored=False, badmatch=None, directories=False):
388 '''
388 '''
389 walk recursively through the directory tree, finding all files
389 walk recursively through the directory tree, finding all files
390 matched by the match function
390 matched by the match function
391
391
392 results are yielded in a tuple (src, filename, st), where src
392 results are yielded in a tuple (src, filename, st), where src
393 is one of:
393 is one of:
394 'f' the file was found in the directory tree
394 'f' the file was found in the directory tree
395 'd' the file is a directory of the tree
395 'd' the file is a directory of the tree
396 'm' the file was only in the dirstate and not in the tree
396 'm' the file was only in the dirstate and not in the tree
397 'b' file was not found and matched badmatch
397 'b' file was not found and matched badmatch
398
398
399 and st is the stat result if the file was found in the directory.
399 and st is the stat result if the file was found in the directory.
400 '''
400 '''
401
401
402 # walk all files by default
402 # walk all files by default
403 if not files:
403 if not files:
404 files = ['.']
404 files = ['.']
405 dc = self._map.copy()
405 dc = self._map.copy()
406 else:
406 else:
407 files = util.unique(files)
407 files = util.unique(files)
408 dc = self._filter(files)
408 dc = self._filter(files)
409
409
410 def imatch(file_):
410 def imatch(file_):
411 if file_ not in dc and self._ignore(file_):
411 if file_ not in dc and self._ignore(file_):
412 return False
412 return False
413 return match(file_)
413 return match(file_)
414
414
415 # TODO: don't walk unknown directories if unknown and ignored are False
415 # TODO: don't walk unknown directories if unknown and ignored are False
416 ignore = self._ignore
416 ignore = self._ignore
417 dirignore = self._dirignore
417 dirignore = self._dirignore
418 if ignored:
418 if ignored:
419 imatch = match
419 imatch = match
420 ignore = util.never
420 ignore = util.never
421 dirignore = util.never
421 dirignore = util.never
422
422
423 # self._root may end with a path separator when self._root == '/'
423 # self._root may end with a path separator when self._root == '/'
424 common_prefix_len = len(self._root)
424 common_prefix_len = len(self._root)
425 if not util.endswithsep(self._root):
425 if not util.endswithsep(self._root):
426 common_prefix_len += 1
426 common_prefix_len += 1
427
427
428 normpath = util.normpath
428 normpath = util.normpath
429 listdir = osutil.listdir
429 listdir = osutil.listdir
430 lstat = os.lstat
430 lstat = os.lstat
431 bisect_left = bisect.bisect_left
431 bisect_left = bisect.bisect_left
432 isdir = os.path.isdir
432 isdir = os.path.isdir
433 pconvert = util.pconvert
433 pconvert = util.pconvert
434 join = os.path.join
434 join = os.path.join
435 s_isdir = stat.S_ISDIR
435 s_isdir = stat.S_ISDIR
436 supported = self._supported
436 supported = self._supported
437 _join = self._join
437 _join = self._join
438 known = {'.hg': 1}
438 known = {'.hg': 1}
439
439
440 # recursion free walker, faster than os.walk.
440 # recursion free walker, faster than os.walk.
441 def findfiles(s):
441 def findfiles(s):
442 work = [s]
442 work = [s]
443 wadd = work.append
443 wadd = work.append
444 found = []
444 found = []
445 add = found.append
445 add = found.append
446 if directories:
446 if directories:
447 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
447 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
448 while work:
448 while work:
449 top = work.pop()
449 top = work.pop()
450 entries = listdir(top, stat=True)
450 entries = listdir(top, stat=True)
451 # nd is the top of the repository dir tree
451 # nd is the top of the repository dir tree
452 nd = normpath(top[common_prefix_len:])
452 nd = normpath(top[common_prefix_len:])
453 if nd == '.':
453 if nd == '.':
454 nd = ''
454 nd = ''
455 else:
455 else:
456 # do not recurse into a repo contained in this
456 # do not recurse into a repo contained in this
457 # one. use bisect to find .hg directory so speed
457 # one. use bisect to find .hg directory so speed
458 # is good on big directory.
458 # is good on big directory.
459 names = [e[0] for e in entries]
459 names = [e[0] for e in entries]
460 hg = bisect_left(names, '.hg')
460 hg = bisect_left(names, '.hg')
461 if hg < len(names) and names[hg] == '.hg':
461 if hg < len(names) and names[hg] == '.hg':
462 if isdir(join(top, '.hg')):
462 if isdir(join(top, '.hg')):
463 continue
463 continue
464 for f, kind, st in entries:
464 for f, kind, st in entries:
465 np = pconvert(join(nd, f))
465 np = pconvert(join(nd, f))
466 if np in known:
466 if np in known:
467 continue
467 continue
468 known[np] = 1
468 known[np] = 1
469 p = join(top, f)
469 p = join(top, f)
470 # don't trip over symlinks
470 # don't trip over symlinks
471 if kind == stat.S_IFDIR:
471 if kind == stat.S_IFDIR:
472 if not ignore(np):
472 if not ignore(np):
473 wadd(p)
473 wadd(p)
474 if directories:
474 if directories:
475 add((np, 'd', st))
475 add((np, 'd', st))
476 if np in dc and match(np):
476 if np in dc and match(np):
477 add((np, 'm', st))
477 add((np, 'm', st))
478 elif imatch(np):
478 elif imatch(np):
479 if supported(np, st.st_mode):
479 if supported(np, st.st_mode):
480 add((np, 'f', st))
480 add((np, 'f', st))
481 elif np in dc:
481 elif np in dc:
482 add((np, 'm', st))
482 add((np, 'm', st))
483 found.sort()
483 found.sort()
484 return found
484 return found
485
485
486 # step one, find all files that match our criteria
486 # step one, find all files that match our criteria
487 files.sort()
487 files.sort()
488 for ff in files:
488 for ff in files:
489 nf = normpath(ff)
489 nf = normpath(ff)
490 f = _join(ff)
490 f = _join(ff)
491 try:
491 try:
492 st = lstat(f)
492 st = lstat(f)
493 except OSError, inst:
493 except OSError, inst:
494 found = False
494 found = False
495 for fn in dc:
495 for fn in dc:
496 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
496 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
497 found = True
497 found = True
498 break
498 break
499 if not found:
499 if not found:
500 if inst.errno != errno.ENOENT or not badmatch:
500 if inst.errno != errno.ENOENT or not badmatch:
501 self._ui.warn('%s: %s\n' %
501 self._ui.warn('%s: %s\n' %
502 (self.pathto(ff), inst.strerror))
502 (self.pathto(ff), inst.strerror))
503 elif badmatch and badmatch(ff) and imatch(nf):
503 elif badmatch and badmatch(ff) and imatch(nf):
504 yield 'b', ff, None
504 yield 'b', ff, None
505 continue
505 continue
506 if s_isdir(st.st_mode):
506 if s_isdir(st.st_mode):
507 if not dirignore(nf):
507 if not dirignore(nf):
508 for f, src, st in findfiles(f):
508 for f, src, st in findfiles(f):
509 yield src, f, st
509 yield src, f, st
510 else:
510 else:
511 if nf in known:
511 if nf in known:
512 continue
512 continue
513 known[nf] = 1
513 known[nf] = 1
514 if match(nf):
514 if match(nf):
515 if supported(ff, st.st_mode, verbose=True):
515 if supported(ff, st.st_mode, verbose=True):
516 yield 'f', nf, st
516 yield 'f', nf, st
517 elif ff in dc:
517 elif ff in dc:
518 yield 'm', nf, st
518 yield 'm', nf, st
519
519
520 # step two run through anything left in the dc hash and yield
520 # step two run through anything left in the dc hash and yield
521 # if we haven't already seen it
521 # if we haven't already seen it
522 ks = dc.keys()
522 ks = dc.keys()
523 ks.sort()
523 ks.sort()
524 for k in ks:
524 for k in ks:
525 if k in known:
525 if k in known:
526 continue
526 continue
527 known[k] = 1
527 known[k] = 1
528 if imatch(k):
528 if imatch(k):
529 yield 'm', k, None
529 yield 'm', k, None
530
530
531 def status(self, files, match, list_ignored, list_clean, list_unknown=True):
531 def status(self, files, match, list_ignored, list_clean, list_unknown=True):
532 lookup, modified, added, unknown, ignored = [], [], [], [], []
532 lookup, modified, added, unknown, ignored = [], [], [], [], []
533 removed, deleted, clean = [], [], []
533 removed, deleted, clean = [], [], []
534
534
535 files = files or []
535 files = files or []
536 _join = self._join
536 _join = self._join
537 lstat = os.lstat
537 lstat = os.lstat
538 cmap = self._copymap
538 cmap = self._copymap
539 dmap = self._map
539 dmap = self._map
540 ladd = lookup.append
540 ladd = lookup.append
541 madd = modified.append
541 madd = modified.append
542 aadd = added.append
542 aadd = added.append
543 uadd = unknown.append
543 uadd = unknown.append
544 iadd = ignored.append
544 iadd = ignored.append
545 radd = removed.append
545 radd = removed.append
546 dadd = deleted.append
546 dadd = deleted.append
547 cadd = clean.append
547 cadd = clean.append
548
548
549 for src, fn, st in self.statwalk(files, match, unknown=list_unknown,
549 for src, fn, st in self.statwalk(files, match, unknown=list_unknown,
550 ignored=list_ignored):
550 ignored=list_ignored):
551 if fn in dmap:
551 if fn in dmap:
552 type_, mode, size, time, foo = dmap[fn]
552 type_, mode, size, time, foo = dmap[fn]
553 else:
553 else:
554 if (list_ignored or fn in files) and self._dirignore(fn):
554 if (list_ignored or fn in files) and self._dirignore(fn):
555 if list_ignored:
555 if list_ignored:
556 iadd(fn)
556 iadd(fn)
557 elif list_unknown:
557 elif list_unknown:
558 uadd(fn)
558 uadd(fn)
559 continue
559 continue
560 if src == 'm':
560 if src == 'm':
561 nonexistent = True
561 nonexistent = True
562 if not st:
562 if not st:
563 try:
563 try:
564 st = lstat(_join(fn))
564 st = lstat(_join(fn))
565 except OSError, inst:
565 except OSError, inst:
566 if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
566 if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
567 raise
567 raise
568 st = None
568 st = None
569 # We need to re-check that it is a valid file
569 # We need to re-check that it is a valid file
570 if st and self._supported(fn, st.st_mode):
570 if st and self._supported(fn, st.st_mode):
571 nonexistent = False
571 nonexistent = False
572 # XXX: what to do with file no longer present in the fs
572 # XXX: what to do with file no longer present in the fs
573 # who are not removed in the dirstate ?
573 # who are not removed in the dirstate ?
574 if nonexistent and type_ in "nma":
574 if nonexistent and type_ in "nma":
575 dadd(fn)
575 dadd(fn)
576 continue
576 continue
577 # check the common case first
577 # check the common case first
578 if type_ == 'n':
578 if type_ == 'n':
579 if not st:
579 if not st:
580 st = lstat(_join(fn))
580 st = lstat(_join(fn))
581 if (size >= 0 and (size != st.st_size
581 if (size >= 0 and (size != st.st_size
582 or (mode ^ st.st_mode) & 0100)
582 or (mode ^ st.st_mode) & 0100)
583 or size == -2
583 or size == -2
584 or fn in self._copymap):
584 or fn in self._copymap):
585 madd(fn)
585 madd(fn)
586 elif time != int(st.st_mtime):
586 elif time != int(st.st_mtime):
587 ladd(fn)
587 ladd(fn)
588 elif list_clean:
588 elif list_clean:
589 cadd(fn)
589 cadd(fn)
590 elif type_ == 'm':
590 elif type_ == 'm':
591 madd(fn)
591 madd(fn)
592 elif type_ == 'a':
592 elif type_ == 'a':
593 aadd(fn)
593 aadd(fn)
594 elif type_ == 'r':
594 elif type_ == 'r':
595 radd(fn)
595 radd(fn)
596
596
597 return (lookup, modified, added, removed, deleted, unknown, ignored,
597 return (lookup, modified, added, removed, deleted, unknown, ignored,
598 clean)
598 clean)
@@ -1,417 +1,416 b''
1 # dispatch.py - command dispatching for mercurial
1 # dispatch.py - command dispatching for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
9 from i18n import _
8 from i18n import _
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex, time
9 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex, time
11 import util, commands, hg, lock, fancyopts, revlog, version, extensions, hook
10 import util, commands, hg, lock, fancyopts, revlog, version, extensions, hook
12 import cmdutil
11 import cmdutil
13 import ui as _ui
12 import ui as _ui
14
13
15 class ParseError(Exception):
14 class ParseError(Exception):
16 """Exception raised on errors in parsing the command line."""
15 """Exception raised on errors in parsing the command line."""
17
16
18 def run():
17 def run():
19 "run the command in sys.argv"
18 "run the command in sys.argv"
20 sys.exit(dispatch(sys.argv[1:]))
19 sys.exit(dispatch(sys.argv[1:]))
21
20
22 def dispatch(args):
21 def dispatch(args):
23 "run the command specified in args"
22 "run the command specified in args"
24 try:
23 try:
25 u = _ui.ui(traceback='--traceback' in args)
24 u = _ui.ui(traceback='--traceback' in args)
26 except util.Abort, inst:
25 except util.Abort, inst:
27 sys.stderr.write(_("abort: %s\n") % inst)
26 sys.stderr.write(_("abort: %s\n") % inst)
28 return -1
27 return -1
29 return _runcatch(u, args)
28 return _runcatch(u, args)
30
29
31 def _runcatch(ui, args):
30 def _runcatch(ui, args):
32 def catchterm(*args):
31 def catchterm(*args):
33 raise util.SignalInterrupt
32 raise util.SignalInterrupt
34
33
35 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
34 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
36 num = getattr(signal, name, None)
35 num = getattr(signal, name, None)
37 if num: signal.signal(num, catchterm)
36 if num: signal.signal(num, catchterm)
38
37
39 try:
38 try:
40 try:
39 try:
41 # enter the debugger before command execution
40 # enter the debugger before command execution
42 if '--debugger' in args:
41 if '--debugger' in args:
43 pdb.set_trace()
42 pdb.set_trace()
44 try:
43 try:
45 return _dispatch(ui, args)
44 return _dispatch(ui, args)
46 finally:
45 finally:
47 ui.flush()
46 ui.flush()
48 except:
47 except:
49 # enter the debugger when we hit an exception
48 # enter the debugger when we hit an exception
50 if '--debugger' in args:
49 if '--debugger' in args:
51 pdb.post_mortem(sys.exc_info()[2])
50 pdb.post_mortem(sys.exc_info()[2])
52 ui.print_exc()
51 ui.print_exc()
53 raise
52 raise
54
53
55 except ParseError, inst:
54 except ParseError, inst:
56 if inst.args[0]:
55 if inst.args[0]:
57 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
56 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
58 commands.help_(ui, inst.args[0])
57 commands.help_(ui, inst.args[0])
59 else:
58 else:
60 ui.warn(_("hg: %s\n") % inst.args[1])
59 ui.warn(_("hg: %s\n") % inst.args[1])
61 commands.help_(ui, 'shortlist')
60 commands.help_(ui, 'shortlist')
62 except cmdutil.AmbiguousCommand, inst:
61 except cmdutil.AmbiguousCommand, inst:
63 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
62 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
64 (inst.args[0], " ".join(inst.args[1])))
63 (inst.args[0], " ".join(inst.args[1])))
65 except cmdutil.UnknownCommand, inst:
64 except cmdutil.UnknownCommand, inst:
66 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
65 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
67 commands.help_(ui, 'shortlist')
66 commands.help_(ui, 'shortlist')
68 except hg.RepoError, inst:
67 except hg.RepoError, inst:
69 ui.warn(_("abort: %s!\n") % inst)
68 ui.warn(_("abort: %s!\n") % inst)
70 except lock.LockHeld, inst:
69 except lock.LockHeld, inst:
71 if inst.errno == errno.ETIMEDOUT:
70 if inst.errno == errno.ETIMEDOUT:
72 reason = _('timed out waiting for lock held by %s') % inst.locker
71 reason = _('timed out waiting for lock held by %s') % inst.locker
73 else:
72 else:
74 reason = _('lock held by %s') % inst.locker
73 reason = _('lock held by %s') % inst.locker
75 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
74 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
76 except lock.LockUnavailable, inst:
75 except lock.LockUnavailable, inst:
77 ui.warn(_("abort: could not lock %s: %s\n") %
76 ui.warn(_("abort: could not lock %s: %s\n") %
78 (inst.desc or inst.filename, inst.strerror))
77 (inst.desc or inst.filename, inst.strerror))
79 except revlog.RevlogError, inst:
78 except revlog.RevlogError, inst:
80 ui.warn(_("abort: %s!\n") % inst)
79 ui.warn(_("abort: %s!\n") % inst)
81 except util.SignalInterrupt:
80 except util.SignalInterrupt:
82 ui.warn(_("killed!\n"))
81 ui.warn(_("killed!\n"))
83 except KeyboardInterrupt:
82 except KeyboardInterrupt:
84 try:
83 try:
85 ui.warn(_("interrupted!\n"))
84 ui.warn(_("interrupted!\n"))
86 except IOError, inst:
85 except IOError, inst:
87 if inst.errno == errno.EPIPE:
86 if inst.errno == errno.EPIPE:
88 if ui.debugflag:
87 if ui.debugflag:
89 ui.warn(_("\nbroken pipe\n"))
88 ui.warn(_("\nbroken pipe\n"))
90 else:
89 else:
91 raise
90 raise
92 except socket.error, inst:
91 except socket.error, inst:
93 ui.warn(_("abort: %s\n") % inst[1])
92 ui.warn(_("abort: %s\n") % inst[1])
94 except IOError, inst:
93 except IOError, inst:
95 if hasattr(inst, "code"):
94 if hasattr(inst, "code"):
96 ui.warn(_("abort: %s\n") % inst)
95 ui.warn(_("abort: %s\n") % inst)
97 elif hasattr(inst, "reason"):
96 elif hasattr(inst, "reason"):
98 try: # usually it is in the form (errno, strerror)
97 try: # usually it is in the form (errno, strerror)
99 reason = inst.reason.args[1]
98 reason = inst.reason.args[1]
100 except: # it might be anything, for example a string
99 except: # it might be anything, for example a string
101 reason = inst.reason
100 reason = inst.reason
102 ui.warn(_("abort: error: %s\n") % reason)
101 ui.warn(_("abort: error: %s\n") % reason)
103 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
102 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
104 if ui.debugflag:
103 if ui.debugflag:
105 ui.warn(_("broken pipe\n"))
104 ui.warn(_("broken pipe\n"))
106 elif getattr(inst, "strerror", None):
105 elif getattr(inst, "strerror", None):
107 if getattr(inst, "filename", None):
106 if getattr(inst, "filename", None):
108 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
109 else:
108 else:
110 ui.warn(_("abort: %s\n") % inst.strerror)
109 ui.warn(_("abort: %s\n") % inst.strerror)
111 else:
110 else:
112 raise
111 raise
113 except OSError, inst:
112 except OSError, inst:
114 if getattr(inst, "filename", None):
113 if getattr(inst, "filename", None):
115 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
114 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
116 else:
115 else:
117 ui.warn(_("abort: %s\n") % inst.strerror)
116 ui.warn(_("abort: %s\n") % inst.strerror)
118 except util.UnexpectedOutput, inst:
117 except util.UnexpectedOutput, inst:
119 ui.warn(_("abort: %s") % inst[0])
118 ui.warn(_("abort: %s") % inst[0])
120 if not isinstance(inst[1], basestring):
119 if not isinstance(inst[1], basestring):
121 ui.warn(" %r\n" % (inst[1],))
120 ui.warn(" %r\n" % (inst[1],))
122 elif not inst[1]:
121 elif not inst[1]:
123 ui.warn(_(" empty string\n"))
122 ui.warn(_(" empty string\n"))
124 else:
123 else:
125 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
124 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
126 except ImportError, inst:
125 except ImportError, inst:
127 m = str(inst).split()[-1]
126 m = str(inst).split()[-1]
128 ui.warn(_("abort: could not import module %s!\n") % m)
127 ui.warn(_("abort: could not import module %s!\n") % m)
129 if m in "mpatch bdiff".split():
128 if m in "mpatch bdiff".split():
130 ui.warn(_("(did you forget to compile extensions?)\n"))
129 ui.warn(_("(did you forget to compile extensions?)\n"))
131 elif m in "zlib".split():
130 elif m in "zlib".split():
132 ui.warn(_("(is your Python install correct?)\n"))
131 ui.warn(_("(is your Python install correct?)\n"))
133
132
134 except util.Abort, inst:
133 except util.Abort, inst:
135 ui.warn(_("abort: %s\n") % inst)
134 ui.warn(_("abort: %s\n") % inst)
136 except MemoryError:
135 except MemoryError:
137 ui.warn(_("abort: out of memory\n"))
136 ui.warn(_("abort: out of memory\n"))
138 except SystemExit, inst:
137 except SystemExit, inst:
139 # Commands shouldn't sys.exit directly, but give a return code.
138 # Commands shouldn't sys.exit directly, but give a return code.
140 # Just in case catch this and and pass exit code to caller.
139 # Just in case catch this and and pass exit code to caller.
141 return inst.code
140 return inst.code
142 except:
141 except:
143 ui.warn(_("** unknown exception encountered, details follow\n"))
142 ui.warn(_("** unknown exception encountered, details follow\n"))
144 ui.warn(_("** report bug details to "
143 ui.warn(_("** report bug details to "
145 "http://www.selenic.com/mercurial/bts\n"))
144 "http://www.selenic.com/mercurial/bts\n"))
146 ui.warn(_("** or mercurial@selenic.com\n"))
145 ui.warn(_("** or mercurial@selenic.com\n"))
147 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
146 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
148 % version.get_version())
147 % version.get_version())
149 raise
148 raise
150
149
151 return -1
150 return -1
152
151
153 def _findrepo(p):
152 def _findrepo(p):
154 while not os.path.isdir(os.path.join(p, ".hg")):
153 while not os.path.isdir(os.path.join(p, ".hg")):
155 oldp, p = p, os.path.dirname(p)
154 oldp, p = p, os.path.dirname(p)
156 if p == oldp:
155 if p == oldp:
157 return None
156 return None
158
157
159 return p
158 return p
160
159
161 def _parse(ui, args):
160 def _parse(ui, args):
162 options = {}
161 options = {}
163 cmdoptions = {}
162 cmdoptions = {}
164
163
165 try:
164 try:
166 args = fancyopts.fancyopts(args, commands.globalopts, options)
165 args = fancyopts.fancyopts(args, commands.globalopts, options)
167 except fancyopts.getopt.GetoptError, inst:
166 except fancyopts.getopt.GetoptError, inst:
168 raise ParseError(None, inst)
167 raise ParseError(None, inst)
169
168
170 if args:
169 if args:
171 cmd, args = args[0], args[1:]
170 cmd, args = args[0], args[1:]
172 aliases, i = cmdutil.findcmd(ui, cmd, commands.table)
171 aliases, i = cmdutil.findcmd(ui, cmd, commands.table)
173 cmd = aliases[0]
172 cmd = aliases[0]
174 defaults = ui.config("defaults", cmd)
173 defaults = ui.config("defaults", cmd)
175 if defaults:
174 if defaults:
176 args = shlex.split(defaults) + args
175 args = shlex.split(defaults) + args
177 c = list(i[1])
176 c = list(i[1])
178 else:
177 else:
179 cmd = None
178 cmd = None
180 c = []
179 c = []
181
180
182 # combine global options into local
181 # combine global options into local
183 for o in commands.globalopts:
182 for o in commands.globalopts:
184 c.append((o[0], o[1], options[o[1]], o[3]))
183 c.append((o[0], o[1], options[o[1]], o[3]))
185
184
186 try:
185 try:
187 args = fancyopts.fancyopts(args, c, cmdoptions)
186 args = fancyopts.fancyopts(args, c, cmdoptions)
188 except fancyopts.getopt.GetoptError, inst:
187 except fancyopts.getopt.GetoptError, inst:
189 raise ParseError(cmd, inst)
188 raise ParseError(cmd, inst)
190
189
191 # separate global options back out
190 # separate global options back out
192 for o in commands.globalopts:
191 for o in commands.globalopts:
193 n = o[1]
192 n = o[1]
194 options[n] = cmdoptions[n]
193 options[n] = cmdoptions[n]
195 del cmdoptions[n]
194 del cmdoptions[n]
196
195
197 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
196 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
198
197
199 def _parseconfig(config):
198 def _parseconfig(config):
200 """parse the --config options from the command line"""
199 """parse the --config options from the command line"""
201 parsed = []
200 parsed = []
202 for cfg in config:
201 for cfg in config:
203 try:
202 try:
204 name, value = cfg.split('=', 1)
203 name, value = cfg.split('=', 1)
205 section, name = name.split('.', 1)
204 section, name = name.split('.', 1)
206 if not section or not name:
205 if not section or not name:
207 raise IndexError
206 raise IndexError
208 parsed.append((section, name, value))
207 parsed.append((section, name, value))
209 except (IndexError, ValueError):
208 except (IndexError, ValueError):
210 raise util.Abort(_('malformed --config option: %s') % cfg)
209 raise util.Abort(_('malformed --config option: %s') % cfg)
211 return parsed
210 return parsed
212
211
213 def _earlygetopt(aliases, args):
212 def _earlygetopt(aliases, args):
214 """Return list of values for an option (or aliases).
213 """Return list of values for an option (or aliases).
215
214
216 The values are listed in the order they appear in args.
215 The values are listed in the order they appear in args.
217 The options and values are removed from args.
216 The options and values are removed from args.
218 """
217 """
219 try:
218 try:
220 argcount = args.index("--")
219 argcount = args.index("--")
221 except ValueError:
220 except ValueError:
222 argcount = len(args)
221 argcount = len(args)
223 shortopts = [opt for opt in aliases if len(opt) == 2]
222 shortopts = [opt for opt in aliases if len(opt) == 2]
224 values = []
223 values = []
225 pos = 0
224 pos = 0
226 while pos < argcount:
225 while pos < argcount:
227 if args[pos] in aliases:
226 if args[pos] in aliases:
228 if pos + 1 >= argcount:
227 if pos + 1 >= argcount:
229 # ignore and let getopt report an error if there is no value
228 # ignore and let getopt report an error if there is no value
230 break
229 break
231 del args[pos]
230 del args[pos]
232 values.append(args.pop(pos))
231 values.append(args.pop(pos))
233 argcount -= 2
232 argcount -= 2
234 elif args[pos][:2] in shortopts:
233 elif args[pos][:2] in shortopts:
235 # short option can have no following space, e.g. hg log -Rfoo
234 # short option can have no following space, e.g. hg log -Rfoo
236 values.append(args.pop(pos)[2:])
235 values.append(args.pop(pos)[2:])
237 argcount -= 1
236 argcount -= 1
238 else:
237 else:
239 pos += 1
238 pos += 1
240 return values
239 return values
241
240
242 _loaded = {}
241 _loaded = {}
243 def _dispatch(ui, args):
242 def _dispatch(ui, args):
244 # read --config before doing anything else
243 # read --config before doing anything else
245 # (e.g. to change trust settings for reading .hg/hgrc)
244 # (e.g. to change trust settings for reading .hg/hgrc)
246 config = _earlygetopt(['--config'], args)
245 config = _earlygetopt(['--config'], args)
247 if config:
246 if config:
248 ui.updateopts(config=_parseconfig(config))
247 ui.updateopts(config=_parseconfig(config))
249
248
250 # check for cwd
249 # check for cwd
251 cwd = _earlygetopt(['--cwd'], args)
250 cwd = _earlygetopt(['--cwd'], args)
252 if cwd:
251 if cwd:
253 os.chdir(cwd[-1])
252 os.chdir(cwd[-1])
254
253
255 # read the local repository .hgrc into a local ui object
254 # read the local repository .hgrc into a local ui object
256 path = _findrepo(os.getcwd()) or ""
255 path = _findrepo(os.getcwd()) or ""
257 if not path:
256 if not path:
258 lui = ui
257 lui = ui
259 if path:
258 if path:
260 try:
259 try:
261 lui = _ui.ui(parentui=ui)
260 lui = _ui.ui(parentui=ui)
262 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
261 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
263 except IOError:
262 except IOError:
264 pass
263 pass
265
264
266 # now we can expand paths, even ones in .hg/hgrc
265 # now we can expand paths, even ones in .hg/hgrc
267 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
266 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
268 if rpath:
267 if rpath:
269 path = lui.expandpath(rpath[-1])
268 path = lui.expandpath(rpath[-1])
270 lui = _ui.ui(parentui=ui)
269 lui = _ui.ui(parentui=ui)
271 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
270 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
272
271
273 extensions.loadall(lui)
272 extensions.loadall(lui)
274 for name, module in extensions.extensions():
273 for name, module in extensions.extensions():
275 if name in _loaded:
274 if name in _loaded:
276 continue
275 continue
277
276
278 # setup extensions
277 # setup extensions
279 # TODO this should be generalized to scheme, where extensions can
278 # TODO this should be generalized to scheme, where extensions can
280 # redepend on other extensions. then we should toposort them, and
279 # redepend on other extensions. then we should toposort them, and
281 # do initialization in correct order
280 # do initialization in correct order
282 extsetup = getattr(module, 'extsetup', None)
281 extsetup = getattr(module, 'extsetup', None)
283 if extsetup:
282 if extsetup:
284 extsetup()
283 extsetup()
285
284
286 cmdtable = getattr(module, 'cmdtable', {})
285 cmdtable = getattr(module, 'cmdtable', {})
287 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
286 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
288 if overrides:
287 if overrides:
289 ui.warn(_("extension '%s' overrides commands: %s\n")
288 ui.warn(_("extension '%s' overrides commands: %s\n")
290 % (name, " ".join(overrides)))
289 % (name, " ".join(overrides)))
291 commands.table.update(cmdtable)
290 commands.table.update(cmdtable)
292 _loaded[name] = 1
291 _loaded[name] = 1
293 # check for fallback encoding
292 # check for fallback encoding
294 fallback = lui.config('ui', 'fallbackencoding')
293 fallback = lui.config('ui', 'fallbackencoding')
295 if fallback:
294 if fallback:
296 util._fallbackencoding = fallback
295 util._fallbackencoding = fallback
297
296
298 fullargs = args
297 fullargs = args
299 cmd, func, args, options, cmdoptions = _parse(lui, args)
298 cmd, func, args, options, cmdoptions = _parse(lui, args)
300
299
301 if options["config"]:
300 if options["config"]:
302 raise util.Abort(_("Option --config may not be abbreviated!"))
301 raise util.Abort(_("Option --config may not be abbreviated!"))
303 if options["cwd"]:
302 if options["cwd"]:
304 raise util.Abort(_("Option --cwd may not be abbreviated!"))
303 raise util.Abort(_("Option --cwd may not be abbreviated!"))
305 if options["repository"]:
304 if options["repository"]:
306 raise util.Abort(_(
305 raise util.Abort(_(
307 "Option -R has to be separated from other options (i.e. not -qR) "
306 "Option -R has to be separated from other options (i.e. not -qR) "
308 "and --repository may only be abbreviated as --repo!"))
307 "and --repository may only be abbreviated as --repo!"))
309
308
310 if options["encoding"]:
309 if options["encoding"]:
311 util._encoding = options["encoding"]
310 util._encoding = options["encoding"]
312 if options["encodingmode"]:
311 if options["encodingmode"]:
313 util._encodingmode = options["encodingmode"]
312 util._encodingmode = options["encodingmode"]
314 if options["time"]:
313 if options["time"]:
315 def get_times():
314 def get_times():
316 t = os.times()
315 t = os.times()
317 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
316 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
318 t = (t[0], t[1], t[2], t[3], time.clock())
317 t = (t[0], t[1], t[2], t[3], time.clock())
319 return t
318 return t
320 s = get_times()
319 s = get_times()
321 def print_time():
320 def print_time():
322 t = get_times()
321 t = get_times()
323 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
322 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
324 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
323 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
325 atexit.register(print_time)
324 atexit.register(print_time)
326
325
327 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
326 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
328 not options["noninteractive"], options["traceback"])
327 not options["noninteractive"], options["traceback"])
329
328
330 if options['help']:
329 if options['help']:
331 return commands.help_(ui, cmd, options['version'])
330 return commands.help_(ui, cmd, options['version'])
332 elif options['version']:
331 elif options['version']:
333 return commands.version_(ui)
332 return commands.version_(ui)
334 elif not cmd:
333 elif not cmd:
335 return commands.help_(ui, 'shortlist')
334 return commands.help_(ui, 'shortlist')
336
335
337 repo = None
336 repo = None
338 if cmd not in commands.norepo.split():
337 if cmd not in commands.norepo.split():
339 try:
338 try:
340 repo = hg.repository(ui, path=path)
339 repo = hg.repository(ui, path=path)
341 ui = repo.ui
340 ui = repo.ui
342 if not repo.local():
341 if not repo.local():
343 raise util.Abort(_("repository '%s' is not local") % path)
342 raise util.Abort(_("repository '%s' is not local") % path)
344 ui.setconfig("bundle", "mainreporoot", repo.root)
343 ui.setconfig("bundle", "mainreporoot", repo.root)
345 except hg.RepoError:
344 except hg.RepoError:
346 if cmd not in commands.optionalrepo.split():
345 if cmd not in commands.optionalrepo.split():
347 if args and not path: # try to infer -R from command args
346 if args and not path: # try to infer -R from command args
348 repos = map(_findrepo, args)
347 repos = map(_findrepo, args)
349 guess = repos[0]
348 guess = repos[0]
350 if guess and repos.count(guess) == len(repos):
349 if guess and repos.count(guess) == len(repos):
351 return _dispatch(ui, ['--repository', guess] + fullargs)
350 return _dispatch(ui, ['--repository', guess] + fullargs)
352 if not path:
351 if not path:
353 raise hg.RepoError(_("There is no Mercurial repository here"
352 raise hg.RepoError(_("There is no Mercurial repository here"
354 " (.hg not found)"))
353 " (.hg not found)"))
355 raise
354 raise
356 d = lambda: func(ui, repo, *args, **cmdoptions)
355 d = lambda: func(ui, repo, *args, **cmdoptions)
357 else:
356 else:
358 d = lambda: func(ui, *args, **cmdoptions)
357 d = lambda: func(ui, *args, **cmdoptions)
359
358
360 # run pre-hook, and abort if it fails
359 # run pre-hook, and abort if it fails
361 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
360 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
362 if ret:
361 if ret:
363 return ret
362 return ret
364 ret = _runcommand(ui, options, cmd, d)
363 ret = _runcommand(ui, options, cmd, d)
365 # run post-hook, passing command result
364 # run post-hook, passing command result
366 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
365 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
367 result = ret)
366 result = ret)
368 return ret
367 return ret
369
368
370 def _runcommand(ui, options, cmd, cmdfunc):
369 def _runcommand(ui, options, cmd, cmdfunc):
371 def checkargs():
370 def checkargs():
372 try:
371 try:
373 return cmdfunc()
372 return cmdfunc()
374 except TypeError, inst:
373 except TypeError, inst:
375 # was this an argument error?
374 # was this an argument error?
376 tb = traceback.extract_tb(sys.exc_info()[2])
375 tb = traceback.extract_tb(sys.exc_info()[2])
377 if len(tb) != 2: # no
376 if len(tb) != 2: # no
378 raise
377 raise
379 raise ParseError(cmd, _("invalid arguments"))
378 raise ParseError(cmd, _("invalid arguments"))
380
379
381 if options['profile']:
380 if options['profile']:
382 import hotshot, hotshot.stats
381 import hotshot, hotshot.stats
383 prof = hotshot.Profile("hg.prof")
382 prof = hotshot.Profile("hg.prof")
384 try:
383 try:
385 try:
384 try:
386 return prof.runcall(checkargs)
385 return prof.runcall(checkargs)
387 except:
386 except:
388 try:
387 try:
389 ui.warn(_('exception raised - generating '
388 ui.warn(_('exception raised - generating '
390 'profile anyway\n'))
389 'profile anyway\n'))
391 except:
390 except:
392 pass
391 pass
393 raise
392 raise
394 finally:
393 finally:
395 prof.close()
394 prof.close()
396 stats = hotshot.stats.load("hg.prof")
395 stats = hotshot.stats.load("hg.prof")
397 stats.strip_dirs()
396 stats.strip_dirs()
398 stats.sort_stats('time', 'calls')
397 stats.sort_stats('time', 'calls')
399 stats.print_stats(40)
398 stats.print_stats(40)
400 elif options['lsprof']:
399 elif options['lsprof']:
401 try:
400 try:
402 from mercurial import lsprof
401 from mercurial import lsprof
403 except ImportError:
402 except ImportError:
404 raise util.Abort(_(
403 raise util.Abort(_(
405 'lsprof not available - install from '
404 'lsprof not available - install from '
406 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
405 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
407 p = lsprof.Profiler()
406 p = lsprof.Profiler()
408 p.enable(subcalls=True)
407 p.enable(subcalls=True)
409 try:
408 try:
410 return checkargs()
409 return checkargs()
411 finally:
410 finally:
412 p.disable()
411 p.disable()
413 stats = lsprof.Stats(p.getstats())
412 stats = lsprof.Stats(p.getstats())
414 stats.sort()
413 stats.sort()
415 stats.pprint(top=10, file=sys.stderr, climit=5)
414 stats.pprint(top=10, file=sys.stderr, climit=5)
416 else:
415 else:
417 return checkargs()
416 return checkargs()
@@ -1,83 +1,84 b''
1 # filelog.py - file history class for mercurial
1 # filelog.py - file history class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from node import bin, nullid
9 from revlog import revlog
9 import os
10 import os
10
11
11 class filelog(revlog):
12 class filelog(revlog):
12 def __init__(self, opener, path):
13 def __init__(self, opener, path):
13 revlog.__init__(self, opener,
14 revlog.__init__(self, opener,
14 "/".join(("data", self.encodedir(path + ".i"))))
15 "/".join(("data", self.encodedir(path + ".i"))))
15
16
16 # This avoids a collision between a file named foo and a dir named
17 # This avoids a collision between a file named foo and a dir named
17 # foo.i or foo.d
18 # foo.i or foo.d
18 def encodedir(self, path):
19 def encodedir(self, path):
19 return (path
20 return (path
20 .replace(".hg/", ".hg.hg/")
21 .replace(".hg/", ".hg.hg/")
21 .replace(".i/", ".i.hg/")
22 .replace(".i/", ".i.hg/")
22 .replace(".d/", ".d.hg/"))
23 .replace(".d/", ".d.hg/"))
23
24
24 def decodedir(self, path):
25 def decodedir(self, path):
25 return (path
26 return (path
26 .replace(".d.hg/", ".d/")
27 .replace(".d.hg/", ".d/")
27 .replace(".i.hg/", ".i/")
28 .replace(".i.hg/", ".i/")
28 .replace(".hg.hg/", ".hg/"))
29 .replace(".hg.hg/", ".hg/"))
29
30
30 def read(self, node):
31 def read(self, node):
31 t = self.revision(node)
32 t = self.revision(node)
32 if not t.startswith('\1\n'):
33 if not t.startswith('\1\n'):
33 return t
34 return t
34 s = t.index('\1\n', 2)
35 s = t.index('\1\n', 2)
35 return t[s+2:]
36 return t[s+2:]
36
37
37 def _readmeta(self, node):
38 def _readmeta(self, node):
38 t = self.revision(node)
39 t = self.revision(node)
39 if not t.startswith('\1\n'):
40 if not t.startswith('\1\n'):
40 return {}
41 return {}
41 s = t.index('\1\n', 2)
42 s = t.index('\1\n', 2)
42 mt = t[2:s]
43 mt = t[2:s]
43 m = {}
44 m = {}
44 for l in mt.splitlines():
45 for l in mt.splitlines():
45 k, v = l.split(": ", 1)
46 k, v = l.split(": ", 1)
46 m[k] = v
47 m[k] = v
47 return m
48 return m
48
49
49 def add(self, text, meta, transaction, link, p1=None, p2=None):
50 def add(self, text, meta, transaction, link, p1=None, p2=None):
50 if meta or text.startswith('\1\n'):
51 if meta or text.startswith('\1\n'):
51 mt = ""
52 mt = ""
52 if meta:
53 if meta:
53 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
54 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
54 text = "\1\n%s\1\n%s" % ("".join(mt), text)
55 text = "\1\n%s\1\n%s" % ("".join(mt), text)
55 return self.addrevision(text, transaction, link, p1, p2)
56 return self.addrevision(text, transaction, link, p1, p2)
56
57
57 def renamed(self, node):
58 def renamed(self, node):
58 if self.parents(node)[0] != nullid:
59 if self.parents(node)[0] != nullid:
59 return False
60 return False
60 m = self._readmeta(node)
61 m = self._readmeta(node)
61 if m and "copy" in m:
62 if m and "copy" in m:
62 return (m["copy"], bin(m["copyrev"]))
63 return (m["copy"], bin(m["copyrev"]))
63 return False
64 return False
64
65
65 def size(self, rev):
66 def size(self, rev):
66 """return the size of a given revision"""
67 """return the size of a given revision"""
67
68
68 # for revisions with renames, we have to go the slow way
69 # for revisions with renames, we have to go the slow way
69 node = self.node(rev)
70 node = self.node(rev)
70 if self.renamed(node):
71 if self.renamed(node):
71 return len(self.read(node))
72 return len(self.read(node))
72
73
73 return revlog.size(self, rev)
74 return revlog.size(self, rev)
74
75
75 def cmp(self, node, text):
76 def cmp(self, node, text):
76 """compare text with a given file revision"""
77 """compare text with a given file revision"""
77
78
78 # for renames, we have to go the slow way
79 # for renames, we have to go the slow way
79 if self.renamed(node):
80 if self.renamed(node):
80 t2 = self.read(node)
81 t2 = self.read(node)
81 return t2 != text
82 return t2 != text
82
83
83 return revlog.cmp(self, node, text)
84 return revlog.cmp(self, node, text)
@@ -1,217 +1,217 b''
1 # filemerge.py - file-level merge handling for Mercurial
1 # filemerge.py - file-level merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import nullrev
9 from i18n import _
9 from i18n import _
10 import util, os, tempfile, context, simplemerge, re, filecmp
10 import util, os, tempfile, context, simplemerge, re, filecmp
11
11
12 def _toolstr(ui, tool, part, default=""):
12 def _toolstr(ui, tool, part, default=""):
13 return ui.config("merge-tools", tool + "." + part, default)
13 return ui.config("merge-tools", tool + "." + part, default)
14
14
15 def _toolbool(ui, tool, part, default=False):
15 def _toolbool(ui, tool, part, default=False):
16 return ui.configbool("merge-tools", tool + "." + part, default)
16 return ui.configbool("merge-tools", tool + "." + part, default)
17
17
18 def _findtool(ui, tool):
18 def _findtool(ui, tool):
19 k = _toolstr(ui, tool, "regkey")
19 k = _toolstr(ui, tool, "regkey")
20 if k:
20 if k:
21 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
21 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
22 if p:
22 if p:
23 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
23 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
24 if p:
24 if p:
25 return p
25 return p
26 return util.find_exe(_toolstr(ui, tool, "executable", tool))
26 return util.find_exe(_toolstr(ui, tool, "executable", tool))
27
27
28 def _picktool(repo, ui, path, binary, symlink):
28 def _picktool(repo, ui, path, binary, symlink):
29 def check(tool, pat, symlink, binary):
29 def check(tool, pat, symlink, binary):
30 tmsg = tool
30 tmsg = tool
31 if pat:
31 if pat:
32 tmsg += " specified for " + pat
32 tmsg += " specified for " + pat
33 if pat and not _findtool(ui, tool): # skip search if not matching
33 if pat and not _findtool(ui, tool): # skip search if not matching
34 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
34 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
35 elif symlink and not _toolbool(ui, tool, "symlink"):
35 elif symlink and not _toolbool(ui, tool, "symlink"):
36 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
36 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
37 elif binary and not _toolbool(ui, tool, "binary"):
37 elif binary and not _toolbool(ui, tool, "binary"):
38 ui.warn(_("tool %s can't handle binary\n") % tmsg)
38 ui.warn(_("tool %s can't handle binary\n") % tmsg)
39 elif not util.gui() and _toolbool(ui, tool, "gui"):
39 elif not util.gui() and _toolbool(ui, tool, "gui"):
40 ui.warn(_("tool %s requires a GUI\n") % tmsg)
40 ui.warn(_("tool %s requires a GUI\n") % tmsg)
41 else:
41 else:
42 return True
42 return True
43 return False
43 return False
44
44
45 # HGMERGE takes precedence
45 # HGMERGE takes precedence
46 hgmerge = os.environ.get("HGMERGE")
46 hgmerge = os.environ.get("HGMERGE")
47 if hgmerge:
47 if hgmerge:
48 return (hgmerge, hgmerge)
48 return (hgmerge, hgmerge)
49
49
50 # then patterns
50 # then patterns
51 for pat, tool in ui.configitems("merge-patterns"):
51 for pat, tool in ui.configitems("merge-patterns"):
52 mf = util.matcher(repo.root, "", [pat], [], [])[1]
52 mf = util.matcher(repo.root, "", [pat], [], [])[1]
53 if mf(path) and check(tool, pat, symlink, False):
53 if mf(path) and check(tool, pat, symlink, False):
54 toolpath = _findtool(ui, tool)
54 toolpath = _findtool(ui, tool)
55 return (tool, '"' + toolpath + '"')
55 return (tool, '"' + toolpath + '"')
56
56
57 # then merge tools
57 # then merge tools
58 tools = {}
58 tools = {}
59 for k,v in ui.configitems("merge-tools"):
59 for k,v in ui.configitems("merge-tools"):
60 t = k.split('.')[0]
60 t = k.split('.')[0]
61 if t not in tools:
61 if t not in tools:
62 tools[t] = int(_toolstr(ui, t, "priority", "0"))
62 tools[t] = int(_toolstr(ui, t, "priority", "0"))
63 names = tools.keys()
63 names = tools.keys()
64 tools = [(-p,t) for t,p in tools.items()]
64 tools = [(-p,t) for t,p in tools.items()]
65 tools.sort()
65 tools.sort()
66 uimerge = ui.config("ui", "merge")
66 uimerge = ui.config("ui", "merge")
67 if uimerge:
67 if uimerge:
68 if uimerge not in names:
68 if uimerge not in names:
69 return (uimerge, uimerge)
69 return (uimerge, uimerge)
70 tools.insert(0, (None, uimerge)) # highest priority
70 tools.insert(0, (None, uimerge)) # highest priority
71 tools.append((None, "hgmerge")) # the old default, if found
71 tools.append((None, "hgmerge")) # the old default, if found
72 for p,t in tools:
72 for p,t in tools:
73 toolpath = _findtool(ui, t)
73 toolpath = _findtool(ui, t)
74 if toolpath and check(t, None, symlink, binary):
74 if toolpath and check(t, None, symlink, binary):
75 return (t, '"' + toolpath + '"')
75 return (t, '"' + toolpath + '"')
76 # internal merge as last resort
76 # internal merge as last resort
77 return (not (symlink or binary) and "internal:merge" or None, None)
77 return (not (symlink or binary) and "internal:merge" or None, None)
78
78
79 def _eoltype(data):
79 def _eoltype(data):
80 "Guess the EOL type of a file"
80 "Guess the EOL type of a file"
81 if '\0' in data: # binary
81 if '\0' in data: # binary
82 return None
82 return None
83 if '\r\n' in data: # Windows
83 if '\r\n' in data: # Windows
84 return '\r\n'
84 return '\r\n'
85 if '\r' in data: # Old Mac
85 if '\r' in data: # Old Mac
86 return '\r'
86 return '\r'
87 if '\n' in data: # UNIX
87 if '\n' in data: # UNIX
88 return '\n'
88 return '\n'
89 return None # unknown
89 return None # unknown
90
90
91 def _matcheol(file, origfile):
91 def _matcheol(file, origfile):
92 "Convert EOL markers in a file to match origfile"
92 "Convert EOL markers in a file to match origfile"
93 tostyle = _eoltype(open(origfile, "rb").read())
93 tostyle = _eoltype(open(origfile, "rb").read())
94 if tostyle:
94 if tostyle:
95 data = open(file, "rb").read()
95 data = open(file, "rb").read()
96 style = _eoltype(data)
96 style = _eoltype(data)
97 if style:
97 if style:
98 newdata = data.replace(style, tostyle)
98 newdata = data.replace(style, tostyle)
99 if newdata != data:
99 if newdata != data:
100 open(file, "wb").write(newdata)
100 open(file, "wb").write(newdata)
101
101
102 def filemerge(repo, fw, fd, fo, wctx, mctx):
102 def filemerge(repo, fw, fd, fo, wctx, mctx):
103 """perform a 3-way merge in the working directory
103 """perform a 3-way merge in the working directory
104
104
105 fw = original filename in the working directory
105 fw = original filename in the working directory
106 fd = destination filename in the working directory
106 fd = destination filename in the working directory
107 fo = filename in other parent
107 fo = filename in other parent
108 wctx, mctx = working and merge changecontexts
108 wctx, mctx = working and merge changecontexts
109 """
109 """
110
110
111 def temp(prefix, ctx):
111 def temp(prefix, ctx):
112 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
112 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
113 (fd, name) = tempfile.mkstemp(prefix=pre)
113 (fd, name) = tempfile.mkstemp(prefix=pre)
114 data = repo.wwritedata(ctx.path(), ctx.data())
114 data = repo.wwritedata(ctx.path(), ctx.data())
115 f = os.fdopen(fd, "wb")
115 f = os.fdopen(fd, "wb")
116 f.write(data)
116 f.write(data)
117 f.close()
117 f.close()
118 return name
118 return name
119
119
120 def isbin(ctx):
120 def isbin(ctx):
121 try:
121 try:
122 return util.binary(ctx.data())
122 return util.binary(ctx.data())
123 except IOError:
123 except IOError:
124 return False
124 return False
125
125
126 fco = mctx.filectx(fo)
126 fco = mctx.filectx(fo)
127 if not fco.cmp(wctx.filectx(fd).data()): # files identical?
127 if not fco.cmp(wctx.filectx(fd).data()): # files identical?
128 return None
128 return None
129
129
130 ui = repo.ui
130 ui = repo.ui
131 fcm = wctx.filectx(fw)
131 fcm = wctx.filectx(fw)
132 fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev)
132 fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev)
133 binary = isbin(fcm) or isbin(fco) or isbin(fca)
133 binary = isbin(fcm) or isbin(fco) or isbin(fca)
134 symlink = fcm.islink() or fco.islink()
134 symlink = fcm.islink() or fco.islink()
135 tool, toolpath = _picktool(repo, ui, fw, binary, symlink)
135 tool, toolpath = _picktool(repo, ui, fw, binary, symlink)
136 ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
136 ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
137 (tool, fw, binary, symlink))
137 (tool, fw, binary, symlink))
138
138
139 if not tool:
139 if not tool:
140 tool = "internal:local"
140 tool = "internal:local"
141 if ui.prompt(_(" no tool found to merge %s\n"
141 if ui.prompt(_(" no tool found to merge %s\n"
142 "keep (l)ocal or take (o)ther?") % fw,
142 "keep (l)ocal or take (o)ther?") % fw,
143 _("[lo]"), _("l")) != _("l"):
143 _("[lo]"), _("l")) != _("l"):
144 tool = "internal:other"
144 tool = "internal:other"
145 if tool == "internal:local":
145 if tool == "internal:local":
146 return 0
146 return 0
147 if tool == "internal:other":
147 if tool == "internal:other":
148 repo.wwrite(fd, fco.data(), fco.fileflags())
148 repo.wwrite(fd, fco.data(), fco.fileflags())
149 return 0
149 return 0
150 if tool == "internal:fail":
150 if tool == "internal:fail":
151 return 1
151 return 1
152
152
153 # do the actual merge
153 # do the actual merge
154 a = repo.wjoin(fd)
154 a = repo.wjoin(fd)
155 b = temp("base", fca)
155 b = temp("base", fca)
156 c = temp("other", fco)
156 c = temp("other", fco)
157 out = ""
157 out = ""
158 back = a + ".orig"
158 back = a + ".orig"
159 util.copyfile(a, back)
159 util.copyfile(a, back)
160
160
161 if fw != fo:
161 if fw != fo:
162 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
162 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
163 else:
163 else:
164 repo.ui.status(_("merging %s\n") % fw)
164 repo.ui.status(_("merging %s\n") % fw)
165 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
165 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
166
166
167 # do we attempt to simplemerge first?
167 # do we attempt to simplemerge first?
168 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
168 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
169 r = simplemerge.simplemerge(a, b, c, quiet=True)
169 r = simplemerge.simplemerge(a, b, c, quiet=True)
170 if not r:
170 if not r:
171 ui.debug(_(" premerge successful\n"))
171 ui.debug(_(" premerge successful\n"))
172 os.unlink(back)
172 os.unlink(back)
173 os.unlink(b)
173 os.unlink(b)
174 os.unlink(c)
174 os.unlink(c)
175 return 0
175 return 0
176 util.copyfile(back, a) # restore from backup and try again
176 util.copyfile(back, a) # restore from backup and try again
177
177
178 env = dict(HG_FILE=fd,
178 env = dict(HG_FILE=fd,
179 HG_MY_NODE=str(wctx.parents()[0]),
179 HG_MY_NODE=str(wctx.parents()[0]),
180 HG_OTHER_NODE=str(mctx),
180 HG_OTHER_NODE=str(mctx),
181 HG_MY_ISLINK=fcm.islink(),
181 HG_MY_ISLINK=fcm.islink(),
182 HG_OTHER_ISLINK=fco.islink(),
182 HG_OTHER_ISLINK=fco.islink(),
183 HG_BASE_ISLINK=fca.islink())
183 HG_BASE_ISLINK=fca.islink())
184
184
185 if tool == "internal:merge":
185 if tool == "internal:merge":
186 r = simplemerge.simplemerge(a, b, c, label=['local', 'other'])
186 r = simplemerge.simplemerge(a, b, c, label=['local', 'other'])
187 else:
187 else:
188 args = _toolstr(ui, tool, "args", '$local $base $other')
188 args = _toolstr(ui, tool, "args", '$local $base $other')
189 if "$output" in args:
189 if "$output" in args:
190 out, a = a, back # read input from backup, write to original
190 out, a = a, back # read input from backup, write to original
191 replace = dict(local=a, base=b, other=c, output=out)
191 replace = dict(local=a, base=b, other=c, output=out)
192 args = re.sub("\$(local|base|other|output)",
192 args = re.sub("\$(local|base|other|output)",
193 lambda x: '"%s"' % replace[x.group()[1:]], args)
193 lambda x: '"%s"' % replace[x.group()[1:]], args)
194 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
194 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
195
195
196 if not r and _toolbool(ui, tool, "checkconflicts"):
196 if not r and _toolbool(ui, tool, "checkconflicts"):
197 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcm.data()):
197 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcm.data()):
198 r = 1
198 r = 1
199
199
200 if not r and _toolbool(ui, tool, "checkchanged"):
200 if not r and _toolbool(ui, tool, "checkchanged"):
201 if filecmp.cmp(repo.wjoin(fd), back):
201 if filecmp.cmp(repo.wjoin(fd), back):
202 if ui.prompt(_(" output file %s appears unchanged\n"
202 if ui.prompt(_(" output file %s appears unchanged\n"
203 "was merge successful (yn)?") % fd,
203 "was merge successful (yn)?") % fd,
204 _("[yn]"), _("n")) != _("y"):
204 _("[yn]"), _("n")) != _("y"):
205 r = 1
205 r = 1
206
206
207 if _toolbool(ui, tool, "fixeol"):
207 if _toolbool(ui, tool, "fixeol"):
208 _matcheol(repo.wjoin(fd), back)
208 _matcheol(repo.wjoin(fd), back)
209
209
210 if r:
210 if r:
211 repo.ui.warn(_("merging %s failed!\n") % fd)
211 repo.ui.warn(_("merging %s failed!\n") % fd)
212 else:
212 else:
213 os.unlink(back)
213 os.unlink(back)
214
214
215 os.unlink(b)
215 os.unlink(b)
216 os.unlink(c)
216 os.unlink(c)
217 return r
217 return r
@@ -1,313 +1,313 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import bin, hex, nullid, nullrev, short
10 from repo import *
10 from repo import NoCapability, RepoError
11 from i18n import _
11 from i18n import _
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 import errno, lock, os, shutil, util, extensions
13 import errno, lock, os, shutil, util, extensions
14 import merge as _merge
14 import merge as _merge
15 import verify as _verify
15 import verify as _verify
16
16
17 def _local(path):
17 def _local(path):
18 return (os.path.isfile(util.drop_scheme('file', path)) and
18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 bundlerepo or localrepo)
19 bundlerepo or localrepo)
20
20
21 def parseurl(url, revs):
21 def parseurl(url, revs):
22 '''parse url#branch, returning url, branch + revs'''
22 '''parse url#branch, returning url, branch + revs'''
23
23
24 if '#' not in url:
24 if '#' not in url:
25 return url, (revs or None), None
25 return url, (revs or None), None
26
26
27 url, rev = url.split('#', 1)
27 url, rev = url.split('#', 1)
28 return url, revs + [rev], rev
28 return url, revs + [rev], rev
29
29
30 schemes = {
30 schemes = {
31 'bundle': bundlerepo,
31 'bundle': bundlerepo,
32 'file': _local,
32 'file': _local,
33 'http': httprepo,
33 'http': httprepo,
34 'https': httprepo,
34 'https': httprepo,
35 'ssh': sshrepo,
35 'ssh': sshrepo,
36 'static-http': statichttprepo,
36 'static-http': statichttprepo,
37 }
37 }
38
38
39 def _lookup(path):
39 def _lookup(path):
40 scheme = 'file'
40 scheme = 'file'
41 if path:
41 if path:
42 c = path.find(':')
42 c = path.find(':')
43 if c > 0:
43 if c > 0:
44 scheme = path[:c]
44 scheme = path[:c]
45 thing = schemes.get(scheme) or schemes['file']
45 thing = schemes.get(scheme) or schemes['file']
46 try:
46 try:
47 return thing(path)
47 return thing(path)
48 except TypeError:
48 except TypeError:
49 return thing
49 return thing
50
50
51 def islocal(repo):
51 def islocal(repo):
52 '''return true if repo or path is local'''
52 '''return true if repo or path is local'''
53 if isinstance(repo, str):
53 if isinstance(repo, str):
54 try:
54 try:
55 return _lookup(repo).islocal(repo)
55 return _lookup(repo).islocal(repo)
56 except AttributeError:
56 except AttributeError:
57 return False
57 return False
58 return repo.local()
58 return repo.local()
59
59
60 def repository(ui, path='', create=False):
60 def repository(ui, path='', create=False):
61 """return a repository object for the specified path"""
61 """return a repository object for the specified path"""
62 repo = _lookup(path).instance(ui, path, create)
62 repo = _lookup(path).instance(ui, path, create)
63 ui = getattr(repo, "ui", ui)
63 ui = getattr(repo, "ui", ui)
64 for name, module in extensions.extensions():
64 for name, module in extensions.extensions():
65 hook = getattr(module, 'reposetup', None)
65 hook = getattr(module, 'reposetup', None)
66 if hook:
66 if hook:
67 hook(ui, repo)
67 hook(ui, repo)
68 return repo
68 return repo
69
69
70 def defaultdest(source):
70 def defaultdest(source):
71 '''return default destination of clone if none is given'''
71 '''return default destination of clone if none is given'''
72 return os.path.basename(os.path.normpath(source))
72 return os.path.basename(os.path.normpath(source))
73
73
74 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
74 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
75 stream=False):
75 stream=False):
76 """Make a copy of an existing repository.
76 """Make a copy of an existing repository.
77
77
78 Create a copy of an existing repository in a new directory. The
78 Create a copy of an existing repository in a new directory. The
79 source and destination are URLs, as passed to the repository
79 source and destination are URLs, as passed to the repository
80 function. Returns a pair of repository objects, the source and
80 function. Returns a pair of repository objects, the source and
81 newly created destination.
81 newly created destination.
82
82
83 The location of the source is added to the new repository's
83 The location of the source is added to the new repository's
84 .hg/hgrc file, as the default to be used for future pulls and
84 .hg/hgrc file, as the default to be used for future pulls and
85 pushes.
85 pushes.
86
86
87 If an exception is raised, the partly cloned/updated destination
87 If an exception is raised, the partly cloned/updated destination
88 repository will be deleted.
88 repository will be deleted.
89
89
90 Arguments:
90 Arguments:
91
91
92 source: repository object or URL
92 source: repository object or URL
93
93
94 dest: URL of destination repository to create (defaults to base
94 dest: URL of destination repository to create (defaults to base
95 name of source repository)
95 name of source repository)
96
96
97 pull: always pull from source repository, even in local case
97 pull: always pull from source repository, even in local case
98
98
99 stream: stream raw data uncompressed from repository (fast over
99 stream: stream raw data uncompressed from repository (fast over
100 LAN, slow over WAN)
100 LAN, slow over WAN)
101
101
102 rev: revision to clone up to (implies pull=True)
102 rev: revision to clone up to (implies pull=True)
103
103
104 update: update working directory after clone completes, if
104 update: update working directory after clone completes, if
105 destination is local repository
105 destination is local repository
106 """
106 """
107
107
108 if isinstance(source, str):
108 if isinstance(source, str):
109 origsource = ui.expandpath(source)
109 origsource = ui.expandpath(source)
110 source, rev, checkout = parseurl(origsource, rev)
110 source, rev, checkout = parseurl(origsource, rev)
111 src_repo = repository(ui, source)
111 src_repo = repository(ui, source)
112 else:
112 else:
113 src_repo = source
113 src_repo = source
114 origsource = source = src_repo.url()
114 origsource = source = src_repo.url()
115 checkout = None
115 checkout = None
116
116
117 if dest is None:
117 if dest is None:
118 dest = defaultdest(source)
118 dest = defaultdest(source)
119 ui.status(_("destination directory: %s\n") % dest)
119 ui.status(_("destination directory: %s\n") % dest)
120
120
121 def localpath(path):
121 def localpath(path):
122 if path.startswith('file://localhost/'):
122 if path.startswith('file://localhost/'):
123 return path[16:]
123 return path[16:]
124 if path.startswith('file://'):
124 if path.startswith('file://'):
125 return path[7:]
125 return path[7:]
126 if path.startswith('file:'):
126 if path.startswith('file:'):
127 return path[5:]
127 return path[5:]
128 return path
128 return path
129
129
130 dest = localpath(dest)
130 dest = localpath(dest)
131 source = localpath(source)
131 source = localpath(source)
132
132
133 if os.path.exists(dest):
133 if os.path.exists(dest):
134 raise util.Abort(_("destination '%s' already exists") % dest)
134 raise util.Abort(_("destination '%s' already exists") % dest)
135
135
136 class DirCleanup(object):
136 class DirCleanup(object):
137 def __init__(self, dir_):
137 def __init__(self, dir_):
138 self.rmtree = shutil.rmtree
138 self.rmtree = shutil.rmtree
139 self.dir_ = dir_
139 self.dir_ = dir_
140 def close(self):
140 def close(self):
141 self.dir_ = None
141 self.dir_ = None
142 def __del__(self):
142 def __del__(self):
143 if self.dir_:
143 if self.dir_:
144 self.rmtree(self.dir_, True)
144 self.rmtree(self.dir_, True)
145
145
146 src_lock = dest_lock = dir_cleanup = None
146 src_lock = dest_lock = dir_cleanup = None
147 try:
147 try:
148 if islocal(dest):
148 if islocal(dest):
149 dir_cleanup = DirCleanup(dest)
149 dir_cleanup = DirCleanup(dest)
150
150
151 abspath = origsource
151 abspath = origsource
152 copy = False
152 copy = False
153 if src_repo.local() and islocal(dest):
153 if src_repo.local() and islocal(dest):
154 abspath = os.path.abspath(util.drop_scheme('file', origsource))
154 abspath = os.path.abspath(util.drop_scheme('file', origsource))
155 copy = not pull and not rev
155 copy = not pull and not rev
156
156
157 if copy:
157 if copy:
158 try:
158 try:
159 # we use a lock here because if we race with commit, we
159 # we use a lock here because if we race with commit, we
160 # can end up with extra data in the cloned revlogs that's
160 # can end up with extra data in the cloned revlogs that's
161 # not pointed to by changesets, thus causing verify to
161 # not pointed to by changesets, thus causing verify to
162 # fail
162 # fail
163 src_lock = src_repo.lock()
163 src_lock = src_repo.lock()
164 except lock.LockException:
164 except lock.LockException:
165 copy = False
165 copy = False
166
166
167 if copy:
167 if copy:
168 def force_copy(src, dst):
168 def force_copy(src, dst):
169 if not os.path.exists(src):
169 if not os.path.exists(src):
170 # Tolerate empty source repository and optional files
170 # Tolerate empty source repository and optional files
171 return
171 return
172 util.copyfiles(src, dst)
172 util.copyfiles(src, dst)
173
173
174 src_store = os.path.realpath(src_repo.spath)
174 src_store = os.path.realpath(src_repo.spath)
175 if not os.path.exists(dest):
175 if not os.path.exists(dest):
176 os.mkdir(dest)
176 os.mkdir(dest)
177 try:
177 try:
178 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
178 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
179 os.mkdir(dest_path)
179 os.mkdir(dest_path)
180 except OSError, inst:
180 except OSError, inst:
181 if inst.errno == errno.EEXIST:
181 if inst.errno == errno.EEXIST:
182 dir_cleanup.close()
182 dir_cleanup.close()
183 raise util.Abort(_("destination '%s' already exists")
183 raise util.Abort(_("destination '%s' already exists")
184 % dest)
184 % dest)
185 raise
185 raise
186 if src_repo.spath != src_repo.path:
186 if src_repo.spath != src_repo.path:
187 # XXX racy
187 # XXX racy
188 dummy_changelog = os.path.join(dest_path, "00changelog.i")
188 dummy_changelog = os.path.join(dest_path, "00changelog.i")
189 # copy the dummy changelog
189 # copy the dummy changelog
190 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
190 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
191 dest_store = os.path.join(dest_path, "store")
191 dest_store = os.path.join(dest_path, "store")
192 os.mkdir(dest_store)
192 os.mkdir(dest_store)
193 else:
193 else:
194 dest_store = dest_path
194 dest_store = dest_path
195 # copy the requires file
195 # copy the requires file
196 force_copy(src_repo.join("requires"),
196 force_copy(src_repo.join("requires"),
197 os.path.join(dest_path, "requires"))
197 os.path.join(dest_path, "requires"))
198 # we lock here to avoid premature writing to the target
198 # we lock here to avoid premature writing to the target
199 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
199 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
200
200
201 files = ("data",
201 files = ("data",
202 "00manifest.d", "00manifest.i",
202 "00manifest.d", "00manifest.i",
203 "00changelog.d", "00changelog.i")
203 "00changelog.d", "00changelog.i")
204 for f in files:
204 for f in files:
205 src = os.path.join(src_store, f)
205 src = os.path.join(src_store, f)
206 dst = os.path.join(dest_store, f)
206 dst = os.path.join(dest_store, f)
207 force_copy(src, dst)
207 force_copy(src, dst)
208
208
209 # we need to re-init the repo after manually copying the data
209 # we need to re-init the repo after manually copying the data
210 # into it
210 # into it
211 dest_repo = repository(ui, dest)
211 dest_repo = repository(ui, dest)
212
212
213 else:
213 else:
214 try:
214 try:
215 dest_repo = repository(ui, dest, create=True)
215 dest_repo = repository(ui, dest, create=True)
216 except OSError, inst:
216 except OSError, inst:
217 if inst.errno == errno.EEXIST:
217 if inst.errno == errno.EEXIST:
218 dir_cleanup.close()
218 dir_cleanup.close()
219 raise util.Abort(_("destination '%s' already exists")
219 raise util.Abort(_("destination '%s' already exists")
220 % dest)
220 % dest)
221 raise
221 raise
222
222
223 revs = None
223 revs = None
224 if rev:
224 if rev:
225 if 'lookup' not in src_repo.capabilities:
225 if 'lookup' not in src_repo.capabilities:
226 raise util.Abort(_("src repository does not support revision "
226 raise util.Abort(_("src repository does not support revision "
227 "lookup and so doesn't support clone by "
227 "lookup and so doesn't support clone by "
228 "revision"))
228 "revision"))
229 revs = [src_repo.lookup(r) for r in rev]
229 revs = [src_repo.lookup(r) for r in rev]
230
230
231 if dest_repo.local():
231 if dest_repo.local():
232 dest_repo.clone(src_repo, heads=revs, stream=stream)
232 dest_repo.clone(src_repo, heads=revs, stream=stream)
233 elif src_repo.local():
233 elif src_repo.local():
234 src_repo.push(dest_repo, revs=revs)
234 src_repo.push(dest_repo, revs=revs)
235 else:
235 else:
236 raise util.Abort(_("clone from remote to remote not supported"))
236 raise util.Abort(_("clone from remote to remote not supported"))
237
237
238 if dir_cleanup:
238 if dir_cleanup:
239 dir_cleanup.close()
239 dir_cleanup.close()
240
240
241 if dest_repo.local():
241 if dest_repo.local():
242 fp = dest_repo.opener("hgrc", "w", text=True)
242 fp = dest_repo.opener("hgrc", "w", text=True)
243 fp.write("[paths]\n")
243 fp.write("[paths]\n")
244 fp.write("default = %s\n" % abspath)
244 fp.write("default = %s\n" % abspath)
245 fp.close()
245 fp.close()
246
246
247 if update:
247 if update:
248 if not checkout:
248 if not checkout:
249 try:
249 try:
250 checkout = dest_repo.lookup("default")
250 checkout = dest_repo.lookup("default")
251 except:
251 except:
252 checkout = dest_repo.changelog.tip()
252 checkout = dest_repo.changelog.tip()
253 _update(dest_repo, checkout)
253 _update(dest_repo, checkout)
254
254
255 return src_repo, dest_repo
255 return src_repo, dest_repo
256 finally:
256 finally:
257 del src_lock, dest_lock, dir_cleanup
257 del src_lock, dest_lock, dir_cleanup
258
258
259 def _showstats(repo, stats):
259 def _showstats(repo, stats):
260 stats = ((stats[0], _("updated")),
260 stats = ((stats[0], _("updated")),
261 (stats[1], _("merged")),
261 (stats[1], _("merged")),
262 (stats[2], _("removed")),
262 (stats[2], _("removed")),
263 (stats[3], _("unresolved")))
263 (stats[3], _("unresolved")))
264 note = ", ".join([_("%d files %s") % s for s in stats])
264 note = ", ".join([_("%d files %s") % s for s in stats])
265 repo.ui.status("%s\n" % note)
265 repo.ui.status("%s\n" % note)
266
266
267 def _update(repo, node): return update(repo, node)
267 def _update(repo, node): return update(repo, node)
268
268
269 def update(repo, node):
269 def update(repo, node):
270 """update the working directory to node, merging linear changes"""
270 """update the working directory to node, merging linear changes"""
271 pl = repo.parents()
271 pl = repo.parents()
272 stats = _merge.update(repo, node, False, False, None)
272 stats = _merge.update(repo, node, False, False, None)
273 _showstats(repo, stats)
273 _showstats(repo, stats)
274 if stats[3]:
274 if stats[3]:
275 repo.ui.status(_("There are unresolved merges with"
275 repo.ui.status(_("There are unresolved merges with"
276 " locally modified files.\n"))
276 " locally modified files.\n"))
277 if stats[1]:
277 if stats[1]:
278 repo.ui.status(_("You can finish the partial merge using:\n"))
278 repo.ui.status(_("You can finish the partial merge using:\n"))
279 else:
279 else:
280 repo.ui.status(_("You can redo the full merge using:\n"))
280 repo.ui.status(_("You can redo the full merge using:\n"))
281 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
281 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
282 repo.ui.status(_(" hg update %s\n hg update %s\n")
282 repo.ui.status(_(" hg update %s\n hg update %s\n")
283 % (pl[0].rev(), repo.changectx(node).rev()))
283 % (pl[0].rev(), repo.changectx(node).rev()))
284 return stats[3] > 0
284 return stats[3] > 0
285
285
286 def clean(repo, node, show_stats=True):
286 def clean(repo, node, show_stats=True):
287 """forcibly switch the working directory to node, clobbering changes"""
287 """forcibly switch the working directory to node, clobbering changes"""
288 stats = _merge.update(repo, node, False, True, None)
288 stats = _merge.update(repo, node, False, True, None)
289 if show_stats: _showstats(repo, stats)
289 if show_stats: _showstats(repo, stats)
290 return stats[3] > 0
290 return stats[3] > 0
291
291
292 def merge(repo, node, force=None, remind=True):
292 def merge(repo, node, force=None, remind=True):
293 """branch merge with node, resolving changes"""
293 """branch merge with node, resolving changes"""
294 stats = _merge.update(repo, node, True, force, False)
294 stats = _merge.update(repo, node, True, force, False)
295 _showstats(repo, stats)
295 _showstats(repo, stats)
296 if stats[3]:
296 if stats[3]:
297 pl = repo.parents()
297 pl = repo.parents()
298 repo.ui.status(_("There are unresolved merges,"
298 repo.ui.status(_("There are unresolved merges,"
299 " you can redo the full merge using:\n"
299 " you can redo the full merge using:\n"
300 " hg update -C %s\n"
300 " hg update -C %s\n"
301 " hg merge %s\n")
301 " hg merge %s\n")
302 % (pl[0].rev(), pl[1].rev()))
302 % (pl[0].rev(), pl[1].rev()))
303 elif remind:
303 elif remind:
304 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
304 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
305 return stats[3] > 0
305 return stats[3] > 0
306
306
307 def revert(repo, node, choose):
307 def revert(repo, node, choose):
308 """revert changes to revision in node without updating dirstate"""
308 """revert changes to revision in node without updating dirstate"""
309 return _merge.update(repo, node, False, True, choose)[3] > 0
309 return _merge.update(repo, node, False, True, choose)[3] > 0
310
310
311 def verify(repo):
311 def verify(repo):
312 """verify the consistency of a repository"""
312 """verify the consistency of a repository"""
313 return _verify.verify(repo)
313 return _verify.verify(repo)
@@ -1,951 +1,951 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes, re
9 import os, mimetypes, re
10 from mercurial.node import *
10 from mercurial.node import hex, nullid, short
11 from mercurial import mdiff, ui, hg, util, archival, patch, hook
11 from mercurial import mdiff, ui, hg, util, archival, patch, hook
12 from mercurial import revlog, templater, templatefilters, changegroup
12 from mercurial import revlog, templater, templatefilters, changegroup
13 from common import get_mtime, style_map, paritygen, countgen, get_contact
13 from common import get_mtime, style_map, paritygen, countgen, get_contact
14 from common import ErrorResponse
14 from common import ErrorResponse
15 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
15 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
16 from request import wsgirequest
16 from request import wsgirequest
17 import webcommands, protocol
17 import webcommands, protocol
18
18
19 shortcuts = {
19 shortcuts = {
20 'cl': [('cmd', ['changelog']), ('rev', None)],
20 'cl': [('cmd', ['changelog']), ('rev', None)],
21 'sl': [('cmd', ['shortlog']), ('rev', None)],
21 'sl': [('cmd', ['shortlog']), ('rev', None)],
22 'cs': [('cmd', ['changeset']), ('node', None)],
22 'cs': [('cmd', ['changeset']), ('node', None)],
23 'f': [('cmd', ['file']), ('filenode', None)],
23 'f': [('cmd', ['file']), ('filenode', None)],
24 'fl': [('cmd', ['filelog']), ('filenode', None)],
24 'fl': [('cmd', ['filelog']), ('filenode', None)],
25 'fd': [('cmd', ['filediff']), ('node', None)],
25 'fd': [('cmd', ['filediff']), ('node', None)],
26 'fa': [('cmd', ['annotate']), ('filenode', None)],
26 'fa': [('cmd', ['annotate']), ('filenode', None)],
27 'mf': [('cmd', ['manifest']), ('manifest', None)],
27 'mf': [('cmd', ['manifest']), ('manifest', None)],
28 'ca': [('cmd', ['archive']), ('node', None)],
28 'ca': [('cmd', ['archive']), ('node', None)],
29 'tags': [('cmd', ['tags'])],
29 'tags': [('cmd', ['tags'])],
30 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
30 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
31 'static': [('cmd', ['static']), ('file', None)]
31 'static': [('cmd', ['static']), ('file', None)]
32 }
32 }
33
33
34 def _up(p):
34 def _up(p):
35 if p[0] != "/":
35 if p[0] != "/":
36 p = "/" + p
36 p = "/" + p
37 if p[-1] == "/":
37 if p[-1] == "/":
38 p = p[:-1]
38 p = p[:-1]
39 up = os.path.dirname(p)
39 up = os.path.dirname(p)
40 if up == "/":
40 if up == "/":
41 return "/"
41 return "/"
42 return up + "/"
42 return up + "/"
43
43
44 def revnavgen(pos, pagelen, limit, nodefunc):
44 def revnavgen(pos, pagelen, limit, nodefunc):
45 def seq(factor, limit=None):
45 def seq(factor, limit=None):
46 if limit:
46 if limit:
47 yield limit
47 yield limit
48 if limit >= 20 and limit <= 40:
48 if limit >= 20 and limit <= 40:
49 yield 50
49 yield 50
50 else:
50 else:
51 yield 1 * factor
51 yield 1 * factor
52 yield 3 * factor
52 yield 3 * factor
53 for f in seq(factor * 10):
53 for f in seq(factor * 10):
54 yield f
54 yield f
55
55
56 def nav(**map):
56 def nav(**map):
57 l = []
57 l = []
58 last = 0
58 last = 0
59 for f in seq(1, pagelen):
59 for f in seq(1, pagelen):
60 if f < pagelen or f <= last:
60 if f < pagelen or f <= last:
61 continue
61 continue
62 if f > limit:
62 if f > limit:
63 break
63 break
64 last = f
64 last = f
65 if pos + f < limit:
65 if pos + f < limit:
66 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
66 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
67 if pos - f >= 0:
67 if pos - f >= 0:
68 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
68 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
69
69
70 try:
70 try:
71 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
71 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
72
72
73 for label, node in l:
73 for label, node in l:
74 yield {"label": label, "node": node}
74 yield {"label": label, "node": node}
75
75
76 yield {"label": "tip", "node": "tip"}
76 yield {"label": "tip", "node": "tip"}
77 except hg.RepoError:
77 except hg.RepoError:
78 pass
78 pass
79
79
80 return nav
80 return nav
81
81
82 class hgweb(object):
82 class hgweb(object):
83 def __init__(self, repo, name=None):
83 def __init__(self, repo, name=None):
84 if isinstance(repo, str):
84 if isinstance(repo, str):
85 parentui = ui.ui(report_untrusted=False, interactive=False)
85 parentui = ui.ui(report_untrusted=False, interactive=False)
86 self.repo = hg.repository(parentui, repo)
86 self.repo = hg.repository(parentui, repo)
87 else:
87 else:
88 self.repo = repo
88 self.repo = repo
89
89
90 hook.redirect(True)
90 hook.redirect(True)
91 self.mtime = -1
91 self.mtime = -1
92 self.reponame = name
92 self.reponame = name
93 self.archives = 'zip', 'gz', 'bz2'
93 self.archives = 'zip', 'gz', 'bz2'
94 self.stripecount = 1
94 self.stripecount = 1
95 self._capabilities = None
95 self._capabilities = None
96 # a repo owner may set web.templates in .hg/hgrc to get any file
96 # a repo owner may set web.templates in .hg/hgrc to get any file
97 # readable by the user running the CGI script
97 # readable by the user running the CGI script
98 self.templatepath = self.config("web", "templates",
98 self.templatepath = self.config("web", "templates",
99 templater.templatepath(),
99 templater.templatepath(),
100 untrusted=False)
100 untrusted=False)
101
101
102 # The CGI scripts are often run by a user different from the repo owner.
102 # The CGI scripts are often run by a user different from the repo owner.
103 # Trust the settings from the .hg/hgrc files by default.
103 # Trust the settings from the .hg/hgrc files by default.
104 def config(self, section, name, default=None, untrusted=True):
104 def config(self, section, name, default=None, untrusted=True):
105 return self.repo.ui.config(section, name, default,
105 return self.repo.ui.config(section, name, default,
106 untrusted=untrusted)
106 untrusted=untrusted)
107
107
108 def configbool(self, section, name, default=False, untrusted=True):
108 def configbool(self, section, name, default=False, untrusted=True):
109 return self.repo.ui.configbool(section, name, default,
109 return self.repo.ui.configbool(section, name, default,
110 untrusted=untrusted)
110 untrusted=untrusted)
111
111
112 def configlist(self, section, name, default=None, untrusted=True):
112 def configlist(self, section, name, default=None, untrusted=True):
113 return self.repo.ui.configlist(section, name, default,
113 return self.repo.ui.configlist(section, name, default,
114 untrusted=untrusted)
114 untrusted=untrusted)
115
115
116 def refresh(self):
116 def refresh(self):
117 mtime = get_mtime(self.repo.root)
117 mtime = get_mtime(self.repo.root)
118 if mtime != self.mtime:
118 if mtime != self.mtime:
119 self.mtime = mtime
119 self.mtime = mtime
120 self.repo = hg.repository(self.repo.ui, self.repo.root)
120 self.repo = hg.repository(self.repo.ui, self.repo.root)
121 self.maxchanges = int(self.config("web", "maxchanges", 10))
121 self.maxchanges = int(self.config("web", "maxchanges", 10))
122 self.stripecount = int(self.config("web", "stripes", 1))
122 self.stripecount = int(self.config("web", "stripes", 1))
123 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
123 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
124 self.maxfiles = int(self.config("web", "maxfiles", 10))
124 self.maxfiles = int(self.config("web", "maxfiles", 10))
125 self.allowpull = self.configbool("web", "allowpull", True)
125 self.allowpull = self.configbool("web", "allowpull", True)
126 self.encoding = self.config("web", "encoding", util._encoding)
126 self.encoding = self.config("web", "encoding", util._encoding)
127 self._capabilities = None
127 self._capabilities = None
128
128
129 def capabilities(self):
129 def capabilities(self):
130 if self._capabilities is not None:
130 if self._capabilities is not None:
131 return self._capabilities
131 return self._capabilities
132 caps = ['lookup', 'changegroupsubset']
132 caps = ['lookup', 'changegroupsubset']
133 if self.configbool('server', 'uncompressed'):
133 if self.configbool('server', 'uncompressed'):
134 caps.append('stream=%d' % self.repo.changelog.version)
134 caps.append('stream=%d' % self.repo.changelog.version)
135 if changegroup.bundlepriority:
135 if changegroup.bundlepriority:
136 caps.append('unbundle=%s' % ','.join(changegroup.bundlepriority))
136 caps.append('unbundle=%s' % ','.join(changegroup.bundlepriority))
137 self._capabilities = caps
137 self._capabilities = caps
138 return caps
138 return caps
139
139
140 def run(self):
140 def run(self):
141 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
141 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
142 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
142 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
143 import mercurial.hgweb.wsgicgi as wsgicgi
143 import mercurial.hgweb.wsgicgi as wsgicgi
144 wsgicgi.launch(self)
144 wsgicgi.launch(self)
145
145
146 def __call__(self, env, respond):
146 def __call__(self, env, respond):
147 req = wsgirequest(env, respond)
147 req = wsgirequest(env, respond)
148 self.run_wsgi(req)
148 self.run_wsgi(req)
149 return req
149 return req
150
150
151 def run_wsgi(self, req):
151 def run_wsgi(self, req):
152
152
153 self.refresh()
153 self.refresh()
154
154
155 # expand form shortcuts
155 # expand form shortcuts
156
156
157 for k in shortcuts.iterkeys():
157 for k in shortcuts.iterkeys():
158 if k in req.form:
158 if k in req.form:
159 for name, value in shortcuts[k]:
159 for name, value in shortcuts[k]:
160 if value is None:
160 if value is None:
161 value = req.form[k]
161 value = req.form[k]
162 req.form[name] = value
162 req.form[name] = value
163 del req.form[k]
163 del req.form[k]
164
164
165 # work with CGI variables to create coherent structure
165 # work with CGI variables to create coherent structure
166 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
166 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
167
167
168 req.url = req.env['SCRIPT_NAME']
168 req.url = req.env['SCRIPT_NAME']
169 if not req.url.endswith('/'):
169 if not req.url.endswith('/'):
170 req.url += '/'
170 req.url += '/'
171 if 'REPO_NAME' in req.env:
171 if 'REPO_NAME' in req.env:
172 req.url += req.env['REPO_NAME'] + '/'
172 req.url += req.env['REPO_NAME'] + '/'
173
173
174 if req.env.get('PATH_INFO'):
174 if req.env.get('PATH_INFO'):
175 parts = req.env.get('PATH_INFO').strip('/').split('/')
175 parts = req.env.get('PATH_INFO').strip('/').split('/')
176 repo_parts = req.env.get('REPO_NAME', '').split('/')
176 repo_parts = req.env.get('REPO_NAME', '').split('/')
177 if parts[:len(repo_parts)] == repo_parts:
177 if parts[:len(repo_parts)] == repo_parts:
178 parts = parts[len(repo_parts):]
178 parts = parts[len(repo_parts):]
179 query = '/'.join(parts)
179 query = '/'.join(parts)
180 else:
180 else:
181 query = req.env['QUERY_STRING'].split('&', 1)[0]
181 query = req.env['QUERY_STRING'].split('&', 1)[0]
182 query = query.split(';', 1)[0]
182 query = query.split(';', 1)[0]
183
183
184 # translate user-visible url structure to internal structure
184 # translate user-visible url structure to internal structure
185
185
186 args = query.split('/', 2)
186 args = query.split('/', 2)
187 if 'cmd' not in req.form and args and args[0]:
187 if 'cmd' not in req.form and args and args[0]:
188
188
189 cmd = args.pop(0)
189 cmd = args.pop(0)
190 style = cmd.rfind('-')
190 style = cmd.rfind('-')
191 if style != -1:
191 if style != -1:
192 req.form['style'] = [cmd[:style]]
192 req.form['style'] = [cmd[:style]]
193 cmd = cmd[style+1:]
193 cmd = cmd[style+1:]
194
194
195 # avoid accepting e.g. style parameter as command
195 # avoid accepting e.g. style parameter as command
196 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
196 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
197 req.form['cmd'] = [cmd]
197 req.form['cmd'] = [cmd]
198
198
199 if args and args[0]:
199 if args and args[0]:
200 node = args.pop(0)
200 node = args.pop(0)
201 req.form['node'] = [node]
201 req.form['node'] = [node]
202 if args:
202 if args:
203 req.form['file'] = args
203 req.form['file'] = args
204
204
205 if cmd == 'static':
205 if cmd == 'static':
206 req.form['file'] = req.form['node']
206 req.form['file'] = req.form['node']
207 elif cmd == 'archive':
207 elif cmd == 'archive':
208 fn = req.form['node'][0]
208 fn = req.form['node'][0]
209 for type_, spec in self.archive_specs.iteritems():
209 for type_, spec in self.archive_specs.iteritems():
210 ext = spec[2]
210 ext = spec[2]
211 if fn.endswith(ext):
211 if fn.endswith(ext):
212 req.form['node'] = [fn[:-len(ext)]]
212 req.form['node'] = [fn[:-len(ext)]]
213 req.form['type'] = [type_]
213 req.form['type'] = [type_]
214
214
215 # process this if it's a protocol request
215 # process this if it's a protocol request
216
216
217 cmd = req.form.get('cmd', [''])[0]
217 cmd = req.form.get('cmd', [''])[0]
218 if cmd in protocol.__all__:
218 if cmd in protocol.__all__:
219 method = getattr(protocol, cmd)
219 method = getattr(protocol, cmd)
220 method(self, req)
220 method(self, req)
221 return
221 return
222
222
223 # process the web interface request
223 # process the web interface request
224
224
225 try:
225 try:
226
226
227 tmpl = self.templater(req)
227 tmpl = self.templater(req)
228 ctype = tmpl('mimetype', encoding=self.encoding)
228 ctype = tmpl('mimetype', encoding=self.encoding)
229 ctype = templater.stringify(ctype)
229 ctype = templater.stringify(ctype)
230
230
231 if cmd == '':
231 if cmd == '':
232 req.form['cmd'] = [tmpl.cache['default']]
232 req.form['cmd'] = [tmpl.cache['default']]
233 cmd = req.form['cmd'][0]
233 cmd = req.form['cmd'][0]
234
234
235 if cmd not in webcommands.__all__:
235 if cmd not in webcommands.__all__:
236 msg = 'No such method: %s' % cmd
236 msg = 'No such method: %s' % cmd
237 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
237 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
238 elif cmd == 'file' and 'raw' in req.form.get('style', []):
238 elif cmd == 'file' and 'raw' in req.form.get('style', []):
239 self.ctype = ctype
239 self.ctype = ctype
240 content = webcommands.rawfile(self, req, tmpl)
240 content = webcommands.rawfile(self, req, tmpl)
241 else:
241 else:
242 content = getattr(webcommands, cmd)(self, req, tmpl)
242 content = getattr(webcommands, cmd)(self, req, tmpl)
243 req.respond(HTTP_OK, ctype)
243 req.respond(HTTP_OK, ctype)
244
244
245 req.write(content)
245 req.write(content)
246 del tmpl
246 del tmpl
247
247
248 except revlog.LookupError, err:
248 except revlog.LookupError, err:
249 req.respond(HTTP_NOT_FOUND, ctype)
249 req.respond(HTTP_NOT_FOUND, ctype)
250 req.write(tmpl('error', error='revision not found: %s' % err.name))
250 req.write(tmpl('error', error='revision not found: %s' % err.name))
251 except (hg.RepoError, revlog.RevlogError), inst:
251 except (hg.RepoError, revlog.RevlogError), inst:
252 req.respond(HTTP_SERVER_ERROR, ctype)
252 req.respond(HTTP_SERVER_ERROR, ctype)
253 req.write(tmpl('error', error=str(inst)))
253 req.write(tmpl('error', error=str(inst)))
254 except ErrorResponse, inst:
254 except ErrorResponse, inst:
255 req.respond(inst.code, ctype)
255 req.respond(inst.code, ctype)
256 req.write(tmpl('error', error=inst.message))
256 req.write(tmpl('error', error=inst.message))
257
257
258 def templater(self, req):
258 def templater(self, req):
259
259
260 # determine scheme, port and server name
260 # determine scheme, port and server name
261 # this is needed to create absolute urls
261 # this is needed to create absolute urls
262
262
263 proto = req.env.get('wsgi.url_scheme')
263 proto = req.env.get('wsgi.url_scheme')
264 if proto == 'https':
264 if proto == 'https':
265 proto = 'https'
265 proto = 'https'
266 default_port = "443"
266 default_port = "443"
267 else:
267 else:
268 proto = 'http'
268 proto = 'http'
269 default_port = "80"
269 default_port = "80"
270
270
271 port = req.env["SERVER_PORT"]
271 port = req.env["SERVER_PORT"]
272 port = port != default_port and (":" + port) or ""
272 port = port != default_port and (":" + port) or ""
273 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
273 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
274 staticurl = self.config("web", "staticurl") or req.url + 'static/'
274 staticurl = self.config("web", "staticurl") or req.url + 'static/'
275 if not staticurl.endswith('/'):
275 if not staticurl.endswith('/'):
276 staticurl += '/'
276 staticurl += '/'
277
277
278 # some functions for the templater
278 # some functions for the templater
279
279
280 def header(**map):
280 def header(**map):
281 yield tmpl('header', encoding=self.encoding, **map)
281 yield tmpl('header', encoding=self.encoding, **map)
282
282
283 def footer(**map):
283 def footer(**map):
284 yield tmpl("footer", **map)
284 yield tmpl("footer", **map)
285
285
286 def motd(**map):
286 def motd(**map):
287 yield self.config("web", "motd", "")
287 yield self.config("web", "motd", "")
288
288
289 def sessionvars(**map):
289 def sessionvars(**map):
290 fields = []
290 fields = []
291 if 'style' in req.form:
291 if 'style' in req.form:
292 style = req.form['style'][0]
292 style = req.form['style'][0]
293 if style != self.config('web', 'style', ''):
293 if style != self.config('web', 'style', ''):
294 fields.append(('style', style))
294 fields.append(('style', style))
295
295
296 separator = req.url[-1] == '?' and ';' or '?'
296 separator = req.url[-1] == '?' and ';' or '?'
297 for name, value in fields:
297 for name, value in fields:
298 yield dict(name=name, value=value, separator=separator)
298 yield dict(name=name, value=value, separator=separator)
299 separator = ';'
299 separator = ';'
300
300
301 # figure out which style to use
301 # figure out which style to use
302
302
303 style = self.config("web", "style", "")
303 style = self.config("web", "style", "")
304 if 'style' in req.form:
304 if 'style' in req.form:
305 style = req.form['style'][0]
305 style = req.form['style'][0]
306 mapfile = style_map(self.templatepath, style)
306 mapfile = style_map(self.templatepath, style)
307
307
308 if not self.reponame:
308 if not self.reponame:
309 self.reponame = (self.config("web", "name")
309 self.reponame = (self.config("web", "name")
310 or req.env.get('REPO_NAME')
310 or req.env.get('REPO_NAME')
311 or req.url.strip('/') or self.repo.root)
311 or req.url.strip('/') or self.repo.root)
312
312
313 # create the templater
313 # create the templater
314
314
315 tmpl = templater.templater(mapfile, templatefilters.filters,
315 tmpl = templater.templater(mapfile, templatefilters.filters,
316 defaults={"url": req.url,
316 defaults={"url": req.url,
317 "staticurl": staticurl,
317 "staticurl": staticurl,
318 "urlbase": urlbase,
318 "urlbase": urlbase,
319 "repo": self.reponame,
319 "repo": self.reponame,
320 "header": header,
320 "header": header,
321 "footer": footer,
321 "footer": footer,
322 "motd": motd,
322 "motd": motd,
323 "sessionvars": sessionvars
323 "sessionvars": sessionvars
324 })
324 })
325 return tmpl
325 return tmpl
326
326
327 def archivelist(self, nodeid):
327 def archivelist(self, nodeid):
328 allowed = self.configlist("web", "allow_archive")
328 allowed = self.configlist("web", "allow_archive")
329 for i, spec in self.archive_specs.iteritems():
329 for i, spec in self.archive_specs.iteritems():
330 if i in allowed or self.configbool("web", "allow" + i):
330 if i in allowed or self.configbool("web", "allow" + i):
331 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
331 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
332
332
333 def listfilediffs(self, tmpl, files, changeset):
333 def listfilediffs(self, tmpl, files, changeset):
334 for f in files[:self.maxfiles]:
334 for f in files[:self.maxfiles]:
335 yield tmpl("filedifflink", node=hex(changeset), file=f)
335 yield tmpl("filedifflink", node=hex(changeset), file=f)
336 if len(files) > self.maxfiles:
336 if len(files) > self.maxfiles:
337 yield tmpl("fileellipses")
337 yield tmpl("fileellipses")
338
338
339 def siblings(self, siblings=[], hiderev=None, **args):
339 def siblings(self, siblings=[], hiderev=None, **args):
340 siblings = [s for s in siblings if s.node() != nullid]
340 siblings = [s for s in siblings if s.node() != nullid]
341 if len(siblings) == 1 and siblings[0].rev() == hiderev:
341 if len(siblings) == 1 and siblings[0].rev() == hiderev:
342 return
342 return
343 for s in siblings:
343 for s in siblings:
344 d = {'node': hex(s.node()), 'rev': s.rev()}
344 d = {'node': hex(s.node()), 'rev': s.rev()}
345 if hasattr(s, 'path'):
345 if hasattr(s, 'path'):
346 d['file'] = s.path()
346 d['file'] = s.path()
347 d.update(args)
347 d.update(args)
348 yield d
348 yield d
349
349
350 def renamelink(self, fl, node):
350 def renamelink(self, fl, node):
351 r = fl.renamed(node)
351 r = fl.renamed(node)
352 if r:
352 if r:
353 return [dict(file=r[0], node=hex(r[1]))]
353 return [dict(file=r[0], node=hex(r[1]))]
354 return []
354 return []
355
355
356 def nodetagsdict(self, node):
356 def nodetagsdict(self, node):
357 return [{"name": i} for i in self.repo.nodetags(node)]
357 return [{"name": i} for i in self.repo.nodetags(node)]
358
358
359 def nodebranchdict(self, ctx):
359 def nodebranchdict(self, ctx):
360 branches = []
360 branches = []
361 branch = ctx.branch()
361 branch = ctx.branch()
362 # If this is an empty repo, ctx.node() == nullid,
362 # If this is an empty repo, ctx.node() == nullid,
363 # ctx.branch() == 'default', but branchtags() is
363 # ctx.branch() == 'default', but branchtags() is
364 # an empty dict. Using dict.get avoids a traceback.
364 # an empty dict. Using dict.get avoids a traceback.
365 if self.repo.branchtags().get(branch) == ctx.node():
365 if self.repo.branchtags().get(branch) == ctx.node():
366 branches.append({"name": branch})
366 branches.append({"name": branch})
367 return branches
367 return branches
368
368
369 def showtag(self, tmpl, t1, node=nullid, **args):
369 def showtag(self, tmpl, t1, node=nullid, **args):
370 for t in self.repo.nodetags(node):
370 for t in self.repo.nodetags(node):
371 yield tmpl(t1, tag=t, **args)
371 yield tmpl(t1, tag=t, **args)
372
372
373 def diff(self, tmpl, node1, node2, files):
373 def diff(self, tmpl, node1, node2, files):
374 def filterfiles(filters, files):
374 def filterfiles(filters, files):
375 l = [x for x in files if x in filters]
375 l = [x for x in files if x in filters]
376
376
377 for t in filters:
377 for t in filters:
378 if t and t[-1] != os.sep:
378 if t and t[-1] != os.sep:
379 t += os.sep
379 t += os.sep
380 l += [x for x in files if x.startswith(t)]
380 l += [x for x in files if x.startswith(t)]
381 return l
381 return l
382
382
383 parity = paritygen(self.stripecount)
383 parity = paritygen(self.stripecount)
384 def diffblock(diff, f, fn):
384 def diffblock(diff, f, fn):
385 yield tmpl("diffblock",
385 yield tmpl("diffblock",
386 lines=prettyprintlines(diff),
386 lines=prettyprintlines(diff),
387 parity=parity.next(),
387 parity=parity.next(),
388 file=f,
388 file=f,
389 filenode=hex(fn or nullid))
389 filenode=hex(fn or nullid))
390
390
391 blockcount = countgen()
391 blockcount = countgen()
392 def prettyprintlines(diff):
392 def prettyprintlines(diff):
393 blockno = blockcount.next()
393 blockno = blockcount.next()
394 for lineno, l in enumerate(diff.splitlines(1)):
394 for lineno, l in enumerate(diff.splitlines(1)):
395 if blockno == 0:
395 if blockno == 0:
396 lineno = lineno + 1
396 lineno = lineno + 1
397 else:
397 else:
398 lineno = "%d.%d" % (blockno, lineno + 1)
398 lineno = "%d.%d" % (blockno, lineno + 1)
399 if l.startswith('+'):
399 if l.startswith('+'):
400 ltype = "difflineplus"
400 ltype = "difflineplus"
401 elif l.startswith('-'):
401 elif l.startswith('-'):
402 ltype = "difflineminus"
402 ltype = "difflineminus"
403 elif l.startswith('@'):
403 elif l.startswith('@'):
404 ltype = "difflineat"
404 ltype = "difflineat"
405 else:
405 else:
406 ltype = "diffline"
406 ltype = "diffline"
407 yield tmpl(ltype,
407 yield tmpl(ltype,
408 line=l,
408 line=l,
409 lineid="l%s" % lineno,
409 lineid="l%s" % lineno,
410 linenumber="% 8s" % lineno)
410 linenumber="% 8s" % lineno)
411
411
412 r = self.repo
412 r = self.repo
413 c1 = r.changectx(node1)
413 c1 = r.changectx(node1)
414 c2 = r.changectx(node2)
414 c2 = r.changectx(node2)
415 date1 = util.datestr(c1.date())
415 date1 = util.datestr(c1.date())
416 date2 = util.datestr(c2.date())
416 date2 = util.datestr(c2.date())
417
417
418 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
418 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
419 if files:
419 if files:
420 modified, added, removed = map(lambda x: filterfiles(files, x),
420 modified, added, removed = map(lambda x: filterfiles(files, x),
421 (modified, added, removed))
421 (modified, added, removed))
422
422
423 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
423 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
424 for f in modified:
424 for f in modified:
425 to = c1.filectx(f).data()
425 to = c1.filectx(f).data()
426 tn = c2.filectx(f).data()
426 tn = c2.filectx(f).data()
427 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
427 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
428 opts=diffopts), f, tn)
428 opts=diffopts), f, tn)
429 for f in added:
429 for f in added:
430 to = None
430 to = None
431 tn = c2.filectx(f).data()
431 tn = c2.filectx(f).data()
432 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
432 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
433 opts=diffopts), f, tn)
433 opts=diffopts), f, tn)
434 for f in removed:
434 for f in removed:
435 to = c1.filectx(f).data()
435 to = c1.filectx(f).data()
436 tn = None
436 tn = None
437 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
437 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
438 opts=diffopts), f, tn)
438 opts=diffopts), f, tn)
439
439
440 def changelog(self, tmpl, ctx, shortlog=False):
440 def changelog(self, tmpl, ctx, shortlog=False):
441 def changelist(limit=0,**map):
441 def changelist(limit=0,**map):
442 cl = self.repo.changelog
442 cl = self.repo.changelog
443 l = [] # build a list in forward order for efficiency
443 l = [] # build a list in forward order for efficiency
444 for i in xrange(start, end):
444 for i in xrange(start, end):
445 ctx = self.repo.changectx(i)
445 ctx = self.repo.changectx(i)
446 n = ctx.node()
446 n = ctx.node()
447 showtags = self.showtag(tmpl, 'changelogtag', n)
447 showtags = self.showtag(tmpl, 'changelogtag', n)
448
448
449 l.insert(0, {"parity": parity.next(),
449 l.insert(0, {"parity": parity.next(),
450 "author": ctx.user(),
450 "author": ctx.user(),
451 "parent": self.siblings(ctx.parents(), i - 1),
451 "parent": self.siblings(ctx.parents(), i - 1),
452 "child": self.siblings(ctx.children(), i + 1),
452 "child": self.siblings(ctx.children(), i + 1),
453 "changelogtag": showtags,
453 "changelogtag": showtags,
454 "desc": ctx.description(),
454 "desc": ctx.description(),
455 "date": ctx.date(),
455 "date": ctx.date(),
456 "files": self.listfilediffs(tmpl, ctx.files(), n),
456 "files": self.listfilediffs(tmpl, ctx.files(), n),
457 "rev": i,
457 "rev": i,
458 "node": hex(n),
458 "node": hex(n),
459 "tags": self.nodetagsdict(n),
459 "tags": self.nodetagsdict(n),
460 "branches": self.nodebranchdict(ctx)})
460 "branches": self.nodebranchdict(ctx)})
461
461
462 if limit > 0:
462 if limit > 0:
463 l = l[:limit]
463 l = l[:limit]
464
464
465 for e in l:
465 for e in l:
466 yield e
466 yield e
467
467
468 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
468 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
469 cl = self.repo.changelog
469 cl = self.repo.changelog
470 count = cl.count()
470 count = cl.count()
471 pos = ctx.rev()
471 pos = ctx.rev()
472 start = max(0, pos - maxchanges + 1)
472 start = max(0, pos - maxchanges + 1)
473 end = min(count, start + maxchanges)
473 end = min(count, start + maxchanges)
474 pos = end - 1
474 pos = end - 1
475 parity = paritygen(self.stripecount, offset=start-end)
475 parity = paritygen(self.stripecount, offset=start-end)
476
476
477 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
477 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
478
478
479 return tmpl(shortlog and 'shortlog' or 'changelog',
479 return tmpl(shortlog and 'shortlog' or 'changelog',
480 changenav=changenav,
480 changenav=changenav,
481 node=hex(cl.tip()),
481 node=hex(cl.tip()),
482 rev=pos, changesets=count,
482 rev=pos, changesets=count,
483 entries=lambda **x: changelist(limit=0,**x),
483 entries=lambda **x: changelist(limit=0,**x),
484 latestentry=lambda **x: changelist(limit=1,**x),
484 latestentry=lambda **x: changelist(limit=1,**x),
485 archives=self.archivelist("tip"))
485 archives=self.archivelist("tip"))
486
486
487 def search(self, tmpl, query):
487 def search(self, tmpl, query):
488
488
489 def changelist(**map):
489 def changelist(**map):
490 cl = self.repo.changelog
490 cl = self.repo.changelog
491 count = 0
491 count = 0
492 qw = query.lower().split()
492 qw = query.lower().split()
493
493
494 def revgen():
494 def revgen():
495 for i in xrange(cl.count() - 1, 0, -100):
495 for i in xrange(cl.count() - 1, 0, -100):
496 l = []
496 l = []
497 for j in xrange(max(0, i - 100), i + 1):
497 for j in xrange(max(0, i - 100), i + 1):
498 ctx = self.repo.changectx(j)
498 ctx = self.repo.changectx(j)
499 l.append(ctx)
499 l.append(ctx)
500 l.reverse()
500 l.reverse()
501 for e in l:
501 for e in l:
502 yield e
502 yield e
503
503
504 for ctx in revgen():
504 for ctx in revgen():
505 miss = 0
505 miss = 0
506 for q in qw:
506 for q in qw:
507 if not (q in ctx.user().lower() or
507 if not (q in ctx.user().lower() or
508 q in ctx.description().lower() or
508 q in ctx.description().lower() or
509 q in " ".join(ctx.files()).lower()):
509 q in " ".join(ctx.files()).lower()):
510 miss = 1
510 miss = 1
511 break
511 break
512 if miss:
512 if miss:
513 continue
513 continue
514
514
515 count += 1
515 count += 1
516 n = ctx.node()
516 n = ctx.node()
517 showtags = self.showtag(tmpl, 'changelogtag', n)
517 showtags = self.showtag(tmpl, 'changelogtag', n)
518
518
519 yield tmpl('searchentry',
519 yield tmpl('searchentry',
520 parity=parity.next(),
520 parity=parity.next(),
521 author=ctx.user(),
521 author=ctx.user(),
522 parent=self.siblings(ctx.parents()),
522 parent=self.siblings(ctx.parents()),
523 child=self.siblings(ctx.children()),
523 child=self.siblings(ctx.children()),
524 changelogtag=showtags,
524 changelogtag=showtags,
525 desc=ctx.description(),
525 desc=ctx.description(),
526 date=ctx.date(),
526 date=ctx.date(),
527 files=self.listfilediffs(tmpl, ctx.files(), n),
527 files=self.listfilediffs(tmpl, ctx.files(), n),
528 rev=ctx.rev(),
528 rev=ctx.rev(),
529 node=hex(n),
529 node=hex(n),
530 tags=self.nodetagsdict(n),
530 tags=self.nodetagsdict(n),
531 branches=self.nodebranchdict(ctx))
531 branches=self.nodebranchdict(ctx))
532
532
533 if count >= self.maxchanges:
533 if count >= self.maxchanges:
534 break
534 break
535
535
536 cl = self.repo.changelog
536 cl = self.repo.changelog
537 parity = paritygen(self.stripecount)
537 parity = paritygen(self.stripecount)
538
538
539 return tmpl('search',
539 return tmpl('search',
540 query=query,
540 query=query,
541 node=hex(cl.tip()),
541 node=hex(cl.tip()),
542 entries=changelist,
542 entries=changelist,
543 archives=self.archivelist("tip"))
543 archives=self.archivelist("tip"))
544
544
545 def changeset(self, tmpl, ctx):
545 def changeset(self, tmpl, ctx):
546 n = ctx.node()
546 n = ctx.node()
547 showtags = self.showtag(tmpl, 'changesettag', n)
547 showtags = self.showtag(tmpl, 'changesettag', n)
548 parents = ctx.parents()
548 parents = ctx.parents()
549 p1 = parents[0].node()
549 p1 = parents[0].node()
550
550
551 files = []
551 files = []
552 parity = paritygen(self.stripecount)
552 parity = paritygen(self.stripecount)
553 for f in ctx.files():
553 for f in ctx.files():
554 files.append(tmpl("filenodelink",
554 files.append(tmpl("filenodelink",
555 node=hex(n), file=f,
555 node=hex(n), file=f,
556 parity=parity.next()))
556 parity=parity.next()))
557
557
558 def diff(**map):
558 def diff(**map):
559 yield self.diff(tmpl, p1, n, None)
559 yield self.diff(tmpl, p1, n, None)
560
560
561 return tmpl('changeset',
561 return tmpl('changeset',
562 diff=diff,
562 diff=diff,
563 rev=ctx.rev(),
563 rev=ctx.rev(),
564 node=hex(n),
564 node=hex(n),
565 parent=self.siblings(parents),
565 parent=self.siblings(parents),
566 child=self.siblings(ctx.children()),
566 child=self.siblings(ctx.children()),
567 changesettag=showtags,
567 changesettag=showtags,
568 author=ctx.user(),
568 author=ctx.user(),
569 desc=ctx.description(),
569 desc=ctx.description(),
570 date=ctx.date(),
570 date=ctx.date(),
571 files=files,
571 files=files,
572 archives=self.archivelist(hex(n)),
572 archives=self.archivelist(hex(n)),
573 tags=self.nodetagsdict(n),
573 tags=self.nodetagsdict(n),
574 branches=self.nodebranchdict(ctx))
574 branches=self.nodebranchdict(ctx))
575
575
576 def filelog(self, tmpl, fctx):
576 def filelog(self, tmpl, fctx):
577 f = fctx.path()
577 f = fctx.path()
578 fl = fctx.filelog()
578 fl = fctx.filelog()
579 count = fl.count()
579 count = fl.count()
580 pagelen = self.maxshortchanges
580 pagelen = self.maxshortchanges
581 pos = fctx.filerev()
581 pos = fctx.filerev()
582 start = max(0, pos - pagelen + 1)
582 start = max(0, pos - pagelen + 1)
583 end = min(count, start + pagelen)
583 end = min(count, start + pagelen)
584 pos = end - 1
584 pos = end - 1
585 parity = paritygen(self.stripecount, offset=start-end)
585 parity = paritygen(self.stripecount, offset=start-end)
586
586
587 def entries(limit=0, **map):
587 def entries(limit=0, **map):
588 l = []
588 l = []
589
589
590 for i in xrange(start, end):
590 for i in xrange(start, end):
591 ctx = fctx.filectx(i)
591 ctx = fctx.filectx(i)
592 n = fl.node(i)
592 n = fl.node(i)
593
593
594 l.insert(0, {"parity": parity.next(),
594 l.insert(0, {"parity": parity.next(),
595 "filerev": i,
595 "filerev": i,
596 "file": f,
596 "file": f,
597 "node": hex(ctx.node()),
597 "node": hex(ctx.node()),
598 "author": ctx.user(),
598 "author": ctx.user(),
599 "date": ctx.date(),
599 "date": ctx.date(),
600 "rename": self.renamelink(fl, n),
600 "rename": self.renamelink(fl, n),
601 "parent": self.siblings(fctx.parents()),
601 "parent": self.siblings(fctx.parents()),
602 "child": self.siblings(fctx.children()),
602 "child": self.siblings(fctx.children()),
603 "desc": ctx.description()})
603 "desc": ctx.description()})
604
604
605 if limit > 0:
605 if limit > 0:
606 l = l[:limit]
606 l = l[:limit]
607
607
608 for e in l:
608 for e in l:
609 yield e
609 yield e
610
610
611 nodefunc = lambda x: fctx.filectx(fileid=x)
611 nodefunc = lambda x: fctx.filectx(fileid=x)
612 nav = revnavgen(pos, pagelen, count, nodefunc)
612 nav = revnavgen(pos, pagelen, count, nodefunc)
613 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
613 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
614 entries=lambda **x: entries(limit=0, **x),
614 entries=lambda **x: entries(limit=0, **x),
615 latestentry=lambda **x: entries(limit=1, **x))
615 latestentry=lambda **x: entries(limit=1, **x))
616
616
617 def filerevision(self, tmpl, fctx):
617 def filerevision(self, tmpl, fctx):
618 f = fctx.path()
618 f = fctx.path()
619 text = fctx.data()
619 text = fctx.data()
620 fl = fctx.filelog()
620 fl = fctx.filelog()
621 n = fctx.filenode()
621 n = fctx.filenode()
622 parity = paritygen(self.stripecount)
622 parity = paritygen(self.stripecount)
623
623
624 if util.binary(text):
624 if util.binary(text):
625 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
625 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
626 text = '(binary:%s)' % mt
626 text = '(binary:%s)' % mt
627
627
628 def lines():
628 def lines():
629 for lineno, t in enumerate(text.splitlines(1)):
629 for lineno, t in enumerate(text.splitlines(1)):
630 yield {"line": t,
630 yield {"line": t,
631 "lineid": "l%d" % (lineno + 1),
631 "lineid": "l%d" % (lineno + 1),
632 "linenumber": "% 6d" % (lineno + 1),
632 "linenumber": "% 6d" % (lineno + 1),
633 "parity": parity.next()}
633 "parity": parity.next()}
634
634
635 return tmpl("filerevision",
635 return tmpl("filerevision",
636 file=f,
636 file=f,
637 path=_up(f),
637 path=_up(f),
638 text=lines(),
638 text=lines(),
639 rev=fctx.rev(),
639 rev=fctx.rev(),
640 node=hex(fctx.node()),
640 node=hex(fctx.node()),
641 author=fctx.user(),
641 author=fctx.user(),
642 date=fctx.date(),
642 date=fctx.date(),
643 desc=fctx.description(),
643 desc=fctx.description(),
644 parent=self.siblings(fctx.parents()),
644 parent=self.siblings(fctx.parents()),
645 child=self.siblings(fctx.children()),
645 child=self.siblings(fctx.children()),
646 rename=self.renamelink(fl, n),
646 rename=self.renamelink(fl, n),
647 permissions=fctx.manifest().flags(f))
647 permissions=fctx.manifest().flags(f))
648
648
649 def fileannotate(self, tmpl, fctx):
649 def fileannotate(self, tmpl, fctx):
650 f = fctx.path()
650 f = fctx.path()
651 n = fctx.filenode()
651 n = fctx.filenode()
652 fl = fctx.filelog()
652 fl = fctx.filelog()
653 parity = paritygen(self.stripecount)
653 parity = paritygen(self.stripecount)
654
654
655 def annotate(**map):
655 def annotate(**map):
656 last = None
656 last = None
657 if util.binary(fctx.data()):
657 if util.binary(fctx.data()):
658 mt = (mimetypes.guess_type(fctx.path())[0]
658 mt = (mimetypes.guess_type(fctx.path())[0]
659 or 'application/octet-stream')
659 or 'application/octet-stream')
660 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
660 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
661 '(binary:%s)' % mt)])
661 '(binary:%s)' % mt)])
662 else:
662 else:
663 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
663 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
664 for lineno, ((f, targetline), l) in lines:
664 for lineno, ((f, targetline), l) in lines:
665 fnode = f.filenode()
665 fnode = f.filenode()
666 name = self.repo.ui.shortuser(f.user())
666 name = self.repo.ui.shortuser(f.user())
667
667
668 if last != fnode:
668 if last != fnode:
669 last = fnode
669 last = fnode
670
670
671 yield {"parity": parity.next(),
671 yield {"parity": parity.next(),
672 "node": hex(f.node()),
672 "node": hex(f.node()),
673 "rev": f.rev(),
673 "rev": f.rev(),
674 "author": name,
674 "author": name,
675 "file": f.path(),
675 "file": f.path(),
676 "targetline": targetline,
676 "targetline": targetline,
677 "line": l,
677 "line": l,
678 "lineid": "l%d" % (lineno + 1),
678 "lineid": "l%d" % (lineno + 1),
679 "linenumber": "% 6d" % (lineno + 1)}
679 "linenumber": "% 6d" % (lineno + 1)}
680
680
681 return tmpl("fileannotate",
681 return tmpl("fileannotate",
682 file=f,
682 file=f,
683 annotate=annotate,
683 annotate=annotate,
684 path=_up(f),
684 path=_up(f),
685 rev=fctx.rev(),
685 rev=fctx.rev(),
686 node=hex(fctx.node()),
686 node=hex(fctx.node()),
687 author=fctx.user(),
687 author=fctx.user(),
688 date=fctx.date(),
688 date=fctx.date(),
689 desc=fctx.description(),
689 desc=fctx.description(),
690 rename=self.renamelink(fl, n),
690 rename=self.renamelink(fl, n),
691 parent=self.siblings(fctx.parents()),
691 parent=self.siblings(fctx.parents()),
692 child=self.siblings(fctx.children()),
692 child=self.siblings(fctx.children()),
693 permissions=fctx.manifest().flags(f))
693 permissions=fctx.manifest().flags(f))
694
694
695 def manifest(self, tmpl, ctx, path):
695 def manifest(self, tmpl, ctx, path):
696 mf = ctx.manifest()
696 mf = ctx.manifest()
697 node = ctx.node()
697 node = ctx.node()
698
698
699 files = {}
699 files = {}
700 parity = paritygen(self.stripecount)
700 parity = paritygen(self.stripecount)
701
701
702 if path and path[-1] != "/":
702 if path and path[-1] != "/":
703 path += "/"
703 path += "/"
704 l = len(path)
704 l = len(path)
705 abspath = "/" + path
705 abspath = "/" + path
706
706
707 for f, n in mf.items():
707 for f, n in mf.items():
708 if f[:l] != path:
708 if f[:l] != path:
709 continue
709 continue
710 remain = f[l:]
710 remain = f[l:]
711 if "/" in remain:
711 if "/" in remain:
712 short = remain[:remain.index("/") + 1] # bleah
712 short = remain[:remain.index("/") + 1] # bleah
713 files[short] = (f, None)
713 files[short] = (f, None)
714 else:
714 else:
715 short = os.path.basename(remain)
715 short = os.path.basename(remain)
716 files[short] = (f, n)
716 files[short] = (f, n)
717
717
718 if not files:
718 if not files:
719 raise ErrorResponse(HTTP_NOT_FOUND, 'Path not found: ' + path)
719 raise ErrorResponse(HTTP_NOT_FOUND, 'Path not found: ' + path)
720
720
721 def filelist(**map):
721 def filelist(**map):
722 fl = files.keys()
722 fl = files.keys()
723 fl.sort()
723 fl.sort()
724 for f in fl:
724 for f in fl:
725 full, fnode = files[f]
725 full, fnode = files[f]
726 if not fnode:
726 if not fnode:
727 continue
727 continue
728
728
729 fctx = ctx.filectx(full)
729 fctx = ctx.filectx(full)
730 yield {"file": full,
730 yield {"file": full,
731 "parity": parity.next(),
731 "parity": parity.next(),
732 "basename": f,
732 "basename": f,
733 "date": fctx.changectx().date(),
733 "date": fctx.changectx().date(),
734 "size": fctx.size(),
734 "size": fctx.size(),
735 "permissions": mf.flags(full)}
735 "permissions": mf.flags(full)}
736
736
737 def dirlist(**map):
737 def dirlist(**map):
738 fl = files.keys()
738 fl = files.keys()
739 fl.sort()
739 fl.sort()
740 for f in fl:
740 for f in fl:
741 full, fnode = files[f]
741 full, fnode = files[f]
742 if fnode:
742 if fnode:
743 continue
743 continue
744
744
745 yield {"parity": parity.next(),
745 yield {"parity": parity.next(),
746 "path": "%s%s" % (abspath, f),
746 "path": "%s%s" % (abspath, f),
747 "basename": f[:-1]}
747 "basename": f[:-1]}
748
748
749 return tmpl("manifest",
749 return tmpl("manifest",
750 rev=ctx.rev(),
750 rev=ctx.rev(),
751 node=hex(node),
751 node=hex(node),
752 path=abspath,
752 path=abspath,
753 up=_up(abspath),
753 up=_up(abspath),
754 upparity=parity.next(),
754 upparity=parity.next(),
755 fentries=filelist,
755 fentries=filelist,
756 dentries=dirlist,
756 dentries=dirlist,
757 archives=self.archivelist(hex(node)),
757 archives=self.archivelist(hex(node)),
758 tags=self.nodetagsdict(node),
758 tags=self.nodetagsdict(node),
759 branches=self.nodebranchdict(ctx))
759 branches=self.nodebranchdict(ctx))
760
760
761 def tags(self, tmpl):
761 def tags(self, tmpl):
762 i = self.repo.tagslist()
762 i = self.repo.tagslist()
763 i.reverse()
763 i.reverse()
764 parity = paritygen(self.stripecount)
764 parity = paritygen(self.stripecount)
765
765
766 def entries(notip=False,limit=0, **map):
766 def entries(notip=False,limit=0, **map):
767 count = 0
767 count = 0
768 for k, n in i:
768 for k, n in i:
769 if notip and k == "tip":
769 if notip and k == "tip":
770 continue
770 continue
771 if limit > 0 and count >= limit:
771 if limit > 0 and count >= limit:
772 continue
772 continue
773 count = count + 1
773 count = count + 1
774 yield {"parity": parity.next(),
774 yield {"parity": parity.next(),
775 "tag": k,
775 "tag": k,
776 "date": self.repo.changectx(n).date(),
776 "date": self.repo.changectx(n).date(),
777 "node": hex(n)}
777 "node": hex(n)}
778
778
779 return tmpl("tags",
779 return tmpl("tags",
780 node=hex(self.repo.changelog.tip()),
780 node=hex(self.repo.changelog.tip()),
781 entries=lambda **x: entries(False,0, **x),
781 entries=lambda **x: entries(False,0, **x),
782 entriesnotip=lambda **x: entries(True,0, **x),
782 entriesnotip=lambda **x: entries(True,0, **x),
783 latestentry=lambda **x: entries(True,1, **x))
783 latestentry=lambda **x: entries(True,1, **x))
784
784
785 def summary(self, tmpl):
785 def summary(self, tmpl):
786 i = self.repo.tagslist()
786 i = self.repo.tagslist()
787 i.reverse()
787 i.reverse()
788
788
789 def tagentries(**map):
789 def tagentries(**map):
790 parity = paritygen(self.stripecount)
790 parity = paritygen(self.stripecount)
791 count = 0
791 count = 0
792 for k, n in i:
792 for k, n in i:
793 if k == "tip": # skip tip
793 if k == "tip": # skip tip
794 continue;
794 continue;
795
795
796 count += 1
796 count += 1
797 if count > 10: # limit to 10 tags
797 if count > 10: # limit to 10 tags
798 break;
798 break;
799
799
800 yield tmpl("tagentry",
800 yield tmpl("tagentry",
801 parity=parity.next(),
801 parity=parity.next(),
802 tag=k,
802 tag=k,
803 node=hex(n),
803 node=hex(n),
804 date=self.repo.changectx(n).date())
804 date=self.repo.changectx(n).date())
805
805
806
806
807 def branches(**map):
807 def branches(**map):
808 parity = paritygen(self.stripecount)
808 parity = paritygen(self.stripecount)
809
809
810 b = self.repo.branchtags()
810 b = self.repo.branchtags()
811 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
811 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
812 l.sort()
812 l.sort()
813
813
814 for r,n,t in l:
814 for r,n,t in l:
815 ctx = self.repo.changectx(n)
815 ctx = self.repo.changectx(n)
816
816
817 yield {'parity': parity.next(),
817 yield {'parity': parity.next(),
818 'branch': t,
818 'branch': t,
819 'node': hex(n),
819 'node': hex(n),
820 'date': ctx.date()}
820 'date': ctx.date()}
821
821
822 def changelist(**map):
822 def changelist(**map):
823 parity = paritygen(self.stripecount, offset=start-end)
823 parity = paritygen(self.stripecount, offset=start-end)
824 l = [] # build a list in forward order for efficiency
824 l = [] # build a list in forward order for efficiency
825 for i in xrange(start, end):
825 for i in xrange(start, end):
826 ctx = self.repo.changectx(i)
826 ctx = self.repo.changectx(i)
827 n = ctx.node()
827 n = ctx.node()
828 hn = hex(n)
828 hn = hex(n)
829
829
830 l.insert(0, tmpl(
830 l.insert(0, tmpl(
831 'shortlogentry',
831 'shortlogentry',
832 parity=parity.next(),
832 parity=parity.next(),
833 author=ctx.user(),
833 author=ctx.user(),
834 desc=ctx.description(),
834 desc=ctx.description(),
835 date=ctx.date(),
835 date=ctx.date(),
836 rev=i,
836 rev=i,
837 node=hn,
837 node=hn,
838 tags=self.nodetagsdict(n),
838 tags=self.nodetagsdict(n),
839 branches=self.nodebranchdict(ctx)))
839 branches=self.nodebranchdict(ctx)))
840
840
841 yield l
841 yield l
842
842
843 cl = self.repo.changelog
843 cl = self.repo.changelog
844 count = cl.count()
844 count = cl.count()
845 start = max(0, count - self.maxchanges)
845 start = max(0, count - self.maxchanges)
846 end = min(count, start + self.maxchanges)
846 end = min(count, start + self.maxchanges)
847
847
848 return tmpl("summary",
848 return tmpl("summary",
849 desc=self.config("web", "description", "unknown"),
849 desc=self.config("web", "description", "unknown"),
850 owner=get_contact(self.config) or "unknown",
850 owner=get_contact(self.config) or "unknown",
851 lastchange=cl.read(cl.tip())[2],
851 lastchange=cl.read(cl.tip())[2],
852 tags=tagentries,
852 tags=tagentries,
853 branches=branches,
853 branches=branches,
854 shortlog=changelist,
854 shortlog=changelist,
855 node=hex(cl.tip()),
855 node=hex(cl.tip()),
856 archives=self.archivelist("tip"))
856 archives=self.archivelist("tip"))
857
857
858 def filediff(self, tmpl, fctx):
858 def filediff(self, tmpl, fctx):
859 n = fctx.node()
859 n = fctx.node()
860 path = fctx.path()
860 path = fctx.path()
861 parents = fctx.parents()
861 parents = fctx.parents()
862 p1 = parents and parents[0].node() or nullid
862 p1 = parents and parents[0].node() or nullid
863
863
864 def diff(**map):
864 def diff(**map):
865 yield self.diff(tmpl, p1, n, [path])
865 yield self.diff(tmpl, p1, n, [path])
866
866
867 return tmpl("filediff",
867 return tmpl("filediff",
868 file=path,
868 file=path,
869 node=hex(n),
869 node=hex(n),
870 rev=fctx.rev(),
870 rev=fctx.rev(),
871 parent=self.siblings(parents),
871 parent=self.siblings(parents),
872 child=self.siblings(fctx.children()),
872 child=self.siblings(fctx.children()),
873 diff=diff)
873 diff=diff)
874
874
875 archive_specs = {
875 archive_specs = {
876 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
876 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
877 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
877 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
878 'zip': ('application/zip', 'zip', '.zip', None),
878 'zip': ('application/zip', 'zip', '.zip', None),
879 }
879 }
880
880
881 def archive(self, tmpl, req, key, type_):
881 def archive(self, tmpl, req, key, type_):
882 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
882 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
883 cnode = self.repo.lookup(key)
883 cnode = self.repo.lookup(key)
884 arch_version = key
884 arch_version = key
885 if cnode == key or key == 'tip':
885 if cnode == key or key == 'tip':
886 arch_version = short(cnode)
886 arch_version = short(cnode)
887 name = "%s-%s" % (reponame, arch_version)
887 name = "%s-%s" % (reponame, arch_version)
888 mimetype, artype, extension, encoding = self.archive_specs[type_]
888 mimetype, artype, extension, encoding = self.archive_specs[type_]
889 headers = [
889 headers = [
890 ('Content-Type', mimetype),
890 ('Content-Type', mimetype),
891 ('Content-Disposition', 'attachment; filename=%s%s' %
891 ('Content-Disposition', 'attachment; filename=%s%s' %
892 (name, extension))
892 (name, extension))
893 ]
893 ]
894 if encoding:
894 if encoding:
895 headers.append(('Content-Encoding', encoding))
895 headers.append(('Content-Encoding', encoding))
896 req.header(headers)
896 req.header(headers)
897 req.respond(HTTP_OK)
897 req.respond(HTTP_OK)
898 archival.archive(self.repo, req, cnode, artype, prefix=name)
898 archival.archive(self.repo, req, cnode, artype, prefix=name)
899
899
900 # add tags to things
900 # add tags to things
901 # tags -> list of changesets corresponding to tags
901 # tags -> list of changesets corresponding to tags
902 # find tag, changeset, file
902 # find tag, changeset, file
903
903
904 def cleanpath(self, path):
904 def cleanpath(self, path):
905 path = path.lstrip('/')
905 path = path.lstrip('/')
906 return util.canonpath(self.repo.root, '', path)
906 return util.canonpath(self.repo.root, '', path)
907
907
908 def changectx(self, req):
908 def changectx(self, req):
909 if 'node' in req.form:
909 if 'node' in req.form:
910 changeid = req.form['node'][0]
910 changeid = req.form['node'][0]
911 elif 'manifest' in req.form:
911 elif 'manifest' in req.form:
912 changeid = req.form['manifest'][0]
912 changeid = req.form['manifest'][0]
913 else:
913 else:
914 changeid = self.repo.changelog.count() - 1
914 changeid = self.repo.changelog.count() - 1
915
915
916 try:
916 try:
917 ctx = self.repo.changectx(changeid)
917 ctx = self.repo.changectx(changeid)
918 except hg.RepoError:
918 except hg.RepoError:
919 man = self.repo.manifest
919 man = self.repo.manifest
920 mn = man.lookup(changeid)
920 mn = man.lookup(changeid)
921 ctx = self.repo.changectx(man.linkrev(mn))
921 ctx = self.repo.changectx(man.linkrev(mn))
922
922
923 return ctx
923 return ctx
924
924
925 def filectx(self, req):
925 def filectx(self, req):
926 path = self.cleanpath(req.form['file'][0])
926 path = self.cleanpath(req.form['file'][0])
927 if 'node' in req.form:
927 if 'node' in req.form:
928 changeid = req.form['node'][0]
928 changeid = req.form['node'][0]
929 else:
929 else:
930 changeid = req.form['filenode'][0]
930 changeid = req.form['filenode'][0]
931 try:
931 try:
932 ctx = self.repo.changectx(changeid)
932 ctx = self.repo.changectx(changeid)
933 fctx = ctx.filectx(path)
933 fctx = ctx.filectx(path)
934 except hg.RepoError:
934 except hg.RepoError:
935 fctx = self.repo.filectx(path, fileid=changeid)
935 fctx = self.repo.filectx(path, fileid=changeid)
936
936
937 return fctx
937 return fctx
938
938
939 def check_perm(self, req, op, default):
939 def check_perm(self, req, op, default):
940 '''check permission for operation based on user auth.
940 '''check permission for operation based on user auth.
941 return true if op allowed, else false.
941 return true if op allowed, else false.
942 default is policy to use if no config given.'''
942 default is policy to use if no config given.'''
943
943
944 user = req.env.get('REMOTE_USER')
944 user = req.env.get('REMOTE_USER')
945
945
946 deny = self.configlist('web', 'deny_' + op)
946 deny = self.configlist('web', 'deny_' + op)
947 if deny and (not user or deny == ['*'] or user in deny):
947 if deny and (not user or deny == ['*'] or user in deny):
948 return False
948 return False
949
949
950 allow = self.configlist('web', 'allow_' + op)
950 allow = self.configlist('web', 'allow_' + op)
951 return (allow and (allow == ['*'] or user in allow)) or default
951 return (allow and (allow == ['*'] or user in allow)) or default
@@ -1,221 +1,221 b''
1 #
1 #
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import cStringIO, zlib, bz2, tempfile, errno, os, sys
8 import cStringIO, zlib, bz2, tempfile, errno, os, sys
9 from mercurial import util, streamclone
9 from mercurial import util, streamclone
10 from mercurial.i18n import gettext as _
10 from mercurial.i18n import gettext as _
11 from mercurial.node import *
11 from mercurial.node import bin, hex
12 from mercurial import changegroup as changegroupmod
12 from mercurial import changegroup as changegroupmod
13 from common import HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
13 from common import HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14
14
15 # __all__ is populated with the allowed commands. Be sure to add to it if
15 # __all__ is populated with the allowed commands. Be sure to add to it if
16 # you're adding a new command, or the new command won't work.
16 # you're adding a new command, or the new command won't work.
17
17
18 __all__ = [
18 __all__ = [
19 'lookup', 'heads', 'branches', 'between', 'changegroup',
19 'lookup', 'heads', 'branches', 'between', 'changegroup',
20 'changegroupsubset', 'capabilities', 'unbundle', 'stream_out',
20 'changegroupsubset', 'capabilities', 'unbundle', 'stream_out',
21 ]
21 ]
22
22
23 HGTYPE = 'application/mercurial-0.1'
23 HGTYPE = 'application/mercurial-0.1'
24
24
25 def lookup(web, req):
25 def lookup(web, req):
26 try:
26 try:
27 r = hex(web.repo.lookup(req.form['key'][0]))
27 r = hex(web.repo.lookup(req.form['key'][0]))
28 success = 1
28 success = 1
29 except Exception,inst:
29 except Exception,inst:
30 r = str(inst)
30 r = str(inst)
31 success = 0
31 success = 0
32 resp = "%s %s\n" % (success, r)
32 resp = "%s %s\n" % (success, r)
33 req.respond(HTTP_OK, HGTYPE, length=len(resp))
33 req.respond(HTTP_OK, HGTYPE, length=len(resp))
34 req.write(resp)
34 req.write(resp)
35
35
36 def heads(web, req):
36 def heads(web, req):
37 resp = " ".join(map(hex, web.repo.heads())) + "\n"
37 resp = " ".join(map(hex, web.repo.heads())) + "\n"
38 req.respond(HTTP_OK, HGTYPE, length=len(resp))
38 req.respond(HTTP_OK, HGTYPE, length=len(resp))
39 req.write(resp)
39 req.write(resp)
40
40
41 def branches(web, req):
41 def branches(web, req):
42 nodes = []
42 nodes = []
43 if 'nodes' in req.form:
43 if 'nodes' in req.form:
44 nodes = map(bin, req.form['nodes'][0].split(" "))
44 nodes = map(bin, req.form['nodes'][0].split(" "))
45 resp = cStringIO.StringIO()
45 resp = cStringIO.StringIO()
46 for b in web.repo.branches(nodes):
46 for b in web.repo.branches(nodes):
47 resp.write(" ".join(map(hex, b)) + "\n")
47 resp.write(" ".join(map(hex, b)) + "\n")
48 resp = resp.getvalue()
48 resp = resp.getvalue()
49 req.respond(HTTP_OK, HGTYPE, length=len(resp))
49 req.respond(HTTP_OK, HGTYPE, length=len(resp))
50 req.write(resp)
50 req.write(resp)
51
51
52 def between(web, req):
52 def between(web, req):
53 if 'pairs' in req.form:
53 if 'pairs' in req.form:
54 pairs = [map(bin, p.split("-"))
54 pairs = [map(bin, p.split("-"))
55 for p in req.form['pairs'][0].split(" ")]
55 for p in req.form['pairs'][0].split(" ")]
56 resp = cStringIO.StringIO()
56 resp = cStringIO.StringIO()
57 for b in web.repo.between(pairs):
57 for b in web.repo.between(pairs):
58 resp.write(" ".join(map(hex, b)) + "\n")
58 resp.write(" ".join(map(hex, b)) + "\n")
59 resp = resp.getvalue()
59 resp = resp.getvalue()
60 req.respond(HTTP_OK, HGTYPE, length=len(resp))
60 req.respond(HTTP_OK, HGTYPE, length=len(resp))
61 req.write(resp)
61 req.write(resp)
62
62
63 def changegroup(web, req):
63 def changegroup(web, req):
64 req.respond(HTTP_OK, HGTYPE)
64 req.respond(HTTP_OK, HGTYPE)
65 nodes = []
65 nodes = []
66 if not web.allowpull:
66 if not web.allowpull:
67 return
67 return
68
68
69 if 'roots' in req.form:
69 if 'roots' in req.form:
70 nodes = map(bin, req.form['roots'][0].split(" "))
70 nodes = map(bin, req.form['roots'][0].split(" "))
71
71
72 z = zlib.compressobj()
72 z = zlib.compressobj()
73 f = web.repo.changegroup(nodes, 'serve')
73 f = web.repo.changegroup(nodes, 'serve')
74 while 1:
74 while 1:
75 chunk = f.read(4096)
75 chunk = f.read(4096)
76 if not chunk:
76 if not chunk:
77 break
77 break
78 req.write(z.compress(chunk))
78 req.write(z.compress(chunk))
79
79
80 req.write(z.flush())
80 req.write(z.flush())
81
81
82 def changegroupsubset(web, req):
82 def changegroupsubset(web, req):
83 req.respond(HTTP_OK, HGTYPE)
83 req.respond(HTTP_OK, HGTYPE)
84 bases = []
84 bases = []
85 heads = []
85 heads = []
86 if not web.allowpull:
86 if not web.allowpull:
87 return
87 return
88
88
89 if 'bases' in req.form:
89 if 'bases' in req.form:
90 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
90 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
91 if 'heads' in req.form:
91 if 'heads' in req.form:
92 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
92 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
93
93
94 z = zlib.compressobj()
94 z = zlib.compressobj()
95 f = web.repo.changegroupsubset(bases, heads, 'serve')
95 f = web.repo.changegroupsubset(bases, heads, 'serve')
96 while 1:
96 while 1:
97 chunk = f.read(4096)
97 chunk = f.read(4096)
98 if not chunk:
98 if not chunk:
99 break
99 break
100 req.write(z.compress(chunk))
100 req.write(z.compress(chunk))
101
101
102 req.write(z.flush())
102 req.write(z.flush())
103
103
104 def capabilities(web, req):
104 def capabilities(web, req):
105 resp = ' '.join(web.capabilities())
105 resp = ' '.join(web.capabilities())
106 req.respond(HTTP_OK, HGTYPE, length=len(resp))
106 req.respond(HTTP_OK, HGTYPE, length=len(resp))
107 req.write(resp)
107 req.write(resp)
108
108
109 def unbundle(web, req):
109 def unbundle(web, req):
110 def bail(response, headers={}):
110 def bail(response, headers={}):
111 length = int(req.env['CONTENT_LENGTH'])
111 length = int(req.env['CONTENT_LENGTH'])
112 for s in util.filechunkiter(req, limit=length):
112 for s in util.filechunkiter(req, limit=length):
113 # drain incoming bundle, else client will not see
113 # drain incoming bundle, else client will not see
114 # response when run outside cgi script
114 # response when run outside cgi script
115 pass
115 pass
116 req.header(headers.items())
116 req.header(headers.items())
117 req.respond(HTTP_OK, HGTYPE)
117 req.respond(HTTP_OK, HGTYPE)
118 req.write('0\n')
118 req.write('0\n')
119 req.write(response)
119 req.write(response)
120
120
121 # require ssl by default, auth info cannot be sniffed and
121 # require ssl by default, auth info cannot be sniffed and
122 # replayed
122 # replayed
123 ssl_req = web.configbool('web', 'push_ssl', True)
123 ssl_req = web.configbool('web', 'push_ssl', True)
124 if ssl_req:
124 if ssl_req:
125 if req.env.get('wsgi.url_scheme') != 'https':
125 if req.env.get('wsgi.url_scheme') != 'https':
126 bail('ssl required\n')
126 bail('ssl required\n')
127 return
127 return
128 proto = 'https'
128 proto = 'https'
129 else:
129 else:
130 proto = 'http'
130 proto = 'http'
131
131
132 # do not allow push unless explicitly allowed
132 # do not allow push unless explicitly allowed
133 if not web.check_perm(req, 'push', False):
133 if not web.check_perm(req, 'push', False):
134 bail('push not authorized\n',
134 bail('push not authorized\n',
135 headers={'status': '401 Unauthorized'})
135 headers={'status': '401 Unauthorized'})
136 return
136 return
137
137
138 their_heads = req.form['heads'][0].split(' ')
138 their_heads = req.form['heads'][0].split(' ')
139
139
140 def check_heads():
140 def check_heads():
141 heads = map(hex, web.repo.heads())
141 heads = map(hex, web.repo.heads())
142 return their_heads == [hex('force')] or their_heads == heads
142 return their_heads == [hex('force')] or their_heads == heads
143
143
144 # fail early if possible
144 # fail early if possible
145 if not check_heads():
145 if not check_heads():
146 bail('unsynced changes\n')
146 bail('unsynced changes\n')
147 return
147 return
148
148
149 req.respond(HTTP_OK, HGTYPE)
149 req.respond(HTTP_OK, HGTYPE)
150
150
151 # do not lock repo until all changegroup data is
151 # do not lock repo until all changegroup data is
152 # streamed. save to temporary file.
152 # streamed. save to temporary file.
153
153
154 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
154 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
155 fp = os.fdopen(fd, 'wb+')
155 fp = os.fdopen(fd, 'wb+')
156 try:
156 try:
157 length = int(req.env['CONTENT_LENGTH'])
157 length = int(req.env['CONTENT_LENGTH'])
158 for s in util.filechunkiter(req, limit=length):
158 for s in util.filechunkiter(req, limit=length):
159 fp.write(s)
159 fp.write(s)
160
160
161 try:
161 try:
162 lock = web.repo.lock()
162 lock = web.repo.lock()
163 try:
163 try:
164 if not check_heads():
164 if not check_heads():
165 req.write('0\n')
165 req.write('0\n')
166 req.write('unsynced changes\n')
166 req.write('unsynced changes\n')
167 return
167 return
168
168
169 fp.seek(0)
169 fp.seek(0)
170 header = fp.read(6)
170 header = fp.read(6)
171 if header.startswith('HG') and not header.startswith('HG10'):
171 if header.startswith('HG') and not header.startswith('HG10'):
172 raise ValueError('unknown bundle version')
172 raise ValueError('unknown bundle version')
173 elif header not in changegroupmod.bundletypes:
173 elif header not in changegroupmod.bundletypes:
174 raise ValueError('unknown bundle compression type')
174 raise ValueError('unknown bundle compression type')
175 gen = changegroupmod.unbundle(header, fp)
175 gen = changegroupmod.unbundle(header, fp)
176
176
177 # send addchangegroup output to client
177 # send addchangegroup output to client
178
178
179 old_stdout = sys.stdout
179 old_stdout = sys.stdout
180 sys.stdout = cStringIO.StringIO()
180 sys.stdout = cStringIO.StringIO()
181
181
182 try:
182 try:
183 url = 'remote:%s:%s' % (proto,
183 url = 'remote:%s:%s' % (proto,
184 req.env.get('REMOTE_HOST', ''))
184 req.env.get('REMOTE_HOST', ''))
185 try:
185 try:
186 ret = web.repo.addchangegroup(gen, 'serve', url)
186 ret = web.repo.addchangegroup(gen, 'serve', url)
187 except util.Abort, inst:
187 except util.Abort, inst:
188 sys.stdout.write("abort: %s\n" % inst)
188 sys.stdout.write("abort: %s\n" % inst)
189 ret = 0
189 ret = 0
190 finally:
190 finally:
191 val = sys.stdout.getvalue()
191 val = sys.stdout.getvalue()
192 sys.stdout = old_stdout
192 sys.stdout = old_stdout
193 req.write('%d\n' % ret)
193 req.write('%d\n' % ret)
194 req.write(val)
194 req.write(val)
195 finally:
195 finally:
196 del lock
196 del lock
197 except ValueError, inst:
197 except ValueError, inst:
198 req.write('0\n')
198 req.write('0\n')
199 req.write(str(inst) + '\n')
199 req.write(str(inst) + '\n')
200 except (OSError, IOError), inst:
200 except (OSError, IOError), inst:
201 req.write('0\n')
201 req.write('0\n')
202 filename = getattr(inst, 'filename', '')
202 filename = getattr(inst, 'filename', '')
203 # Don't send our filesystem layout to the client
203 # Don't send our filesystem layout to the client
204 if filename.startswith(web.repo.root):
204 if filename.startswith(web.repo.root):
205 filename = filename[len(web.repo.root)+1:]
205 filename = filename[len(web.repo.root)+1:]
206 else:
206 else:
207 filename = ''
207 filename = ''
208 error = getattr(inst, 'strerror', 'Unknown error')
208 error = getattr(inst, 'strerror', 'Unknown error')
209 if inst.errno == errno.ENOENT:
209 if inst.errno == errno.ENOENT:
210 code = HTTP_NOT_FOUND
210 code = HTTP_NOT_FOUND
211 else:
211 else:
212 code = HTTP_SERVER_ERROR
212 code = HTTP_SERVER_ERROR
213 req.respond(code)
213 req.respond(code)
214 req.write('%s: %s\n' % (error, filename))
214 req.write('%s: %s\n' % (error, filename))
215 finally:
215 finally:
216 fp.close()
216 fp.close()
217 os.unlink(tempname)
217 os.unlink(tempname)
218
218
219 def stream_out(web, req):
219 def stream_out(web, req):
220 req.respond(HTTP_OK, HGTYPE)
220 req.respond(HTTP_OK, HGTYPE)
221 streamclone.stream_out(web.repo, req, untrusted=True)
221 streamclone.stream_out(web.repo, req, untrusted=True)
@@ -1,458 +1,458 b''
1 # httprepo.py - HTTP repository proxy classes for mercurial
1 # httprepo.py - HTTP repository proxy classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import bin, hex
10 from remoterepo import *
10 from remoterepo import remoterepository
11 from i18n import _
11 from i18n import _
12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
13 import errno, keepalive, tempfile, socket, changegroup
13 import errno, keepalive, tempfile, socket, changegroup
14
14
15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 def __init__(self, ui):
16 def __init__(self, ui):
17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 self.ui = ui
18 self.ui = ui
19
19
20 def find_user_password(self, realm, authuri):
20 def find_user_password(self, realm, authuri):
21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 self, realm, authuri)
22 self, realm, authuri)
23 user, passwd = authinfo
23 user, passwd = authinfo
24 if user and passwd:
24 if user and passwd:
25 return (user, passwd)
25 return (user, passwd)
26
26
27 if not self.ui.interactive:
27 if not self.ui.interactive:
28 raise util.Abort(_('http authorization required'))
28 raise util.Abort(_('http authorization required'))
29
29
30 self.ui.write(_("http authorization required\n"))
30 self.ui.write(_("http authorization required\n"))
31 self.ui.status(_("realm: %s\n") % realm)
31 self.ui.status(_("realm: %s\n") % realm)
32 if user:
32 if user:
33 self.ui.status(_("user: %s\n") % user)
33 self.ui.status(_("user: %s\n") % user)
34 else:
34 else:
35 user = self.ui.prompt(_("user:"), default=None)
35 user = self.ui.prompt(_("user:"), default=None)
36
36
37 if not passwd:
37 if not passwd:
38 passwd = self.ui.getpass()
38 passwd = self.ui.getpass()
39
39
40 self.add_password(realm, authuri, user, passwd)
40 self.add_password(realm, authuri, user, passwd)
41 return (user, passwd)
41 return (user, passwd)
42
42
43 def netlocsplit(netloc):
43 def netlocsplit(netloc):
44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45
45
46 a = netloc.find('@')
46 a = netloc.find('@')
47 if a == -1:
47 if a == -1:
48 user, passwd = None, None
48 user, passwd = None, None
49 else:
49 else:
50 userpass, netloc = netloc[:a], netloc[a+1:]
50 userpass, netloc = netloc[:a], netloc[a+1:]
51 c = userpass.find(':')
51 c = userpass.find(':')
52 if c == -1:
52 if c == -1:
53 user, passwd = urllib.unquote(userpass), None
53 user, passwd = urllib.unquote(userpass), None
54 else:
54 else:
55 user = urllib.unquote(userpass[:c])
55 user = urllib.unquote(userpass[:c])
56 passwd = urllib.unquote(userpass[c+1:])
56 passwd = urllib.unquote(userpass[c+1:])
57 c = netloc.find(':')
57 c = netloc.find(':')
58 if c == -1:
58 if c == -1:
59 host, port = netloc, None
59 host, port = netloc, None
60 else:
60 else:
61 host, port = netloc[:c], netloc[c+1:]
61 host, port = netloc[:c], netloc[c+1:]
62 return host, port, user, passwd
62 return host, port, user, passwd
63
63
64 def netlocunsplit(host, port, user=None, passwd=None):
64 def netlocunsplit(host, port, user=None, passwd=None):
65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 if port:
66 if port:
67 hostport = host + ':' + port
67 hostport = host + ':' + port
68 else:
68 else:
69 hostport = host
69 hostport = host
70 if user:
70 if user:
71 if passwd:
71 if passwd:
72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 else:
73 else:
74 userpass = urllib.quote(user)
74 userpass = urllib.quote(user)
75 return userpass + '@' + hostport
75 return userpass + '@' + hostport
76 return hostport
76 return hostport
77
77
78 # work around a bug in Python < 2.4.2
78 # work around a bug in Python < 2.4.2
79 # (it leaves a "\n" at the end of Proxy-authorization headers)
79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 class request(urllib2.Request):
80 class request(urllib2.Request):
81 def add_header(self, key, val):
81 def add_header(self, key, val):
82 if key.lower() == 'proxy-authorization':
82 if key.lower() == 'proxy-authorization':
83 val = val.strip()
83 val = val.strip()
84 return urllib2.Request.add_header(self, key, val)
84 return urllib2.Request.add_header(self, key, val)
85
85
86 class httpsendfile(file):
86 class httpsendfile(file):
87 def __len__(self):
87 def __len__(self):
88 return os.fstat(self.fileno()).st_size
88 return os.fstat(self.fileno()).st_size
89
89
90 def _gen_sendfile(connection):
90 def _gen_sendfile(connection):
91 def _sendfile(self, data):
91 def _sendfile(self, data):
92 # send a file
92 # send a file
93 if isinstance(data, httpsendfile):
93 if isinstance(data, httpsendfile):
94 # if auth required, some data sent twice, so rewind here
94 # if auth required, some data sent twice, so rewind here
95 data.seek(0)
95 data.seek(0)
96 for chunk in util.filechunkiter(data):
96 for chunk in util.filechunkiter(data):
97 connection.send(self, chunk)
97 connection.send(self, chunk)
98 else:
98 else:
99 connection.send(self, data)
99 connection.send(self, data)
100 return _sendfile
100 return _sendfile
101
101
102 class httpconnection(keepalive.HTTPConnection):
102 class httpconnection(keepalive.HTTPConnection):
103 # must be able to send big bundle as stream.
103 # must be able to send big bundle as stream.
104 send = _gen_sendfile(keepalive.HTTPConnection)
104 send = _gen_sendfile(keepalive.HTTPConnection)
105
105
106 class httphandler(keepalive.HTTPHandler):
106 class httphandler(keepalive.HTTPHandler):
107 def http_open(self, req):
107 def http_open(self, req):
108 return self.do_open(httpconnection, req)
108 return self.do_open(httpconnection, req)
109
109
110 def __del__(self):
110 def __del__(self):
111 self.close_all()
111 self.close_all()
112
112
113 has_https = hasattr(urllib2, 'HTTPSHandler')
113 has_https = hasattr(urllib2, 'HTTPSHandler')
114 if has_https:
114 if has_https:
115 class httpsconnection(httplib.HTTPSConnection):
115 class httpsconnection(httplib.HTTPSConnection):
116 response_class = keepalive.HTTPResponse
116 response_class = keepalive.HTTPResponse
117 # must be able to send big bundle as stream.
117 # must be able to send big bundle as stream.
118 send = _gen_sendfile(httplib.HTTPSConnection)
118 send = _gen_sendfile(httplib.HTTPSConnection)
119
119
120 class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
120 class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
121 def https_open(self, req):
121 def https_open(self, req):
122 return self.do_open(httpsconnection, req)
122 return self.do_open(httpsconnection, req)
123
123
124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
125 # it doesn't know about the auth type requested. This can happen if
125 # it doesn't know about the auth type requested. This can happen if
126 # somebody is using BasicAuth and types a bad password.
126 # somebody is using BasicAuth and types a bad password.
127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
128 def http_error_auth_reqed(self, auth_header, host, req, headers):
128 def http_error_auth_reqed(self, auth_header, host, req, headers):
129 try:
129 try:
130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
131 self, auth_header, host, req, headers)
131 self, auth_header, host, req, headers)
132 except ValueError, inst:
132 except ValueError, inst:
133 arg = inst.args[0]
133 arg = inst.args[0]
134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
135 return
135 return
136 raise
136 raise
137
137
138 def zgenerator(f):
138 def zgenerator(f):
139 zd = zlib.decompressobj()
139 zd = zlib.decompressobj()
140 try:
140 try:
141 for chunk in util.filechunkiter(f):
141 for chunk in util.filechunkiter(f):
142 yield zd.decompress(chunk)
142 yield zd.decompress(chunk)
143 except httplib.HTTPException, inst:
143 except httplib.HTTPException, inst:
144 raise IOError(None, _('connection ended unexpectedly'))
144 raise IOError(None, _('connection ended unexpectedly'))
145 yield zd.flush()
145 yield zd.flush()
146
146
147 _safe = ('abcdefghijklmnopqrstuvwxyz'
147 _safe = ('abcdefghijklmnopqrstuvwxyz'
148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
149 '0123456789' '_.-/')
149 '0123456789' '_.-/')
150 _safeset = None
150 _safeset = None
151 _hex = None
151 _hex = None
152 def quotepath(path):
152 def quotepath(path):
153 '''quote the path part of a URL
153 '''quote the path part of a URL
154
154
155 This is similar to urllib.quote, but it also tries to avoid
155 This is similar to urllib.quote, but it also tries to avoid
156 quoting things twice (inspired by wget):
156 quoting things twice (inspired by wget):
157
157
158 >>> quotepath('abc def')
158 >>> quotepath('abc def')
159 'abc%20def'
159 'abc%20def'
160 >>> quotepath('abc%20def')
160 >>> quotepath('abc%20def')
161 'abc%20def'
161 'abc%20def'
162 >>> quotepath('abc%20 def')
162 >>> quotepath('abc%20 def')
163 'abc%20%20def'
163 'abc%20%20def'
164 >>> quotepath('abc def%20')
164 >>> quotepath('abc def%20')
165 'abc%20def%20'
165 'abc%20def%20'
166 >>> quotepath('abc def%2')
166 >>> quotepath('abc def%2')
167 'abc%20def%252'
167 'abc%20def%252'
168 >>> quotepath('abc def%')
168 >>> quotepath('abc def%')
169 'abc%20def%25'
169 'abc%20def%25'
170 '''
170 '''
171 global _safeset, _hex
171 global _safeset, _hex
172 if _safeset is None:
172 if _safeset is None:
173 _safeset = util.set(_safe)
173 _safeset = util.set(_safe)
174 _hex = util.set('abcdefABCDEF0123456789')
174 _hex = util.set('abcdefABCDEF0123456789')
175 l = list(path)
175 l = list(path)
176 for i in xrange(len(l)):
176 for i in xrange(len(l)):
177 c = l[i]
177 c = l[i]
178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
179 pass
179 pass
180 elif c not in _safeset:
180 elif c not in _safeset:
181 l[i] = '%%%02X' % ord(c)
181 l[i] = '%%%02X' % ord(c)
182 return ''.join(l)
182 return ''.join(l)
183
183
184 class httprepository(remoterepository):
184 class httprepository(remoterepository):
185 def __init__(self, ui, path):
185 def __init__(self, ui, path):
186 self.path = path
186 self.path = path
187 self.caps = None
187 self.caps = None
188 self.handler = None
188 self.handler = None
189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
190 if query or frag:
190 if query or frag:
191 raise util.Abort(_('unsupported URL component: "%s"') %
191 raise util.Abort(_('unsupported URL component: "%s"') %
192 (query or frag))
192 (query or frag))
193 if not urlpath:
193 if not urlpath:
194 urlpath = '/'
194 urlpath = '/'
195 urlpath = quotepath(urlpath)
195 urlpath = quotepath(urlpath)
196 host, port, user, passwd = netlocsplit(netloc)
196 host, port, user, passwd = netlocsplit(netloc)
197
197
198 # urllib cannot handle URLs with embedded user or passwd
198 # urllib cannot handle URLs with embedded user or passwd
199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
200 urlpath, '', ''))
200 urlpath, '', ''))
201 self.ui = ui
201 self.ui = ui
202 self.ui.debug(_('using %s\n') % self._url)
202 self.ui.debug(_('using %s\n') % self._url)
203
203
204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
205 # XXX proxyauthinfo = None
205 # XXX proxyauthinfo = None
206 handlers = [httphandler()]
206 handlers = [httphandler()]
207 if has_https:
207 if has_https:
208 handlers.append(httpshandler())
208 handlers.append(httpshandler())
209
209
210 if proxyurl:
210 if proxyurl:
211 # proxy can be proper url or host[:port]
211 # proxy can be proper url or host[:port]
212 if not (proxyurl.startswith('http:') or
212 if not (proxyurl.startswith('http:') or
213 proxyurl.startswith('https:')):
213 proxyurl.startswith('https:')):
214 proxyurl = 'http://' + proxyurl + '/'
214 proxyurl = 'http://' + proxyurl + '/'
215 snpqf = urlparse.urlsplit(proxyurl)
215 snpqf = urlparse.urlsplit(proxyurl)
216 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
216 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
217 hpup = netlocsplit(proxynetloc)
217 hpup = netlocsplit(proxynetloc)
218
218
219 proxyhost, proxyport, proxyuser, proxypasswd = hpup
219 proxyhost, proxyport, proxyuser, proxypasswd = hpup
220 if not proxyuser:
220 if not proxyuser:
221 proxyuser = ui.config("http_proxy", "user")
221 proxyuser = ui.config("http_proxy", "user")
222 proxypasswd = ui.config("http_proxy", "passwd")
222 proxypasswd = ui.config("http_proxy", "passwd")
223
223
224 # see if we should use a proxy for this url
224 # see if we should use a proxy for this url
225 no_list = [ "localhost", "127.0.0.1" ]
225 no_list = [ "localhost", "127.0.0.1" ]
226 no_list.extend([p.lower() for
226 no_list.extend([p.lower() for
227 p in ui.configlist("http_proxy", "no")])
227 p in ui.configlist("http_proxy", "no")])
228 no_list.extend([p.strip().lower() for
228 no_list.extend([p.strip().lower() for
229 p in os.getenv("no_proxy", '').split(',')
229 p in os.getenv("no_proxy", '').split(',')
230 if p.strip()])
230 if p.strip()])
231 # "http_proxy.always" config is for running tests on localhost
231 # "http_proxy.always" config is for running tests on localhost
232 if (not ui.configbool("http_proxy", "always") and
232 if (not ui.configbool("http_proxy", "always") and
233 host.lower() in no_list):
233 host.lower() in no_list):
234 # avoid auto-detection of proxy settings by appending
234 # avoid auto-detection of proxy settings by appending
235 # a ProxyHandler with no proxies defined.
235 # a ProxyHandler with no proxies defined.
236 handlers.append(urllib2.ProxyHandler({}))
236 handlers.append(urllib2.ProxyHandler({}))
237 ui.debug(_('disabling proxy for %s\n') % host)
237 ui.debug(_('disabling proxy for %s\n') % host)
238 else:
238 else:
239 proxyurl = urlparse.urlunsplit((
239 proxyurl = urlparse.urlunsplit((
240 proxyscheme, netlocunsplit(proxyhost, proxyport,
240 proxyscheme, netlocunsplit(proxyhost, proxyport,
241 proxyuser, proxypasswd or ''),
241 proxyuser, proxypasswd or ''),
242 proxypath, proxyquery, proxyfrag))
242 proxypath, proxyquery, proxyfrag))
243 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
243 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
244 ui.debug(_('proxying through http://%s:%s\n') %
244 ui.debug(_('proxying through http://%s:%s\n') %
245 (proxyhost, proxyport))
245 (proxyhost, proxyport))
246
246
247 # urllib2 takes proxy values from the environment and those
247 # urllib2 takes proxy values from the environment and those
248 # will take precedence if found, so drop them
248 # will take precedence if found, so drop them
249 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
249 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
250 try:
250 try:
251 if env in os.environ:
251 if env in os.environ:
252 del os.environ[env]
252 del os.environ[env]
253 except OSError:
253 except OSError:
254 pass
254 pass
255
255
256 passmgr = passwordmgr(ui)
256 passmgr = passwordmgr(ui)
257 if user:
257 if user:
258 ui.debug(_('http auth: user %s, password %s\n') %
258 ui.debug(_('http auth: user %s, password %s\n') %
259 (user, passwd and '*' * len(passwd) or 'not set'))
259 (user, passwd and '*' * len(passwd) or 'not set'))
260 netloc = host
260 netloc = host
261 if port:
261 if port:
262 netloc += ':' + port
262 netloc += ':' + port
263 # Python < 2.4.3 uses only the netloc to search for a password
263 # Python < 2.4.3 uses only the netloc to search for a password
264 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
264 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
265
265
266 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
266 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
267 httpdigestauthhandler(passmgr)))
267 httpdigestauthhandler(passmgr)))
268 opener = urllib2.build_opener(*handlers)
268 opener = urllib2.build_opener(*handlers)
269
269
270 # 1.0 here is the _protocol_ version
270 # 1.0 here is the _protocol_ version
271 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
271 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
272 urllib2.install_opener(opener)
272 urllib2.install_opener(opener)
273
273
274 def url(self):
274 def url(self):
275 return self.path
275 return self.path
276
276
277 # look up capabilities only when needed
277 # look up capabilities only when needed
278
278
279 def get_caps(self):
279 def get_caps(self):
280 if self.caps is None:
280 if self.caps is None:
281 try:
281 try:
282 self.caps = util.set(self.do_read('capabilities').split())
282 self.caps = util.set(self.do_read('capabilities').split())
283 except repo.RepoError:
283 except repo.RepoError:
284 self.caps = util.set()
284 self.caps = util.set()
285 self.ui.debug(_('capabilities: %s\n') %
285 self.ui.debug(_('capabilities: %s\n') %
286 (' '.join(self.caps or ['none'])))
286 (' '.join(self.caps or ['none'])))
287 return self.caps
287 return self.caps
288
288
289 capabilities = property(get_caps)
289 capabilities = property(get_caps)
290
290
291 def lock(self):
291 def lock(self):
292 raise util.Abort(_('operation not supported over http'))
292 raise util.Abort(_('operation not supported over http'))
293
293
294 def do_cmd(self, cmd, **args):
294 def do_cmd(self, cmd, **args):
295 data = args.pop('data', None)
295 data = args.pop('data', None)
296 headers = args.pop('headers', {})
296 headers = args.pop('headers', {})
297 self.ui.debug(_("sending %s command\n") % cmd)
297 self.ui.debug(_("sending %s command\n") % cmd)
298 q = {"cmd": cmd}
298 q = {"cmd": cmd}
299 q.update(args)
299 q.update(args)
300 qs = '?%s' % urllib.urlencode(q)
300 qs = '?%s' % urllib.urlencode(q)
301 cu = "%s%s" % (self._url, qs)
301 cu = "%s%s" % (self._url, qs)
302 try:
302 try:
303 if data:
303 if data:
304 self.ui.debug(_("sending %s bytes\n") % len(data))
304 self.ui.debug(_("sending %s bytes\n") % len(data))
305 resp = urllib2.urlopen(request(cu, data, headers))
305 resp = urllib2.urlopen(request(cu, data, headers))
306 except urllib2.HTTPError, inst:
306 except urllib2.HTTPError, inst:
307 if inst.code == 401:
307 if inst.code == 401:
308 raise util.Abort(_('authorization failed'))
308 raise util.Abort(_('authorization failed'))
309 raise
309 raise
310 except httplib.HTTPException, inst:
310 except httplib.HTTPException, inst:
311 self.ui.debug(_('http error while sending %s command\n') % cmd)
311 self.ui.debug(_('http error while sending %s command\n') % cmd)
312 self.ui.print_exc()
312 self.ui.print_exc()
313 raise IOError(None, inst)
313 raise IOError(None, inst)
314 except IndexError:
314 except IndexError:
315 # this only happens with Python 2.3, later versions raise URLError
315 # this only happens with Python 2.3, later versions raise URLError
316 raise util.Abort(_('http error, possibly caused by proxy setting'))
316 raise util.Abort(_('http error, possibly caused by proxy setting'))
317 # record the url we got redirected to
317 # record the url we got redirected to
318 resp_url = resp.geturl()
318 resp_url = resp.geturl()
319 if resp_url.endswith(qs):
319 if resp_url.endswith(qs):
320 resp_url = resp_url[:-len(qs)]
320 resp_url = resp_url[:-len(qs)]
321 if self._url != resp_url:
321 if self._url != resp_url:
322 self.ui.status(_('real URL is %s\n') % resp_url)
322 self.ui.status(_('real URL is %s\n') % resp_url)
323 self._url = resp_url
323 self._url = resp_url
324 try:
324 try:
325 proto = resp.getheader('content-type')
325 proto = resp.getheader('content-type')
326 except AttributeError:
326 except AttributeError:
327 proto = resp.headers['content-type']
327 proto = resp.headers['content-type']
328
328
329 # accept old "text/plain" and "application/hg-changegroup" for now
329 # accept old "text/plain" and "application/hg-changegroup" for now
330 if not (proto.startswith('application/mercurial-') or
330 if not (proto.startswith('application/mercurial-') or
331 proto.startswith('text/plain') or
331 proto.startswith('text/plain') or
332 proto.startswith('application/hg-changegroup')):
332 proto.startswith('application/hg-changegroup')):
333 self.ui.debug(_("Requested URL: '%s'\n") % cu)
333 self.ui.debug(_("Requested URL: '%s'\n") % cu)
334 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
334 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
335 % self._url)
335 % self._url)
336
336
337 if proto.startswith('application/mercurial-'):
337 if proto.startswith('application/mercurial-'):
338 try:
338 try:
339 version = proto.split('-', 1)[1]
339 version = proto.split('-', 1)[1]
340 version_info = tuple([int(n) for n in version.split('.')])
340 version_info = tuple([int(n) for n in version.split('.')])
341 except ValueError:
341 except ValueError:
342 raise repo.RepoError(_("'%s' sent a broken Content-Type "
342 raise repo.RepoError(_("'%s' sent a broken Content-Type "
343 "header (%s)") % (self._url, proto))
343 "header (%s)") % (self._url, proto))
344 if version_info > (0, 1):
344 if version_info > (0, 1):
345 raise repo.RepoError(_("'%s' uses newer protocol %s") %
345 raise repo.RepoError(_("'%s' uses newer protocol %s") %
346 (self._url, version))
346 (self._url, version))
347
347
348 return resp
348 return resp
349
349
350 def do_read(self, cmd, **args):
350 def do_read(self, cmd, **args):
351 fp = self.do_cmd(cmd, **args)
351 fp = self.do_cmd(cmd, **args)
352 try:
352 try:
353 return fp.read()
353 return fp.read()
354 finally:
354 finally:
355 # if using keepalive, allow connection to be reused
355 # if using keepalive, allow connection to be reused
356 fp.close()
356 fp.close()
357
357
358 def lookup(self, key):
358 def lookup(self, key):
359 self.requirecap('lookup', _('look up remote revision'))
359 self.requirecap('lookup', _('look up remote revision'))
360 d = self.do_cmd("lookup", key = key).read()
360 d = self.do_cmd("lookup", key = key).read()
361 success, data = d[:-1].split(' ', 1)
361 success, data = d[:-1].split(' ', 1)
362 if int(success):
362 if int(success):
363 return bin(data)
363 return bin(data)
364 raise repo.RepoError(data)
364 raise repo.RepoError(data)
365
365
366 def heads(self):
366 def heads(self):
367 d = self.do_read("heads")
367 d = self.do_read("heads")
368 try:
368 try:
369 return map(bin, d[:-1].split(" "))
369 return map(bin, d[:-1].split(" "))
370 except:
370 except:
371 raise util.UnexpectedOutput(_("unexpected response:"), d)
371 raise util.UnexpectedOutput(_("unexpected response:"), d)
372
372
373 def branches(self, nodes):
373 def branches(self, nodes):
374 n = " ".join(map(hex, nodes))
374 n = " ".join(map(hex, nodes))
375 d = self.do_read("branches", nodes=n)
375 d = self.do_read("branches", nodes=n)
376 try:
376 try:
377 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
377 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
378 return br
378 return br
379 except:
379 except:
380 raise util.UnexpectedOutput(_("unexpected response:"), d)
380 raise util.UnexpectedOutput(_("unexpected response:"), d)
381
381
382 def between(self, pairs):
382 def between(self, pairs):
383 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
383 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
384 d = self.do_read("between", pairs=n)
384 d = self.do_read("between", pairs=n)
385 try:
385 try:
386 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
386 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
387 return p
387 return p
388 except:
388 except:
389 raise util.UnexpectedOutput(_("unexpected response:"), d)
389 raise util.UnexpectedOutput(_("unexpected response:"), d)
390
390
391 def changegroup(self, nodes, kind):
391 def changegroup(self, nodes, kind):
392 n = " ".join(map(hex, nodes))
392 n = " ".join(map(hex, nodes))
393 f = self.do_cmd("changegroup", roots=n)
393 f = self.do_cmd("changegroup", roots=n)
394 return util.chunkbuffer(zgenerator(f))
394 return util.chunkbuffer(zgenerator(f))
395
395
396 def changegroupsubset(self, bases, heads, source):
396 def changegroupsubset(self, bases, heads, source):
397 self.requirecap('changegroupsubset', _('look up remote changes'))
397 self.requirecap('changegroupsubset', _('look up remote changes'))
398 baselst = " ".join([hex(n) for n in bases])
398 baselst = " ".join([hex(n) for n in bases])
399 headlst = " ".join([hex(n) for n in heads])
399 headlst = " ".join([hex(n) for n in heads])
400 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
400 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
401 return util.chunkbuffer(zgenerator(f))
401 return util.chunkbuffer(zgenerator(f))
402
402
403 def unbundle(self, cg, heads, source):
403 def unbundle(self, cg, heads, source):
404 # have to stream bundle to a temp file because we do not have
404 # have to stream bundle to a temp file because we do not have
405 # http 1.1 chunked transfer.
405 # http 1.1 chunked transfer.
406
406
407 type = ""
407 type = ""
408 types = self.capable('unbundle')
408 types = self.capable('unbundle')
409 # servers older than d1b16a746db6 will send 'unbundle' as a
409 # servers older than d1b16a746db6 will send 'unbundle' as a
410 # boolean capability
410 # boolean capability
411 try:
411 try:
412 types = types.split(',')
412 types = types.split(',')
413 except AttributeError:
413 except AttributeError:
414 types = [""]
414 types = [""]
415 if types:
415 if types:
416 for x in types:
416 for x in types:
417 if x in changegroup.bundletypes:
417 if x in changegroup.bundletypes:
418 type = x
418 type = x
419 break
419 break
420
420
421 tempname = changegroup.writebundle(cg, None, type)
421 tempname = changegroup.writebundle(cg, None, type)
422 fp = httpsendfile(tempname, "rb")
422 fp = httpsendfile(tempname, "rb")
423 try:
423 try:
424 try:
424 try:
425 rfp = self.do_cmd(
425 rfp = self.do_cmd(
426 'unbundle', data=fp,
426 'unbundle', data=fp,
427 headers={'Content-Type': 'application/octet-stream'},
427 headers={'Content-Type': 'application/octet-stream'},
428 heads=' '.join(map(hex, heads)))
428 heads=' '.join(map(hex, heads)))
429 try:
429 try:
430 ret = int(rfp.readline())
430 ret = int(rfp.readline())
431 self.ui.write(rfp.read())
431 self.ui.write(rfp.read())
432 return ret
432 return ret
433 finally:
433 finally:
434 rfp.close()
434 rfp.close()
435 except socket.error, err:
435 except socket.error, err:
436 if err[0] in (errno.ECONNRESET, errno.EPIPE):
436 if err[0] in (errno.ECONNRESET, errno.EPIPE):
437 raise util.Abort(_('push failed: %s') % err[1])
437 raise util.Abort(_('push failed: %s') % err[1])
438 raise util.Abort(err[1])
438 raise util.Abort(err[1])
439 finally:
439 finally:
440 fp.close()
440 fp.close()
441 os.unlink(tempname)
441 os.unlink(tempname)
442
442
443 def stream_out(self):
443 def stream_out(self):
444 return self.do_cmd('stream_out')
444 return self.do_cmd('stream_out')
445
445
446 class httpsrepository(httprepository):
446 class httpsrepository(httprepository):
447 def __init__(self, ui, path):
447 def __init__(self, ui, path):
448 if not has_https:
448 if not has_https:
449 raise util.Abort(_('Python support for SSL and HTTPS '
449 raise util.Abort(_('Python support for SSL and HTTPS '
450 'is not installed'))
450 'is not installed'))
451 httprepository.__init__(self, ui, path)
451 httprepository.__init__(self, ui, path)
452
452
453 def instance(ui, path, create):
453 def instance(ui, path, create):
454 if create:
454 if create:
455 raise util.Abort(_('cannot create new http repository'))
455 raise util.Abort(_('cannot create new http repository'))
456 if path.startswith('https:'):
456 if path.startswith('https:'):
457 return httpsrepository(ui, path)
457 return httpsrepository(ui, path)
458 return httprepository(ui, path)
458 return httprepository(ui, path)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now