##// END OF EJS Templates
merge with stable
Matt Mackall -
r23244:18cc87e4 merge default
parent child Browse files
Show More
@@ -1,385 +1,385 b''
1 1 # git.py - git support for the convert extension
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import os
9 9 import subprocess
10 10 from mercurial import util, config
11 11 from mercurial.node import hex, nullid
12 12 from mercurial.i18n import _
13 13
14 14 from common import NoRepo, commit, converter_source, checktool
15 15
16 16 class submodule(object):
17 17 def __init__(self, path, node, url):
18 18 self.path = path
19 19 self.node = node
20 20 self.url = url
21 21
22 22 def hgsub(self):
23 23 return "%s = [git]%s" % (self.path, self.url)
24 24
25 25 def hgsubstate(self):
26 26 return "%s %s" % (self.node, self.path)
27 27
28 28 class convert_git(converter_source):
29 29 # Windows does not support GIT_DIR= construct while other systems
30 30 # cannot remove environment variable. Just assume none have
31 31 # both issues.
32 32 if util.safehasattr(os, 'unsetenv'):
33 33 def gitopen(self, s, err=None):
34 34 prevgitdir = os.environ.get('GIT_DIR')
35 35 os.environ['GIT_DIR'] = self.path
36 36 try:
37 37 if err == subprocess.PIPE:
38 38 (stdin, stdout, stderr) = util.popen3(s)
39 39 return stdout
40 40 elif err == subprocess.STDOUT:
41 41 return self.popen_with_stderr(s)
42 42 else:
43 43 return util.popen(s, 'rb')
44 44 finally:
45 45 if prevgitdir is None:
46 46 del os.environ['GIT_DIR']
47 47 else:
48 48 os.environ['GIT_DIR'] = prevgitdir
49 49
50 50 def gitpipe(self, s):
51 51 prevgitdir = os.environ.get('GIT_DIR')
52 52 os.environ['GIT_DIR'] = self.path
53 53 try:
54 54 return util.popen3(s)
55 55 finally:
56 56 if prevgitdir is None:
57 57 del os.environ['GIT_DIR']
58 58 else:
59 59 os.environ['GIT_DIR'] = prevgitdir
60 60
61 61 else:
62 62 def gitopen(self, s, err=None):
63 63 if err == subprocess.PIPE:
64 64 (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
65 65 return so
66 66 elif err == subprocess.STDOUT:
67 67 return self.popen_with_stderr(s)
68 68 else:
69 69 return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
70 70
71 71 def gitpipe(self, s):
72 72 return util.popen3('GIT_DIR=%s %s' % (self.path, s))
73 73
74 74 def popen_with_stderr(self, s):
75 75 p = subprocess.Popen(s, shell=True, bufsize=-1,
76 76 close_fds=util.closefds,
77 77 stdin=subprocess.PIPE,
78 78 stdout=subprocess.PIPE,
79 79 stderr=subprocess.STDOUT,
80 80 universal_newlines=False,
81 81 env=None)
82 82 return p.stdout
83 83
84 84 def gitread(self, s):
85 85 fh = self.gitopen(s)
86 86 data = fh.read()
87 87 return data, fh.close()
88 88
89 89 def __init__(self, ui, path, rev=None):
90 90 super(convert_git, self).__init__(ui, path, rev=rev)
91 91
92 92 if os.path.isdir(path + "/.git"):
93 93 path += "/.git"
94 94 if not os.path.exists(path + "/objects"):
95 95 raise NoRepo(_("%s does not look like a Git repository") % path)
96 96
97 97 # The default value (50) is based on the default for 'git diff'.
98 98 similarity = ui.configint('convert', 'git.similarity', default=50)
99 99 if similarity < 0 or similarity > 100:
100 100 raise util.Abort(_('similarity must be between 0 and 100'))
101 101 if similarity > 0:
102 self.simopt = '--find-copies=%d%%' % similarity
102 self.simopt = '-C%d%%' % similarity
103 103 findcopiesharder = ui.configbool('convert', 'git.findcopiesharder',
104 104 False)
105 105 if findcopiesharder:
106 106 self.simopt += ' --find-copies-harder'
107 107 else:
108 108 self.simopt = ''
109 109
110 110 checktool('git', 'git')
111 111
112 112 self.path = path
113 113 self.submodules = []
114 114
115 115 self.catfilepipe = self.gitpipe('git cat-file --batch')
116 116
117 117 def after(self):
118 118 for f in self.catfilepipe:
119 119 f.close()
120 120
121 121 def getheads(self):
122 122 if not self.rev:
123 123 heads, ret = self.gitread('git rev-parse --branches --remotes')
124 124 heads = heads.splitlines()
125 125 else:
126 126 heads, ret = self.gitread("git rev-parse --verify %s" % self.rev)
127 127 heads = [heads[:-1]]
128 128 if ret:
129 129 raise util.Abort(_('cannot retrieve git heads'))
130 130 return heads
131 131
132 132 def catfile(self, rev, type):
133 133 if rev == hex(nullid):
134 134 raise IOError
135 135 self.catfilepipe[0].write(rev+'\n')
136 136 self.catfilepipe[0].flush()
137 137 info = self.catfilepipe[1].readline().split()
138 138 if info[1] != type:
139 139 raise util.Abort(_('cannot read %r object at %s') % (type, rev))
140 140 size = int(info[2])
141 141 data = self.catfilepipe[1].read(size)
142 142 if len(data) < size:
143 143 raise util.Abort(_('cannot read %r object at %s: unexpected size')
144 144 % (type, rev))
145 145 # read the trailing newline
146 146 self.catfilepipe[1].read(1)
147 147 return data
148 148
149 149 def getfile(self, name, rev):
150 150 if rev == hex(nullid):
151 151 return None, None
152 152 if name == '.hgsub':
153 153 data = '\n'.join([m.hgsub() for m in self.submoditer()])
154 154 mode = ''
155 155 elif name == '.hgsubstate':
156 156 data = '\n'.join([m.hgsubstate() for m in self.submoditer()])
157 157 mode = ''
158 158 else:
159 159 data = self.catfile(rev, "blob")
160 160 mode = self.modecache[(name, rev)]
161 161 return data, mode
162 162
163 163 def submoditer(self):
164 164 null = hex(nullid)
165 165 for m in sorted(self.submodules, key=lambda p: p.path):
166 166 if m.node != null:
167 167 yield m
168 168
169 169 def parsegitmodules(self, content):
170 170 """Parse the formatted .gitmodules file, example file format:
171 171 [submodule "sub"]\n
172 172 \tpath = sub\n
173 173 \turl = git://giturl\n
174 174 """
175 175 self.submodules = []
176 176 c = config.config()
177 177 # Each item in .gitmodules starts with \t that cant be parsed
178 178 c.parse('.gitmodules', content.replace('\t',''))
179 179 for sec in c.sections():
180 180 s = c[sec]
181 181 if 'url' in s and 'path' in s:
182 182 self.submodules.append(submodule(s['path'], '', s['url']))
183 183
184 184 def retrievegitmodules(self, version):
185 185 modules, ret = self.gitread("git show %s:%s" % (version, '.gitmodules'))
186 186 if ret:
187 187 raise util.Abort(_('cannot read submodules config file in %s') %
188 188 version)
189 189 self.parsegitmodules(modules)
190 190 for m in self.submodules:
191 191 node, ret = self.gitread("git rev-parse %s:%s" % (version, m.path))
192 192 if ret:
193 193 continue
194 194 m.node = node.strip()
195 195
196 196 def getchanges(self, version, full):
197 197 if full:
198 198 raise util.Abort(_("convert from git do not support --full"))
199 199 self.modecache = {}
200 200 fh = self.gitopen("git diff-tree -z --root -m -r %s %s" % (
201 201 self.simopt, version))
202 202 changes = []
203 203 copies = {}
204 204 seen = set()
205 205 entry = None
206 206 subexists = [False]
207 207 subdeleted = [False]
208 208 difftree = fh.read().split('\x00')
209 209 lcount = len(difftree)
210 210 i = 0
211 211
212 212 def add(entry, f, isdest):
213 213 seen.add(f)
214 214 h = entry[3]
215 215 p = (entry[1] == "100755")
216 216 s = (entry[1] == "120000")
217 217 renamesource = (not isdest and entry[4][0] == 'R')
218 218
219 219 if f == '.gitmodules':
220 220 subexists[0] = True
221 221 if entry[4] == 'D' or renamesource:
222 222 subdeleted[0] = True
223 223 changes.append(('.hgsub', hex(nullid)))
224 224 else:
225 225 changes.append(('.hgsub', ''))
226 226 elif entry[1] == '160000' or entry[0] == ':160000':
227 227 subexists[0] = True
228 228 else:
229 229 if renamesource:
230 230 h = hex(nullid)
231 231 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
232 232 changes.append((f, h))
233 233
234 234 while i < lcount:
235 235 l = difftree[i]
236 236 i += 1
237 237 if not entry:
238 238 if not l.startswith(':'):
239 239 continue
240 240 entry = l.split()
241 241 continue
242 242 f = l
243 243 if f not in seen:
244 244 add(entry, f, False)
245 245 # A file can be copied multiple times, or modified and copied
246 246 # simultaneously. So f can be repeated even if fdest isn't.
247 247 if entry[4][0] in 'RC':
248 248 # rename or copy: next line is the destination
249 249 fdest = difftree[i]
250 250 i += 1
251 251 if fdest not in seen:
252 252 add(entry, fdest, True)
253 253 # .gitmodules isn't imported at all, so it being copied to
254 254 # and fro doesn't really make sense
255 255 if f != '.gitmodules' and fdest != '.gitmodules':
256 256 copies[fdest] = f
257 257 entry = None
258 258 if fh.close():
259 259 raise util.Abort(_('cannot read changes in %s') % version)
260 260
261 261 if subexists[0]:
262 262 if subdeleted[0]:
263 263 changes.append(('.hgsubstate', hex(nullid)))
264 264 else:
265 265 self.retrievegitmodules(version)
266 266 changes.append(('.hgsubstate', ''))
267 267 return (changes, copies)
268 268
269 269 def getcommit(self, version):
270 270 c = self.catfile(version, "commit") # read the commit hash
271 271 end = c.find("\n\n")
272 272 message = c[end + 2:]
273 273 message = self.recode(message)
274 274 l = c[:end].splitlines()
275 275 parents = []
276 276 author = committer = None
277 277 for e in l[1:]:
278 278 n, v = e.split(" ", 1)
279 279 if n == "author":
280 280 p = v.split()
281 281 tm, tz = p[-2:]
282 282 author = " ".join(p[:-2])
283 283 if author[0] == "<": author = author[1:-1]
284 284 author = self.recode(author)
285 285 if n == "committer":
286 286 p = v.split()
287 287 tm, tz = p[-2:]
288 288 committer = " ".join(p[:-2])
289 289 if committer[0] == "<": committer = committer[1:-1]
290 290 committer = self.recode(committer)
291 291 if n == "parent":
292 292 parents.append(v)
293 293
294 294 if committer and committer != author:
295 295 message += "\ncommitter: %s\n" % committer
296 296 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
297 297 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
298 298 date = tm + " " + str(tz)
299 299
300 300 c = commit(parents=parents, date=date, author=author, desc=message,
301 301 rev=version)
302 302 return c
303 303
304 304 def numcommits(self):
305 305 return len([None for _ in self.gitopen('git rev-list --all')])
306 306
307 307 def gettags(self):
308 308 tags = {}
309 309 alltags = {}
310 310 fh = self.gitopen('git ls-remote --tags "%s"' % self.path,
311 311 err=subprocess.STDOUT)
312 312 prefix = 'refs/tags/'
313 313
314 314 # Build complete list of tags, both annotated and bare ones
315 315 for line in fh:
316 316 line = line.strip()
317 317 if line.startswith("error:") or line.startswith("fatal:"):
318 318 raise util.Abort(_('cannot read tags from %s') % self.path)
319 319 node, tag = line.split(None, 1)
320 320 if not tag.startswith(prefix):
321 321 continue
322 322 alltags[tag[len(prefix):]] = node
323 323 if fh.close():
324 324 raise util.Abort(_('cannot read tags from %s') % self.path)
325 325
326 326 # Filter out tag objects for annotated tag refs
327 327 for tag in alltags:
328 328 if tag.endswith('^{}'):
329 329 tags[tag[:-3]] = alltags[tag]
330 330 else:
331 331 if tag + '^{}' in alltags:
332 332 continue
333 333 else:
334 334 tags[tag] = alltags[tag]
335 335
336 336 return tags
337 337
338 338 def getchangedfiles(self, version, i):
339 339 changes = []
340 340 if i is None:
341 341 fh = self.gitopen("git diff-tree --root -m -r %s" % version)
342 342 for l in fh:
343 343 if "\t" not in l:
344 344 continue
345 345 m, f = l[:-1].split("\t")
346 346 changes.append(f)
347 347 else:
348 348 fh = self.gitopen('git diff-tree --name-only --root -r %s '
349 349 '"%s^%s" --' % (version, version, i + 1))
350 350 changes = [f.rstrip('\n') for f in fh]
351 351 if fh.close():
352 352 raise util.Abort(_('cannot read changes in %s') % version)
353 353
354 354 return changes
355 355
356 356 def getbookmarks(self):
357 357 bookmarks = {}
358 358
359 359 # Interesting references in git are prefixed
360 360 prefix = 'refs/heads/'
361 361 prefixlen = len(prefix)
362 362
363 363 # factor two commands
364 364 gitcmd = { 'remote/': 'git ls-remote --heads origin',
365 365 '': 'git show-ref'}
366 366
367 367 # Origin heads
368 368 for reftype in gitcmd:
369 369 try:
370 370 fh = self.gitopen(gitcmd[reftype], err=subprocess.PIPE)
371 371 for line in fh:
372 372 line = line.strip()
373 373 rev, name = line.split(None, 1)
374 374 if not name.startswith(prefix):
375 375 continue
376 376 name = '%s%s' % (reftype, name[prefixlen:])
377 377 bookmarks[name] = rev
378 378 except Exception:
379 379 pass
380 380
381 381 return bookmarks
382 382
383 383 def checkrevformat(self, revstr, mapname='splicemap'):
384 384 """ git revision string is a 40 byte hex """
385 385 self.checkhexformat(revstr, mapname)
@@ -1,831 +1,829 b''
1 1 # changegroup.py - Mercurial changegroup manipulation functions
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import weakref
9 9 from i18n import _
10 10 from node import nullrev, nullid, hex, short
11 11 import mdiff, util, dagutil
12 12 import struct, os, bz2, zlib, tempfile
13 13 import discovery, error, phases, branchmap
14 14
15 15 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
16 16 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
17 17
18 18 def readexactly(stream, n):
19 19 '''read n bytes from stream.read and abort if less was available'''
20 20 s = stream.read(n)
21 21 if len(s) < n:
22 22 raise util.Abort(_("stream ended unexpectedly"
23 23 " (got %d bytes, expected %d)")
24 24 % (len(s), n))
25 25 return s
26 26
27 27 def getchunk(stream):
28 28 """return the next chunk from stream as a string"""
29 29 d = readexactly(stream, 4)
30 30 l = struct.unpack(">l", d)[0]
31 31 if l <= 4:
32 32 if l:
33 33 raise util.Abort(_("invalid chunk length %d") % l)
34 34 return ""
35 35 return readexactly(stream, l - 4)
36 36
37 37 def chunkheader(length):
38 38 """return a changegroup chunk header (string)"""
39 39 return struct.pack(">l", length + 4)
40 40
41 41 def closechunk():
42 42 """return a changegroup chunk header (string) for a zero-length chunk"""
43 43 return struct.pack(">l", 0)
44 44
45 45 class nocompress(object):
46 46 def compress(self, x):
47 47 return x
48 48 def flush(self):
49 49 return ""
50 50
51 51 bundletypes = {
52 52 "": ("", nocompress), # only when using unbundle on ssh and old http servers
53 53 # since the unification ssh accepts a header but there
54 54 # is no capability signaling it.
55 55 "HG10UN": ("HG10UN", nocompress),
56 56 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
57 57 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
58 58 }
59 59
60 60 # hgweb uses this list to communicate its preferred type
61 61 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
62 62
63 63 def writebundle(cg, filename, bundletype, vfs=None):
64 64 """Write a bundle file and return its filename.
65 65
66 66 Existing files will not be overwritten.
67 67 If no filename is specified, a temporary file is created.
68 68 bz2 compression can be turned off.
69 69 The bundle file will be deleted in case of errors.
70 70 """
71 71
72 72 fh = None
73 73 cleanup = None
74 74 try:
75 75 if filename:
76 76 if vfs:
77 77 fh = vfs.open(filename, "wb")
78 78 else:
79 79 fh = open(filename, "wb")
80 80 else:
81 81 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
82 82 fh = os.fdopen(fd, "wb")
83 83 cleanup = filename
84 84
85 85 header, compressor = bundletypes[bundletype]
86 86 fh.write(header)
87 87 z = compressor()
88 88
89 89 # parse the changegroup data, otherwise we will block
90 90 # in case of sshrepo because we don't know the end of the stream
91 91
92 92 # an empty chunkgroup is the end of the changegroup
93 93 # a changegroup has at least 2 chunkgroups (changelog and manifest).
94 94 # after that, an empty chunkgroup is the end of the changegroup
95 95 for chunk in cg.getchunks():
96 96 fh.write(z.compress(chunk))
97 97 fh.write(z.flush())
98 98 cleanup = None
99 99 return filename
100 100 finally:
101 101 if fh is not None:
102 102 fh.close()
103 103 if cleanup is not None:
104 104 if filename and vfs:
105 105 vfs.unlink(cleanup)
106 106 else:
107 107 os.unlink(cleanup)
108 108
109 109 def decompressor(fh, alg):
110 110 if alg == 'UN':
111 111 return fh
112 112 elif alg == 'GZ':
113 113 def generator(f):
114 114 zd = zlib.decompressobj()
115 115 for chunk in util.filechunkiter(f):
116 116 yield zd.decompress(chunk)
117 117 elif alg == 'BZ':
118 118 def generator(f):
119 119 zd = bz2.BZ2Decompressor()
120 120 zd.decompress("BZ")
121 121 for chunk in util.filechunkiter(f, 4096):
122 122 yield zd.decompress(chunk)
123 123 else:
124 124 raise util.Abort("unknown bundle compression '%s'" % alg)
125 125 return util.chunkbuffer(generator(fh))
126 126
127 127 class cg1unpacker(object):
128 128 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
129 129 deltaheadersize = struct.calcsize(deltaheader)
130 130 def __init__(self, fh, alg):
131 131 self._stream = decompressor(fh, alg)
132 132 self._type = alg
133 133 self.callback = None
134 134 def compressed(self):
135 135 return self._type != 'UN'
136 136 def read(self, l):
137 137 return self._stream.read(l)
138 138 def seek(self, pos):
139 139 return self._stream.seek(pos)
140 140 def tell(self):
141 141 return self._stream.tell()
142 142 def close(self):
143 143 return self._stream.close()
144 144
145 145 def chunklength(self):
146 146 d = readexactly(self._stream, 4)
147 147 l = struct.unpack(">l", d)[0]
148 148 if l <= 4:
149 149 if l:
150 150 raise util.Abort(_("invalid chunk length %d") % l)
151 151 return 0
152 152 if self.callback:
153 153 self.callback()
154 154 return l - 4
155 155
156 156 def changelogheader(self):
157 157 """v10 does not have a changelog header chunk"""
158 158 return {}
159 159
160 160 def manifestheader(self):
161 161 """v10 does not have a manifest header chunk"""
162 162 return {}
163 163
164 164 def filelogheader(self):
165 165 """return the header of the filelogs chunk, v10 only has the filename"""
166 166 l = self.chunklength()
167 167 if not l:
168 168 return {}
169 169 fname = readexactly(self._stream, l)
170 170 return {'filename': fname}
171 171
172 172 def _deltaheader(self, headertuple, prevnode):
173 173 node, p1, p2, cs = headertuple
174 174 if prevnode is None:
175 175 deltabase = p1
176 176 else:
177 177 deltabase = prevnode
178 178 return node, p1, p2, deltabase, cs
179 179
180 180 def deltachunk(self, prevnode):
181 181 l = self.chunklength()
182 182 if not l:
183 183 return {}
184 184 headerdata = readexactly(self._stream, self.deltaheadersize)
185 185 header = struct.unpack(self.deltaheader, headerdata)
186 186 delta = readexactly(self._stream, l - self.deltaheadersize)
187 187 node, p1, p2, deltabase, cs = self._deltaheader(header, prevnode)
188 188 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
189 189 'deltabase': deltabase, 'delta': delta}
190 190
191 191 def getchunks(self):
192 192 """returns all the chunks contains in the bundle
193 193
194 194 Used when you need to forward the binary stream to a file or another
195 195 network API. To do so, it parse the changegroup data, otherwise it will
196 196 block in case of sshrepo because it don't know the end of the stream.
197 197 """
198 198 # an empty chunkgroup is the end of the changegroup
199 199 # a changegroup has at least 2 chunkgroups (changelog and manifest).
200 200 # after that, an empty chunkgroup is the end of the changegroup
201 201 empty = False
202 202 count = 0
203 203 while not empty or count <= 2:
204 204 empty = True
205 205 count += 1
206 206 while True:
207 207 chunk = getchunk(self)
208 208 if not chunk:
209 209 break
210 210 empty = False
211 211 yield chunkheader(len(chunk))
212 212 pos = 0
213 213 while pos < len(chunk):
214 214 next = pos + 2**20
215 215 yield chunk[pos:next]
216 216 pos = next
217 217 yield closechunk()
218 218
219 219 class cg2unpacker(cg1unpacker):
220 220 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
221 221 deltaheadersize = struct.calcsize(deltaheader)
222 222
223 223 def _deltaheader(self, headertuple, prevnode):
224 224 node, p1, p2, deltabase, cs = headertuple
225 225 return node, p1, p2, deltabase, cs
226 226
227 227 class headerlessfixup(object):
228 228 def __init__(self, fh, h):
229 229 self._h = h
230 230 self._fh = fh
231 231 def read(self, n):
232 232 if self._h:
233 233 d, self._h = self._h[:n], self._h[n:]
234 234 if len(d) < n:
235 235 d += readexactly(self._fh, n - len(d))
236 236 return d
237 237 return readexactly(self._fh, n)
238 238
239 239 class cg1packer(object):
240 240 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
241 241 def __init__(self, repo, bundlecaps=None):
242 242 """Given a source repo, construct a bundler.
243 243
244 244 bundlecaps is optional and can be used to specify the set of
245 245 capabilities which can be used to build the bundle.
246 246 """
247 247 # Set of capabilities we can use to build the bundle.
248 248 if bundlecaps is None:
249 249 bundlecaps = set()
250 250 self._bundlecaps = bundlecaps
251 251 self._changelog = repo.changelog
252 252 self._manifest = repo.manifest
253 253 reorder = repo.ui.config('bundle', 'reorder', 'auto')
254 254 if reorder == 'auto':
255 255 reorder = None
256 256 else:
257 257 reorder = util.parsebool(reorder)
258 258 self._repo = repo
259 259 self._reorder = reorder
260 260 self._progress = repo.ui.progress
261 261 def close(self):
262 262 return closechunk()
263 263
264 264 def fileheader(self, fname):
265 265 return chunkheader(len(fname)) + fname
266 266
267 267 def group(self, nodelist, revlog, lookup, units=None, reorder=None):
268 268 """Calculate a delta group, yielding a sequence of changegroup chunks
269 269 (strings).
270 270
271 271 Given a list of changeset revs, return a set of deltas and
272 272 metadata corresponding to nodes. The first delta is
273 273 first parent(nodelist[0]) -> nodelist[0], the receiver is
274 274 guaranteed to have this parent as it has all history before
275 275 these changesets. In the case firstparent is nullrev the
276 276 changegroup starts with a full revision.
277 277
278 278 If units is not None, progress detail will be generated, units specifies
279 279 the type of revlog that is touched (changelog, manifest, etc.).
280 280 """
281 281 # if we don't have any revisions touched by these changesets, bail
282 282 if len(nodelist) == 0:
283 283 yield self.close()
284 284 return
285 285
286 286 # for generaldelta revlogs, we linearize the revs; this will both be
287 287 # much quicker and generate a much smaller bundle
288 288 if (revlog._generaldelta and reorder is not False) or reorder:
289 289 dag = dagutil.revlogdag(revlog)
290 290 revs = set(revlog.rev(n) for n in nodelist)
291 291 revs = dag.linearize(revs)
292 292 else:
293 293 revs = sorted([revlog.rev(n) for n in nodelist])
294 294
295 295 # add the parent of the first rev
296 296 p = revlog.parentrevs(revs[0])[0]
297 297 revs.insert(0, p)
298 298
299 299 # build deltas
300 300 total = len(revs) - 1
301 301 msgbundling = _('bundling')
302 302 for r in xrange(len(revs) - 1):
303 303 if units is not None:
304 304 self._progress(msgbundling, r + 1, unit=units, total=total)
305 305 prev, curr = revs[r], revs[r + 1]
306 306 linknode = lookup(revlog.node(curr))
307 307 for c in self.revchunk(revlog, curr, prev, linknode):
308 308 yield c
309 309
310 310 yield self.close()
311 311
312 312 # filter any nodes that claim to be part of the known set
313 313 def prune(self, revlog, missing, commonrevs, source):
314 314 rr, rl = revlog.rev, revlog.linkrev
315 315 return [n for n in missing if rl(rr(n)) not in commonrevs]
316 316
317 317 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
318 318 '''yield a sequence of changegroup chunks (strings)'''
319 319 repo = self._repo
320 320 cl = self._changelog
321 321 mf = self._manifest
322 322 reorder = self._reorder
323 323 progress = self._progress
324 324
325 325 # for progress output
326 326 msgbundling = _('bundling')
327 327
328 328 mfs = {} # needed manifests
329 329 fnodes = {} # needed file nodes
330 330 changedfiles = set()
331 331
332 332 # Callback for the changelog, used to collect changed files and manifest
333 333 # nodes.
334 334 # Returns the linkrev node (identity in the changelog case).
335 335 def lookupcl(x):
336 336 c = cl.read(x)
337 337 changedfiles.update(c[3])
338 338 # record the first changeset introducing this manifest version
339 339 mfs.setdefault(c[0], x)
340 340 return x
341 341
342 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets'),
343 reorder=reorder):
344 yield chunk
345 progress(msgbundling, None)
346
342 347 # Callback for the manifest, used to collect linkrevs for filelog
343 348 # revisions.
344 349 # Returns the linkrev node (collected in lookupcl).
345 350 def lookupmf(x):
346 351 clnode = mfs[x]
347 352 if not fastpathlinkrev:
348 353 mdata = mf.readfast(x)
349 354 for f, n in mdata.iteritems():
350 355 if f in changedfiles:
351 356 # record the first changeset introducing this filelog
352 357 # version
353 fnodes[f].setdefault(n, clnode)
358 fnodes.setdefault(f, {}).setdefault(n, clnode)
354 359 return clnode
355 360
356 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets'),
357 reorder=reorder):
358 yield chunk
359 progress(msgbundling, None)
360
361 for f in changedfiles:
362 fnodes[f] = {}
363 361 mfnodes = self.prune(mf, mfs, commonrevs, source)
364 362 for chunk in self.group(mfnodes, mf, lookupmf, units=_('manifests'),
365 363 reorder=reorder):
366 364 yield chunk
367 365 progress(msgbundling, None)
368 366
369 367 mfs.clear()
370 368 needed = set(cl.rev(x) for x in clnodes)
371 369
372 370 def linknodes(filerevlog, fname):
373 371 if fastpathlinkrev:
374 372 llr = filerevlog.linkrev
375 373 def genfilenodes():
376 374 for r in filerevlog:
377 375 linkrev = llr(r)
378 376 if linkrev in needed:
379 377 yield filerevlog.node(r), cl.node(linkrev)
380 fnodes[fname] = dict(genfilenodes())
378 return dict(genfilenodes())
381 379 return fnodes.get(fname, {})
382 380
383 381 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
384 382 source):
385 383 yield chunk
386 384
387 385 yield self.close()
388 386 progress(msgbundling, None)
389 387
390 388 if clnodes:
391 389 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
392 390
393 391 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
394 392 repo = self._repo
395 393 progress = self._progress
396 394 reorder = self._reorder
397 395 msgbundling = _('bundling')
398 396
399 397 total = len(changedfiles)
400 398 # for progress output
401 399 msgfiles = _('files')
402 400 for i, fname in enumerate(sorted(changedfiles)):
403 401 filerevlog = repo.file(fname)
404 402 if not filerevlog:
405 403 raise util.Abort(_("empty or missing revlog for %s") % fname)
406 404
407 405 linkrevnodes = linknodes(filerevlog, fname)
408 406 # Lookup for filenodes, we collected the linkrev nodes above in the
409 407 # fastpath case and with lookupmf in the slowpath case.
410 408 def lookupfilelog(x):
411 409 return linkrevnodes[x]
412 410
413 411 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs, source)
414 412 if filenodes:
415 413 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
416 414 total=total)
417 415 yield self.fileheader(fname)
418 416 for chunk in self.group(filenodes, filerevlog, lookupfilelog,
419 417 reorder=reorder):
420 418 yield chunk
421 419
422 420 def deltaparent(self, revlog, rev, p1, p2, prev):
423 421 return prev
424 422
425 423 def revchunk(self, revlog, rev, prev, linknode):
426 424 node = revlog.node(rev)
427 425 p1, p2 = revlog.parentrevs(rev)
428 426 base = self.deltaparent(revlog, rev, p1, p2, prev)
429 427
430 428 prefix = ''
431 429 if base == nullrev:
432 430 delta = revlog.revision(node)
433 431 prefix = mdiff.trivialdiffheader(len(delta))
434 432 else:
435 433 delta = revlog.revdiff(base, rev)
436 434 p1n, p2n = revlog.parents(node)
437 435 basenode = revlog.node(base)
438 436 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode)
439 437 meta += prefix
440 438 l = len(meta) + len(delta)
441 439 yield chunkheader(l)
442 440 yield meta
443 441 yield delta
444 442 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
445 443 # do nothing with basenode, it is implicitly the previous one in HG10
446 444 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
447 445
448 446 class cg2packer(cg1packer):
449 447
450 448 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
451 449
452 450 def group(self, nodelist, revlog, lookup, units=None, reorder=None):
453 451 if (revlog._generaldelta and reorder is not True):
454 452 reorder = False
455 453 return super(cg2packer, self).group(nodelist, revlog, lookup,
456 454 units=units, reorder=reorder)
457 455
458 456 def deltaparent(self, revlog, rev, p1, p2, prev):
459 457 dp = revlog.deltaparent(rev)
460 458 # avoid storing full revisions; pick prev in those cases
461 459 # also pick prev when we can't be sure remote has dp
462 460 if dp == nullrev or (dp != p1 and dp != p2 and dp != prev):
463 461 return prev
464 462 return dp
465 463
466 464 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
467 465 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
468 466
469 467 packermap = {'01': (cg1packer, cg1unpacker),
470 468 '02': (cg2packer, cg2unpacker)}
471 469
472 470 def _changegroupinfo(repo, nodes, source):
473 471 if repo.ui.verbose or source == 'bundle':
474 472 repo.ui.status(_("%d changesets found\n") % len(nodes))
475 473 if repo.ui.debugflag:
476 474 repo.ui.debug("list of changesets:\n")
477 475 for node in nodes:
478 476 repo.ui.debug("%s\n" % hex(node))
479 477
480 478 def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
481 479 repo = repo.unfiltered()
482 480 commonrevs = outgoing.common
483 481 csets = outgoing.missing
484 482 heads = outgoing.missingheads
485 483 # We go through the fast path if we get told to, or if all (unfiltered
486 484 # heads have been requested (since we then know there all linkrevs will
487 485 # be pulled by the client).
488 486 heads.sort()
489 487 fastpathlinkrev = fastpath or (
490 488 repo.filtername is None and heads == sorted(repo.heads()))
491 489
492 490 repo.hook('preoutgoing', throw=True, source=source)
493 491 _changegroupinfo(repo, csets, source)
494 492 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
495 493
496 494 def getsubset(repo, outgoing, bundler, source, fastpath=False):
497 495 gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
498 496 return cg1unpacker(util.chunkbuffer(gengroup), 'UN')
499 497
500 498 def changegroupsubset(repo, roots, heads, source):
501 499 """Compute a changegroup consisting of all the nodes that are
502 500 descendants of any of the roots and ancestors of any of the heads.
503 501 Return a chunkbuffer object whose read() method will return
504 502 successive changegroup chunks.
505 503
506 504 It is fairly complex as determining which filenodes and which
507 505 manifest nodes need to be included for the changeset to be complete
508 506 is non-trivial.
509 507
510 508 Another wrinkle is doing the reverse, figuring out which changeset in
511 509 the changegroup a particular filenode or manifestnode belongs to.
512 510 """
513 511 cl = repo.changelog
514 512 if not roots:
515 513 roots = [nullid]
516 514 # TODO: remove call to nodesbetween.
517 515 csets, roots, heads = cl.nodesbetween(roots, heads)
518 516 discbases = []
519 517 for n in roots:
520 518 discbases.extend([p for p in cl.parents(n) if p != nullid])
521 519 outgoing = discovery.outgoing(cl, discbases, heads)
522 520 bundler = cg1packer(repo)
523 521 return getsubset(repo, outgoing, bundler, source)
524 522
525 523 def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
526 524 version='01'):
527 525 """Like getbundle, but taking a discovery.outgoing as an argument.
528 526
529 527 This is only implemented for local repos and reuses potentially
530 528 precomputed sets in outgoing. Returns a raw changegroup generator."""
531 529 if not outgoing.missing:
532 530 return None
533 531 bundler = packermap[version][0](repo, bundlecaps)
534 532 return getsubsetraw(repo, outgoing, bundler, source)
535 533
536 534 def getlocalchangegroup(repo, source, outgoing, bundlecaps=None):
537 535 """Like getbundle, but taking a discovery.outgoing as an argument.
538 536
539 537 This is only implemented for local repos and reuses potentially
540 538 precomputed sets in outgoing."""
541 539 if not outgoing.missing:
542 540 return None
543 541 bundler = cg1packer(repo, bundlecaps)
544 542 return getsubset(repo, outgoing, bundler, source)
545 543
546 544 def _computeoutgoing(repo, heads, common):
547 545 """Computes which revs are outgoing given a set of common
548 546 and a set of heads.
549 547
550 548 This is a separate function so extensions can have access to
551 549 the logic.
552 550
553 551 Returns a discovery.outgoing object.
554 552 """
555 553 cl = repo.changelog
556 554 if common:
557 555 hasnode = cl.hasnode
558 556 common = [n for n in common if hasnode(n)]
559 557 else:
560 558 common = [nullid]
561 559 if not heads:
562 560 heads = cl.heads()
563 561 return discovery.outgoing(cl, common, heads)
564 562
565 563 def getchangegroupraw(repo, source, heads=None, common=None, bundlecaps=None,
566 564 version='01'):
567 565 """Like changegroupsubset, but returns the set difference between the
568 566 ancestors of heads and the ancestors common.
569 567
570 568 If heads is None, use the local heads. If common is None, use [nullid].
571 569
572 570 If version is None, use a version '1' changegroup.
573 571
574 572 The nodes in common might not all be known locally due to the way the
575 573 current discovery protocol works. Returns a raw changegroup generator.
576 574 """
577 575 outgoing = _computeoutgoing(repo, heads, common)
578 576 return getlocalchangegroupraw(repo, source, outgoing, bundlecaps=bundlecaps,
579 577 version=version)
580 578
581 579 def getchangegroup(repo, source, heads=None, common=None, bundlecaps=None):
582 580 """Like changegroupsubset, but returns the set difference between the
583 581 ancestors of heads and the ancestors common.
584 582
585 583 If heads is None, use the local heads. If common is None, use [nullid].
586 584
587 585 The nodes in common might not all be known locally due to the way the
588 586 current discovery protocol works.
589 587 """
590 588 outgoing = _computeoutgoing(repo, heads, common)
591 589 return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps)
592 590
593 591 def changegroup(repo, basenodes, source):
594 592 # to avoid a race we use changegroupsubset() (issue1320)
595 593 return changegroupsubset(repo, basenodes, repo.heads(), source)
596 594
597 595 def addchangegroupfiles(repo, source, revmap, trp, pr, needfiles):
598 596 revisions = 0
599 597 files = 0
600 598 while True:
601 599 chunkdata = source.filelogheader()
602 600 if not chunkdata:
603 601 break
604 602 f = chunkdata["filename"]
605 603 repo.ui.debug("adding %s revisions\n" % f)
606 604 pr()
607 605 fl = repo.file(f)
608 606 o = len(fl)
609 607 if not fl.addgroup(source, revmap, trp):
610 608 raise util.Abort(_("received file revlog group is empty"))
611 609 revisions += len(fl) - o
612 610 files += 1
613 611 if f in needfiles:
614 612 needs = needfiles[f]
615 613 for new in xrange(o, len(fl)):
616 614 n = fl.node(new)
617 615 if n in needs:
618 616 needs.remove(n)
619 617 else:
620 618 raise util.Abort(
621 619 _("received spurious file revlog entry"))
622 620 if not needs:
623 621 del needfiles[f]
624 622 repo.ui.progress(_('files'), None)
625 623
626 624 for f, needs in needfiles.iteritems():
627 625 fl = repo.file(f)
628 626 for n in needs:
629 627 try:
630 628 fl.rev(n)
631 629 except error.LookupError:
632 630 raise util.Abort(
633 631 _('missing file data for %s:%s - run hg verify') %
634 632 (f, hex(n)))
635 633
636 634 return revisions, files
637 635
638 636 def addchangegroup(repo, source, srctype, url, emptyok=False,
639 637 targetphase=phases.draft):
640 638 """Add the changegroup returned by source.read() to this repo.
641 639 srctype is a string like 'push', 'pull', or 'unbundle'. url is
642 640 the URL of the repo where this changegroup is coming from.
643 641
644 642 Return an integer summarizing the change to this repo:
645 643 - nothing changed or no source: 0
646 644 - more heads than before: 1+added heads (2..n)
647 645 - fewer heads than before: -1-removed heads (-2..-n)
648 646 - number of heads stays the same: 1
649 647 """
650 648 repo = repo.unfiltered()
651 649 def csmap(x):
652 650 repo.ui.debug("add changeset %s\n" % short(x))
653 651 return len(cl)
654 652
655 653 def revmap(x):
656 654 return cl.rev(x)
657 655
658 656 if not source:
659 657 return 0
660 658
661 659 changesets = files = revisions = 0
662 660 efiles = set()
663 661
664 662 tr = repo.transaction("\n".join([srctype, util.hidepassword(url)]))
665 663 # The transaction could have been created before and already carries source
666 664 # information. In this case we use the top level data. We overwrite the
667 665 # argument because we need to use the top level value (if they exist) in
668 666 # this function.
669 667 srctype = tr.hookargs.setdefault('source', srctype)
670 668 url = tr.hookargs.setdefault('url', url)
671 669
672 670 # write changelog data to temp files so concurrent readers will not see
673 671 # inconsistent view
674 672 cl = repo.changelog
675 673 cl.delayupdate(tr)
676 674 oldheads = cl.heads()
677 675 try:
678 676 repo.hook('prechangegroup', throw=True, **tr.hookargs)
679 677
680 678 trp = weakref.proxy(tr)
681 679 # pull off the changeset group
682 680 repo.ui.status(_("adding changesets\n"))
683 681 clstart = len(cl)
684 682 class prog(object):
685 683 step = _('changesets')
686 684 count = 1
687 685 ui = repo.ui
688 686 total = None
689 687 def __call__(repo):
690 688 repo.ui.progress(repo.step, repo.count, unit=_('chunks'),
691 689 total=repo.total)
692 690 repo.count += 1
693 691 pr = prog()
694 692 source.callback = pr
695 693
696 694 source.changelogheader()
697 695 srccontent = cl.addgroup(source, csmap, trp)
698 696 if not (srccontent or emptyok):
699 697 raise util.Abort(_("received changelog group is empty"))
700 698 clend = len(cl)
701 699 changesets = clend - clstart
702 700 for c in xrange(clstart, clend):
703 701 efiles.update(repo[c].files())
704 702 efiles = len(efiles)
705 703 repo.ui.progress(_('changesets'), None)
706 704
707 705 # pull off the manifest group
708 706 repo.ui.status(_("adding manifests\n"))
709 707 pr.step = _('manifests')
710 708 pr.count = 1
711 709 pr.total = changesets # manifests <= changesets
712 710 # no need to check for empty manifest group here:
713 711 # if the result of the merge of 1 and 2 is the same in 3 and 4,
714 712 # no new manifest will be created and the manifest group will
715 713 # be empty during the pull
716 714 source.manifestheader()
717 715 repo.manifest.addgroup(source, revmap, trp)
718 716 repo.ui.progress(_('manifests'), None)
719 717
720 718 needfiles = {}
721 719 if repo.ui.configbool('server', 'validate', default=False):
722 720 # validate incoming csets have their manifests
723 721 for cset in xrange(clstart, clend):
724 722 mfest = repo.changelog.read(repo.changelog.node(cset))[0]
725 723 mfest = repo.manifest.readdelta(mfest)
726 724 # store file nodes we must see
727 725 for f, n in mfest.iteritems():
728 726 needfiles.setdefault(f, set()).add(n)
729 727
730 728 # process the files
731 729 repo.ui.status(_("adding file changes\n"))
732 730 pr.step = _('files')
733 731 pr.count = 1
734 732 pr.total = efiles
735 733 source.callback = None
736 734
737 735 newrevs, newfiles = addchangegroupfiles(repo, source, revmap, trp, pr,
738 736 needfiles)
739 737 revisions += newrevs
740 738 files += newfiles
741 739
742 740 dh = 0
743 741 if oldheads:
744 742 heads = cl.heads()
745 743 dh = len(heads) - len(oldheads)
746 744 for h in heads:
747 745 if h not in oldheads and repo[h].closesbranch():
748 746 dh -= 1
749 747 htext = ""
750 748 if dh:
751 749 htext = _(" (%+d heads)") % dh
752 750
753 751 repo.ui.status(_("added %d changesets"
754 752 " with %d changes to %d files%s\n")
755 753 % (changesets, revisions, files, htext))
756 754 repo.invalidatevolatilesets()
757 755
758 756 if changesets > 0:
759 757 p = lambda: tr.writepending() and repo.root or ""
760 758 if 'node' not in tr.hookargs:
761 759 tr.hookargs['node'] = hex(cl.node(clstart))
762 760 hookargs = dict(tr.hookargs)
763 761 else:
764 762 hookargs = dict(tr.hookargs)
765 763 hookargs['node'] = hex(cl.node(clstart))
766 764 repo.hook('pretxnchangegroup', throw=True, pending=p, **hookargs)
767 765
768 766 added = [cl.node(r) for r in xrange(clstart, clend)]
769 767 publishing = repo.ui.configbool('phases', 'publish', True)
770 768 if srctype in ('push', 'serve'):
771 769 # Old servers can not push the boundary themselves.
772 770 # New servers won't push the boundary if changeset already
773 771 # exists locally as secret
774 772 #
775 773 # We should not use added here but the list of all change in
776 774 # the bundle
777 775 if publishing:
778 776 phases.advanceboundary(repo, tr, phases.public, srccontent)
779 777 else:
780 778 # Those changesets have been pushed from the outside, their
781 779 # phases are going to be pushed alongside. Therefor
782 780 # `targetphase` is ignored.
783 781 phases.advanceboundary(repo, tr, phases.draft, srccontent)
784 782 phases.retractboundary(repo, tr, phases.draft, added)
785 783 elif srctype != 'strip':
786 784 # publishing only alter behavior during push
787 785 #
788 786 # strip should not touch boundary at all
789 787 phases.retractboundary(repo, tr, targetphase, added)
790 788
791 789 if changesets > 0:
792 790 if srctype != 'strip':
793 791 # During strip, branchcache is invalid but coming call to
794 792 # `destroyed` will repair it.
795 793 # In other case we can safely update cache on disk.
796 794 branchmap.updatecache(repo.filtered('served'))
797 795
798 796 def runhooks():
799 797 # These hooks run when the lock releases, not when the
800 798 # transaction closes. So it's possible for the changelog
801 799 # to have changed since we last saw it.
802 800 if clstart >= len(repo):
803 801 return
804 802
805 803 # forcefully update the on-disk branch cache
806 804 repo.ui.debug("updating the branch cache\n")
807 805 repo.hook("changegroup", **hookargs)
808 806
809 807 for n in added:
810 808 args = hookargs.copy()
811 809 args['node'] = hex(n)
812 810 repo.hook("incoming", **args)
813 811
814 812 newheads = [h for h in repo.heads() if h not in oldheads]
815 813 repo.ui.log("incoming",
816 814 "%s incoming changes - new heads: %s\n",
817 815 len(added),
818 816 ', '.join([hex(c[:6]) for c in newheads]))
819 817
820 818 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
821 819 lambda: repo._afterlock(runhooks))
822 820
823 821 tr.close()
824 822
825 823 finally:
826 824 tr.release()
827 825 # never return 0 here:
828 826 if dh < 0:
829 827 return dh - 1
830 828 else:
831 829 return dh + 1
@@ -1,1107 +1,1107 b''
1 1 #
2 2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import os, mimetypes, re, cgi, copy
9 9 import webutil
10 10 from mercurial import error, encoding, archival, templater, templatefilters
11 11 from mercurial.node import short, hex
12 12 from mercurial import util
13 13 from common import paritygen, staticfile, get_contact, ErrorResponse
14 14 from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
15 15 from mercurial import graphmod, patch
16 16 from mercurial import help as helpmod
17 17 from mercurial import scmutil
18 18 from mercurial.i18n import _
19 19 from mercurial.error import ParseError, RepoLookupError, Abort
20 20 from mercurial import revset
21 21
22 22 # __all__ is populated with the allowed commands. Be sure to add to it if
23 23 # you're adding a new command, or the new command won't work.
24 24
25 25 __all__ = [
26 26 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
27 27 'manifest', 'tags', 'bookmarks', 'branches', 'summary', 'filediff', 'diff',
28 28 'comparison', 'annotate', 'filelog', 'archive', 'static', 'graph', 'help',
29 29 ]
30 30
31 31 def log(web, req, tmpl):
32 32 if 'file' in req.form and req.form['file'][0]:
33 33 return filelog(web, req, tmpl)
34 34 else:
35 35 return changelog(web, req, tmpl)
36 36
37 37 def rawfile(web, req, tmpl):
38 38 guessmime = web.configbool('web', 'guessmime', False)
39 39
40 40 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
41 41 if not path:
42 42 content = manifest(web, req, tmpl)
43 43 req.respond(HTTP_OK, web.ctype)
44 44 return content
45 45
46 46 try:
47 47 fctx = webutil.filectx(web.repo, req)
48 48 except error.LookupError, inst:
49 49 try:
50 50 content = manifest(web, req, tmpl)
51 51 req.respond(HTTP_OK, web.ctype)
52 52 return content
53 53 except ErrorResponse:
54 54 raise inst
55 55
56 56 path = fctx.path()
57 57 text = fctx.data()
58 58 mt = 'application/binary'
59 59 if guessmime:
60 60 mt = mimetypes.guess_type(path)[0]
61 61 if mt is None:
62 62 mt = util.binary(text) and 'application/binary' or 'text/plain'
63 63 if mt.startswith('text/'):
64 64 mt += '; charset="%s"' % encoding.encoding
65 65
66 66 req.respond(HTTP_OK, mt, path, body=text)
67 67 return []
68 68
69 69 def _filerevision(web, tmpl, fctx):
70 70 f = fctx.path()
71 71 text = fctx.data()
72 72 parity = paritygen(web.stripecount)
73 73
74 74 if util.binary(text):
75 75 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
76 76 text = '(binary:%s)' % mt
77 77
78 78 def lines():
79 79 for lineno, t in enumerate(text.splitlines(True)):
80 80 yield {"line": t,
81 81 "lineid": "l%d" % (lineno + 1),
82 82 "linenumber": "% 6d" % (lineno + 1),
83 83 "parity": parity.next()}
84 84
85 85 return tmpl("filerevision",
86 86 file=f,
87 87 path=webutil.up(f),
88 88 text=lines(),
89 89 rev=fctx.rev(),
90 90 node=fctx.hex(),
91 91 author=fctx.user(),
92 92 date=fctx.date(),
93 93 desc=fctx.description(),
94 94 extra=fctx.extra(),
95 95 branch=webutil.nodebranchnodefault(fctx),
96 96 parent=webutil.parents(fctx),
97 97 child=webutil.children(fctx),
98 98 rename=webutil.renamelink(fctx),
99 99 permissions=fctx.manifest().flags(f))
100 100
101 101 def file(web, req, tmpl):
102 102 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
103 103 if not path:
104 104 return manifest(web, req, tmpl)
105 105 try:
106 106 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
107 107 except error.LookupError, inst:
108 108 try:
109 109 return manifest(web, req, tmpl)
110 110 except ErrorResponse:
111 111 raise inst
112 112
113 113 def _search(web, req, tmpl):
114 114 MODE_REVISION = 'rev'
115 115 MODE_KEYWORD = 'keyword'
116 116 MODE_REVSET = 'revset'
117 117
118 118 def revsearch(ctx):
119 119 yield ctx
120 120
121 121 def keywordsearch(query):
122 122 lower = encoding.lower
123 123 qw = lower(query).split()
124 124
125 125 def revgen():
126 126 cl = web.repo.changelog
127 127 for i in xrange(len(web.repo) - 1, 0, -100):
128 128 l = []
129 129 for j in cl.revs(max(0, i - 99), i):
130 130 ctx = web.repo[j]
131 131 l.append(ctx)
132 132 l.reverse()
133 133 for e in l:
134 134 yield e
135 135
136 136 for ctx in revgen():
137 137 miss = 0
138 138 for q in qw:
139 139 if not (q in lower(ctx.user()) or
140 140 q in lower(ctx.description()) or
141 141 q in lower(" ".join(ctx.files()))):
142 142 miss = 1
143 143 break
144 144 if miss:
145 145 continue
146 146
147 147 yield ctx
148 148
149 149 def revsetsearch(revs):
150 150 for r in revs:
151 151 yield web.repo[r]
152 152
153 153 searchfuncs = {
154 154 MODE_REVISION: (revsearch, 'exact revision search'),
155 155 MODE_KEYWORD: (keywordsearch, 'literal keyword search'),
156 156 MODE_REVSET: (revsetsearch, 'revset expression search'),
157 157 }
158 158
159 159 def getsearchmode(query):
160 160 try:
161 161 ctx = web.repo[query]
162 162 except (error.RepoError, error.LookupError):
163 163 # query is not an exact revision pointer, need to
164 164 # decide if it's a revset expression or keywords
165 165 pass
166 166 else:
167 167 return MODE_REVISION, ctx
168 168
169 169 revdef = 'reverse(%s)' % query
170 170 try:
171 171 tree, pos = revset.parse(revdef)
172 172 except ParseError:
173 173 # can't parse to a revset tree
174 174 return MODE_KEYWORD, query
175 175
176 176 if revset.depth(tree) <= 2:
177 177 # no revset syntax used
178 178 return MODE_KEYWORD, query
179 179
180 180 if util.any((token, (value or '')[:3]) == ('string', 're:')
181 181 for token, value, pos in revset.tokenize(revdef)):
182 182 return MODE_KEYWORD, query
183 183
184 184 funcsused = revset.funcsused(tree)
185 185 if not funcsused.issubset(revset.safesymbols):
186 186 return MODE_KEYWORD, query
187 187
188 188 mfunc = revset.match(web.repo.ui, revdef)
189 189 try:
190 190 revs = mfunc(web.repo, revset.baseset(web.repo))
191 191 return MODE_REVSET, revs
192 192 # ParseError: wrongly placed tokens, wrongs arguments, etc
193 193 # RepoLookupError: no such revision, e.g. in 'revision:'
194 194 # Abort: bookmark/tag not exists
195 195 # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo
196 196 except (ParseError, RepoLookupError, Abort, LookupError):
197 197 return MODE_KEYWORD, query
198 198
199 199 def changelist(**map):
200 200 count = 0
201 201
202 202 for ctx in searchfunc[0](funcarg):
203 203 count += 1
204 204 n = ctx.node()
205 205 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
206 206 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
207 207
208 208 yield tmpl('searchentry',
209 209 parity=parity.next(),
210 210 author=ctx.user(),
211 211 parent=webutil.parents(ctx),
212 212 child=webutil.children(ctx),
213 213 changelogtag=showtags,
214 214 desc=ctx.description(),
215 215 extra=ctx.extra(),
216 216 date=ctx.date(),
217 217 files=files,
218 218 rev=ctx.rev(),
219 219 node=hex(n),
220 220 tags=webutil.nodetagsdict(web.repo, n),
221 221 bookmarks=webutil.nodebookmarksdict(web.repo, n),
222 222 inbranch=webutil.nodeinbranch(web.repo, ctx),
223 223 branches=webutil.nodebranchdict(web.repo, ctx))
224 224
225 225 if count >= revcount:
226 226 break
227 227
228 228 query = req.form['rev'][0]
229 229 revcount = web.maxchanges
230 230 if 'revcount' in req.form:
231 231 try:
232 232 revcount = int(req.form.get('revcount', [revcount])[0])
233 233 revcount = max(revcount, 1)
234 234 tmpl.defaults['sessionvars']['revcount'] = revcount
235 235 except ValueError:
236 236 pass
237 237
238 238 lessvars = copy.copy(tmpl.defaults['sessionvars'])
239 239 lessvars['revcount'] = max(revcount / 2, 1)
240 240 lessvars['rev'] = query
241 241 morevars = copy.copy(tmpl.defaults['sessionvars'])
242 242 morevars['revcount'] = revcount * 2
243 243 morevars['rev'] = query
244 244
245 245 mode, funcarg = getsearchmode(query)
246 246
247 247 if 'forcekw' in req.form:
248 248 showforcekw = ''
249 249 showunforcekw = searchfuncs[mode][1]
250 250 mode = MODE_KEYWORD
251 251 funcarg = query
252 252 else:
253 253 if mode != MODE_KEYWORD:
254 254 showforcekw = searchfuncs[MODE_KEYWORD][1]
255 255 else:
256 256 showforcekw = ''
257 257 showunforcekw = ''
258 258
259 259 searchfunc = searchfuncs[mode]
260 260
261 261 tip = web.repo['tip']
262 262 parity = paritygen(web.stripecount)
263 263
264 264 return tmpl('search', query=query, node=tip.hex(),
265 265 entries=changelist, archives=web.archivelist("tip"),
266 266 morevars=morevars, lessvars=lessvars,
267 267 modedesc=searchfunc[1],
268 268 showforcekw=showforcekw, showunforcekw=showunforcekw)
269 269
270 270 def changelog(web, req, tmpl, shortlog=False):
271 271
272 272 query = ''
273 273 if 'node' in req.form:
274 274 ctx = webutil.changectx(web.repo, req)
275 275 elif 'rev' in req.form:
276 276 return _search(web, req, tmpl)
277 277 else:
278 278 ctx = web.repo['tip']
279 279
280 280 def changelist():
281 281 revs = []
282 282 if pos != -1:
283 283 revs = web.repo.changelog.revs(pos, 0)
284 284 curcount = 0
285 285 for i in revs:
286 286 ctx = web.repo[i]
287 287 n = ctx.node()
288 288 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
289 289 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
290 290
291 291 curcount += 1
292 292 if curcount > revcount + 1:
293 293 break
294 294 yield {"parity": parity.next(),
295 295 "author": ctx.user(),
296 296 "parent": webutil.parents(ctx, i - 1),
297 297 "child": webutil.children(ctx, i + 1),
298 298 "changelogtag": showtags,
299 299 "desc": ctx.description(),
300 300 "extra": ctx.extra(),
301 301 "date": ctx.date(),
302 302 "files": files,
303 303 "rev": i,
304 304 "node": hex(n),
305 305 "tags": webutil.nodetagsdict(web.repo, n),
306 306 "bookmarks": webutil.nodebookmarksdict(web.repo, n),
307 307 "inbranch": webutil.nodeinbranch(web.repo, ctx),
308 308 "branches": webutil.nodebranchdict(web.repo, ctx)
309 309 }
310 310
311 311 revcount = shortlog and web.maxshortchanges or web.maxchanges
312 312 if 'revcount' in req.form:
313 313 try:
314 314 revcount = int(req.form.get('revcount', [revcount])[0])
315 315 revcount = max(revcount, 1)
316 316 tmpl.defaults['sessionvars']['revcount'] = revcount
317 317 except ValueError:
318 318 pass
319 319
320 320 lessvars = copy.copy(tmpl.defaults['sessionvars'])
321 321 lessvars['revcount'] = max(revcount / 2, 1)
322 322 morevars = copy.copy(tmpl.defaults['sessionvars'])
323 323 morevars['revcount'] = revcount * 2
324 324
325 325 count = len(web.repo)
326 326 pos = ctx.rev()
327 327 parity = paritygen(web.stripecount)
328 328
329 329 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
330 330
331 331 entries = list(changelist())
332 332 latestentry = entries[:1]
333 333 if len(entries) > revcount:
334 334 nextentry = entries[-1:]
335 335 entries = entries[:-1]
336 336 else:
337 337 nextentry = []
338 338
339 339 return tmpl(shortlog and 'shortlog' or 'changelog', changenav=changenav,
340 340 node=ctx.hex(), rev=pos, changesets=count,
341 341 entries=entries,
342 342 latestentry=latestentry, nextentry=nextentry,
343 343 archives=web.archivelist("tip"), revcount=revcount,
344 344 morevars=morevars, lessvars=lessvars, query=query)
345 345
346 346 def shortlog(web, req, tmpl):
347 347 return changelog(web, req, tmpl, shortlog=True)
348 348
349 349 def changeset(web, req, tmpl):
350 350 ctx = webutil.changectx(web.repo, req)
351 351 basectx = webutil.basechangectx(web.repo, req)
352 352 if basectx is None:
353 353 basectx = ctx.p1()
354 354 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', ctx.node())
355 355 showbookmarks = webutil.showbookmark(web.repo, tmpl, 'changesetbookmark',
356 356 ctx.node())
357 357 showbranch = webutil.nodebranchnodefault(ctx)
358 358
359 359 files = []
360 360 parity = paritygen(web.stripecount)
361 361 for blockno, f in enumerate(ctx.files()):
362 362 template = f in ctx and 'filenodelink' or 'filenolink'
363 363 files.append(tmpl(template,
364 364 node=ctx.hex(), file=f, blockno=blockno + 1,
365 365 parity=parity.next()))
366 366
367 367 style = web.config('web', 'style', 'paper')
368 368 if 'style' in req.form:
369 369 style = req.form['style'][0]
370 370
371 371 parity = paritygen(web.stripecount)
372 372 diffs = webutil.diffs(web.repo, tmpl, ctx, basectx, None, parity, style)
373 373
374 374 parity = paritygen(web.stripecount)
375 375 diffstatgen = webutil.diffstatgen(ctx, basectx)
376 376 diffstat = webutil.diffstat(tmpl, ctx, diffstatgen, parity)
377 377
378 378 return tmpl('changeset',
379 379 diff=diffs,
380 380 rev=ctx.rev(),
381 381 node=ctx.hex(),
382 382 parent=webutil.parents(ctx),
383 383 child=webutil.children(ctx),
384 384 basenode=basectx.hex(),
385 385 changesettag=showtags,
386 386 changesetbookmark=showbookmarks,
387 387 changesetbranch=showbranch,
388 388 author=ctx.user(),
389 389 desc=ctx.description(),
390 390 extra=ctx.extra(),
391 391 date=ctx.date(),
392 392 files=files,
393 393 diffsummary=lambda **x: webutil.diffsummary(diffstatgen),
394 394 diffstat=diffstat,
395 395 archives=web.archivelist(ctx.hex()),
396 396 tags=webutil.nodetagsdict(web.repo, ctx.node()),
397 397 bookmarks=webutil.nodebookmarksdict(web.repo, ctx.node()),
398 398 branch=webutil.nodebranchnodefault(ctx),
399 399 inbranch=webutil.nodeinbranch(web.repo, ctx),
400 400 branches=webutil.nodebranchdict(web.repo, ctx))
401 401
402 402 rev = changeset
403 403
404 404 def decodepath(path):
405 405 """Hook for mapping a path in the repository to a path in the
406 406 working copy.
407 407
408 408 Extensions (e.g., largefiles) can override this to remap files in
409 409 the virtual file system presented by the manifest command below."""
410 410 return path
411 411
412 412 def manifest(web, req, tmpl):
413 413 ctx = webutil.changectx(web.repo, req)
414 414 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
415 415 mf = ctx.manifest()
416 416 node = ctx.node()
417 417
418 418 files = {}
419 419 dirs = {}
420 420 parity = paritygen(web.stripecount)
421 421
422 422 if path and path[-1] != "/":
423 423 path += "/"
424 424 l = len(path)
425 425 abspath = "/" + path
426 426
427 427 for full, n in mf.iteritems():
428 428 # the virtual path (working copy path) used for the full
429 429 # (repository) path
430 430 f = decodepath(full)
431 431
432 432 if f[:l] != path:
433 433 continue
434 434 remain = f[l:]
435 435 elements = remain.split('/')
436 436 if len(elements) == 1:
437 437 files[remain] = full
438 438 else:
439 439 h = dirs # need to retain ref to dirs (root)
440 440 for elem in elements[0:-1]:
441 441 if elem not in h:
442 442 h[elem] = {}
443 443 h = h[elem]
444 444 if len(h) > 1:
445 445 break
446 446 h[None] = None # denotes files present
447 447
448 448 if mf and not files and not dirs:
449 449 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
450 450
451 451 def filelist(**map):
452 452 for f in sorted(files):
453 453 full = files[f]
454 454
455 455 fctx = ctx.filectx(full)
456 456 yield {"file": full,
457 457 "parity": parity.next(),
458 458 "basename": f,
459 459 "date": fctx.date(),
460 460 "size": fctx.size(),
461 461 "permissions": mf.flags(full)}
462 462
463 463 def dirlist(**map):
464 464 for d in sorted(dirs):
465 465
466 466 emptydirs = []
467 467 h = dirs[d]
468 468 while isinstance(h, dict) and len(h) == 1:
469 469 k, v = h.items()[0]
470 470 if v:
471 471 emptydirs.append(k)
472 472 h = v
473 473
474 474 path = "%s%s" % (abspath, d)
475 475 yield {"parity": parity.next(),
476 476 "path": path,
477 477 "emptydirs": "/".join(emptydirs),
478 478 "basename": d}
479 479
480 480 return tmpl("manifest",
481 481 rev=ctx.rev(),
482 482 node=hex(node),
483 483 path=abspath,
484 484 up=webutil.up(abspath),
485 485 upparity=parity.next(),
486 486 fentries=filelist,
487 487 dentries=dirlist,
488 488 archives=web.archivelist(hex(node)),
489 489 tags=webutil.nodetagsdict(web.repo, node),
490 490 bookmarks=webutil.nodebookmarksdict(web.repo, node),
491 491 inbranch=webutil.nodeinbranch(web.repo, ctx),
492 492 branches=webutil.nodebranchdict(web.repo, ctx))
493 493
494 494 def tags(web, req, tmpl):
495 495 i = list(reversed(web.repo.tagslist()))
496 496 parity = paritygen(web.stripecount)
497 497
498 498 def entries(notip, latestonly, **map):
499 499 t = i
500 500 if notip:
501 501 t = [(k, n) for k, n in i if k != "tip"]
502 502 if latestonly:
503 503 t = t[:1]
504 504 for k, n in t:
505 505 yield {"parity": parity.next(),
506 506 "tag": k,
507 507 "date": web.repo[n].date(),
508 508 "node": hex(n)}
509 509
510 510 return tmpl("tags",
511 511 node=hex(web.repo.changelog.tip()),
512 512 entries=lambda **x: entries(False, False, **x),
513 513 entriesnotip=lambda **x: entries(True, False, **x),
514 514 latestentry=lambda **x: entries(True, True, **x))
515 515
516 516 def bookmarks(web, req, tmpl):
517 517 i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
518 518 parity = paritygen(web.stripecount)
519 519
520 520 def entries(latestonly, **map):
521 521 if latestonly:
522 522 t = [min(i)]
523 523 else:
524 524 t = sorted(i)
525 525 for k, n in t:
526 526 yield {"parity": parity.next(),
527 527 "bookmark": k,
528 528 "date": web.repo[n].date(),
529 529 "node": hex(n)}
530 530
531 531 return tmpl("bookmarks",
532 532 node=hex(web.repo.changelog.tip()),
533 533 entries=lambda **x: entries(latestonly=False, **x),
534 534 latestentry=lambda **x: entries(latestonly=True, **x))
535 535
536 536 def branches(web, req, tmpl):
537 537 tips = []
538 538 heads = web.repo.heads()
539 539 parity = paritygen(web.stripecount)
540 540 sortkey = lambda item: (not item[1], item[0].rev())
541 541
542 542 def entries(limit, **map):
543 543 count = 0
544 544 if not tips:
545 545 for tag, hs, tip, closed in web.repo.branchmap().iterbranches():
546 546 tips.append((web.repo[tip], closed))
547 547 for ctx, closed in sorted(tips, key=sortkey, reverse=True):
548 548 if limit > 0 and count >= limit:
549 549 return
550 550 count += 1
551 551 if closed:
552 552 status = 'closed'
553 553 elif ctx.node() not in heads:
554 554 status = 'inactive'
555 555 else:
556 556 status = 'open'
557 557 yield {'parity': parity.next(),
558 558 'branch': ctx.branch(),
559 559 'status': status,
560 560 'node': ctx.hex(),
561 561 'date': ctx.date()}
562 562
563 563 return tmpl('branches', node=hex(web.repo.changelog.tip()),
564 564 entries=lambda **x: entries(0, **x),
565 565 latestentry=lambda **x: entries(1, **x))
566 566
567 567 def summary(web, req, tmpl):
568 568 i = reversed(web.repo.tagslist())
569 569
570 570 def tagentries(**map):
571 571 parity = paritygen(web.stripecount)
572 572 count = 0
573 573 for k, n in i:
574 574 if k == "tip": # skip tip
575 575 continue
576 576
577 577 count += 1
578 578 if count > 10: # limit to 10 tags
579 579 break
580 580
581 581 yield tmpl("tagentry",
582 582 parity=parity.next(),
583 583 tag=k,
584 584 node=hex(n),
585 585 date=web.repo[n].date())
586 586
587 587 def bookmarks(**map):
588 588 parity = paritygen(web.stripecount)
589 589 marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
590 590 for k, n in sorted(marks)[:10]: # limit to 10 bookmarks
591 591 yield {'parity': parity.next(),
592 592 'bookmark': k,
593 593 'date': web.repo[n].date(),
594 594 'node': hex(n)}
595 595
596 596 def branches(**map):
597 597 parity = paritygen(web.stripecount)
598 598
599 599 b = web.repo.branchmap()
600 600 l = [(-web.repo.changelog.rev(tip), tip, tag)
601 601 for tag, heads, tip, closed in b.iterbranches()]
602 602 for r, n, t in sorted(l):
603 603 yield {'parity': parity.next(),
604 604 'branch': t,
605 605 'node': hex(n),
606 606 'date': web.repo[n].date()}
607 607
608 608 def changelist(**map):
609 609 parity = paritygen(web.stripecount, offset=start - end)
610 610 l = [] # build a list in forward order for efficiency
611 611 revs = []
612 612 if start < end:
613 613 revs = web.repo.changelog.revs(start, end - 1)
614 614 for i in revs:
615 615 ctx = web.repo[i]
616 616 n = ctx.node()
617 617 hn = hex(n)
618 618
619 619 l.append(tmpl(
620 620 'shortlogentry',
621 621 parity=parity.next(),
622 622 author=ctx.user(),
623 623 desc=ctx.description(),
624 624 extra=ctx.extra(),
625 625 date=ctx.date(),
626 626 rev=i,
627 627 node=hn,
628 628 tags=webutil.nodetagsdict(web.repo, n),
629 629 bookmarks=webutil.nodebookmarksdict(web.repo, n),
630 630 inbranch=webutil.nodeinbranch(web.repo, ctx),
631 631 branches=webutil.nodebranchdict(web.repo, ctx)))
632 632
633 633 l.reverse()
634 634 yield l
635 635
636 636 tip = web.repo['tip']
637 637 count = len(web.repo)
638 638 start = max(0, count - web.maxchanges)
639 639 end = min(count, start + web.maxchanges)
640 640
641 641 return tmpl("summary",
642 642 desc=web.config("web", "description", "unknown"),
643 643 owner=get_contact(web.config) or "unknown",
644 644 lastchange=tip.date(),
645 645 tags=tagentries,
646 646 bookmarks=bookmarks,
647 647 branches=branches,
648 648 shortlog=changelist,
649 649 node=tip.hex(),
650 650 archives=web.archivelist("tip"))
651 651
652 652 def filediff(web, req, tmpl):
653 653 fctx, ctx = None, None
654 654 try:
655 655 fctx = webutil.filectx(web.repo, req)
656 656 except LookupError:
657 657 ctx = webutil.changectx(web.repo, req)
658 658 path = webutil.cleanpath(web.repo, req.form['file'][0])
659 659 if path not in ctx.files():
660 660 raise
661 661
662 662 if fctx is not None:
663 663 n = fctx.node()
664 664 path = fctx.path()
665 665 ctx = fctx.changectx()
666 666 else:
667 667 n = ctx.node()
668 668 # path already defined in except clause
669 669
670 670 parity = paritygen(web.stripecount)
671 671 style = web.config('web', 'style', 'paper')
672 672 if 'style' in req.form:
673 673 style = req.form['style'][0]
674 674
675 675 diffs = webutil.diffs(web.repo, tmpl, ctx, None, [path], parity, style)
676 676 rename = fctx and webutil.renamelink(fctx) or []
677 677 ctx = fctx and fctx or ctx
678 678 return tmpl("filediff",
679 679 file=path,
680 680 node=hex(n),
681 681 rev=ctx.rev(),
682 682 date=ctx.date(),
683 683 desc=ctx.description(),
684 684 extra=ctx.extra(),
685 685 author=ctx.user(),
686 686 rename=rename,
687 687 branch=webutil.nodebranchnodefault(ctx),
688 688 parent=webutil.parents(ctx),
689 689 child=webutil.children(ctx),
690 690 diff=diffs)
691 691
692 692 diff = filediff
693 693
694 694 def comparison(web, req, tmpl):
695 695 ctx = webutil.changectx(web.repo, req)
696 696 if 'file' not in req.form:
697 697 raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
698 698 path = webutil.cleanpath(web.repo, req.form['file'][0])
699 699 rename = path in ctx and webutil.renamelink(ctx[path]) or []
700 700
701 701 parsecontext = lambda v: v == 'full' and -1 or int(v)
702 702 if 'context' in req.form:
703 703 context = parsecontext(req.form['context'][0])
704 704 else:
705 705 context = parsecontext(web.config('web', 'comparisoncontext', '5'))
706 706
707 707 def filelines(f):
708 708 if util.binary(f.data()):
709 709 mt = mimetypes.guess_type(f.path())[0]
710 710 if not mt:
711 711 mt = 'application/octet-stream'
712 712 return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))]
713 713 return f.data().splitlines()
714 714
715 715 parent = ctx.p1()
716 716 leftrev = parent.rev()
717 717 leftnode = parent.node()
718 718 rightrev = ctx.rev()
719 719 rightnode = ctx.node()
720 720 if path in ctx:
721 721 fctx = ctx[path]
722 722 rightlines = filelines(fctx)
723 723 if path not in parent:
724 724 leftlines = ()
725 725 else:
726 726 pfctx = parent[path]
727 727 leftlines = filelines(pfctx)
728 728 else:
729 729 rightlines = ()
730 730 fctx = ctx.parents()[0][path]
731 731 leftlines = filelines(fctx)
732 732
733 733 comparison = webutil.compare(tmpl, context, leftlines, rightlines)
734 734 return tmpl('filecomparison',
735 735 file=path,
736 736 node=hex(ctx.node()),
737 737 rev=ctx.rev(),
738 738 date=ctx.date(),
739 739 desc=ctx.description(),
740 740 extra=ctx.extra(),
741 741 author=ctx.user(),
742 742 rename=rename,
743 743 branch=webutil.nodebranchnodefault(ctx),
744 744 parent=webutil.parents(fctx),
745 745 child=webutil.children(fctx),
746 746 leftrev=leftrev,
747 747 leftnode=hex(leftnode),
748 748 rightrev=rightrev,
749 749 rightnode=hex(rightnode),
750 750 comparison=comparison)
751 751
752 752 def annotate(web, req, tmpl):
753 753 fctx = webutil.filectx(web.repo, req)
754 754 f = fctx.path()
755 755 parity = paritygen(web.stripecount)
756 756 diffopts = patch.diffopts(web.repo.ui, untrusted=True, section='annotate')
757 757
758 758 def annotate(**map):
759 759 last = None
760 760 if util.binary(fctx.data()):
761 761 mt = (mimetypes.guess_type(fctx.path())[0]
762 762 or 'application/octet-stream')
763 763 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
764 764 '(binary:%s)' % mt)])
765 765 else:
766 766 lines = enumerate(fctx.annotate(follow=True, linenumber=True,
767 767 diffopts=diffopts))
768 768 for lineno, ((f, targetline), l) in lines:
769 769 fnode = f.filenode()
770 770
771 771 if last != fnode:
772 772 last = fnode
773 773
774 774 yield {"parity": parity.next(),
775 775 "node": f.hex(),
776 776 "rev": f.rev(),
777 777 "author": f.user(),
778 778 "desc": f.description(),
779 779 "extra": f.extra(),
780 780 "file": f.path(),
781 781 "targetline": targetline,
782 782 "line": l,
783 783 "lineid": "l%d" % (lineno + 1),
784 784 "linenumber": "% 6d" % (lineno + 1),
785 785 "revdate": f.date()}
786 786
787 787 return tmpl("fileannotate",
788 788 file=f,
789 789 annotate=annotate,
790 790 path=webutil.up(f),
791 791 rev=fctx.rev(),
792 792 node=fctx.hex(),
793 793 author=fctx.user(),
794 794 date=fctx.date(),
795 795 desc=fctx.description(),
796 796 extra=fctx.extra(),
797 797 rename=webutil.renamelink(fctx),
798 798 branch=webutil.nodebranchnodefault(fctx),
799 799 parent=webutil.parents(fctx),
800 800 child=webutil.children(fctx),
801 801 permissions=fctx.manifest().flags(f))
802 802
803 803 def filelog(web, req, tmpl):
804 804
805 805 try:
806 806 fctx = webutil.filectx(web.repo, req)
807 807 f = fctx.path()
808 808 fl = fctx.filelog()
809 809 except error.LookupError:
810 810 f = webutil.cleanpath(web.repo, req.form['file'][0])
811 811 fl = web.repo.file(f)
812 812 numrevs = len(fl)
813 813 if not numrevs: # file doesn't exist at all
814 814 raise
815 815 rev = webutil.changectx(web.repo, req).rev()
816 816 first = fl.linkrev(0)
817 817 if rev < first: # current rev is from before file existed
818 818 raise
819 819 frev = numrevs - 1
820 820 while fl.linkrev(frev) > rev:
821 821 frev -= 1
822 822 fctx = web.repo.filectx(f, fl.linkrev(frev))
823 823
824 824 revcount = web.maxshortchanges
825 825 if 'revcount' in req.form:
826 826 try:
827 827 revcount = int(req.form.get('revcount', [revcount])[0])
828 828 revcount = max(revcount, 1)
829 829 tmpl.defaults['sessionvars']['revcount'] = revcount
830 830 except ValueError:
831 831 pass
832 832
833 833 lessvars = copy.copy(tmpl.defaults['sessionvars'])
834 834 lessvars['revcount'] = max(revcount / 2, 1)
835 835 morevars = copy.copy(tmpl.defaults['sessionvars'])
836 836 morevars['revcount'] = revcount * 2
837 837
838 838 count = fctx.filerev() + 1
839 839 start = max(0, fctx.filerev() - revcount + 1) # first rev on this page
840 840 end = min(count, start + revcount) # last rev on this page
841 841 parity = paritygen(web.stripecount, offset=start - end)
842 842
843 843 def entries():
844 844 l = []
845 845
846 846 repo = web.repo
847 847 revs = fctx.filelog().revs(start, end - 1)
848 848 for i in revs:
849 849 iterfctx = fctx.filectx(i)
850 850
851 851 l.append({"parity": parity.next(),
852 852 "filerev": i,
853 853 "file": f,
854 854 "node": iterfctx.hex(),
855 855 "author": iterfctx.user(),
856 856 "date": iterfctx.date(),
857 857 "rename": webutil.renamelink(iterfctx),
858 858 "parent": webutil.parents(iterfctx),
859 859 "child": webutil.children(iterfctx),
860 860 "desc": iterfctx.description(),
861 861 "extra": iterfctx.extra(),
862 862 "tags": webutil.nodetagsdict(repo, iterfctx.node()),
863 863 "bookmarks": webutil.nodebookmarksdict(
864 864 repo, iterfctx.node()),
865 865 "branch": webutil.nodebranchnodefault(iterfctx),
866 866 "inbranch": webutil.nodeinbranch(repo, iterfctx),
867 867 "branches": webutil.nodebranchdict(repo, iterfctx)})
868 868 for e in reversed(l):
869 869 yield e
870 870
871 871 entries = list(entries())
872 872 latestentry = entries[:1]
873 873
874 874 revnav = webutil.filerevnav(web.repo, fctx.path())
875 875 nav = revnav.gen(end - 1, revcount, count)
876 876 return tmpl("filelog", file=f, node=fctx.hex(), nav=nav,
877 877 entries=entries,
878 878 latestentry=latestentry,
879 879 revcount=revcount, morevars=morevars, lessvars=lessvars)
880 880
881 881 def archive(web, req, tmpl):
882 882 type_ = req.form.get('type', [None])[0]
883 883 allowed = web.configlist("web", "allow_archive")
884 884 key = req.form['node'][0]
885 885
886 886 if type_ not in web.archives:
887 887 msg = 'Unsupported archive type: %s' % type_
888 888 raise ErrorResponse(HTTP_NOT_FOUND, msg)
889 889
890 890 if not ((type_ in allowed or
891 891 web.configbool("web", "allow" + type_, False))):
892 892 msg = 'Archive type not allowed: %s' % type_
893 893 raise ErrorResponse(HTTP_FORBIDDEN, msg)
894 894
895 895 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
896 896 cnode = web.repo.lookup(key)
897 897 arch_version = key
898 898 if cnode == key or key == 'tip':
899 899 arch_version = short(cnode)
900 900 name = "%s-%s" % (reponame, arch_version)
901 901
902 902 ctx = webutil.changectx(web.repo, req)
903 903 pats = []
904 matchfn = None
904 matchfn = scmutil.match(ctx, [])
905 905 file = req.form.get('file', None)
906 906 if file:
907 907 pats = ['path:' + file[0]]
908 908 matchfn = scmutil.match(ctx, pats, default='path')
909 909 if pats:
910 910 files = [f for f in ctx.manifest().keys() if matchfn(f)]
911 911 if not files:
912 912 raise ErrorResponse(HTTP_NOT_FOUND,
913 913 'file(s) not found: %s' % file[0])
914 914
915 915 mimetype, artype, extension, encoding = web.archive_specs[type_]
916 916 headers = [
917 917 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
918 918 ]
919 919 if encoding:
920 920 headers.append(('Content-Encoding', encoding))
921 921 req.headers.extend(headers)
922 922 req.respond(HTTP_OK, mimetype)
923 923
924 924 archival.archive(web.repo, req, cnode, artype, prefix=name,
925 925 matchfn=matchfn,
926 926 subrepos=web.configbool("web", "archivesubrepos"))
927 927 return []
928 928
929 929
930 930 def static(web, req, tmpl):
931 931 fname = req.form['file'][0]
932 932 # a repo owner may set web.static in .hg/hgrc to get any file
933 933 # readable by the user running the CGI script
934 934 static = web.config("web", "static", None, untrusted=False)
935 935 if not static:
936 936 tp = web.templatepath or templater.templatepaths()
937 937 if isinstance(tp, str):
938 938 tp = [tp]
939 939 static = [os.path.join(p, 'static') for p in tp]
940 940 staticfile(static, fname, req)
941 941 return []
942 942
943 943 def graph(web, req, tmpl):
944 944
945 945 ctx = webutil.changectx(web.repo, req)
946 946 rev = ctx.rev()
947 947
948 948 bg_height = 39
949 949 revcount = web.maxshortchanges
950 950 if 'revcount' in req.form:
951 951 try:
952 952 revcount = int(req.form.get('revcount', [revcount])[0])
953 953 revcount = max(revcount, 1)
954 954 tmpl.defaults['sessionvars']['revcount'] = revcount
955 955 except ValueError:
956 956 pass
957 957
958 958 lessvars = copy.copy(tmpl.defaults['sessionvars'])
959 959 lessvars['revcount'] = max(revcount / 2, 1)
960 960 morevars = copy.copy(tmpl.defaults['sessionvars'])
961 961 morevars['revcount'] = revcount * 2
962 962
963 963 count = len(web.repo)
964 964 pos = rev
965 965
966 966 uprev = min(max(0, count - 1), rev + revcount)
967 967 downrev = max(0, rev - revcount)
968 968 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
969 969
970 970 tree = []
971 971 if pos != -1:
972 972 allrevs = web.repo.changelog.revs(pos, 0)
973 973 revs = []
974 974 for i in allrevs:
975 975 revs.append(i)
976 976 if len(revs) >= revcount:
977 977 break
978 978
979 979 # We have to feed a baseset to dagwalker as it is expecting smartset
980 980 # object. This does not have a big impact on hgweb performance itself
981 981 # since hgweb graphing code is not itself lazy yet.
982 982 dag = graphmod.dagwalker(web.repo, revset.baseset(revs))
983 983 # As we said one line above... not lazy.
984 984 tree = list(graphmod.colored(dag, web.repo))
985 985
986 986 def getcolumns(tree):
987 987 cols = 0
988 988 for (id, type, ctx, vtx, edges) in tree:
989 989 if type != graphmod.CHANGESET:
990 990 continue
991 991 cols = max(cols, max([edge[0] for edge in edges] or [0]),
992 992 max([edge[1] for edge in edges] or [0]))
993 993 return cols
994 994
995 995 def graphdata(usetuples, **map):
996 996 data = []
997 997
998 998 row = 0
999 999 for (id, type, ctx, vtx, edges) in tree:
1000 1000 if type != graphmod.CHANGESET:
1001 1001 continue
1002 1002 node = str(ctx)
1003 1003 age = templatefilters.age(ctx.date())
1004 1004 desc = templatefilters.firstline(ctx.description())
1005 1005 desc = cgi.escape(templatefilters.nonempty(desc))
1006 1006 user = cgi.escape(templatefilters.person(ctx.user()))
1007 1007 branch = cgi.escape(ctx.branch())
1008 1008 try:
1009 1009 branchnode = web.repo.branchtip(branch)
1010 1010 except error.RepoLookupError:
1011 1011 branchnode = None
1012 1012 branch = branch, branchnode == ctx.node()
1013 1013
1014 1014 if usetuples:
1015 1015 data.append((node, vtx, edges, desc, user, age, branch,
1016 1016 [cgi.escape(x) for x in ctx.tags()],
1017 1017 [cgi.escape(x) for x in ctx.bookmarks()]))
1018 1018 else:
1019 1019 edgedata = [{'col': edge[0], 'nextcol': edge[1],
1020 1020 'color': (edge[2] - 1) % 6 + 1,
1021 1021 'width': edge[3], 'bcolor': edge[4]}
1022 1022 for edge in edges]
1023 1023
1024 1024 data.append(
1025 1025 {'node': node,
1026 1026 'col': vtx[0],
1027 1027 'color': (vtx[1] - 1) % 6 + 1,
1028 1028 'edges': edgedata,
1029 1029 'row': row,
1030 1030 'nextrow': row + 1,
1031 1031 'desc': desc,
1032 1032 'user': user,
1033 1033 'age': age,
1034 1034 'bookmarks': webutil.nodebookmarksdict(
1035 1035 web.repo, ctx.node()),
1036 1036 'branches': webutil.nodebranchdict(web.repo, ctx),
1037 1037 'inbranch': webutil.nodeinbranch(web.repo, ctx),
1038 1038 'tags': webutil.nodetagsdict(web.repo, ctx.node())})
1039 1039
1040 1040 row += 1
1041 1041
1042 1042 return data
1043 1043
1044 1044 cols = getcolumns(tree)
1045 1045 rows = len(tree)
1046 1046 canvasheight = (rows + 1) * bg_height - 27
1047 1047
1048 1048 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
1049 1049 lessvars=lessvars, morevars=morevars, downrev=downrev,
1050 1050 cols=cols, rows=rows,
1051 1051 canvaswidth=(cols + 1) * bg_height,
1052 1052 truecanvasheight=rows * bg_height,
1053 1053 canvasheight=canvasheight, bg_height=bg_height,
1054 1054 jsdata=lambda **x: graphdata(True, **x),
1055 1055 nodes=lambda **x: graphdata(False, **x),
1056 1056 node=ctx.hex(), changenav=changenav)
1057 1057
1058 1058 def _getdoc(e):
1059 1059 doc = e[0].__doc__
1060 1060 if doc:
1061 1061 doc = _(doc).split('\n')[0]
1062 1062 else:
1063 1063 doc = _('(no help text available)')
1064 1064 return doc
1065 1065
1066 1066 def help(web, req, tmpl):
1067 1067 from mercurial import commands # avoid cycle
1068 1068
1069 1069 topicname = req.form.get('node', [None])[0]
1070 1070 if not topicname:
1071 1071 def topics(**map):
1072 1072 for entries, summary, _doc in helpmod.helptable:
1073 1073 yield {'topic': entries[0], 'summary': summary}
1074 1074
1075 1075 early, other = [], []
1076 1076 primary = lambda s: s.split('|')[0]
1077 1077 for c, e in commands.table.iteritems():
1078 1078 doc = _getdoc(e)
1079 1079 if 'DEPRECATED' in doc or c.startswith('debug'):
1080 1080 continue
1081 1081 cmd = primary(c)
1082 1082 if cmd.startswith('^'):
1083 1083 early.append((cmd[1:], doc))
1084 1084 else:
1085 1085 other.append((cmd, doc))
1086 1086
1087 1087 early.sort()
1088 1088 other.sort()
1089 1089
1090 1090 def earlycommands(**map):
1091 1091 for c, doc in early:
1092 1092 yield {'topic': c, 'summary': doc}
1093 1093
1094 1094 def othercommands(**map):
1095 1095 for c, doc in other:
1096 1096 yield {'topic': c, 'summary': doc}
1097 1097
1098 1098 return tmpl('helptopics', topics=topics, earlycommands=earlycommands,
1099 1099 othercommands=othercommands, title='Index')
1100 1100
1101 1101 u = webutil.wsgiui()
1102 1102 u.verbose = True
1103 1103 try:
1104 1104 doc = helpmod.help_(u, topicname)
1105 1105 except error.UnknownCommand:
1106 1106 raise ErrorResponse(HTTP_NOT_FOUND)
1107 1107 return tmpl('help', topic=topicname, doc=doc)
@@ -1,330 +1,331 b''
1 1 # mail.py - mail sending bits for mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 import util, encoding, sslutil
10 10 import os, smtplib, socket, quopri, time, sys
11 11 import email
12 12 # On python2.4 you have to import these by name or they fail to
13 13 # load. This was not a problem on Python 2.7.
14 14 import email.Header
15 15 import email.MIMEText
16 16
17 17 _oldheaderinit = email.Header.Header.__init__
18 18 def _unifiedheaderinit(self, *args, **kw):
19 19 """
20 20 Python 2.7 introduces a backwards incompatible change
21 21 (Python issue1974, r70772) in email.Generator.Generator code:
22 22 pre-2.7 code passed "continuation_ws='\t'" to the Header
23 23 constructor, and 2.7 removed this parameter.
24 24
25 25 Default argument is continuation_ws=' ', which means that the
26 26 behaviour is different in <2.7 and 2.7
27 27
28 28 We consider the 2.7 behaviour to be preferable, but need
29 29 to have an unified behaviour for versions 2.4 to 2.7
30 30 """
31 31 # override continuation_ws
32 32 kw['continuation_ws'] = ' '
33 33 _oldheaderinit(self, *args, **kw)
34 34
35 35 email.Header.Header.__dict__['__init__'] = _unifiedheaderinit
36 36
37 37 class STARTTLS(smtplib.SMTP):
38 38 '''Derived class to verify the peer certificate for STARTTLS.
39 39
40 40 This class allows to pass any keyword arguments to SSL socket creation.
41 41 '''
42 42 def __init__(self, sslkwargs, **kwargs):
43 43 smtplib.SMTP.__init__(self, **kwargs)
44 44 self._sslkwargs = sslkwargs
45 45
46 46 def starttls(self, keyfile=None, certfile=None):
47 47 if not self.has_extn("starttls"):
48 48 msg = "STARTTLS extension not supported by server"
49 49 raise smtplib.SMTPException(msg)
50 50 (resp, reply) = self.docmd("STARTTLS")
51 51 if resp == 220:
52 52 self.sock = sslutil.ssl_wrap_socket(self.sock, keyfile, certfile,
53 53 **self._sslkwargs)
54 54 if not util.safehasattr(self.sock, "read"):
55 55 # using httplib.FakeSocket with Python 2.5.x or earlier
56 56 self.sock.read = self.sock.recv
57 57 self.file = smtplib.SSLFakeFile(self.sock)
58 58 self.helo_resp = None
59 59 self.ehlo_resp = None
60 60 self.esmtp_features = {}
61 61 self.does_esmtp = 0
62 62 return (resp, reply)
63 63
64 64 if util.safehasattr(smtplib.SMTP, '_get_socket'):
65 65 class SMTPS(smtplib.SMTP):
66 66 '''Derived class to verify the peer certificate for SMTPS.
67 67
68 68 This class allows to pass any keyword arguments to SSL socket creation.
69 69 '''
70 70 def __init__(self, sslkwargs, keyfile=None, certfile=None, **kwargs):
71 71 self.keyfile = keyfile
72 72 self.certfile = certfile
73 73 smtplib.SMTP.__init__(self, **kwargs)
74 74 self.default_port = smtplib.SMTP_SSL_PORT
75 75 self._sslkwargs = sslkwargs
76 76
77 77 def _get_socket(self, host, port, timeout):
78 78 if self.debuglevel > 0:
79 79 print >> sys.stderr, 'connect:', (host, port)
80 80 new_socket = socket.create_connection((host, port), timeout)
81 81 new_socket = sslutil.ssl_wrap_socket(new_socket,
82 82 self.keyfile, self.certfile,
83 83 **self._sslkwargs)
84 84 self.file = smtplib.SSLFakeFile(new_socket)
85 85 return new_socket
86 86 else:
87 87 def SMTPS(sslkwargs, keyfile=None, certfile=None, **kwargs):
88 88 raise util.Abort(_('SMTPS requires Python 2.6 or later'))
89 89
90 90 def _smtp(ui):
91 91 '''build an smtp connection and return a function to send mail'''
92 92 local_hostname = ui.config('smtp', 'local_hostname')
93 93 tls = ui.config('smtp', 'tls', 'none')
94 94 # backward compatible: when tls = true, we use starttls.
95 95 starttls = tls == 'starttls' or util.parsebool(tls)
96 96 smtps = tls == 'smtps'
97 97 if (starttls or smtps) and not util.safehasattr(socket, 'ssl'):
98 98 raise util.Abort(_("can't use TLS: Python SSL support not installed"))
99 99 mailhost = ui.config('smtp', 'host')
100 100 if not mailhost:
101 101 raise util.Abort(_('smtp.host not configured - cannot send mail'))
102 102 verifycert = ui.config('smtp', 'verifycert', 'strict')
103 103 if verifycert not in ['strict', 'loose']:
104 104 if util.parsebool(verifycert) is not False:
105 105 raise util.Abort(_('invalid smtp.verifycert configuration: %s')
106 106 % (verifycert))
107 verifycert = False
107 108 if (starttls or smtps) and verifycert:
108 109 sslkwargs = sslutil.sslkwargs(ui, mailhost)
109 110 else:
110 111 sslkwargs = {}
111 112 if smtps:
112 113 ui.note(_('(using smtps)\n'))
113 114 s = SMTPS(sslkwargs, local_hostname=local_hostname)
114 115 elif starttls:
115 116 s = STARTTLS(sslkwargs, local_hostname=local_hostname)
116 117 else:
117 118 s = smtplib.SMTP(local_hostname=local_hostname)
118 119 if smtps:
119 120 defaultport = 465
120 121 else:
121 122 defaultport = 25
122 123 mailport = util.getport(ui.config('smtp', 'port', defaultport))
123 124 ui.note(_('sending mail: smtp host %s, port %s\n') %
124 125 (mailhost, mailport))
125 126 s.connect(host=mailhost, port=mailport)
126 127 if starttls:
127 128 ui.note(_('(using starttls)\n'))
128 129 s.ehlo()
129 130 s.starttls()
130 131 s.ehlo()
131 132 if (starttls or smtps) and verifycert:
132 133 ui.note(_('(verifying remote certificate)\n'))
133 134 sslutil.validator(ui, mailhost)(s.sock, verifycert == 'strict')
134 135 username = ui.config('smtp', 'username')
135 136 password = ui.config('smtp', 'password')
136 137 if username and not password:
137 138 password = ui.getpass()
138 139 if username and password:
139 140 ui.note(_('(authenticating to mail server as %s)\n') %
140 141 (username))
141 142 try:
142 143 s.login(username, password)
143 144 except smtplib.SMTPException, inst:
144 145 raise util.Abort(inst)
145 146
146 147 def send(sender, recipients, msg):
147 148 try:
148 149 return s.sendmail(sender, recipients, msg)
149 150 except smtplib.SMTPRecipientsRefused, inst:
150 151 recipients = [r[1] for r in inst.recipients.values()]
151 152 raise util.Abort('\n' + '\n'.join(recipients))
152 153 except smtplib.SMTPException, inst:
153 154 raise util.Abort(inst)
154 155
155 156 return send
156 157
157 158 def _sendmail(ui, sender, recipients, msg):
158 159 '''send mail using sendmail.'''
159 160 program = ui.config('email', 'method')
160 161 cmdline = '%s -f %s %s' % (program, util.email(sender),
161 162 ' '.join(map(util.email, recipients)))
162 163 ui.note(_('sending mail: %s\n') % cmdline)
163 164 fp = util.popen(cmdline, 'w')
164 165 fp.write(msg)
165 166 ret = fp.close()
166 167 if ret:
167 168 raise util.Abort('%s %s' % (
168 169 os.path.basename(program.split(None, 1)[0]),
169 170 util.explainexit(ret)[0]))
170 171
171 172 def _mbox(mbox, sender, recipients, msg):
172 173 '''write mails to mbox'''
173 174 fp = open(mbox, 'ab+')
174 175 # Should be time.asctime(), but Windows prints 2-characters day
175 176 # of month instead of one. Make them print the same thing.
176 177 date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime())
177 178 fp.write('From %s %s\n' % (sender, date))
178 179 fp.write(msg)
179 180 fp.write('\n\n')
180 181 fp.close()
181 182
182 183 def connect(ui, mbox=None):
183 184 '''make a mail connection. return a function to send mail.
184 185 call as sendmail(sender, list-of-recipients, msg).'''
185 186 if mbox:
186 187 open(mbox, 'wb').close()
187 188 return lambda s, r, m: _mbox(mbox, s, r, m)
188 189 if ui.config('email', 'method', 'smtp') == 'smtp':
189 190 return _smtp(ui)
190 191 return lambda s, r, m: _sendmail(ui, s, r, m)
191 192
192 193 def sendmail(ui, sender, recipients, msg, mbox=None):
193 194 send = connect(ui, mbox=mbox)
194 195 return send(sender, recipients, msg)
195 196
196 197 def validateconfig(ui):
197 198 '''determine if we have enough config data to try sending email.'''
198 199 method = ui.config('email', 'method', 'smtp')
199 200 if method == 'smtp':
200 201 if not ui.config('smtp', 'host'):
201 202 raise util.Abort(_('smtp specified as email transport, '
202 203 'but no smtp host configured'))
203 204 else:
204 205 if not util.findexe(method):
205 206 raise util.Abort(_('%r specified as email transport, '
206 207 'but not in PATH') % method)
207 208
208 209 def mimetextpatch(s, subtype='plain', display=False):
209 210 '''Return MIME message suitable for a patch.
210 211 Charset will be detected as utf-8 or (possibly fake) us-ascii.
211 212 Transfer encodings will be used if necessary.'''
212 213
213 214 cs = 'us-ascii'
214 215 if not display:
215 216 try:
216 217 s.decode('us-ascii')
217 218 except UnicodeDecodeError:
218 219 try:
219 220 s.decode('utf-8')
220 221 cs = 'utf-8'
221 222 except UnicodeDecodeError:
222 223 # We'll go with us-ascii as a fallback.
223 224 pass
224 225
225 226 return mimetextqp(s, subtype, cs)
226 227
227 228 def mimetextqp(body, subtype, charset):
228 229 '''Return MIME message.
229 230 Quoted-printable transfer encoding will be used if necessary.
230 231 '''
231 232 enc = None
232 233 for line in body.splitlines():
233 234 if len(line) > 950:
234 235 body = quopri.encodestring(body)
235 236 enc = "quoted-printable"
236 237 break
237 238
238 239 msg = email.MIMEText.MIMEText(body, subtype, charset)
239 240 if enc:
240 241 del msg['Content-Transfer-Encoding']
241 242 msg['Content-Transfer-Encoding'] = enc
242 243 return msg
243 244
244 245 def _charsets(ui):
245 246 '''Obtains charsets to send mail parts not containing patches.'''
246 247 charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')]
247 248 fallbacks = [encoding.fallbackencoding.lower(),
248 249 encoding.encoding.lower(), 'utf-8']
249 250 for cs in fallbacks: # find unique charsets while keeping order
250 251 if cs not in charsets:
251 252 charsets.append(cs)
252 253 return [cs for cs in charsets if not cs.endswith('ascii')]
253 254
254 255 def _encode(ui, s, charsets):
255 256 '''Returns (converted) string, charset tuple.
256 257 Finds out best charset by cycling through sendcharsets in descending
257 258 order. Tries both encoding and fallbackencoding for input. Only as
258 259 last resort send as is in fake ascii.
259 260 Caveat: Do not use for mail parts containing patches!'''
260 261 try:
261 262 s.decode('ascii')
262 263 except UnicodeDecodeError:
263 264 sendcharsets = charsets or _charsets(ui)
264 265 for ics in (encoding.encoding, encoding.fallbackencoding):
265 266 try:
266 267 u = s.decode(ics)
267 268 except UnicodeDecodeError:
268 269 continue
269 270 for ocs in sendcharsets:
270 271 try:
271 272 return u.encode(ocs), ocs
272 273 except UnicodeEncodeError:
273 274 pass
274 275 except LookupError:
275 276 ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs)
276 277 # if ascii, or all conversion attempts fail, send (broken) ascii
277 278 return s, 'us-ascii'
278 279
279 280 def headencode(ui, s, charsets=None, display=False):
280 281 '''Returns RFC-2047 compliant header from given string.'''
281 282 if not display:
282 283 # split into words?
283 284 s, cs = _encode(ui, s, charsets)
284 285 return str(email.Header.Header(s, cs))
285 286 return s
286 287
287 288 def _addressencode(ui, name, addr, charsets=None):
288 289 name = headencode(ui, name, charsets)
289 290 try:
290 291 acc, dom = addr.split('@')
291 292 acc = acc.encode('ascii')
292 293 dom = dom.decode(encoding.encoding).encode('idna')
293 294 addr = '%s@%s' % (acc, dom)
294 295 except UnicodeDecodeError:
295 296 raise util.Abort(_('invalid email address: %s') % addr)
296 297 except ValueError:
297 298 try:
298 299 # too strict?
299 300 addr = addr.encode('ascii')
300 301 except UnicodeDecodeError:
301 302 raise util.Abort(_('invalid local address: %s') % addr)
302 303 return email.Utils.formataddr((name, addr))
303 304
304 305 def addressencode(ui, address, charsets=None, display=False):
305 306 '''Turns address into RFC-2047 compliant header.'''
306 307 if display or not address:
307 308 return address or ''
308 309 name, addr = email.Utils.parseaddr(address)
309 310 return _addressencode(ui, name, addr, charsets)
310 311
311 312 def addrlistencode(ui, addrs, charsets=None, display=False):
312 313 '''Turns a list of addresses into a list of RFC-2047 compliant headers.
313 314 A single element of input list may contain multiple addresses, but output
314 315 always has one address per item'''
315 316 if display:
316 317 return [a.strip() for a in addrs if a.strip()]
317 318
318 319 result = []
319 320 for name, addr in email.Utils.getaddresses(addrs):
320 321 if name or addr:
321 322 result.append(_addressencode(ui, name, addr, charsets))
322 323 return result
323 324
324 325 def mimeencode(ui, s, charsets=None, display=False):
325 326 '''creates mime text object, encodes it if needed, and sets
326 327 charset and transfer-encoding accordingly.'''
327 328 cs = 'us-ascii'
328 329 if not display:
329 330 s, cs = _encode(ui, s, charsets)
330 331 return mimetextqp(s, 'plain', cs)
@@ -1,334 +1,368 b''
1 1 #require serve
2 2
3 3 $ hg init test
4 4 $ cd test
5 5 $ echo foo>foo
6 6 $ hg commit -Am 1 -d '1 0'
7 7 adding foo
8 8 $ echo bar>bar
9 9 $ hg commit -Am 2 -d '2 0'
10 10 adding bar
11 11 $ mkdir baz
12 12 $ echo bletch>baz/bletch
13 13 $ hg commit -Am 3 -d '1000000000 0'
14 14 adding baz/bletch
15 $ hg init subrepo
16 $ touch subrepo/sub
17 $ hg -q -R subrepo ci -Am "init subrepo"
18 $ echo "subrepo = subrepo" > .hgsub
19 $ hg add .hgsub
20 $ hg ci -m "add subrepo"
15 21 $ echo "[web]" >> .hg/hgrc
16 22 $ echo "name = test-archive" >> .hg/hgrc
23 $ echo "archivesubrepos = True" >> .hg/hgrc
17 24 $ cp .hg/hgrc .hg/hgrc-base
18 25 > test_archtype() {
19 26 > echo "allow_archive = $1" >> .hg/hgrc
20 27 > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
21 28 > cat hg.pid >> $DAEMON_PIDS
22 29 > echo % $1 allowed should give 200
23 30 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$2" | head -n 1
24 31 > echo % $3 and $4 disallowed should both give 403
25 32 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$3" | head -n 1
26 33 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$4" | head -n 1
27 34 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
28 35 > cat errors.log
29 36 > cp .hg/hgrc-base .hg/hgrc
30 37 > }
31 38
32 39 check http return codes
33 40
34 41 $ test_archtype gz tar.gz tar.bz2 zip
35 42 % gz allowed should give 200
36 43 200 Script output follows
37 44 % tar.bz2 and zip disallowed should both give 403
38 45 403 Archive type not allowed: bz2
39 46 403 Archive type not allowed: zip
40 47 $ test_archtype bz2 tar.bz2 zip tar.gz
41 48 % bz2 allowed should give 200
42 49 200 Script output follows
43 50 % zip and tar.gz disallowed should both give 403
44 51 403 Archive type not allowed: zip
45 52 403 Archive type not allowed: gz
46 53 $ test_archtype zip zip tar.gz tar.bz2
47 54 % zip allowed should give 200
48 55 200 Script output follows
49 56 % tar.gz and tar.bz2 disallowed should both give 403
50 57 403 Archive type not allowed: gz
51 58 403 Archive type not allowed: bz2
52 59
53 60 $ echo "allow_archive = gz bz2 zip" >> .hg/hgrc
54 61 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
55 62 $ cat hg.pid >> $DAEMON_PIDS
56 63
57 64 invalid arch type should give 404
58 65
59 66 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.invalid" | head -n 1
60 67 404 Unsupported archive type: None
61 68
62 69 $ TIP=`hg id -v | cut -f1 -d' '`
63 70 $ QTIP=`hg id -q`
64 71 $ cat > getarchive.py <<EOF
65 72 > import os, sys, urllib2
66 73 > try:
67 74 > # Set stdout to binary mode for win32 platforms
68 75 > import msvcrt
69 76 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
70 77 > except ImportError:
71 78 > pass
72 79 > if len(sys.argv) <= 3:
73 80 > node, archive = sys.argv[1:]
74 81 > requeststr = 'cmd=archive;node=%s;type=%s' % (node, archive)
75 82 > else:
76 83 > node, archive, file = sys.argv[1:]
77 84 > requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file)
78 85 > try:
79 86 > f = urllib2.urlopen('http://127.0.0.1:%s/?%s'
80 87 > % (os.environ['HGPORT'], requeststr))
81 88 > sys.stdout.write(f.read())
82 89 > except urllib2.HTTPError, e:
83 90 > sys.stderr.write(str(e) + '\n')
84 91 > EOF
85 92 $ python getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
86 test-archive-2c0277f05ed4/.hg_archival.txt
87 test-archive-2c0277f05ed4/bar
88 test-archive-2c0277f05ed4/baz/bletch
89 test-archive-2c0277f05ed4/foo
93 test-archive-1701ef1f1510/.hg_archival.txt
94 test-archive-1701ef1f1510/.hgsub
95 test-archive-1701ef1f1510/.hgsubstate
96 test-archive-1701ef1f1510/bar
97 test-archive-1701ef1f1510/baz/bletch
98 test-archive-1701ef1f1510/foo
99 test-archive-1701ef1f1510/subrepo/sub
90 100 $ python getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
91 test-archive-2c0277f05ed4/.hg_archival.txt
92 test-archive-2c0277f05ed4/bar
93 test-archive-2c0277f05ed4/baz/bletch
94 test-archive-2c0277f05ed4/foo
101 test-archive-1701ef1f1510/.hg_archival.txt
102 test-archive-1701ef1f1510/.hgsub
103 test-archive-1701ef1f1510/.hgsubstate
104 test-archive-1701ef1f1510/bar
105 test-archive-1701ef1f1510/baz/bletch
106 test-archive-1701ef1f1510/foo
107 test-archive-1701ef1f1510/subrepo/sub
95 108 $ python getarchive.py "$TIP" zip > archive.zip
96 109 $ unzip -t archive.zip
97 110 Archive: archive.zip
98 testing: test-archive-2c0277f05ed4/.hg_archival.txt OK
99 testing: test-archive-2c0277f05ed4/bar OK
100 testing: test-archive-2c0277f05ed4/baz/bletch OK
101 testing: test-archive-2c0277f05ed4/foo OK
111 testing: test-archive-1701ef1f1510/.hg_archival.txt OK
112 testing: test-archive-1701ef1f1510/.hgsub OK
113 testing: test-archive-1701ef1f1510/.hgsubstate OK
114 testing: test-archive-1701ef1f1510/bar OK
115 testing: test-archive-1701ef1f1510/baz/bletch OK
116 testing: test-archive-1701ef1f1510/foo OK
117 testing: test-archive-1701ef1f1510/subrepo/sub OK
102 118 No errors detected in compressed data of archive.zip.
103 119
104 120 test that we can download single directories and files
105 121
106 122 $ python getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
107 test-archive-2c0277f05ed4/baz/bletch
123 test-archive-1701ef1f1510/baz/bletch
108 124 $ python getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
109 test-archive-2c0277f05ed4/foo
125 test-archive-1701ef1f1510/foo
110 126
111 127 test that we detect file patterns that match no files
112 128
113 129 $ python getarchive.py "$TIP" gz foobar
114 130 HTTP Error 404: file(s) not found: foobar
115 131
116 132 test that we reject unsafe patterns
117 133
118 134 $ python getarchive.py "$TIP" gz relre:baz
119 135 HTTP Error 404: file(s) not found: relre:baz
120 136
121 137 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
122 138
123 139 $ hg archive -t tar test.tar
124 140 $ tar tf test.tar
125 141 test/.hg_archival.txt
142 test/.hgsub
143 test/.hgsubstate
126 144 test/bar
127 145 test/baz/bletch
128 146 test/foo
129 147
130 148 $ hg archive --debug -t tbz2 -X baz test.tar.bz2
131 archiving: 0/2 files (0.00%)
132 archiving: bar 1/2 files (50.00%)
133 archiving: foo 2/2 files (100.00%)
149 archiving: 0/4 files (0.00%)
150 archiving: .hgsub 1/4 files (25.00%)
151 archiving: .hgsubstate 2/4 files (50.00%)
152 archiving: bar 3/4 files (75.00%)
153 archiving: foo 4/4 files (100.00%)
134 154 $ bunzip2 -dc test.tar.bz2 | tar tf - 2>/dev/null
135 155 test/.hg_archival.txt
156 test/.hgsub
157 test/.hgsubstate
136 158 test/bar
137 159 test/foo
138 160
139 161 $ hg archive -t tgz -p %b-%h test-%h.tar.gz
140 162 $ gzip -dc test-$QTIP.tar.gz | tar tf - 2>/dev/null
141 test-2c0277f05ed4/.hg_archival.txt
142 test-2c0277f05ed4/bar
143 test-2c0277f05ed4/baz/bletch
144 test-2c0277f05ed4/foo
163 test-1701ef1f1510/.hg_archival.txt
164 test-1701ef1f1510/.hgsub
165 test-1701ef1f1510/.hgsubstate
166 test-1701ef1f1510/bar
167 test-1701ef1f1510/baz/bletch
168 test-1701ef1f1510/foo
145 169
146 170 $ hg archive autodetected_test.tar
147 171 $ tar tf autodetected_test.tar
148 172 autodetected_test/.hg_archival.txt
173 autodetected_test/.hgsub
174 autodetected_test/.hgsubstate
149 175 autodetected_test/bar
150 176 autodetected_test/baz/bletch
151 177 autodetected_test/foo
152 178
153 179 The '-t' should override autodetection
154 180
155 181 $ hg archive -t tar autodetect_override_test.zip
156 182 $ tar tf autodetect_override_test.zip
157 183 autodetect_override_test.zip/.hg_archival.txt
184 autodetect_override_test.zip/.hgsub
185 autodetect_override_test.zip/.hgsubstate
158 186 autodetect_override_test.zip/bar
159 187 autodetect_override_test.zip/baz/bletch
160 188 autodetect_override_test.zip/foo
161 189
162 190 $ for ext in tar tar.gz tgz tar.bz2 tbz2 zip; do
163 191 > hg archive auto_test.$ext
164 192 > if [ -d auto_test.$ext ]; then
165 193 > echo "extension $ext was not autodetected."
166 194 > fi
167 195 > done
168 196
169 197 $ cat > md5comp.py <<EOF
170 198 > try:
171 199 > from hashlib import md5
172 200 > except ImportError:
173 201 > from md5 import md5
174 202 > import sys
175 203 > f1, f2 = sys.argv[1:3]
176 204 > h1 = md5(file(f1, 'rb').read()).hexdigest()
177 205 > h2 = md5(file(f2, 'rb').read()).hexdigest()
178 206 > print h1 == h2 or "md5 differ: " + repr((h1, h2))
179 207 > EOF
180 208
181 209 archive name is stored in the archive, so create similar archives and
182 210 rename them afterwards.
183 211
184 212 $ hg archive -t tgz tip.tar.gz
185 213 $ mv tip.tar.gz tip1.tar.gz
186 214 $ sleep 1
187 215 $ hg archive -t tgz tip.tar.gz
188 216 $ mv tip.tar.gz tip2.tar.gz
189 217 $ python md5comp.py tip1.tar.gz tip2.tar.gz
190 218 True
191 219
192 220 $ hg archive -t zip -p /illegal test.zip
193 221 abort: archive prefix contains illegal components
194 222 [255]
195 223 $ hg archive -t zip -p very/../bad test.zip
196 224
197 225 $ hg archive --config ui.archivemeta=false -t zip -r 2 test.zip
198 226 $ unzip -t test.zip
199 227 Archive: test.zip
200 228 testing: test/bar OK
201 229 testing: test/baz/bletch OK
202 230 testing: test/foo OK
203 231 No errors detected in compressed data of test.zip.
204 232
205 233 $ hg archive -t tar - | tar tf - 2>/dev/null
206 test-2c0277f05ed4/.hg_archival.txt
207 test-2c0277f05ed4/bar
208 test-2c0277f05ed4/baz/bletch
209 test-2c0277f05ed4/foo
234 test-1701ef1f1510/.hg_archival.txt
235 test-1701ef1f1510/.hgsub
236 test-1701ef1f1510/.hgsubstate
237 test-1701ef1f1510/bar
238 test-1701ef1f1510/baz/bletch
239 test-1701ef1f1510/foo
210 240
211 241 $ hg archive -r 0 -t tar rev-%r.tar
212 242 $ [ -f rev-0.tar ]
213 243
214 244 test .hg_archival.txt
215 245
216 246 $ hg archive ../test-tags
217 247 $ cat ../test-tags/.hg_archival.txt
218 248 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
219 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
249 node: 1701ef1f151069b8747038e93b5186bb43a47504
220 250 branch: default
221 251 latesttag: null
222 latesttagdistance: 3
252 latesttagdistance: 4
223 253 $ hg tag -r 2 mytag
224 254 $ hg tag -r 2 anothertag
225 255 $ hg archive -r 2 ../test-lasttag
226 256 $ cat ../test-lasttag/.hg_archival.txt
227 257 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
228 258 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
229 259 branch: default
230 260 tag: anothertag
231 261 tag: mytag
232 262
233 263 $ hg archive -t bogus test.bogus
234 264 abort: unknown archive type 'bogus'
235 265 [255]
236 266
237 267 enable progress extension:
238 268
239 269 $ cp $HGRCPATH $HGRCPATH.no-progress
240 270 $ cat >> $HGRCPATH <<EOF
241 271 > [extensions]
242 272 > progress =
243 273 > [progress]
244 274 > assume-tty = 1
245 275 > format = topic bar number
246 276 > delay = 0
247 277 > refresh = 0
248 278 > width = 60
249 279 > EOF
250 280
251 281 $ hg archive ../with-progress
252 282 \r (no-eol) (esc)
253 archiving [ ] 0/4\r (no-eol) (esc)
254 archiving [ ] 0/4\r (no-eol) (esc)
255 archiving [=========> ] 1/4\r (no-eol) (esc)
256 archiving [=========> ] 1/4\r (no-eol) (esc)
257 archiving [====================> ] 2/4\r (no-eol) (esc)
258 archiving [====================> ] 2/4\r (no-eol) (esc)
259 archiving [===============================> ] 3/4\r (no-eol) (esc)
260 archiving [===============================> ] 3/4\r (no-eol) (esc)
261 archiving [==========================================>] 4/4\r (no-eol) (esc)
262 archiving [==========================================>] 4/4\r (no-eol) (esc)
283 archiving [ ] 0/6\r (no-eol) (esc)
284 archiving [ ] 0/6\r (no-eol) (esc)
285 archiving [======> ] 1/6\r (no-eol) (esc)
286 archiving [======> ] 1/6\r (no-eol) (esc)
287 archiving [=============> ] 2/6\r (no-eol) (esc)
288 archiving [=============> ] 2/6\r (no-eol) (esc)
289 archiving [====================> ] 3/6\r (no-eol) (esc)
290 archiving [====================> ] 3/6\r (no-eol) (esc)
291 archiving [===========================> ] 4/6\r (no-eol) (esc)
292 archiving [===========================> ] 4/6\r (no-eol) (esc)
293 archiving [==================================> ] 5/6\r (no-eol) (esc)
294 archiving [==================================> ] 5/6\r (no-eol) (esc)
295 archiving [==========================================>] 6/6\r (no-eol) (esc)
296 archiving [==========================================>] 6/6\r (no-eol) (esc)
263 297 \r (no-eol) (esc)
264 298
265 299 cleanup after progress extension test:
266 300
267 301 $ cp $HGRCPATH.no-progress $HGRCPATH
268 302
269 303 server errors
270 304
271 305 $ cat errors.log
272 306
273 307 empty repo
274 308
275 309 $ hg init ../empty
276 310 $ cd ../empty
277 311 $ hg archive ../test-empty
278 312 abort: no working directory: please specify a revision
279 313 [255]
280 314
281 315 old file -- date clamped to 1980
282 316
283 317 $ touch -t 197501010000 old
284 318 $ hg add old
285 319 $ hg commit -m old
286 320 $ hg archive ../old.zip
287 321 $ unzip -l ../old.zip
288 322 Archive: ../old.zip
289 323 \s*Length.* (re)
290 324 *-----* (glob)
291 325 *147*80*00:00*old/.hg_archival.txt (glob)
292 326 *0*80*00:00*old/old (glob)
293 327 *-----* (glob)
294 328 \s*147\s+2 files (re)
295 329
296 330 show an error when a provided pattern matches no files
297 331
298 332 $ hg archive -I file_that_does_not_exist.foo ../empty.zip
299 333 abort: no files match the archive pattern
300 334 [255]
301 335
302 336 $ hg archive -X * ../empty.zip
303 337 abort: no files match the archive pattern
304 338 [255]
305 339
306 340 $ cd ..
307 341
308 342 issue3600: check whether "hg archive" can create archive files which
309 343 are extracted with expected timestamp, even though TZ is not
310 344 configured as GMT.
311 345
312 346 $ mkdir issue3600
313 347 $ cd issue3600
314 348
315 349 $ hg init repo
316 350 $ echo a > repo/a
317 351 $ hg -R repo add repo/a
318 352 $ hg -R repo commit -m '#0' -d '456789012 21600'
319 353 $ cat > show_mtime.py <<EOF
320 354 > import sys, os
321 355 > print int(os.stat(sys.argv[1]).st_mtime)
322 356 > EOF
323 357
324 358 $ hg -R repo archive --prefix tar-extracted archive.tar
325 359 $ (TZ=UTC-3; export TZ; tar xf archive.tar)
326 360 $ python show_mtime.py tar-extracted/a
327 361 456789012
328 362
329 363 $ hg -R repo archive --prefix zip-extracted archive.zip
330 364 $ (TZ=UTC-3; export TZ; unzip -q archive.zip)
331 365 $ python show_mtime.py zip-extracted/a
332 366 456789012
333 367
334 368 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now