##// END OF EJS Templates
branchmap: log events related to branch cache...
Gregory Szorc -
r21031:05cfcecb default
parent child Browse files
Show More
@@ -1,274 +1,285 b''
1 # branchmap.py - logic to computes, maintain and stores branchmap for local repo
1 # branchmap.py - logic to computes, maintain and stores branchmap for local repo
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev
8 from node import bin, hex, nullid, nullrev
9 import encoding
9 import encoding
10 import util
10 import util
11 import time
11
12
12 def _filename(repo):
13 def _filename(repo):
13 """name of a branchcache file for a given repo or repoview"""
14 """name of a branchcache file for a given repo or repoview"""
14 filename = "cache/branch2"
15 filename = "cache/branch2"
15 if repo.filtername:
16 if repo.filtername:
16 filename = '%s-%s' % (filename, repo.filtername)
17 filename = '%s-%s' % (filename, repo.filtername)
17 return filename
18 return filename
18
19
19 def read(repo):
20 def read(repo):
20 try:
21 try:
21 f = repo.opener(_filename(repo))
22 f = repo.opener(_filename(repo))
22 lines = f.read().split('\n')
23 lines = f.read().split('\n')
23 f.close()
24 f.close()
24 except (IOError, OSError):
25 except (IOError, OSError):
25 return None
26 return None
26
27
27 try:
28 try:
28 cachekey = lines.pop(0).split(" ", 2)
29 cachekey = lines.pop(0).split(" ", 2)
29 last, lrev = cachekey[:2]
30 last, lrev = cachekey[:2]
30 last, lrev = bin(last), int(lrev)
31 last, lrev = bin(last), int(lrev)
31 filteredhash = None
32 filteredhash = None
32 if len(cachekey) > 2:
33 if len(cachekey) > 2:
33 filteredhash = bin(cachekey[2])
34 filteredhash = bin(cachekey[2])
34 partial = branchcache(tipnode=last, tiprev=lrev,
35 partial = branchcache(tipnode=last, tiprev=lrev,
35 filteredhash=filteredhash)
36 filteredhash=filteredhash)
36 if not partial.validfor(repo):
37 if not partial.validfor(repo):
37 # invalidate the cache
38 # invalidate the cache
38 raise ValueError('tip differs')
39 raise ValueError('tip differs')
39 for l in lines:
40 for l in lines:
40 if not l:
41 if not l:
41 continue
42 continue
42 node, state, label = l.split(" ", 2)
43 node, state, label = l.split(" ", 2)
43 if state not in 'oc':
44 if state not in 'oc':
44 raise ValueError('invalid branch state')
45 raise ValueError('invalid branch state')
45 label = encoding.tolocal(label.strip())
46 label = encoding.tolocal(label.strip())
46 if not node in repo:
47 if not node in repo:
47 raise ValueError('node %s does not exist' % node)
48 raise ValueError('node %s does not exist' % node)
48 node = bin(node)
49 node = bin(node)
49 partial.setdefault(label, []).append(node)
50 partial.setdefault(label, []).append(node)
50 if state == 'c':
51 if state == 'c':
51 partial._closednodes.add(node)
52 partial._closednodes.add(node)
52 except KeyboardInterrupt:
53 except KeyboardInterrupt:
53 raise
54 raise
54 except Exception, inst:
55 except Exception, inst:
55 if repo.ui.debugflag:
56 if repo.ui.debugflag:
56 msg = 'invalid branchheads cache'
57 msg = 'invalid branchheads cache'
57 if repo.filtername is not None:
58 if repo.filtername is not None:
58 msg += ' (%s)' % repo.filtername
59 msg += ' (%s)' % repo.filtername
59 msg += ': %s\n'
60 msg += ': %s\n'
60 repo.ui.warn(msg % inst)
61 repo.ui.warn(msg % inst)
61 partial = None
62 partial = None
62 return partial
63 return partial
63
64
64
65
65
66
66 ### Nearest subset relation
67 ### Nearest subset relation
67 # Nearest subset of filter X is a filter Y so that:
68 # Nearest subset of filter X is a filter Y so that:
68 # * Y is included in X,
69 # * Y is included in X,
69 # * X - Y is as small as possible.
70 # * X - Y is as small as possible.
70 # This create and ordering used for branchmap purpose.
71 # This create and ordering used for branchmap purpose.
71 # the ordering may be partial
72 # the ordering may be partial
72 subsettable = {None: 'visible',
73 subsettable = {None: 'visible',
73 'visible': 'served',
74 'visible': 'served',
74 'served': 'immutable',
75 'served': 'immutable',
75 'immutable': 'base'}
76 'immutable': 'base'}
76
77
77 def updatecache(repo):
78 def updatecache(repo):
78 cl = repo.changelog
79 cl = repo.changelog
79 filtername = repo.filtername
80 filtername = repo.filtername
80 partial = repo._branchcaches.get(filtername)
81 partial = repo._branchcaches.get(filtername)
81
82
82 revs = []
83 revs = []
83 if partial is None or not partial.validfor(repo):
84 if partial is None or not partial.validfor(repo):
84 partial = read(repo)
85 partial = read(repo)
85 if partial is None:
86 if partial is None:
86 subsetname = subsettable.get(filtername)
87 subsetname = subsettable.get(filtername)
87 if subsetname is None:
88 if subsetname is None:
88 partial = branchcache()
89 partial = branchcache()
89 else:
90 else:
90 subset = repo.filtered(subsetname)
91 subset = repo.filtered(subsetname)
91 partial = subset.branchmap().copy()
92 partial = subset.branchmap().copy()
92 extrarevs = subset.changelog.filteredrevs - cl.filteredrevs
93 extrarevs = subset.changelog.filteredrevs - cl.filteredrevs
93 revs.extend(r for r in extrarevs if r <= partial.tiprev)
94 revs.extend(r for r in extrarevs if r <= partial.tiprev)
94 revs.extend(cl.revs(start=partial.tiprev + 1))
95 revs.extend(cl.revs(start=partial.tiprev + 1))
95 if revs:
96 if revs:
96 partial.update(repo, revs)
97 partial.update(repo, revs)
97 partial.write(repo)
98 partial.write(repo)
98 assert partial.validfor(repo), filtername
99 assert partial.validfor(repo), filtername
99 repo._branchcaches[repo.filtername] = partial
100 repo._branchcaches[repo.filtername] = partial
100
101
101 class branchcache(dict):
102 class branchcache(dict):
102 """A dict like object that hold branches heads cache.
103 """A dict like object that hold branches heads cache.
103
104
104 This cache is used to avoid costly computations to determine all the
105 This cache is used to avoid costly computations to determine all the
105 branch heads of a repo.
106 branch heads of a repo.
106
107
107 The cache is serialized on disk in the following format:
108 The cache is serialized on disk in the following format:
108
109
109 <tip hex node> <tip rev number> [optional filtered repo hex hash]
110 <tip hex node> <tip rev number> [optional filtered repo hex hash]
110 <branch head hex node> <open/closed state> <branch name>
111 <branch head hex node> <open/closed state> <branch name>
111 <branch head hex node> <open/closed state> <branch name>
112 <branch head hex node> <open/closed state> <branch name>
112 ...
113 ...
113
114
114 The first line is used to check if the cache is still valid. If the
115 The first line is used to check if the cache is still valid. If the
115 branch cache is for a filtered repo view, an optional third hash is
116 branch cache is for a filtered repo view, an optional third hash is
116 included that hashes the hashes of all filtered revisions.
117 included that hashes the hashes of all filtered revisions.
117
118
118 The open/closed state is represented by a single letter 'o' or 'c'.
119 The open/closed state is represented by a single letter 'o' or 'c'.
119 This field can be used to avoid changelog reads when determining if a
120 This field can be used to avoid changelog reads when determining if a
120 branch head closes a branch or not.
121 branch head closes a branch or not.
121 """
122 """
122
123
123 def __init__(self, entries=(), tipnode=nullid, tiprev=nullrev,
124 def __init__(self, entries=(), tipnode=nullid, tiprev=nullrev,
124 filteredhash=None, closednodes=None):
125 filteredhash=None, closednodes=None):
125 super(branchcache, self).__init__(entries)
126 super(branchcache, self).__init__(entries)
126 self.tipnode = tipnode
127 self.tipnode = tipnode
127 self.tiprev = tiprev
128 self.tiprev = tiprev
128 self.filteredhash = filteredhash
129 self.filteredhash = filteredhash
129 # closednodes is a set of nodes that close their branch. If the branch
130 # closednodes is a set of nodes that close their branch. If the branch
130 # cache has been updated, it may contain nodes that are no longer
131 # cache has been updated, it may contain nodes that are no longer
131 # heads.
132 # heads.
132 if closednodes is None:
133 if closednodes is None:
133 self._closednodes = set()
134 self._closednodes = set()
134 else:
135 else:
135 self._closednodes = closednodes
136 self._closednodes = closednodes
136
137
137 def _hashfiltered(self, repo):
138 def _hashfiltered(self, repo):
138 """build hash of revision filtered in the current cache
139 """build hash of revision filtered in the current cache
139
140
140 Tracking tipnode and tiprev is not enough to ensure validity of the
141 Tracking tipnode and tiprev is not enough to ensure validity of the
141 cache as they do not help to distinct cache that ignored various
142 cache as they do not help to distinct cache that ignored various
142 revision bellow tiprev.
143 revision bellow tiprev.
143
144
144 To detect such difference, we build a cache of all ignored revisions.
145 To detect such difference, we build a cache of all ignored revisions.
145 """
146 """
146 cl = repo.changelog
147 cl = repo.changelog
147 if not cl.filteredrevs:
148 if not cl.filteredrevs:
148 return None
149 return None
149 key = None
150 key = None
150 revs = sorted(r for r in cl.filteredrevs if r <= self.tiprev)
151 revs = sorted(r for r in cl.filteredrevs if r <= self.tiprev)
151 if revs:
152 if revs:
152 s = util.sha1()
153 s = util.sha1()
153 for rev in revs:
154 for rev in revs:
154 s.update('%s;' % rev)
155 s.update('%s;' % rev)
155 key = s.digest()
156 key = s.digest()
156 return key
157 return key
157
158
158 def validfor(self, repo):
159 def validfor(self, repo):
159 """Is the cache content valid regarding a repo
160 """Is the cache content valid regarding a repo
160
161
161 - False when cached tipnode is unknown or if we detect a strip.
162 - False when cached tipnode is unknown or if we detect a strip.
162 - True when cache is up to date or a subset of current repo."""
163 - True when cache is up to date or a subset of current repo."""
163 try:
164 try:
164 return ((self.tipnode == repo.changelog.node(self.tiprev))
165 return ((self.tipnode == repo.changelog.node(self.tiprev))
165 and (self.filteredhash == self._hashfiltered(repo)))
166 and (self.filteredhash == self._hashfiltered(repo)))
166 except IndexError:
167 except IndexError:
167 return False
168 return False
168
169
169 def _branchtip(self, heads):
170 def _branchtip(self, heads):
170 '''Return tuple with last open head in heads and false,
171 '''Return tuple with last open head in heads and false,
171 otherwise return last closed head and true.'''
172 otherwise return last closed head and true.'''
172 tip = heads[-1]
173 tip = heads[-1]
173 closed = True
174 closed = True
174 for h in reversed(heads):
175 for h in reversed(heads):
175 if h not in self._closednodes:
176 if h not in self._closednodes:
176 tip = h
177 tip = h
177 closed = False
178 closed = False
178 break
179 break
179 return tip, closed
180 return tip, closed
180
181
181 def branchtip(self, branch):
182 def branchtip(self, branch):
182 '''Return the tipmost open head on branch head, otherwise return the
183 '''Return the tipmost open head on branch head, otherwise return the
183 tipmost closed head on branch.
184 tipmost closed head on branch.
184 Raise KeyError for unknown branch.'''
185 Raise KeyError for unknown branch.'''
185 return self._branchtip(self[branch])[0]
186 return self._branchtip(self[branch])[0]
186
187
187 def branchheads(self, branch, closed=False):
188 def branchheads(self, branch, closed=False):
188 heads = self[branch]
189 heads = self[branch]
189 if not closed:
190 if not closed:
190 heads = [h for h in heads if h not in self._closednodes]
191 heads = [h for h in heads if h not in self._closednodes]
191 return heads
192 return heads
192
193
193 def iterbranches(self):
194 def iterbranches(self):
194 for bn, heads in self.iteritems():
195 for bn, heads in self.iteritems():
195 yield (bn, heads) + self._branchtip(heads)
196 yield (bn, heads) + self._branchtip(heads)
196
197
197 def copy(self):
198 def copy(self):
198 """return an deep copy of the branchcache object"""
199 """return an deep copy of the branchcache object"""
199 return branchcache(self, self.tipnode, self.tiprev, self.filteredhash,
200 return branchcache(self, self.tipnode, self.tiprev, self.filteredhash,
200 self._closednodes)
201 self._closednodes)
201
202
202 def write(self, repo):
203 def write(self, repo):
203 try:
204 try:
204 f = repo.opener(_filename(repo), "w", atomictemp=True)
205 f = repo.opener(_filename(repo), "w", atomictemp=True)
205 cachekey = [hex(self.tipnode), str(self.tiprev)]
206 cachekey = [hex(self.tipnode), str(self.tiprev)]
206 if self.filteredhash is not None:
207 if self.filteredhash is not None:
207 cachekey.append(hex(self.filteredhash))
208 cachekey.append(hex(self.filteredhash))
208 f.write(" ".join(cachekey) + '\n')
209 f.write(" ".join(cachekey) + '\n')
210 nodecount = 0
209 for label, nodes in sorted(self.iteritems()):
211 for label, nodes in sorted(self.iteritems()):
210 for node in nodes:
212 for node in nodes:
213 nodecount += 1
211 if node in self._closednodes:
214 if node in self._closednodes:
212 state = 'c'
215 state = 'c'
213 else:
216 else:
214 state = 'o'
217 state = 'o'
215 f.write("%s %s %s\n" % (hex(node), state,
218 f.write("%s %s %s\n" % (hex(node), state,
216 encoding.fromlocal(label)))
219 encoding.fromlocal(label)))
217 f.close()
220 f.close()
221 repo.ui.log('branchcache',
222 'wrote %s branch cache with %d labels and %d nodes\n',
223 repo.filtername, len(self), nodecount)
218 except (IOError, OSError, util.Abort):
224 except (IOError, OSError, util.Abort):
219 # Abort may be raise by read only opener
225 # Abort may be raise by read only opener
220 pass
226 pass
221
227
222 def update(self, repo, revgen):
228 def update(self, repo, revgen):
223 """Given a branchhead cache, self, that may have extra nodes or be
229 """Given a branchhead cache, self, that may have extra nodes or be
224 missing heads, and a generator of nodes that are strictly a superset of
230 missing heads, and a generator of nodes that are strictly a superset of
225 heads missing, this function updates self to be correct.
231 heads missing, this function updates self to be correct.
226 """
232 """
233 starttime = time.time()
227 cl = repo.changelog
234 cl = repo.changelog
228 # collect new branch entries
235 # collect new branch entries
229 newbranches = {}
236 newbranches = {}
230 getbranchinfo = cl.branchinfo
237 getbranchinfo = cl.branchinfo
231 for r in revgen:
238 for r in revgen:
232 branch, closesbranch = getbranchinfo(r)
239 branch, closesbranch = getbranchinfo(r)
233 newbranches.setdefault(branch, []).append(r)
240 newbranches.setdefault(branch, []).append(r)
234 if closesbranch:
241 if closesbranch:
235 self._closednodes.add(cl.node(r))
242 self._closednodes.add(cl.node(r))
236 # if older branchheads are reachable from new ones, they aren't
243 # if older branchheads are reachable from new ones, they aren't
237 # really branchheads. Note checking parents is insufficient:
244 # really branchheads. Note checking parents is insufficient:
238 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
245 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
239 for branch, newheadrevs in newbranches.iteritems():
246 for branch, newheadrevs in newbranches.iteritems():
240 bheads = self.setdefault(branch, [])
247 bheads = self.setdefault(branch, [])
241 bheadset = set(cl.rev(node) for node in bheads)
248 bheadset = set(cl.rev(node) for node in bheads)
242
249
243 # This have been tested True on all internal usage of this function.
250 # This have been tested True on all internal usage of this function.
244 # run it again in case of doubt
251 # run it again in case of doubt
245 # assert not (set(bheadrevs) & set(newheadrevs))
252 # assert not (set(bheadrevs) & set(newheadrevs))
246 newheadrevs.sort()
253 newheadrevs.sort()
247 bheadset.update(newheadrevs)
254 bheadset.update(newheadrevs)
248
255
249 # This loop prunes out two kinds of heads - heads that are
256 # This loop prunes out two kinds of heads - heads that are
250 # superseded by a head in newheadrevs, and newheadrevs that are not
257 # superseded by a head in newheadrevs, and newheadrevs that are not
251 # heads because an existing head is their descendant.
258 # heads because an existing head is their descendant.
252 while newheadrevs:
259 while newheadrevs:
253 latest = newheadrevs.pop()
260 latest = newheadrevs.pop()
254 if latest not in bheadset:
261 if latest not in bheadset:
255 continue
262 continue
256 ancestors = set(cl.ancestors([latest], min(bheadset)))
263 ancestors = set(cl.ancestors([latest], min(bheadset)))
257 bheadset -= ancestors
264 bheadset -= ancestors
258 bheadrevs = sorted(bheadset)
265 bheadrevs = sorted(bheadset)
259 self[branch] = [cl.node(rev) for rev in bheadrevs]
266 self[branch] = [cl.node(rev) for rev in bheadrevs]
260 tiprev = bheadrevs[-1]
267 tiprev = bheadrevs[-1]
261 if tiprev > self.tiprev:
268 if tiprev > self.tiprev:
262 self.tipnode = cl.node(tiprev)
269 self.tipnode = cl.node(tiprev)
263 self.tiprev = tiprev
270 self.tiprev = tiprev
264
271
265 if not self.validfor(repo):
272 if not self.validfor(repo):
266 # cache key are not valid anymore
273 # cache key are not valid anymore
267 self.tipnode = nullid
274 self.tipnode = nullid
268 self.tiprev = nullrev
275 self.tiprev = nullrev
269 for heads in self.values():
276 for heads in self.values():
270 tiprev = max(cl.rev(node) for node in heads)
277 tiprev = max(cl.rev(node) for node in heads)
271 if tiprev > self.tiprev:
278 if tiprev > self.tiprev:
272 self.tipnode = cl.node(tiprev)
279 self.tipnode = cl.node(tiprev)
273 self.tiprev = tiprev
280 self.tiprev = tiprev
274 self.filteredhash = self._hashfiltered(repo)
281 self.filteredhash = self._hashfiltered(repo)
282
283 duration = time.time() - starttime
284 repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n',
285 repo.filtername, duration)
@@ -1,166 +1,170 b''
1 setup
1 setup
2 $ cat > mock.py <<EOF
2 $ cat > mock.py <<EOF
3 > from mercurial import util
3 > from mercurial import util
4 >
4 >
5 > def makedate():
5 > def makedate():
6 > return 0, 0
6 > return 0, 0
7 > def getuser():
7 > def getuser():
8 > return 'bob'
8 > return 'bob'
9 > # mock the date and user apis so the output is always the same
9 > # mock the date and user apis so the output is always the same
10 > def uisetup(ui):
10 > def uisetup(ui):
11 > util.makedate = makedate
11 > util.makedate = makedate
12 > util.getuser = getuser
12 > util.getuser = getuser
13 > EOF
13 > EOF
14 $ cat >> $HGRCPATH <<EOF
14 $ cat >> $HGRCPATH <<EOF
15 > [extensions]
15 > [extensions]
16 > blackbox=
16 > blackbox=
17 > mock=`pwd`/mock.py
17 > mock=`pwd`/mock.py
18 > mq=
18 > mq=
19 > EOF
19 > EOF
20 $ hg init blackboxtest
20 $ hg init blackboxtest
21 $ cd blackboxtest
21 $ cd blackboxtest
22
22
23 command, exit codes, and duration
23 command, exit codes, and duration
24
24
25 $ echo a > a
25 $ echo a > a
26 $ hg add a
26 $ hg add a
27 $ hg blackbox
27 $ hg blackbox
28 1970/01/01 00:00:00 bob> add a
28 1970/01/01 00:00:00 bob> add a
29 1970/01/01 00:00:00 bob> add a exited 0 after * seconds (glob)
29 1970/01/01 00:00:00 bob> add a exited 0 after * seconds (glob)
30
30
31 incoming change tracking
31 incoming change tracking
32
32
33 create two heads to verify that we only see one change in the log later
33 create two heads to verify that we only see one change in the log later
34 $ hg commit -ma
34 $ hg commit -ma
35 $ hg up null
35 $ hg up null
36 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
36 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
37 $ echo b > b
37 $ echo b > b
38 $ hg commit -Amb
38 $ hg commit -Amb
39 adding b
39 adding b
40 created new head
40 created new head
41
41
42 clone, commit, pull
42 clone, commit, pull
43 $ hg clone . ../blackboxtest2
43 $ hg clone . ../blackboxtest2
44 updating to branch default
44 updating to branch default
45 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
45 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
46 $ echo c > c
46 $ echo c > c
47 $ hg commit -Amc
47 $ hg commit -Amc
48 adding c
48 adding c
49 $ cd ../blackboxtest2
49 $ cd ../blackboxtest2
50 $ hg pull
50 $ hg pull
51 pulling from $TESTTMP/blackboxtest (glob)
51 pulling from $TESTTMP/blackboxtest (glob)
52 searching for changes
52 searching for changes
53 adding changesets
53 adding changesets
54 adding manifests
54 adding manifests
55 adding file changes
55 adding file changes
56 added 1 changesets with 1 changes to 1 files
56 added 1 changesets with 1 changes to 1 files
57 (run 'hg update' to get a working copy)
57 (run 'hg update' to get a working copy)
58 $ hg blackbox -l 3
58 $ hg blackbox -l 5
59 1970/01/01 00:00:00 bob> pull
59 1970/01/01 00:00:00 bob> pull
60 1970/01/01 00:00:00 bob> updated served branch cache in ?.???? seconds (glob)
61 1970/01/01 00:00:00 bob> wrote served branch cache with 1 labels and 2 nodes
60 1970/01/01 00:00:00 bob> 1 incoming changes - new heads: d02f48003e62
62 1970/01/01 00:00:00 bob> 1 incoming changes - new heads: d02f48003e62
61 1970/01/01 00:00:00 bob> pull exited 0 after * seconds (glob)
63 1970/01/01 00:00:00 bob> pull exited 0 after * seconds (glob)
62
64
63 we must not cause a failure if we cannot write to the log
65 we must not cause a failure if we cannot write to the log
64
66
65 $ hg rollback
67 $ hg rollback
66 repository tip rolled back to revision 1 (undo pull)
68 repository tip rolled back to revision 1 (undo pull)
67
69
68 #if unix-permissions no-root
70 #if unix-permissions no-root
69 $ chmod 000 .hg/blackbox.log
71 $ chmod 000 .hg/blackbox.log
70 $ hg --debug incoming
72 $ hg --debug incoming
71 warning: cannot write to blackbox.log: Permission denied
73 warning: cannot write to blackbox.log: Permission denied
72 comparing with $TESTTMP/blackboxtest (glob)
74 comparing with $TESTTMP/blackboxtest (glob)
73 query 1; heads
75 query 1; heads
74 searching for changes
76 searching for changes
75 all local heads known remotely
77 all local heads known remotely
76 changeset: 2:d02f48003e62c24e2659d97d30f2a83abe5d5d51
78 changeset: 2:d02f48003e62c24e2659d97d30f2a83abe5d5d51
77 tag: tip
79 tag: tip
78 phase: draft
80 phase: draft
79 parent: 1:6563da9dcf87b1949716e38ff3e3dfaa3198eb06
81 parent: 1:6563da9dcf87b1949716e38ff3e3dfaa3198eb06
80 parent: -1:0000000000000000000000000000000000000000
82 parent: -1:0000000000000000000000000000000000000000
81 manifest: 2:ab9d46b053ebf45b7996f2922b9893ff4b63d892
83 manifest: 2:ab9d46b053ebf45b7996f2922b9893ff4b63d892
82 user: test
84 user: test
83 date: Thu Jan 01 00:00:00 1970 +0000
85 date: Thu Jan 01 00:00:00 1970 +0000
84 files+: c
86 files+: c
85 extra: branch=default
87 extra: branch=default
86 description:
88 description:
87 c
89 c
88
90
89
91
90 #endif
92 #endif
91 $ hg pull
93 $ hg pull
92 pulling from $TESTTMP/blackboxtest (glob)
94 pulling from $TESTTMP/blackboxtest (glob)
93 searching for changes
95 searching for changes
94 adding changesets
96 adding changesets
95 adding manifests
97 adding manifests
96 adding file changes
98 adding file changes
97 added 1 changesets with 1 changes to 1 files
99 added 1 changesets with 1 changes to 1 files
98 (run 'hg update' to get a working copy)
100 (run 'hg update' to get a working copy)
99
101
100 a failure reading from the log is fine
102 a failure reading from the log is fine
101 #if unix-permissions no-root
103 #if unix-permissions no-root
102 $ hg blackbox -l 3
104 $ hg blackbox -l 3
103 abort: Permission denied: $TESTTMP/blackboxtest2/.hg/blackbox.log
105 abort: Permission denied: $TESTTMP/blackboxtest2/.hg/blackbox.log
104 [255]
106 [255]
105
107
106 $ chmod 600 .hg/blackbox.log
108 $ chmod 600 .hg/blackbox.log
107 #endif
109 #endif
108
110
109 backup bundles get logged
111 backup bundles get logged
110
112
111 $ touch d
113 $ touch d
112 $ hg commit -Amd
114 $ hg commit -Amd
113 adding d
115 adding d
114 created new head
116 created new head
115 $ hg strip tip
117 $ hg strip tip
116 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
118 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
117 saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
119 saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
118 $ hg blackbox -l 3
120 $ hg blackbox -l 5
119 1970/01/01 00:00:00 bob> strip tip
121 1970/01/01 00:00:00 bob> strip tip
120 1970/01/01 00:00:00 bob> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
122 1970/01/01 00:00:00 bob> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
123 1970/01/01 00:00:00 bob> updated base branch cache in ?.???? seconds (glob)
124 1970/01/01 00:00:00 bob> wrote base branch cache with 1 labels and 2 nodes
121 1970/01/01 00:00:00 bob> strip tip exited 0 after * seconds (glob)
125 1970/01/01 00:00:00 bob> strip tip exited 0 after * seconds (glob)
122
126
123 tags cache gets logged
127 tags cache gets logged
124 $ hg up tip
128 $ hg up tip
125 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
129 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
126 $ hg tag -m 'create test tag' test-tag
130 $ hg tag -m 'create test tag' test-tag
127 $ hg tags
131 $ hg tags
128 tip 3:5b5562c08298
132 tip 3:5b5562c08298
129 test-tag 2:d02f48003e62
133 test-tag 2:d02f48003e62
130 $ hg blackbox -l 3
134 $ hg blackbox -l 3
131 1970/01/01 00:00:00 bob> resolved 1 tags cache entries from 1 manifests in ?.???? seconds (glob)
135 1970/01/01 00:00:00 bob> resolved 1 tags cache entries from 1 manifests in ?.???? seconds (glob)
132 1970/01/01 00:00:00 bob> writing tags cache file with 2 heads and 1 tags
136 1970/01/01 00:00:00 bob> writing tags cache file with 2 heads and 1 tags
133 1970/01/01 00:00:00 bob> tags exited 0 after ?.?? seconds (glob)
137 1970/01/01 00:00:00 bob> tags exited 0 after ?.?? seconds (glob)
134
138
135 extension and python hooks - use the eol extension for a pythonhook
139 extension and python hooks - use the eol extension for a pythonhook
136
140
137 $ echo '[extensions]' >> .hg/hgrc
141 $ echo '[extensions]' >> .hg/hgrc
138 $ echo 'eol=' >> .hg/hgrc
142 $ echo 'eol=' >> .hg/hgrc
139 $ echo '[hooks]' >> .hg/hgrc
143 $ echo '[hooks]' >> .hg/hgrc
140 $ echo 'update = echo hooked' >> .hg/hgrc
144 $ echo 'update = echo hooked' >> .hg/hgrc
141 $ hg update
145 $ hg update
142 hooked
146 hooked
143 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
147 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
144 $ hg blackbox -l 4
148 $ hg blackbox -l 4
145 1970/01/01 00:00:00 bob> update
149 1970/01/01 00:00:00 bob> update
146 1970/01/01 00:00:00 bob> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob)
150 1970/01/01 00:00:00 bob> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob)
147 1970/01/01 00:00:00 bob> exthook-update: echo hooked finished in * seconds (glob)
151 1970/01/01 00:00:00 bob> exthook-update: echo hooked finished in * seconds (glob)
148 1970/01/01 00:00:00 bob> update exited 0 after * seconds (glob)
152 1970/01/01 00:00:00 bob> update exited 0 after * seconds (glob)
149
153
150 log rotation
154 log rotation
151
155
152 $ echo '[blackbox]' >> .hg/hgrc
156 $ echo '[blackbox]' >> .hg/hgrc
153 $ echo 'maxsize = 20 b' >> .hg/hgrc
157 $ echo 'maxsize = 20 b' >> .hg/hgrc
154 $ echo 'maxfiles = 3' >> .hg/hgrc
158 $ echo 'maxfiles = 3' >> .hg/hgrc
155 $ hg status
159 $ hg status
156 $ hg status
160 $ hg status
157 $ hg status
161 $ hg status
158 $ hg tip -q
162 $ hg tip -q
159 3:5b5562c08298
163 3:5b5562c08298
160 $ ls .hg/blackbox.log*
164 $ ls .hg/blackbox.log*
161 .hg/blackbox.log
165 .hg/blackbox.log
162 .hg/blackbox.log.1
166 .hg/blackbox.log.1
163 .hg/blackbox.log.2
167 .hg/blackbox.log.2
164
168
165 cleanup
169 cleanup
166 $ cd ..
170 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now