##// END OF EJS Templates
metadata: move computation related to files touched in a dedicated module...
marmoute -
r45466:4c1d3921 default
parent child Browse files
Show More
@@ -0,0 +1,268 b''
1 # metadata.py -- code related to various metadata computation and access.
2 #
3 # Copyright 2019 Google, Inc <martinvonz@google.com>
4 # Copyright 2020 Pierre-Yves David <pierre-yves.david@octobus.net>
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8 from __future__ import absolute_import, print_function
9
10 import multiprocessing
11
12 from . import (
13 error,
14 pycompat,
15 util,
16 )
17
18 from .revlogutils import (
19 flagutil as sidedataflag,
20 sidedata as sidedatamod,
21 )
22
23
24 def computechangesetfilesadded(ctx):
25 """return the list of files added in a changeset
26 """
27 added = []
28 for f in ctx.files():
29 if not any(f in p for p in ctx.parents()):
30 added.append(f)
31 return added
32
33
34 def computechangesetfilesremoved(ctx):
35 """return the list of files removed in a changeset
36 """
37 removed = []
38 for f in ctx.files():
39 if f not in ctx:
40 removed.append(f)
41 return removed
42
43
44 def computechangesetcopies(ctx):
45 """return the copies data for a changeset
46
47 The copies data are returned as a pair of dictionnary (p1copies, p2copies).
48
49 Each dictionnary are in the form: `{newname: oldname}`
50 """
51 p1copies = {}
52 p2copies = {}
53 p1 = ctx.p1()
54 p2 = ctx.p2()
55 narrowmatch = ctx._repo.narrowmatch()
56 for dst in ctx.files():
57 if not narrowmatch(dst) or dst not in ctx:
58 continue
59 copied = ctx[dst].renamed()
60 if not copied:
61 continue
62 src, srcnode = copied
63 if src in p1 and p1[src].filenode() == srcnode:
64 p1copies[dst] = src
65 elif src in p2 and p2[src].filenode() == srcnode:
66 p2copies[dst] = src
67 return p1copies, p2copies
68
69
70 def encodecopies(files, copies):
71 items = []
72 for i, dst in enumerate(files):
73 if dst in copies:
74 items.append(b'%d\0%s' % (i, copies[dst]))
75 if len(items) != len(copies):
76 raise error.ProgrammingError(
77 b'some copy targets missing from file list'
78 )
79 return b"\n".join(items)
80
81
82 def decodecopies(files, data):
83 try:
84 copies = {}
85 if not data:
86 return copies
87 for l in data.split(b'\n'):
88 strindex, src = l.split(b'\0')
89 i = int(strindex)
90 dst = files[i]
91 copies[dst] = src
92 return copies
93 except (ValueError, IndexError):
94 # Perhaps someone had chosen the same key name (e.g. "p1copies") and
95 # used different syntax for the value.
96 return None
97
98
99 def encodefileindices(files, subset):
100 subset = set(subset)
101 indices = []
102 for i, f in enumerate(files):
103 if f in subset:
104 indices.append(b'%d' % i)
105 return b'\n'.join(indices)
106
107
108 def decodefileindices(files, data):
109 try:
110 subset = []
111 if not data:
112 return subset
113 for strindex in data.split(b'\n'):
114 i = int(strindex)
115 if i < 0 or i >= len(files):
116 return None
117 subset.append(files[i])
118 return subset
119 except (ValueError, IndexError):
120 # Perhaps someone had chosen the same key name (e.g. "added") and
121 # used different syntax for the value.
122 return None
123
124
125 def _getsidedata(srcrepo, rev):
126 ctx = srcrepo[rev]
127 filescopies = computechangesetcopies(ctx)
128 filesadded = computechangesetfilesadded(ctx)
129 filesremoved = computechangesetfilesremoved(ctx)
130 sidedata = {}
131 if any([filescopies, filesadded, filesremoved]):
132 sortedfiles = sorted(ctx.files())
133 p1copies, p2copies = filescopies
134 p1copies = encodecopies(sortedfiles, p1copies)
135 p2copies = encodecopies(sortedfiles, p2copies)
136 filesadded = encodefileindices(sortedfiles, filesadded)
137 filesremoved = encodefileindices(sortedfiles, filesremoved)
138 if p1copies:
139 sidedata[sidedatamod.SD_P1COPIES] = p1copies
140 if p2copies:
141 sidedata[sidedatamod.SD_P2COPIES] = p2copies
142 if filesadded:
143 sidedata[sidedatamod.SD_FILESADDED] = filesadded
144 if filesremoved:
145 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
146 return sidedata
147
148
149 def getsidedataadder(srcrepo, destrepo):
150 use_w = srcrepo.ui.configbool(b'experimental', b'worker.repository-upgrade')
151 if pycompat.iswindows or not use_w:
152 return _get_simple_sidedata_adder(srcrepo, destrepo)
153 else:
154 return _get_worker_sidedata_adder(srcrepo, destrepo)
155
156
157 def _sidedata_worker(srcrepo, revs_queue, sidedata_queue, tokens):
158 """The function used by worker precomputing sidedata
159
160 It read an input queue containing revision numbers
161 It write in an output queue containing (rev, <sidedata-map>)
162
163 The `None` input value is used as a stop signal.
164
165 The `tokens` semaphore is user to avoid having too many unprocessed
166 entries. The workers needs to acquire one token before fetching a task.
167 They will be released by the consumer of the produced data.
168 """
169 tokens.acquire()
170 rev = revs_queue.get()
171 while rev is not None:
172 data = _getsidedata(srcrepo, rev)
173 sidedata_queue.put((rev, data))
174 tokens.acquire()
175 rev = revs_queue.get()
176 # processing of `None` is completed, release the token.
177 tokens.release()
178
179
180 BUFF_PER_WORKER = 50
181
182
183 def _get_worker_sidedata_adder(srcrepo, destrepo):
184 """The parallel version of the sidedata computation
185
186 This code spawn a pool of worker that precompute a buffer of sidedata
187 before we actually need them"""
188 # avoid circular import copies -> scmutil -> worker -> copies
189 from . import worker
190
191 nbworkers = worker._numworkers(srcrepo.ui)
192
193 tokens = multiprocessing.BoundedSemaphore(nbworkers * BUFF_PER_WORKER)
194 revsq = multiprocessing.Queue()
195 sidedataq = multiprocessing.Queue()
196
197 assert srcrepo.filtername is None
198 # queue all tasks beforehand, revision numbers are small and it make
199 # synchronisation simpler
200 #
201 # Since the computation for each node can be quite expensive, the overhead
202 # of using a single queue is not revelant. In practice, most computation
203 # are fast but some are very expensive and dominate all the other smaller
204 # cost.
205 for r in srcrepo.changelog.revs():
206 revsq.put(r)
207 # queue the "no more tasks" markers
208 for i in range(nbworkers):
209 revsq.put(None)
210
211 allworkers = []
212 for i in range(nbworkers):
213 args = (srcrepo, revsq, sidedataq, tokens)
214 w = multiprocessing.Process(target=_sidedata_worker, args=args)
215 allworkers.append(w)
216 w.start()
217
218 # dictionnary to store results for revision higher than we one we are
219 # looking for. For example, if we need the sidedatamap for 42, and 43 is
220 # received, when shelve 43 for later use.
221 staging = {}
222
223 def sidedata_companion(revlog, rev):
224 sidedata = {}
225 if util.safehasattr(revlog, b'filteredrevs'): # this is a changelog
226 # Is the data previously shelved ?
227 sidedata = staging.pop(rev, None)
228 if sidedata is None:
229 # look at the queued result until we find the one we are lookig
230 # for (shelve the other ones)
231 r, sidedata = sidedataq.get()
232 while r != rev:
233 staging[r] = sidedata
234 r, sidedata = sidedataq.get()
235 tokens.release()
236 return False, (), sidedata
237
238 return sidedata_companion
239
240
241 def _get_simple_sidedata_adder(srcrepo, destrepo):
242 """The simple version of the sidedata computation
243
244 It just compute it in the same thread on request"""
245
246 def sidedatacompanion(revlog, rev):
247 sidedata = {}
248 if util.safehasattr(revlog, 'filteredrevs'): # this is a changelog
249 sidedata = _getsidedata(srcrepo, rev)
250 return False, (), sidedata
251
252 return sidedatacompanion
253
254
255 def getsidedataremover(srcrepo, destrepo):
256 def sidedatacompanion(revlog, rev):
257 f = ()
258 if util.safehasattr(revlog, 'filteredrevs'): # this is a changelog
259 if revlog.flags(rev) & sidedataflag.REVIDX_SIDEDATA:
260 f = (
261 sidedatamod.SD_P1COPIES,
262 sidedatamod.SD_P2COPIES,
263 sidedatamod.SD_FILESADDED,
264 sidedatamod.SD_FILESREMOVED,
265 )
266 return False, f, {}
267
268 return sidedatacompanion
@@ -16,9 +16,9 b' from .node import ('
16 from .thirdparty import attr
16 from .thirdparty import attr
17
17
18 from . import (
18 from . import (
19 copies,
20 encoding,
19 encoding,
21 error,
20 error,
21 metadata,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 )
24 )
@@ -318,7 +318,7 b' class changelogrevision(object):'
318 rawindices = self.extra.get(b'filesadded')
318 rawindices = self.extra.get(b'filesadded')
319 if rawindices is None:
319 if rawindices is None:
320 return None
320 return None
321 return copies.decodefileindices(self.files, rawindices)
321 return metadata.decodefileindices(self.files, rawindices)
322
322
323 @property
323 @property
324 def filesremoved(self):
324 def filesremoved(self):
@@ -330,7 +330,7 b' class changelogrevision(object):'
330 rawindices = self.extra.get(b'filesremoved')
330 rawindices = self.extra.get(b'filesremoved')
331 if rawindices is None:
331 if rawindices is None:
332 return None
332 return None
333 return copies.decodefileindices(self.files, rawindices)
333 return metadata.decodefileindices(self.files, rawindices)
334
334
335 @property
335 @property
336 def p1copies(self):
336 def p1copies(self):
@@ -342,7 +342,7 b' class changelogrevision(object):'
342 rawcopies = self.extra.get(b'p1copies')
342 rawcopies = self.extra.get(b'p1copies')
343 if rawcopies is None:
343 if rawcopies is None:
344 return None
344 return None
345 return copies.decodecopies(self.files, rawcopies)
345 return metadata.decodecopies(self.files, rawcopies)
346
346
347 @property
347 @property
348 def p2copies(self):
348 def p2copies(self):
@@ -354,7 +354,7 b' class changelogrevision(object):'
354 rawcopies = self.extra.get(b'p2copies')
354 rawcopies = self.extra.get(b'p2copies')
355 if rawcopies is None:
355 if rawcopies is None:
356 return None
356 return None
357 return copies.decodecopies(self.files, rawcopies)
357 return metadata.decodecopies(self.files, rawcopies)
358
358
359 @property
359 @property
360 def description(self):
360 def description(self):
@@ -570,13 +570,13 b' class changelog(revlog.revlog):'
570 ):
570 ):
571 extra.pop(name, None)
571 extra.pop(name, None)
572 if p1copies is not None:
572 if p1copies is not None:
573 p1copies = copies.encodecopies(sortedfiles, p1copies)
573 p1copies = metadata.encodecopies(sortedfiles, p1copies)
574 if p2copies is not None:
574 if p2copies is not None:
575 p2copies = copies.encodecopies(sortedfiles, p2copies)
575 p2copies = metadata.encodecopies(sortedfiles, p2copies)
576 if filesadded is not None:
576 if filesadded is not None:
577 filesadded = copies.encodefileindices(sortedfiles, filesadded)
577 filesadded = metadata.encodefileindices(sortedfiles, filesadded)
578 if filesremoved is not None:
578 if filesremoved is not None:
579 filesremoved = copies.encodefileindices(sortedfiles, filesremoved)
579 filesremoved = metadata.encodefileindices(sortedfiles, filesremoved)
580 if self._copiesstorage == b'extra':
580 if self._copiesstorage == b'extra':
581 extrasentries = p1copies, p2copies, filesadded, filesremoved
581 extrasentries = p1copies, p2copies, filesadded, filesremoved
582 if extra is None and any(x is not None for x in extrasentries):
582 if extra is None and any(x is not None for x in extrasentries):
@@ -28,13 +28,13 b' from .pycompat import ('
28 open,
28 open,
29 )
29 )
30 from . import (
30 from . import (
31 copies,
32 dagop,
31 dagop,
33 encoding,
32 encoding,
34 error,
33 error,
35 fileset,
34 fileset,
36 match as matchmod,
35 match as matchmod,
37 mergestate as mergestatemod,
36 mergestate as mergestatemod,
37 metadata,
38 obsolete as obsmod,
38 obsolete as obsmod,
39 patch,
39 patch,
40 pathutil,
40 pathutil,
@@ -300,7 +300,7 b' class basectx(object):'
300
300
301 @propertycache
301 @propertycache
302 def _copies(self):
302 def _copies(self):
303 return copies.computechangesetcopies(self)
303 return metadata.computechangesetcopies(self)
304
304
305 def p1copies(self):
305 def p1copies(self):
306 return self._copies[0]
306 return self._copies[0]
@@ -589,7 +589,7 b' class changectx(basectx):'
589 filesadded = None
589 filesadded = None
590 if filesadded is None:
590 if filesadded is None:
591 if compute_on_none:
591 if compute_on_none:
592 filesadded = copies.computechangesetfilesadded(self)
592 filesadded = metadata.computechangesetfilesadded(self)
593 else:
593 else:
594 filesadded = []
594 filesadded = []
595 return filesadded
595 return filesadded
@@ -608,7 +608,7 b' class changectx(basectx):'
608 filesremoved = None
608 filesremoved = None
609 if filesremoved is None:
609 if filesremoved is None:
610 if compute_on_none:
610 if compute_on_none:
611 filesremoved = copies.computechangesetfilesremoved(self)
611 filesremoved = metadata.computechangesetfilesremoved(self)
612 else:
612 else:
613 filesremoved = []
613 filesremoved = []
614 return filesremoved
614 return filesremoved
@@ -8,7 +8,6 b''
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import multiprocessing
12 import os
11 import os
13
12
14 from .i18n import _
13 from .i18n import _
@@ -17,7 +16,6 b' from .i18n import _'
17 from .revlogutils.flagutil import REVIDX_SIDEDATA
16 from .revlogutils.flagutil import REVIDX_SIDEDATA
18
17
19 from . import (
18 from . import (
20 error,
21 match as matchmod,
19 match as matchmod,
22 node,
20 node,
23 pathutil,
21 pathutil,
@@ -25,7 +23,6 b' from . import ('
25 util,
23 util,
26 )
24 )
27
25
28 from .revlogutils import sidedata as sidedatamod
29
26
30 from .utils import stringutil
27 from .utils import stringutil
31
28
@@ -992,250 +989,3 b' def graftcopies(wctx, ctx, base):'
992 _filter(wctx.p1(), wctx, new_copies)
989 _filter(wctx.p1(), wctx, new_copies)
993 for dst, src in pycompat.iteritems(new_copies):
990 for dst, src in pycompat.iteritems(new_copies):
994 wctx[dst].markcopied(src)
991 wctx[dst].markcopied(src)
995
996
997 def computechangesetfilesadded(ctx):
998 """return the list of files added in a changeset
999 """
1000 added = []
1001 for f in ctx.files():
1002 if not any(f in p for p in ctx.parents()):
1003 added.append(f)
1004 return added
1005
1006
1007 def computechangesetfilesremoved(ctx):
1008 """return the list of files removed in a changeset
1009 """
1010 removed = []
1011 for f in ctx.files():
1012 if f not in ctx:
1013 removed.append(f)
1014 return removed
1015
1016
1017 def computechangesetcopies(ctx):
1018 """return the copies data for a changeset
1019
1020 The copies data are returned as a pair of dictionnary (p1copies, p2copies).
1021
1022 Each dictionnary are in the form: `{newname: oldname}`
1023 """
1024 p1copies = {}
1025 p2copies = {}
1026 p1 = ctx.p1()
1027 p2 = ctx.p2()
1028 narrowmatch = ctx._repo.narrowmatch()
1029 for dst in ctx.files():
1030 if not narrowmatch(dst) or dst not in ctx:
1031 continue
1032 copied = ctx[dst].renamed()
1033 if not copied:
1034 continue
1035 src, srcnode = copied
1036 if src in p1 and p1[src].filenode() == srcnode:
1037 p1copies[dst] = src
1038 elif src in p2 and p2[src].filenode() == srcnode:
1039 p2copies[dst] = src
1040 return p1copies, p2copies
1041
1042
1043 def encodecopies(files, copies):
1044 items = []
1045 for i, dst in enumerate(files):
1046 if dst in copies:
1047 items.append(b'%d\0%s' % (i, copies[dst]))
1048 if len(items) != len(copies):
1049 raise error.ProgrammingError(
1050 b'some copy targets missing from file list'
1051 )
1052 return b"\n".join(items)
1053
1054
1055 def decodecopies(files, data):
1056 try:
1057 copies = {}
1058 if not data:
1059 return copies
1060 for l in data.split(b'\n'):
1061 strindex, src = l.split(b'\0')
1062 i = int(strindex)
1063 dst = files[i]
1064 copies[dst] = src
1065 return copies
1066 except (ValueError, IndexError):
1067 # Perhaps someone had chosen the same key name (e.g. "p1copies") and
1068 # used different syntax for the value.
1069 return None
1070
1071
1072 def encodefileindices(files, subset):
1073 subset = set(subset)
1074 indices = []
1075 for i, f in enumerate(files):
1076 if f in subset:
1077 indices.append(b'%d' % i)
1078 return b'\n'.join(indices)
1079
1080
1081 def decodefileindices(files, data):
1082 try:
1083 subset = []
1084 if not data:
1085 return subset
1086 for strindex in data.split(b'\n'):
1087 i = int(strindex)
1088 if i < 0 or i >= len(files):
1089 return None
1090 subset.append(files[i])
1091 return subset
1092 except (ValueError, IndexError):
1093 # Perhaps someone had chosen the same key name (e.g. "added") and
1094 # used different syntax for the value.
1095 return None
1096
1097
1098 def _getsidedata(srcrepo, rev):
1099 ctx = srcrepo[rev]
1100 filescopies = computechangesetcopies(ctx)
1101 filesadded = computechangesetfilesadded(ctx)
1102 filesremoved = computechangesetfilesremoved(ctx)
1103 sidedata = {}
1104 if any([filescopies, filesadded, filesremoved]):
1105 sortedfiles = sorted(ctx.files())
1106 p1copies, p2copies = filescopies
1107 p1copies = encodecopies(sortedfiles, p1copies)
1108 p2copies = encodecopies(sortedfiles, p2copies)
1109 filesadded = encodefileindices(sortedfiles, filesadded)
1110 filesremoved = encodefileindices(sortedfiles, filesremoved)
1111 if p1copies:
1112 sidedata[sidedatamod.SD_P1COPIES] = p1copies
1113 if p2copies:
1114 sidedata[sidedatamod.SD_P2COPIES] = p2copies
1115 if filesadded:
1116 sidedata[sidedatamod.SD_FILESADDED] = filesadded
1117 if filesremoved:
1118 sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved
1119 return sidedata
1120
1121
1122 def getsidedataadder(srcrepo, destrepo):
1123 use_w = srcrepo.ui.configbool(b'experimental', b'worker.repository-upgrade')
1124 if pycompat.iswindows or not use_w:
1125 return _get_simple_sidedata_adder(srcrepo, destrepo)
1126 else:
1127 return _get_worker_sidedata_adder(srcrepo, destrepo)
1128
1129
1130 def _sidedata_worker(srcrepo, revs_queue, sidedata_queue, tokens):
1131 """The function used by worker precomputing sidedata
1132
1133 It read an input queue containing revision numbers
1134 It write in an output queue containing (rev, <sidedata-map>)
1135
1136 The `None` input value is used as a stop signal.
1137
1138 The `tokens` semaphore is user to avoid having too many unprocessed
1139 entries. The workers needs to acquire one token before fetching a task.
1140 They will be released by the consumer of the produced data.
1141 """
1142 tokens.acquire()
1143 rev = revs_queue.get()
1144 while rev is not None:
1145 data = _getsidedata(srcrepo, rev)
1146 sidedata_queue.put((rev, data))
1147 tokens.acquire()
1148 rev = revs_queue.get()
1149 # processing of `None` is completed, release the token.
1150 tokens.release()
1151
1152
1153 BUFF_PER_WORKER = 50
1154
1155
1156 def _get_worker_sidedata_adder(srcrepo, destrepo):
1157 """The parallel version of the sidedata computation
1158
1159 This code spawn a pool of worker that precompute a buffer of sidedata
1160 before we actually need them"""
1161 # avoid circular import copies -> scmutil -> worker -> copies
1162 from . import worker
1163
1164 nbworkers = worker._numworkers(srcrepo.ui)
1165
1166 tokens = multiprocessing.BoundedSemaphore(nbworkers * BUFF_PER_WORKER)
1167 revsq = multiprocessing.Queue()
1168 sidedataq = multiprocessing.Queue()
1169
1170 assert srcrepo.filtername is None
1171 # queue all tasks beforehand, revision numbers are small and it make
1172 # synchronisation simpler
1173 #
1174 # Since the computation for each node can be quite expensive, the overhead
1175 # of using a single queue is not revelant. In practice, most computation
1176 # are fast but some are very expensive and dominate all the other smaller
1177 # cost.
1178 for r in srcrepo.changelog.revs():
1179 revsq.put(r)
1180 # queue the "no more tasks" markers
1181 for i in range(nbworkers):
1182 revsq.put(None)
1183
1184 allworkers = []
1185 for i in range(nbworkers):
1186 args = (srcrepo, revsq, sidedataq, tokens)
1187 w = multiprocessing.Process(target=_sidedata_worker, args=args)
1188 allworkers.append(w)
1189 w.start()
1190
1191 # dictionnary to store results for revision higher than we one we are
1192 # looking for. For example, if we need the sidedatamap for 42, and 43 is
1193 # received, when shelve 43 for later use.
1194 staging = {}
1195
1196 def sidedata_companion(revlog, rev):
1197 sidedata = {}
1198 if util.safehasattr(revlog, b'filteredrevs'): # this is a changelog
1199 # Is the data previously shelved ?
1200 sidedata = staging.pop(rev, None)
1201 if sidedata is None:
1202 # look at the queued result until we find the one we are lookig
1203 # for (shelve the other ones)
1204 r, sidedata = sidedataq.get()
1205 while r != rev:
1206 staging[r] = sidedata
1207 r, sidedata = sidedataq.get()
1208 tokens.release()
1209 return False, (), sidedata
1210
1211 return sidedata_companion
1212
1213
1214 def _get_simple_sidedata_adder(srcrepo, destrepo):
1215 """The simple version of the sidedata computation
1216
1217 It just compute it in the same thread on request"""
1218
1219 def sidedatacompanion(revlog, rev):
1220 sidedata = {}
1221 if util.safehasattr(revlog, 'filteredrevs'): # this is a changelog
1222 sidedata = _getsidedata(srcrepo, rev)
1223 return False, (), sidedata
1224
1225 return sidedatacompanion
1226
1227
1228 def getsidedataremover(srcrepo, destrepo):
1229 def sidedatacompanion(revlog, rev):
1230 f = ()
1231 if util.safehasattr(revlog, 'filteredrevs'): # this is a changelog
1232 if revlog.flags(rev) & REVIDX_SIDEDATA:
1233 f = (
1234 sidedatamod.SD_P1COPIES,
1235 sidedatamod.SD_P2COPIES,
1236 sidedatamod.SD_FILESADDED,
1237 sidedatamod.SD_FILESREMOVED,
1238 )
1239 return False, f, {}
1240
1241 return sidedatacompanion
@@ -13,12 +13,12 b' from .i18n import _'
13 from .pycompat import getattr
13 from .pycompat import getattr
14 from . import (
14 from . import (
15 changelog,
15 changelog,
16 copies,
17 error,
16 error,
18 filelog,
17 filelog,
19 hg,
18 hg,
20 localrepo,
19 localrepo,
21 manifest,
20 manifest,
21 metadata,
22 pycompat,
22 pycompat,
23 revlog,
23 revlog,
24 scmutil,
24 scmutil,
@@ -734,9 +734,9 b' def getsidedatacompanion(srcrepo, dstrep'
734 return False, (), {}
734 return False, (), {}
735
735
736 elif localrepo.COPIESSDC_REQUIREMENT in addedreqs:
736 elif localrepo.COPIESSDC_REQUIREMENT in addedreqs:
737 sidedatacompanion = copies.getsidedataadder(srcrepo, dstrepo)
737 sidedatacompanion = metadata.getsidedataadder(srcrepo, dstrepo)
738 elif localrepo.COPIESSDC_REQUIREMENT in removedreqs:
738 elif localrepo.COPIESSDC_REQUIREMENT in removedreqs:
739 sidedatacompanion = copies.getsidedataremover(srcrepo, dstrepo)
739 sidedatacompanion = metadata.getsidedataremover(srcrepo, dstrepo)
740 return sidedatacompanion
740 return sidedatacompanion
741
741
742
742
General Comments 0
You need to be logged in to leave comments. Login now