gvsig-scripting / org.gvsig.scripting / trunk / org.gvsig.scripting / org.gvsig.scripting.app / org.gvsig.scripting.app.mainplugin / src / main / resources-plugin / scripting / lib / dulwich / tests / utils.py @ 959
History | View | Annotate | Download (12.3 KB)
1 |
# utils.py -- Test utilities for Dulwich.
|
---|---|
2 |
# Copyright (C) 2010 Google, Inc.
|
3 |
#
|
4 |
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
|
5 |
# General Public License as public by the Free Software Foundation; version 2.0
|
6 |
# or (at your option) any later version. You can redistribute it and/or
|
7 |
# modify it under the terms of either of these two licenses.
|
8 |
#
|
9 |
# Unless required by applicable law or agreed to in writing, software
|
10 |
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
# See the License for the specific language governing permissions and
|
13 |
# limitations under the License.
|
14 |
#
|
15 |
# You should have received a copy of the licenses; if not, see
|
16 |
# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
|
17 |
# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
|
18 |
# License, Version 2.0.
|
19 |
#
|
20 |
|
21 |
"""Utility functions common to Dulwich tests."""
|
22 |
|
23 |
|
24 |
import datetime |
25 |
import os |
26 |
import shutil |
27 |
import tempfile |
28 |
import time |
29 |
import types |
30 |
|
31 |
import warnings |
32 |
|
33 |
from dulwich.index import ( |
34 |
commit_tree, |
35 |
) |
36 |
from dulwich.objects import ( |
37 |
FixedSha, |
38 |
Commit, |
39 |
Tag, |
40 |
object_class, |
41 |
) |
42 |
from dulwich.pack import ( |
43 |
OFS_DELTA, |
44 |
REF_DELTA, |
45 |
DELTA_TYPES, |
46 |
obj_sha, |
47 |
SHA1Writer, |
48 |
write_pack_header, |
49 |
write_pack_object, |
50 |
create_delta, |
51 |
) |
52 |
from dulwich.repo import Repo |
53 |
from dulwich.tests import ( |
54 |
SkipTest, |
55 |
skipIf, |
56 |
) |
57 |
|
58 |
|
59 |
# Plain files are very frequently used in tests, so let the mode be very short.
|
60 |
F = 0o100644 # Shorthand mode for Files. |
61 |
|
62 |
|
63 |
def open_repo(name, temp_dir=None): |
64 |
"""Open a copy of a repo in a temporary directory.
|
65 |
|
66 |
Use this function for accessing repos in dulwich/tests/data/repos to avoid
|
67 |
accidentally or intentionally modifying those repos in place. Use
|
68 |
tear_down_repo to delete any temp files created.
|
69 |
|
70 |
:param name: The name of the repository, relative to
|
71 |
dulwich/tests/data/repos
|
72 |
:param temp_dir: temporary directory to initialize to. If not provided, a
|
73 |
temporary directory will be created.
|
74 |
:returns: An initialized Repo object that lives in a temporary directory.
|
75 |
"""
|
76 |
if temp_dir is None: |
77 |
temp_dir = tempfile.mkdtemp() |
78 |
repo_dir = os.path.join(os.path.dirname(__file__), 'data', 'repos', name) |
79 |
temp_repo_dir = os.path.join(temp_dir, name) |
80 |
shutil.copytree(repo_dir, temp_repo_dir, symlinks=True)
|
81 |
return Repo(temp_repo_dir)
|
82 |
|
83 |
|
84 |
def tear_down_repo(repo): |
85 |
"""Tear down a test repository."""
|
86 |
repo.close() |
87 |
temp_dir = os.path.dirname(repo.path.rstrip(os.sep)) |
88 |
shutil.rmtree(temp_dir) |
89 |
|
90 |
|
91 |
def make_object(cls, **attrs): |
92 |
"""Make an object for testing and assign some members.
|
93 |
|
94 |
This method creates a new subclass to allow arbitrary attribute
|
95 |
reassignment, which is not otherwise possible with objects having __slots__.
|
96 |
|
97 |
:param attrs: dict of attributes to set on the new object.
|
98 |
:return: A newly initialized object of type cls.
|
99 |
"""
|
100 |
|
101 |
class TestObject(cls): |
102 |
"""Class that inherits from the given class, but without __slots__.
|
103 |
|
104 |
Note that classes with __slots__ can't have arbitrary attributes monkey-
|
105 |
patched in, so this is a class that is exactly the same only with a
|
106 |
__dict__ instead of __slots__.
|
107 |
"""
|
108 |
pass
|
109 |
TestObject.__name__ = 'TestObject_' + cls.__name__
|
110 |
|
111 |
obj = TestObject() |
112 |
for name, value in attrs.items(): |
113 |
if name == 'id': |
114 |
# id property is read-only, so we overwrite sha instead.
|
115 |
sha = FixedSha(value) |
116 |
obj.sha = lambda: sha
|
117 |
else:
|
118 |
setattr(obj, name, value)
|
119 |
return obj
|
120 |
|
121 |
|
122 |
def make_commit(**attrs): |
123 |
"""Make a Commit object with a default set of members.
|
124 |
|
125 |
:param attrs: dict of attributes to overwrite from the default values.
|
126 |
:return: A newly initialized Commit object.
|
127 |
"""
|
128 |
default_time = int(time.mktime(datetime.datetime(2010, 1, 1).timetuple())) |
129 |
all_attrs = {'author': b'Test Author <test@nodomain.com>', |
130 |
'author_time': default_time,
|
131 |
'author_timezone': 0, |
132 |
'committer': b'Test Committer <test@nodomain.com>', |
133 |
'commit_time': default_time,
|
134 |
'commit_timezone': 0, |
135 |
'message': b'Test message.', |
136 |
'parents': [],
|
137 |
'tree': b'0' * 40} |
138 |
all_attrs.update(attrs) |
139 |
return make_object(Commit, **all_attrs)
|
140 |
|
141 |
|
142 |
def make_tag(target, **attrs): |
143 |
"""Make a Tag object with a default set of values.
|
144 |
|
145 |
:param target: object to be tagged (Commit, Blob, Tree, etc)
|
146 |
:param attrs: dict of attributes to overwrite from the default values.
|
147 |
:return: A newly initialized Tag object.
|
148 |
"""
|
149 |
target_id = target.id |
150 |
target_type = object_class(target.type_name) |
151 |
default_time = int(time.mktime(datetime.datetime(2010, 1, 1).timetuple())) |
152 |
all_attrs = {'tagger': b'Test Author <test@nodomain.com>', |
153 |
'tag_time': default_time,
|
154 |
'tag_timezone': 0, |
155 |
'message': b'Test message.', |
156 |
'object': (target_type, target_id),
|
157 |
'name': b'Test Tag', |
158 |
} |
159 |
all_attrs.update(attrs) |
160 |
return make_object(Tag, **all_attrs)
|
161 |
|
162 |
|
163 |
def functest_builder(method, func): |
164 |
"""Generate a test method that tests the given function."""
|
165 |
|
166 |
def do_test(self): |
167 |
method(self, func)
|
168 |
|
169 |
return do_test
|
170 |
|
171 |
|
172 |
def ext_functest_builder(method, func): |
173 |
"""Generate a test method that tests the given extension function.
|
174 |
|
175 |
This is intended to generate test methods that test both a pure-Python
|
176 |
version and an extension version using common test code. The extension test
|
177 |
will raise SkipTest if the extension is not found.
|
178 |
|
179 |
Sample usage:
|
180 |
|
181 |
class MyTest(TestCase);
|
182 |
def _do_some_test(self, func_impl):
|
183 |
self.assertEqual('foo', func_impl())
|
184 |
|
185 |
test_foo = functest_builder(_do_some_test, foo_py)
|
186 |
test_foo_extension = ext_functest_builder(_do_some_test, _foo_c)
|
187 |
|
188 |
:param method: The method to run. It must must two parameters, self and the
|
189 |
function implementation to test.
|
190 |
:param func: The function implementation to pass to method.
|
191 |
"""
|
192 |
|
193 |
def do_test(self): |
194 |
if not isinstance(func, types.BuiltinFunctionType): |
195 |
raise SkipTest("%s extension not found" % func) |
196 |
method(self, func)
|
197 |
|
198 |
return do_test
|
199 |
|
200 |
|
201 |
def build_pack(f, objects_spec, store=None): |
202 |
"""Write test pack data from a concise spec.
|
203 |
|
204 |
:param f: A file-like object to write the pack to.
|
205 |
:param objects_spec: A list of (type_num, obj). For non-delta types, obj
|
206 |
is the string of that object's data.
|
207 |
For delta types, obj is a tuple of (base, data), where:
|
208 |
|
209 |
* base can be either an index in objects_spec of the base for that
|
210 |
* delta; or for a ref delta, a SHA, in which case the resulting pack
|
211 |
* will be thin and the base will be an external ref.
|
212 |
* data is a string of the full, non-deltified data for that object.
|
213 |
|
214 |
Note that offsets/refs and deltas are computed within this function.
|
215 |
:param store: An optional ObjectStore for looking up external refs.
|
216 |
:return: A list of tuples in the order specified by objects_spec:
|
217 |
(offset, type num, data, sha, CRC32)
|
218 |
"""
|
219 |
sf = SHA1Writer(f) |
220 |
num_objects = len(objects_spec)
|
221 |
write_pack_header(sf, num_objects) |
222 |
|
223 |
full_objects = {} |
224 |
offsets = {} |
225 |
crc32s = {} |
226 |
|
227 |
while len(full_objects) < num_objects: |
228 |
for i, (type_num, data) in enumerate(objects_spec): |
229 |
if type_num not in DELTA_TYPES: |
230 |
full_objects[i] = (type_num, data, |
231 |
obj_sha(type_num, [data])) |
232 |
continue
|
233 |
base, data = data |
234 |
if isinstance(base, int): |
235 |
if base not in full_objects: |
236 |
continue
|
237 |
base_type_num, _, _ = full_objects[base] |
238 |
else:
|
239 |
base_type_num, _ = store.get_raw(base) |
240 |
full_objects[i] = (base_type_num, data, |
241 |
obj_sha(base_type_num, [data])) |
242 |
|
243 |
for i, (type_num, obj) in enumerate(objects_spec): |
244 |
offset = f.tell() |
245 |
if type_num == OFS_DELTA:
|
246 |
base_index, data = obj |
247 |
base = offset - offsets[base_index] |
248 |
_, base_data, _ = full_objects[base_index] |
249 |
obj = (base, create_delta(base_data, data)) |
250 |
elif type_num == REF_DELTA:
|
251 |
base_ref, data = obj |
252 |
if isinstance(base_ref, int): |
253 |
_, base_data, base = full_objects[base_ref] |
254 |
else:
|
255 |
base_type_num, base_data = store.get_raw(base_ref) |
256 |
base = obj_sha(base_type_num, base_data) |
257 |
obj = (base, create_delta(base_data, data)) |
258 |
|
259 |
crc32 = write_pack_object(sf, type_num, obj) |
260 |
offsets[i] = offset |
261 |
crc32s[i] = crc32 |
262 |
|
263 |
expected = [] |
264 |
for i in range(num_objects): |
265 |
type_num, data, sha = full_objects[i] |
266 |
assert len(sha) == 20 |
267 |
expected.append((offsets[i], type_num, data, sha, crc32s[i])) |
268 |
|
269 |
sf.write_sha() |
270 |
f.seek(0)
|
271 |
return expected
|
272 |
|
273 |
|
274 |
def build_commit_graph(object_store, commit_spec, trees=None, attrs=None): |
275 |
"""Build a commit graph from a concise specification.
|
276 |
|
277 |
Sample usage:
|
278 |
>>> c1, c2, c3 = build_commit_graph(store, [[1], [2, 1], [3, 1, 2]])
|
279 |
>>> store[store[c3].parents[0]] == c1
|
280 |
True
|
281 |
>>> store[store[c3].parents[1]] == c2
|
282 |
True
|
283 |
|
284 |
If not otherwise specified, commits will refer to the empty tree and have
|
285 |
commit times increasing in the same order as the commit spec.
|
286 |
|
287 |
:param object_store: An ObjectStore to commit objects to.
|
288 |
:param commit_spec: An iterable of iterables of ints defining the commit
|
289 |
graph. Each entry defines one commit, and entries must be in topological
|
290 |
order. The first element of each entry is a commit number, and the
|
291 |
remaining elements are its parents. The commit numbers are only
|
292 |
meaningful for the call to make_commits; since real commit objects are
|
293 |
created, they will get created with real, opaque SHAs.
|
294 |
:param trees: An optional dict of commit number -> tree spec for building
|
295 |
trees for commits. The tree spec is an iterable of (path, blob, mode) or
|
296 |
(path, blob) entries; if mode is omitted, it defaults to the normal file
|
297 |
mode (0100644).
|
298 |
:param attrs: A dict of commit number -> (dict of attribute -> value) for
|
299 |
assigning additional values to the commits.
|
300 |
:return: The list of commit objects created.
|
301 |
:raise ValueError: If an undefined commit identifier is listed as a parent.
|
302 |
"""
|
303 |
if trees is None: |
304 |
trees = {} |
305 |
if attrs is None: |
306 |
attrs = {} |
307 |
commit_time = 0
|
308 |
nums = {} |
309 |
commits = [] |
310 |
|
311 |
for commit in commit_spec: |
312 |
commit_num = commit[0]
|
313 |
try:
|
314 |
parent_ids = [nums[pn] for pn in commit[1:]] |
315 |
except KeyError as e: |
316 |
missing_parent, = e.args |
317 |
raise ValueError('Unknown parent %i' % missing_parent) |
318 |
|
319 |
blobs = [] |
320 |
for entry in trees.get(commit_num, []): |
321 |
if len(entry) == 2: |
322 |
path, blob = entry |
323 |
entry = (path, blob, F) |
324 |
path, blob, mode = entry |
325 |
blobs.append((path, blob.id, mode)) |
326 |
object_store.add_object(blob) |
327 |
tree_id = commit_tree(object_store, blobs) |
328 |
|
329 |
commit_attrs = { |
330 |
'message': ('Commit %i' % commit_num).encode('ascii'), |
331 |
'parents': parent_ids,
|
332 |
'tree': tree_id,
|
333 |
'commit_time': commit_time,
|
334 |
} |
335 |
commit_attrs.update(attrs.get(commit_num, {})) |
336 |
commit_obj = make_commit(**commit_attrs) |
337 |
|
338 |
# By default, increment the time by a lot. Out-of-order commits should
|
339 |
# be closer together than this because their main cause is clock skew.
|
340 |
commit_time = commit_attrs['commit_time'] + 100 |
341 |
nums[commit_num] = commit_obj.id |
342 |
object_store.add_object(commit_obj) |
343 |
commits.append(commit_obj) |
344 |
|
345 |
return commits
|
346 |
|
347 |
|
348 |
def setup_warning_catcher(): |
349 |
"""Wrap warnings.showwarning with code that records warnings."""
|
350 |
|
351 |
caught_warnings = [] |
352 |
original_showwarning = warnings.showwarning |
353 |
|
354 |
def custom_showwarning(*args, **kwargs): |
355 |
caught_warnings.append(args[0])
|
356 |
|
357 |
warnings.showwarning = custom_showwarning |
358 |
|
359 |
def restore_showwarning(): |
360 |
warnings.showwarning = original_showwarning |
361 |
|
362 |
return caught_warnings, restore_showwarning
|