gvsig-scripting / org.gvsig.scripting / trunk / org.gvsig.scripting / org.gvsig.scripting.app / org.gvsig.scripting.app.mainplugin / src / main / resources-plugin / scripting / lib / dulwich / fastexport.py @ 959
History | View | Annotate | Download (8.45 KB)
1 |
# __init__.py -- Fast export/import functionality
|
---|---|
2 |
# Copyright (C) 2010-2013 Jelmer Vernooij <jelmer@samba.org>
|
3 |
#
|
4 |
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
|
5 |
# General Public License as public by the Free Software Foundation; version 2.0
|
6 |
# or (at your option) any later version. You can redistribute it and/or
|
7 |
# modify it under the terms of either of these two licenses.
|
8 |
#
|
9 |
# Unless required by applicable law or agreed to in writing, software
|
10 |
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
# See the License for the specific language governing permissions and
|
13 |
# limitations under the License.
|
14 |
#
|
15 |
# You should have received a copy of the licenses; if not, see
|
16 |
# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
|
17 |
# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
|
18 |
# License, Version 2.0.
|
19 |
#
|
20 |
|
21 |
|
22 |
"""Fast export/import functionality."""
|
23 |
|
24 |
import sys |
25 |
|
26 |
from dulwich.index import ( |
27 |
commit_tree, |
28 |
) |
29 |
from dulwich.objects import ( |
30 |
Blob, |
31 |
Commit, |
32 |
Tag, |
33 |
) |
34 |
from fastimport import __version__ as fastimport_version |
35 |
if fastimport_version <= (0, 9, 5) and sys.version_info[0] == 3 and sys.version_info[1] < 5: |
36 |
raise ImportError("Older versions of fastimport don't support python3<3.5") |
37 |
from fastimport import ( |
38 |
commands, |
39 |
errors as fastimport_errors,
|
40 |
parser, |
41 |
processor, |
42 |
) |
43 |
|
44 |
import stat |
45 |
|
46 |
|
47 |
def split_email(text): |
48 |
(name, email) = text.rsplit(b" <", 1) |
49 |
return (name, email.rstrip(b">")) |
50 |
|
51 |
|
52 |
class GitFastExporter(object): |
53 |
"""Generate a fast-export output stream for Git objects."""
|
54 |
|
55 |
def __init__(self, outf, store): |
56 |
self.outf = outf
|
57 |
self.store = store
|
58 |
self.markers = {}
|
59 |
self._marker_idx = 0 |
60 |
|
61 |
def print_cmd(self, cmd): |
62 |
self.outf.write(getattr(cmd, "__bytes__", cmd.__repr__)() + b"\n") |
63 |
|
64 |
def _allocate_marker(self): |
65 |
self._marker_idx+=1 |
66 |
return ("%d" % (self._marker_idx,)).encode('ascii') |
67 |
|
68 |
def _export_blob(self, blob): |
69 |
marker = self._allocate_marker()
|
70 |
self.markers[marker] = blob.id
|
71 |
return (commands.BlobCommand(marker, blob.data), marker)
|
72 |
|
73 |
def emit_blob(self, blob): |
74 |
(cmd, marker) = self._export_blob(blob)
|
75 |
self.print_cmd(cmd)
|
76 |
return marker
|
77 |
|
78 |
def _iter_files(self, base_tree, new_tree): |
79 |
for ((old_path, new_path), (old_mode, new_mode),
|
80 |
(old_hexsha, new_hexsha)) in \
|
81 |
self.store.tree_changes(base_tree, new_tree):
|
82 |
if new_path is None: |
83 |
yield commands.FileDeleteCommand(old_path)
|
84 |
continue
|
85 |
if not stat.S_ISDIR(new_mode): |
86 |
blob = self.store[new_hexsha]
|
87 |
marker = self.emit_blob(blob)
|
88 |
if old_path != new_path and old_path is not None: |
89 |
yield commands.FileRenameCommand(old_path, new_path)
|
90 |
if old_mode != new_mode or old_hexsha != new_hexsha: |
91 |
prefixed_marker = b':' + marker
|
92 |
yield commands.FileModifyCommand(
|
93 |
new_path, new_mode, prefixed_marker, None
|
94 |
) |
95 |
|
96 |
def _export_commit(self, commit, ref, base_tree=None): |
97 |
file_cmds = list(self._iter_files(base_tree, commit.tree)) |
98 |
marker = self._allocate_marker()
|
99 |
if commit.parents:
|
100 |
from_ = commit.parents[0]
|
101 |
merges = commit.parents[1:]
|
102 |
else:
|
103 |
from_ = None
|
104 |
merges = [] |
105 |
author, author_email = split_email(commit.author) |
106 |
committer, committer_email = split_email(commit.committer) |
107 |
cmd = commands.CommitCommand(ref, marker, |
108 |
(author, author_email, commit.author_time, commit.author_timezone), |
109 |
(committer, committer_email, commit.commit_time, |
110 |
commit.commit_timezone), |
111 |
commit.message, from_, merges, file_cmds) |
112 |
return (cmd, marker)
|
113 |
|
114 |
def emit_commit(self, commit, ref, base_tree=None): |
115 |
cmd, marker = self._export_commit(commit, ref, base_tree)
|
116 |
self.print_cmd(cmd)
|
117 |
return marker
|
118 |
|
119 |
|
120 |
class GitImportProcessor(processor.ImportProcessor): |
121 |
"""An import processor that imports into a Git repository using Dulwich.
|
122 |
|
123 |
"""
|
124 |
# FIXME: Batch creation of objects?
|
125 |
|
126 |
def __init__(self, repo, params=None, verbose=False, outf=None): |
127 |
processor.ImportProcessor.__init__(self, params, verbose)
|
128 |
self.repo = repo
|
129 |
self.last_commit = None |
130 |
self.markers = {}
|
131 |
self._contents = {}
|
132 |
|
133 |
def import_stream(self, stream): |
134 |
p = parser.ImportParser(stream) |
135 |
self.process(p.iter_commands)
|
136 |
return self.markers |
137 |
|
138 |
def blob_handler(self, cmd): |
139 |
"""Process a BlobCommand."""
|
140 |
blob = Blob.from_string(cmd.data) |
141 |
self.repo.object_store.add_object(blob)
|
142 |
if cmd.mark:
|
143 |
self.markers[cmd.mark] = blob.id
|
144 |
|
145 |
def checkpoint_handler(self, cmd): |
146 |
"""Process a CheckpointCommand."""
|
147 |
pass
|
148 |
|
149 |
def commit_handler(self, cmd): |
150 |
"""Process a CommitCommand."""
|
151 |
commit = Commit() |
152 |
if cmd.author is not None: |
153 |
author = cmd.author |
154 |
else:
|
155 |
author = cmd.committer |
156 |
(author_name, author_email, author_timestamp, author_timezone) = author |
157 |
(committer_name, committer_email, commit_timestamp, |
158 |
commit_timezone) = cmd.committer |
159 |
commit.author = author_name + b" <" + author_email + b">" |
160 |
commit.author_timezone = author_timezone |
161 |
commit.author_time = int(author_timestamp)
|
162 |
commit.committer = committer_name + b" <" + committer_email + b">" |
163 |
commit.commit_timezone = commit_timezone |
164 |
commit.commit_time = int(commit_timestamp)
|
165 |
commit.message = cmd.message |
166 |
commit.parents = [] |
167 |
if cmd.from_:
|
168 |
self._reset_base(cmd.from_)
|
169 |
for filecmd in cmd.iter_files(): |
170 |
if filecmd.name == b"filemodify": |
171 |
if filecmd.data is not None: |
172 |
blob = Blob.from_string(filecmd.data) |
173 |
self.repo.object_store.add(blob)
|
174 |
blob_id = blob.id |
175 |
else:
|
176 |
assert filecmd.dataref.startswith(b":"), \ |
177 |
"non-marker refs not supported yet (%r)" % filecmd.dataref
|
178 |
blob_id = self.markers[filecmd.dataref[1:]] |
179 |
self._contents[filecmd.path] = (filecmd.mode, blob_id)
|
180 |
elif filecmd.name == b"filedelete": |
181 |
del self._contents[filecmd.path] |
182 |
elif filecmd.name == b"filecopy": |
183 |
self._contents[filecmd.dest_path] = self._contents[ |
184 |
filecmd.src_path] |
185 |
elif filecmd.name == b"filerename": |
186 |
self._contents[filecmd.new_path] = self._contents[ |
187 |
filecmd.old_path] |
188 |
del self._contents[filecmd.old_path] |
189 |
elif filecmd.name == b"filedeleteall": |
190 |
self._contents = {}
|
191 |
else:
|
192 |
raise Exception("Command %s not supported" % filecmd.name) |
193 |
commit.tree = commit_tree(self.repo.object_store,
|
194 |
((path, hexsha, mode) for (path, (mode, hexsha)) in |
195 |
self._contents.items()))
|
196 |
if self.last_commit is not None: |
197 |
commit.parents.append(self.last_commit)
|
198 |
commit.parents += cmd.merges |
199 |
self.repo.object_store.add_object(commit)
|
200 |
self.repo[cmd.ref] = commit.id
|
201 |
self.last_commit = commit.id
|
202 |
if cmd.mark:
|
203 |
self.markers[cmd.mark] = commit.id
|
204 |
|
205 |
def progress_handler(self, cmd): |
206 |
"""Process a ProgressCommand."""
|
207 |
pass
|
208 |
|
209 |
def _reset_base(self, commit_id): |
210 |
if self.last_commit == commit_id: |
211 |
return
|
212 |
self.last_commit = commit_id
|
213 |
self._contents = {}
|
214 |
tree_id = self.repo[commit_id].tree
|
215 |
for (path, mode, hexsha) in ( |
216 |
self.repo.object_store.iter_tree_contents(tree_id)):
|
217 |
self._contents[path] = (mode, hexsha)
|
218 |
|
219 |
def reset_handler(self, cmd): |
220 |
"""Process a ResetCommand."""
|
221 |
self._reset_base(cmd.from_)
|
222 |
self.repo.refs[cmd.ref] = cmd.from_
|
223 |
|
224 |
def tag_handler(self, cmd): |
225 |
"""Process a TagCommand."""
|
226 |
tag = Tag() |
227 |
tag.tagger = cmd.tagger |
228 |
tag.message = cmd.message |
229 |
tag.name = cmd.tag |
230 |
self.repo.add_object(tag)
|
231 |
self.repo.refs["refs/tags/" + tag.name] = tag.id |
232 |
|
233 |
def feature_handler(self, cmd): |
234 |
"""Process a FeatureCommand."""
|
235 |
raise fastimport_errors.UnknownFeature(cmd.feature_name)
|