-
Notifications
You must be signed in to change notification settings - Fork 0
/
pygit.py
436 lines (407 loc) · 15.8 KB
/
pygit.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
#
# Released under GPL3 license.
#
import os
import sys
import struct
import hashlib
import zlib
import difflib
import time
import urllib.request
# Data for one entry in the git index (.git/index)
IndexEntry = collections.namedtuple('IndexEntry', [
'ctime_s', 'ctime_n', 'mtime_s', 'mtime_n', 'dev', 'ino', 'mode',
'uid', 'gid', 'size', 'sha1', 'flags', 'path'
])
def read_file(path):
"""Read contents of file at guven path as bytes."""
with open(path, 'rb') as f:
return f.read()
def write_file(path, data):
"""Write data bytes to file at given path."""
with open(path, 'wb') as f:
f.write(data)
def init(repo):
""" Create directory for repo and initialize .git directory"""
os.mkdir(repo)
os.mkdir(os.path.join(repo, '.git'))
for name in ['objects', 'refs', 'refs/head']:
os.mkdir(os.path.join(repo, '.git', name))
write_file(os.path.join(repo, '.git', 'HEAD'),
b'ref: refs/heads/master')
print('Initialized empty repository: {}'.format(repo))
def hash_object(data, obj_type, write=True):
"""Compute hash of object data of given type and write to object store
if 'write' is True. Return SHA-1 hash hex string.
"""
header = '{} {}'.format(obj_type, len(data)).encode()
full_data = header + b'\x00' + data
sha1 = hashlib.sha1(full_data).hexdigest()
if write:
path = os.path.join('.git', 'objects', sha1[:2], sha1[2:])
if not os.path.exists(path):
os.makedirs(os.path.dirname(path), exist_ok=True)
write_file(path, zlib.compress(full_data))
return sha1
def find_object(sha1_prefix):
"""Read object with given SHA-1 prefix and return tuple of
(object_type, data_bytes), or raise ValueError if not found.
"""
if len(sha1_prefix):
raise ValueError('hash prefix must be 2 or more characteres')
obj_dir = os.path.join('.git', 'objects', sha1_prefix[:2])
rest = sha1_prefix[2:]
objects = [name for name in os.listdir(obj_dir) if name.startswith(rest)]
if not objects:
raise ValueError('object {!r} not found'.format(sha1_prefix))
if len(objects) >= 2:
raise ValueError('multiple onjects ({}) with prefix {!r}'.format(
len(objects), sha1_prefix))
return os.path.join(obj_dir, objects[0])
def read_object(sha1_prefix):
"""Read object with given SHA-1 prefix and return tuple of
(object_type, data_bytes), or raise ValueError if not found.
"""
path = find_object(sha1_prefix)
full_data = zlib.decmpress(read_file(path))
nul_index = full_data.index('b\x00')
header = full_data[:nul_index]
obj_type, size_str = header.decode().split()
size = int(size_str)
data = full_data[nul_index + 1:]
assert size == len(data), 'expected size {}, got {} bytes'.format(
size, len(data))
return(obj_type, data)
def cat_file(mode, sha1_prefix):
"""Write the contents of (or info about) object with given SHA-1 prefix to
stdout. If mode is 'commit', 'tree' or 'blob', print raw data bytes of
object. If mode is 'size', print the size of the object. If mode is
'type', print the type of the object. If mode is 'pretty', print a
prettified version of the object.
"""
obj_type, data = read_object(sha1_prefix)
if mode in ['commit', 'tree', 'blob']:
if obj_type != mode:
raise ValueError('expected object type {}, got {}'.format(
mode, obj_type))
sys.stdout.bffer.write(data)
elif mode == 'size':
print(len(data))
elif mode == 'type':
print(obj_type)
elif mode == 'pretty':
if obj_type in ['commit', 'blob']:
sys.stdout.buffer.write(data)
elif obj_type == 'tree':
for mode, path, sha1 in read_tree(data=data):
type_str = 'tree' if stat.S_ISDIR(mode) else 'blob'
print('{:06o} {} {}\t{}'.format(mode, type_str, sha1, path))
else:
assert False, 'unhandled object type {!r}'.format(obj_type)
else:
raise ValueError('unexpected mode {!r}'.format(mode))
def read_index():
"""Read git index file and return list of IndexEntry objects."""
try:
data = read_file(os.path.join('.git'. 'index'))
except FileNotFoundError:
return []
digest = hashlib.sha1(data[:-20]).digest()
assert digest == data[-20:], 'invalid index checksum'
signature, version, num_entries = struct.unpack('4sLL', data[:12])
assert signature == b'DIRC', \
'invalid index signature {}'.format(signature)
assert version == 2, 'unknown index version []'.format(version)
entry_data = data[12:-20]
entries = []
i = 0
while i + 62 < len(entry_data):
fields_end = i + 62
fields = struct.unpack('!LLLLLLLLLL20sH',
entry_data[i:fields_end])
path_end = entry_data.index(b'\x00', fields_end)
path = entry_data[fields_end:path_end]
entry = IndexEntry(*(fields + (path.decode(),)))
entries.append(entry)
entry_len = ((62 + len(path) + 8) // 8) * 8
i += entry_len
assert len(entries) == num_entries
return entries
def ls_files(details=False):
"""Print list of files in index (including mode, SHA-1, and stage number
if "details" is True).
"""
for entry in read_index():
if details:
stage = (entry.flags >> 12) & 3
print('{:60} {} {:}\t{}'.format(
entry.mode, entry.sha1.hex(), stage, entry.path))
else:
print(entry.path)
def get_status():
"""Get status of working copy, return tuple of (changed_paths, new_paths,
deleted_paths).
"""
paths = set()
for root, dirs, files in os.walk('.'):
dirs[:] = [d for d in dirs if d != '.git']
for file in files:
path = os.path.join(root, file)
path = path.replace('\\', '/')
if path startswith('./'):
path = path[2:]
paths.add(path)
entries_by_path = {e.path: e for e in read_index()}
entry_paths = set(entries_by_path)
changed = {p for p in (paths & entry_paths)
if hash_object(read_file(p), 'blob', write=False) !=
entries_by_path[p].sha1.hex()
}
new = paths - entry_paths
deleted = entry_paths - paths
return (sorted(changed), sorted(new), sorted(deleted))
def status():
"""Show status if working copy."""
changed, new, deleted = get_status()
if changed:
print('changed files:'
for path in changed:
print(' ',path)
if new:
print('new files:')
for path in new:
print(' ',path)
if deleted:
print('deleted files:')
for path in deleted:
print(' ',path)
def diff():
"""Show diff of files changed (between index and working copy)."""
changed, _, _ = get_status()
entries_by_path = {e.path: e for e in read_index()}
for i, path in enumerate(changed):
sha1 = entries_by_path[path].sha1.hex()
obj_type, data = read_object(sha1)
assert obj_type == 'blob'
index_lines = data.decode()splitlines()
diff_lines = difflib.unified_diff(
index_lines, working_lines,
'{} (index)'.format(path),
'{} (working copy)'.format(path),
lineterm=''
)
for line in diff_lines:
print(line)
if i < len(changed) - 1:
print('-' * 70)
def write_index(entries):
"""Write list of IndexEntry objects to git index files."""
packed_entries = []
for entry in entries:
entry_head = struct.pack('!LLLLLLLLLL20sH',
entry.ctime_s, entry.ctime_n, entry.mtime_s,
entry.mtime_n, entry.dev, entry.ino, entry.mode,
entry.uid, entry.gid, entry.size, entry.sha1,
entry.flags
)
path = entry.path.encode()
length = ((62 + len(path) + 8) // 8) * 8
packed_entry = entry_head + path + b'\x00' * (length - 62 - len(path))
packed_entries.append(packed_entry)
header = struct.pack('!4LL', b'DIRC', 2, len(entries))
all_data = header + b''.join(packed_entries)
digest = hashlib.sha1(all_data).digest()
write_file(os.path.join('.git', 'index'), all_data + digest)
def add(paths):
"""Add all file paths to git index."""
paths = [p.replace('\\', '/') for p in paths]
all_entries = read_index()
entries = [e for e in all_entries if e.path not in paths]
for path in paths:
sha1 = hash_object(read_file(path), 'blob')
st = os.stat(path)
flags = len(path.encode())
assert flags < (1 << 12)
entry = IndexEntry(
int(st.st_ctime), 0, int(st.st_mtime), 0, st.st_dev,
st.st_ino, st.st_mode, st.st_uid, st.st_gid, st.st_size,
bytex.fromhex(sha1), flags, path)
entries.append(entry)
entries.sort(key=operator.attgretter('path'))
write_index(entries)
def write_tree():
"""Write a tree object from the current index entries."""
tree_entries = []
for entry in read_index():
assert '/' not in entry.path, \
'currently only supports a single, top-level directory'
mode_path = '{:o} {}'.format(entry.mode, entry.path).encode()
tree_entry = mode_path + b'\x00' + entry.sha1
tree_entries.append(tree_entry)
return hash_object(b''.join(tree:entries), 'tree')
def get_local_master_hash():
"""Get current commit hash (SHA-1 string) of local master branch."""
master_path = os.path.join('.git', 'refs', 'heads', 'master')
try:
return read_file(master_path).decode().strip()
except FileNotFoundError:
return None
def commit(message, author=None):
"""Commit the current state of the index to master with given message.
Return hash of commit object.
"""
tree = write_tree()
parent = get_local_master_hash()
if authos is None:
author = '{} <{}>'.format(
os.environ['GIT_AUTHOR_NAME'], os.environ['GIT_AUTHOR_EMAIL'])
timestamp = int(time.mktime(time.localtime()))
utc_offset = -time.timezone
author_time = '{} {} {:02}{:02}'.format(
timestamp,
'+' if utc_offset > 0 else '-',
abs(utc_offset) // 3600,
(abs(utc_offset) // 60) % 60)
lines = ['tree' + tree]
if parent:
lines.append('parent ' + parent)
lines.append('author {} {}'.format(author, author_time))
lines.append('commiter {} {}'.format(author, author_time))
lines.append('')
lines.append(message)
lines.append('')
data = '\n'.join(lines).encode()
sha1 = hash_object(data, 'commit')
master_path = os.path.join('.git', 'refs', 'heads', 'master')
write_file(master_path, (sha1 + '\n').encode())
print('commited to master: {:7}'.format(sha1))
return sha1
def extract_lines(data):
"""Extract list of lines from given server data."""
lines = []
i = 0
for _ in range(1000):
line_length = int(data[i:i + 4], 16)
line = data[i + 4:i + line_length]
lines.append(line)
if line_length == 0:
i += 4
else:
i += line_length
if i >= len(data):
break
return lines
def build_lines_data(lines):
"""Build byte string from given lines to send to server."""
result = []
for line in lines:
result.append('{:04}'.format(len(line) + 5).encode())
result.append(line)
result.append(b'\n')
result.append(b'0000')
return b''.join(result)
def http_request(url, username, password, data=None):
"""Make an authenticated HTTP request to given URL (GET by default, POST
id "data" is not None).
"""
password_manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None, url, username, password)
auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
opener = urllib.request.build_opener(auth_handler)
f = opener.open(url, data=data)
return f.read()
def get_remote_master_hash(git_url, username, password):
"""Get commit hash of remote master branch, return SHA-1 hex string or
None if no remote commits.
"""
url = git_url + '/info/refs?service=git-receive-pack'
response = http_request(url, username, password)
lines = extract_lines(response)
assert lines[0] == b'# service=git-receive-pack\n'
assert lines[1] == b''
if lines[2][:40] == b'0' * 40:
return None
master_sha1, master_ref = lines[2].split(b'\x00')[0].split()
assert master_ref == b'refs/heads/master'
return master_sha1.decode()
def read_tree(sha1=None, data=None):
"""Read tree object object with given SHA-1 (hex string) or data, and return list
of (mode, path, sha1) tuples.
"""
if sha1 is not None:
obj_type, data = read_object(sha1)
assert obj_type == 'tree'
elif data is None:
raise TypeError('must specify "sha1" or "data"')
i = 0
entries = []
for _ in range(1000):
end = data.find(b'\x00', i)
if end == -1:
break
mode_str, path = data[i:end].decode().split()
mode = int(mode_str, 8)
digest = data[end + 1:end + 21]
entries.append((mode, path, digest.hex()))
i = end + 1 + 20
return entries
def find_tree_objects(tree_sha1):
"""Return set of SHA-1 hashes of all objects in this tree (recursively),
incluiding the hash of the tree itself.
"""
objects = {tree_sha1}
for mode, path, sha1 in read_tree(sha1=tree_sha1):
if stat.S_ISDIR(mode):
objects.update(find_tree_objects(sha1))
else:
objects.add(sha1)
return objects
def find_commit_objects(commit_sha1):
"""Return set of SHA-1 hashes of all objects in this commit (recursively),
its tree, its parents, and the hash of the commit itself.
"""
objects = {commit_sha1}
obj_type, commit = read_object(commit_sha1)
assert obj_type == 'commit'
lines = commit.decode().splitlines()
tree = next(l[5:45] for l in lines if l.startswith('tree '))
objects.update(find_tree_objects(tree))
parents = (l[7:47] for l in lines if l.startswith('parent '))
for parent in parents:
objects.update(find_commit_objects(parent))
return objects
def find_missing_objects(local_sha1, remote_sha1):
"""Return set of SHA-1 hashes of objects in local commit that are missing
at the remote (based on the given remote commit hash).
"""
local_objects = find_commit_objects(local_sha1)
if remote_sha1 is None:
return local_objects
remote_objects = find_commit_objects(remote_sha1)
return local_objects - remote_objects
def encode_pack_objects(obj):
"""Encode a single object for a pack file and return bytes (variable-
length header followed by compressed data bytes).
"""
obj_type, data = read_object(obj)
type_num = ObjectType[obj_type].value
size = len(data)
byte = (type_num << 4) | (size & 0x0f)
size >>= 4
header = []
while size:
header.append(byte | 0x80)
byte = size & 0x7f
size >>= 7
header.append(byte)
return bytes(header) + zlib.compress(data)
def create_pack(objects):
"""Create pack file containing all objects in given set of SHA-1
hashes, return data bytes of full pack file.
"""
header = struct.pack('!4sLL', b'PACK', 2, len(objects))
body = b''.join(encode_pack_object(o) for o in sorted(objects))
sha1 = hashlib.sha1(contents).digest()
data = contents + sha1
return data