Skip to content

Commit 9636459

Browse files
committed
Add initial types to IndexFile .init() to _to_relative_path()
1 parent 33346b2 commit 9636459

File tree

3 files changed

+82
-57
lines changed

3 files changed

+82
-57
lines changed

‎git/index/base.py

+72-50
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,19 @@
6363
git_working_dir
6464
)
6565

66+
# typing -----------------------------------------------------------------------------
67+
68+
from typing import Any, Callable, Dict, IO, Iterator, List, Sequence, TYPE_CHECKING, Tuple, Union
69+
70+
from git.types import PathLike, TBD
71+
72+
if TYPE_CHECKING:
73+
from subprocess import Popen
74+
from git.repo import Repo
75+
76+
StageType = int
77+
Treeish = Union[Tree, Commit, bytes]
78+
6679

6780
__all__ = ('IndexFile', 'CheckoutError')
6881

@@ -93,7 +106,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
93106
_VERSION = 2 # latest version we support
94107
S_IFGITLINK = S_IFGITLINK # a submodule
95108

96-
def __init__(self, repo, file_path=None):
109+
def __init__(self, repo: 'Repo', file_path: PathLike = None) -> None:
97110
"""Initialize this Index instance, optionally from the given ``file_path``.
98111
If no file_path is given, we will be created from the current index file.
99112
@@ -102,9 +115,9 @@ def __init__(self, repo, file_path=None):
102115
self.repo = repo
103116
self.version = self._VERSION
104117
self._extension_data = b''
105-
self._file_path = file_path or self._index_path()
118+
self._file_path = file_path or self._index_path() # type: PathLike
106119

107-
def _set_cache_(self, attr):
120+
def _set_cache_(self, attr: str) -> None:
108121
if attr == "entries":
109122
# read the current index
110123
# try memory map for speed
@@ -115,8 +128,8 @@ def _set_cache_(self, attr):
115128
ok = True
116129
except OSError:
117130
# in new repositories, there may be no index, which means we are empty
118-
self.entries = {}
119-
return
131+
self.entries = {} # type: Dict[Tuple[PathLike, StageType], IndexEntry]
132+
return None
120133
finally:
121134
if not ok:
122135
lfd.rollback()
@@ -133,15 +146,18 @@ def _set_cache_(self, attr):
133146
else:
134147
super(IndexFile, self)._set_cache_(attr)
135148

136-
def _index_path(self):
137-
return join_path_native(self.repo.git_dir, "index")
149+
def _index_path(self) -> PathLike:
150+
if self.repo.git_dir:
151+
return join_path_native(self.repo.git_dir, "index")
152+
else:
153+
raise GitCommandError("No git directory given to join index path")
138154

139155
@property
140-
def path(self):
156+
def path(self) -> PathLike:
141157
""" :return: Path to the index file we are representing """
142158
return self._file_path
143159

144-
def _delete_entries_cache(self):
160+
def _delete_entries_cache(self) -> None:
145161
"""Safely clear the entries cache so it can be recreated"""
146162
try:
147163
del(self.entries)
@@ -152,26 +168,26 @@ def _delete_entries_cache(self):
152168

153169
#{ Serializable Interface
154170

155-
def _deserialize(self, stream):
171+
def _deserialize(self, stream: IO) -> 'IndexFile':
156172
"""Initialize this instance with index values read from the given stream"""
157173
self.version, self.entries, self._extension_data, _conten_sha = read_cache(stream)
158174
return self
159175

160-
def _entries_sorted(self):
176+
def _entries_sorted(self) -> List[TBD]:
161177
""":return: list of entries, in a sorted fashion, first by path, then by stage"""
162178
return sorted(self.entries.values(), key=lambda e: (e.path, e.stage))
163179

164-
def _serialize(self, stream, ignore_extension_data=False):
180+
def _serialize(self, stream: IO, ignore_extension_data: bool = False) -> 'IndexFile':
165181
entries = self._entries_sorted()
166-
extension_data = self._extension_data
182+
extension_data = self._extension_data # type: Union[None, bytes]
167183
if ignore_extension_data:
168184
extension_data = None
169185
write_cache(entries, stream, extension_data)
170186
return self
171187

172188
#} END serializable interface
173189

174-
def write(self, file_path=None, ignore_extension_data=False):
190+
def write(self, file_path: Union[None, PathLike] = None, ignore_extension_data: bool = False) -> None:
175191
"""Write the current state to our file path or to the given one
176192
177193
:param file_path:
@@ -191,7 +207,7 @@ def write(self, file_path=None, ignore_extension_data=False):
191207
Alternatively, use IndexFile.write_tree() to handle this case
192208
automatically
193209
194-
:return: self"""
210+
:return: self # does it? or returns None?"""
195211
# make sure we have our entries read before getting a write lock
196212
# else it would be done when streaming. This can happen
197213
# if one doesn't change the index, but writes it right away
@@ -215,7 +231,7 @@ def write(self, file_path=None, ignore_extension_data=False):
215231

216232
@post_clear_cache
217233
@default_index
218-
def merge_tree(self, rhs, base=None):
234+
def merge_tree(self, rhs: Treeish, base: Union[None, Treeish] = None) -> 'IndexFile':
219235
"""Merge the given rhs treeish into the current index, possibly taking
220236
a common base treeish into account.
221237
@@ -242,7 +258,7 @@ def merge_tree(self, rhs, base=None):
242258
# -i : ignore working tree status
243259
# --aggressive : handle more merge cases
244260
# -m : do an actual merge
245-
args = ["--aggressive", "-i", "-m"]
261+
args = ["--aggressive", "-i", "-m"] # type: List[Union[Treeish, str]]
246262
if base is not None:
247263
args.append(base)
248264
args.append(rhs)
@@ -251,7 +267,7 @@ def merge_tree(self, rhs, base=None):
251267
return self
252268

253269
@classmethod
254-
def new(cls, repo, *tree_sha):
270+
def new(cls, repo: 'Repo', *tree_sha: bytes) -> 'IndexFile':
255271
""" Merge the given treeish revisions into a new index which is returned.
256272
This method behaves like git-read-tree --aggressive when doing the merge.
257273
@@ -275,7 +291,7 @@ def new(cls, repo, *tree_sha):
275291
return inst
276292

277293
@classmethod
278-
def from_tree(cls, repo, *treeish, **kwargs):
294+
def from_tree(cls, repo: 'Repo', *treeish: Treeish, **kwargs: Any) -> 'IndexFile':
279295
"""Merge the given treeish revisions into a new index which is returned.
280296
The original index will remain unaltered
281297
@@ -312,7 +328,7 @@ def from_tree(cls, repo, *treeish, **kwargs):
312328
if len(treeish) == 0 or len(treeish) > 3:
313329
raise ValueError("Please specify between 1 and 3 treeish, got %i" % len(treeish))
314330

315-
arg_list = []
331+
arg_list = [] # type: List[Union[Treeish, str]]
316332
# ignore that working tree and index possibly are out of date
317333
if len(treeish) > 1:
318334
# drop unmerged entries when reading our index and merging
@@ -331,7 +347,8 @@ def from_tree(cls, repo, *treeish, **kwargs):
331347
# as it considers existing entries. moving it essentially clears the index.
332348
# Unfortunately there is no 'soft' way to do it.
333349
# The TemporaryFileSwap assure the original file get put back
334-
index_handler = TemporaryFileSwap(join_path_native(repo.git_dir, 'index'))
350+
if repo.git_dir:
351+
index_handler = TemporaryFileSwap(join_path_native(repo.git_dir, 'index'))
335352
try:
336353
repo.git.read_tree(*arg_list, **kwargs)
337354
index = cls(repo, tmp_index)
@@ -346,18 +363,18 @@ def from_tree(cls, repo, *treeish, **kwargs):
346363

347364
# UTILITIES
348365
@unbare_repo
349-
def _iter_expand_paths(self, paths):
366+
def _iter_expand_paths(self, paths: Sequence[PathLike]) -> Iterator[PathLike]:
350367
"""Expand the directories in list of paths to the corresponding paths accordingly,
351368
352369
Note: git will add items multiple times even if a glob overlapped
353370
with manually specified paths or if paths where specified multiple
354371
times - we respect that and do not prune"""
355372
def raise_exc(e):
356373
raise e
357-
r = self.repo.working_tree_dir
374+
r = str(self.repo.working_tree_dir)
358375
rs = r + os.sep
359376
for path in paths:
360-
abs_path = path
377+
abs_path = str(path)
361378
if not osp.isabs(abs_path):
362379
abs_path = osp.join(r, path)
363380
# END make absolute path
@@ -374,7 +391,7 @@ def raise_exc(e):
374391
# end check symlink
375392

376393
# if the path is not already pointing to an existing file, resolve globs if possible
377-
if not os.path.exists(path) and ('?' in path or '*' in path or '[' in path):
394+
if not os.path.exists(abs_path) and ('?' in abs_path or '*' in abs_path or '[' in abs_path):
378395
resolved_paths = glob.glob(abs_path)
379396
# not abs_path in resolved_paths:
380397
# a glob() resolving to the same path we are feeding it with
@@ -396,12 +413,12 @@ def raise_exc(e):
396413
# END for each subdirectory
397414
except OSError:
398415
# was a file or something that could not be iterated
399-
yield path.replace(rs, '')
416+
yield abs_path.replace(rs, '')
400417
# END path exception handling
401418
# END for each path
402419

403-
def _write_path_to_stdin(self, proc, filepath, item, fmakeexc, fprogress,
404-
read_from_stdout=True):
420+
def _write_path_to_stdin(self, proc: 'Popen', filepath: PathLike, item, fmakeexc, fprogress,
421+
read_from_stdout: bool = True) -> Union[None, str]:
405422
"""Write path to proc.stdin and make sure it processes the item, including progress.
406423
407424
:return: stdout string
@@ -417,20 +434,24 @@ def _write_path_to_stdin(self, proc, filepath, item, fmakeexc, fprogress,
417434
we will close stdin to break the pipe."""
418435

419436
fprogress(filepath, False, item)
420-
rval = None
421-
try:
422-
proc.stdin.write(("%s\n" % filepath).encode(defenc))
423-
except IOError as e:
424-
# pipe broke, usually because some error happened
425-
raise fmakeexc() from e
426-
# END write exception handling
427-
proc.stdin.flush()
428-
if read_from_stdout:
437+
rval = None # type: Union[None, str]
438+
439+
if proc.stdin is not None:
440+
try:
441+
proc.stdin.write(("%s\n" % filepath).encode(defenc))
442+
except IOError as e:
443+
# pipe broke, usually because some error happened
444+
raise fmakeexc() from e
445+
# END write exception handling
446+
proc.stdin.flush()
447+
448+
if read_from_stdout and proc.stdout is not None:
429449
rval = proc.stdout.readline().strip()
430450
fprogress(filepath, True, item)
431451
return rval
432452

433-
def iter_blobs(self, predicate=lambda t: True):
453+
def iter_blobs(self, predicate: Callable[[Tuple[StageType, Blob]], bool] = lambda t: True
454+
) -> Iterator[Tuple[StageType, Blob]]:
434455
"""
435456
:return: Iterator yielding tuples of Blob objects and stages, tuple(stage, Blob)
436457
@@ -446,20 +467,21 @@ def iter_blobs(self, predicate=lambda t: True):
446467
yield output
447468
# END for each entry
448469

449-
def unmerged_blobs(self):
470+
def unmerged_blobs(self) -> Dict[PathLike, List[Tuple[StageType, Blob]]]:
450471
"""
451472
:return:
452473
Iterator yielding dict(path : list( tuple( stage, Blob, ...))), being
453474
a dictionary associating a path in the index with a list containing
454475
sorted stage/blob pairs
476+
##### Does it return iterator? or just the Dict?
455477
456478
:note:
457479
Blobs that have been removed in one side simply do not exist in the
458480
given stage. I.e. a file removed on the 'other' branch whose entries
459481
are at stage 3 will not have a stage 3 entry.
460482
"""
461483
is_unmerged_blob = lambda t: t[0] != 0
462-
path_map = {}
484+
path_map = {} # type: Dict[PathLike, List[Tuple[TBD, Blob]]]
463485
for stage, blob in self.iter_blobs(is_unmerged_blob):
464486
path_map.setdefault(blob.path, []).append((stage, blob))
465487
# END for each unmerged blob
@@ -468,10 +490,10 @@ def unmerged_blobs(self):
468490
return path_map
469491

470492
@classmethod
471-
def entry_key(cls, *entry):
472-
return entry_key(*entry)
493+
def entry_key(cls, entry: Union[Tuple[BaseIndexEntry], Tuple[PathLike, StageType]]) -> Tuple[PathLike, StageType]:
494+
return entry_key(entry)
473495

474-
def resolve_blobs(self, iter_blobs):
496+
def resolve_blobs(self, iter_blobs: Iterator[Blob]) -> 'IndexFile':
475497
"""Resolve the blobs given in blob iterator. This will effectively remove the
476498
index entries of the respective path at all non-null stages and add the given
477499
blob as new stage null blob.
@@ -489,9 +511,9 @@ def resolve_blobs(self, iter_blobs):
489511
for blob in iter_blobs:
490512
stage_null_key = (blob.path, 0)
491513
if stage_null_key in self.entries:
492-
raise ValueError("Path %r already exists at stage 0" % blob.path)
514+
raise ValueError("Path %r already exists at stage 0" % str(blob.path))
493515
# END assert blob is not stage 0 already
494-
516+
495517
# delete all possible stages
496518
for stage in (1, 2, 3):
497519
try:
@@ -506,7 +528,7 @@ def resolve_blobs(self, iter_blobs):
506528

507529
return self
508530

509-
def update(self):
531+
def update(self) -> 'IndexFile':
510532
"""Reread the contents of our index file, discarding all cached information
511533
we might have.
512534
@@ -517,7 +539,7 @@ def update(self):
517539
# allows to lazily reread on demand
518540
return self
519541

520-
def write_tree(self):
542+
def write_tree(self) -> Tree:
521543
"""Writes this index to a corresponding Tree object into the repository's
522544
object database and return it.
523545
@@ -542,22 +564,22 @@ def write_tree(self):
542564
root_tree._cache = tree_items
543565
return root_tree
544566

545-
def _process_diff_args(self, args):
567+
def _process_diff_args(self, args: Any) -> List[Any]:
546568
try:
547569
args.pop(args.index(self))
548570
except IndexError:
549571
pass
550572
# END remove self
551573
return args
552574

553-
def _to_relative_path(self, path):
575+
def _to_relative_path(self, path: PathLike) -> PathLike:
554576
""":return: Version of path relative to our git directory or raise ValueError
555577
if it is not within our git direcotory"""
556578
if not osp.isabs(path):
557579
return path
558580
if self.repo.bare:
559581
raise InvalidGitRepositoryError("require non-bare repository")
560-
if not path.startswith(self.repo.working_tree_dir):
582+
if not str(path).startswith(str(self.repo.working_tree_dir)):
561583
raise ValueError("Absolute path %r is not in git repository at %r" % (path, self.repo.working_tree_dir))
562584
return os.path.relpath(path, self.repo.working_tree_dir)
563585

‎git/index/fun.py

+7-4
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# Contains standalone functions to accompany the index implementation and make it
22
# more versatile
33
# NOTE: Autodoc hates it if this is a docstring
4+
from git.types import PathLike, TBD
45
from io import BytesIO
56
import os
67
from stat import (
@@ -12,6 +13,7 @@
1213
S_IFREG,
1314
)
1415
import subprocess
16+
from typing import List, Tuple, Union, cast
1517

1618
from git.cmd import PROC_CREATIONFLAGS, handle_process_output
1719
from git.compat import (
@@ -166,11 +168,12 @@ def read_header(stream):
166168
return version, num_entries
167169

168170

169-
def entry_key(*entry):
171+
def entry_key(entry: Union[Tuple[BaseIndexEntry], Tuple[PathLike, TBD]]):
170172
""":return: Key suitable to be used for the index.entries dictionary
171173
:param entry: One instance of type BaseIndexEntry or the path and the stage"""
172174
if len(entry) == 1:
173-
return (entry[0].path, entry[0].stage)
175+
entry_first = cast(BaseIndexEntry, entry[0]) # type: BaseIndexEntry
176+
return (entry_first.path, entry_first.stage)
174177
return tuple(entry)
175178
# END handle entry
176179

@@ -283,7 +286,7 @@ def _tree_entry_to_baseindexentry(tree_entry, stage):
283286
return BaseIndexEntry((tree_entry[1], tree_entry[0], stage << CE_STAGESHIFT, tree_entry[2]))
284287

285288

286-
def aggressive_tree_merge(odb, tree_shas):
289+
def aggressive_tree_merge(odb, tree_shas) -> List[BaseIndexEntry]:
287290
"""
288291
:return: list of BaseIndexEntries representing the aggressive merge of the given
289292
trees. All valid entries are on stage 0, whereas the conflicting ones are left
@@ -292,7 +295,7 @@ def aggressive_tree_merge(odb, tree_shas):
292295
:param tree_shas: 1, 2 or 3 trees as identified by their binary 20 byte shas
293296
If 1 or two, the entries will effectively correspond to the last given tree
294297
If 3 are given, a 3 way merge is performed"""
295-
out = []
298+
out = [] # type: List[BaseIndexEntry]
296299
out_append = out.append
297300

298301
# one and two way is the same for us, as we don't have to handle an existing

0 commit comments

Comments
 (0)