From dd07fa946b200c3b1cd692a64992bcbae7e20a54 Mon Sep 17 00:00:00 2001 From: monnerat Date: Mon, 26 Aug 2019 00:47:38 +0200 Subject: [PATCH 01/22] Allow configured email addresses to be pre-quoted as "<...>". (#722) Allow configured email addresses to be pre-quoted as "<...>". --- dulwich/repo.py | 6 ++++++ dulwich/tests/test_repository.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/dulwich/repo.py b/dulwich/repo.py index 9e6fd9e96..aaa066e6b 100644 --- a/dulwich/repo.py +++ b/dulwich/repo.py @@ -34,6 +34,7 @@ import sys import stat import time +import re from dulwich.errors import ( NoIndexPresent, @@ -112,6 +113,8 @@ DEFAULT_REF = b'refs/heads/master' +quoted_email_re = re.compile(br"^\s*<\s*(.*)\s*>\s*$") + class InvalidUserIdentity(Exception): """User identity is not of the format 'user '""" @@ -175,6 +178,9 @@ def get_user_identity(config, kind=None): email = default_email if not isinstance(email, bytes): email = email.encode('utf-8') + m = quoted_email_re.match(email) + if m: + email = m.group(1) return (user + b" <" + email + b">") diff --git a/dulwich/tests/test_repository.py b/dulwich/tests/test_repository.py index c05e6ef86..368f943de 100644 --- a/dulwich/tests/test_repository.py +++ b/dulwich/tests/test_repository.py @@ -882,7 +882,7 @@ def test_commit_config_identity(self): r = self._repo c = r.get_config() c.set((b"user", ), b"name", b"Jelmer") - c.set((b"user", ), b"email", b"jelmer@apache.org") + c.set((b"user", ), b"email", b"") c.write_to_path() commit_sha = r.do_commit(b'message') self.assertEqual( From 1beda2132a344fbee909076aee82ad829bf9755d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 25 Aug 2019 22:48:40 +0000 Subject: [PATCH 02/22] Update NEWS. --- NEWS | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/NEWS b/NEWS index 0ddfae524..2e60b84e4 100644 --- a/NEWS +++ b/NEWS @@ -1,3 +1,7 @@ +0.19.14 UNRELEASED + + * Strip superfluous <> around email. (monnerat) + 0.19.13 2019-08-19 BUG FIXES From 3f87027ed1f16a667731778f719d4c790a6d6ea1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 25 Aug 2019 22:52:05 +0000 Subject: [PATCH 03/22] Avoid re module. --- dulwich/repo.py | 9 ++------- dulwich/tests/test_repository.py | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/dulwich/repo.py b/dulwich/repo.py index aaa066e6b..dbd6417b8 100644 --- a/dulwich/repo.py +++ b/dulwich/repo.py @@ -34,7 +34,6 @@ import sys import stat import time -import re from dulwich.errors import ( NoIndexPresent, @@ -113,9 +112,6 @@ DEFAULT_REF = b'refs/heads/master' -quoted_email_re = re.compile(br"^\s*<\s*(.*)\s*>\s*$") - - class InvalidUserIdentity(Exception): """User identity is not of the format 'user '""" @@ -178,9 +174,8 @@ def get_user_identity(config, kind=None): email = default_email if not isinstance(email, bytes): email = email.encode('utf-8') - m = quoted_email_re.match(email) - if m: - email = m.group(1) + if email.startswith(b'<') and email.endswith(b'>'): + email = email[1:-1] return (user + b" <" + email + b">") diff --git a/dulwich/tests/test_repository.py b/dulwich/tests/test_repository.py index 368f943de..1529917ab 100644 --- a/dulwich/tests/test_repository.py +++ b/dulwich/tests/test_repository.py @@ -882,6 +882,22 @@ def test_commit_config_identity(self): r = self._repo c = r.get_config() c.set((b"user", ), b"name", b"Jelmer") + c.set((b"user", ), b"email", b"jelmer@apache.org") + c.write_to_path() + commit_sha = r.do_commit(b'message') + self.assertEqual( + b"Jelmer ", + r[commit_sha].author) + self.assertEqual( + b"Jelmer ", + r[commit_sha].committer) + + def test_commit_config_identity_strips_than(self): + # commit falls back to the users' identity if it wasn't specified, + # and strips superfluous <> + r = self._repo + c = r.get_config() + c.set((b"user", ), b"name", b"Jelmer") c.set((b"user", ), b"email", b"") c.write_to_path() commit_sha = r.do_commit(b'message') From cb99baa336f96c2f04fd94d758c6b8e3b0d5d07d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 25 Aug 2019 23:07:44 +0000 Subject: [PATCH 04/22] Fix style. --- dulwich/repo.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dulwich/repo.py b/dulwich/repo.py index dbd6417b8..ccb30f1a2 100644 --- a/dulwich/repo.py +++ b/dulwich/repo.py @@ -112,6 +112,7 @@ DEFAULT_REF = b'refs/heads/master' + class InvalidUserIdentity(Exception): """User identity is not of the format 'user '""" From c33607e8d76643c6ec44b3010b138d2039c9acec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 29 Sep 2019 21:23:18 +0000 Subject: [PATCH 05/22] Add another test for ref setting. --- dulwich/tests/test_refs.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dulwich/tests/test_refs.py b/dulwich/tests/test_refs.py index 7c5bf1db0..b9c76457f 100644 --- a/dulwich/tests/test_refs.py +++ b/dulwich/tests/test_refs.py @@ -224,6 +224,10 @@ def test_set_if_equals(self): b'HEAD', b'42d06bd4b77fed026b154d16493e5deab78f02ec', nines)) self.assertEqual(nines, self._refs[b'HEAD']) + # Setting the ref again is a no-op, but will return True. + self.assertTrue(self._refs.set_if_equals(b'HEAD', nines, nines)) + self.assertEqual(nines, self._refs[b'HEAD']) + self.assertTrue(self._refs.set_if_equals(b'refs/heads/master', None, nines)) self.assertEqual(nines, self._refs[b'refs/heads/master']) From 15b00efb3106083912b9af0955ccb82743a0104d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 20 Oct 2019 00:38:03 +0000 Subject: [PATCH 06/22] Stop checking for ref validity client-side. --- NEWS | 3 +++ dulwich/client.py | 2 -- dulwich/tests/test_client.py | 14 -------------- 3 files changed, 3 insertions(+), 16 deletions(-) diff --git a/NEWS b/NEWS index 2e60b84e4..4d0bd7e09 100644 --- a/NEWS +++ b/NEWS @@ -2,6 +2,9 @@ * Strip superfluous <> around email. (monnerat) + * Stop checking for ref validity client-side. Users can + still call check_wants manually. (Jelmer Vernooij) + 0.19.13 2019-08-19 BUG FIXES diff --git a/dulwich/client.py b/dulwich/client.py index 448c1a9a8..4db976d74 100644 --- a/dulwich/client.py +++ b/dulwich/client.py @@ -832,7 +832,6 @@ def fetch_pack(self, path, determine_wants, graph_walker, pack_data, if not wants: proto.write_pkt_line(None) return FetchPackResult(refs, symrefs, agent) - check_wants(wants, refs) (new_shallow, new_unshallow) = self._handle_upload_pack_head( proto, negotiated_capabilities, graph_walker, wants, can_read, depth=depth) @@ -1652,7 +1651,6 @@ def fetch_pack(self, path, determine_wants, graph_walker, pack_data, return FetchPackResult(refs, symrefs, agent) if self.dumb: raise NotImplementedError(self.send_pack) - check_wants(wants, refs) req_data = BytesIO() req_proto = Protocol(None, req_data.write) (new_shallow, new_unshallow) = self._handle_upload_pack_head( diff --git a/dulwich/tests/test_client.py b/dulwich/tests/test_client.py index 2e105db3d..50f11b9ca 100644 --- a/dulwich/tests/test_client.py +++ b/dulwich/tests/test_client.py @@ -193,20 +193,6 @@ def test_fetch_pack_none(self): self.assertEqual({}, ret.symrefs) self.assertEqual(self.rout.getvalue(), b'0000') - def test_fetch_pack_sha_not_in_ref(self): - self.rin.write( - b'008855dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7 HEAD\x00multi_ack ' - b'thin-pack side-band side-band-64k ofs-delta shallow no-progress ' - b'include-tag\n' - b'0000') - self.rin.seek(0) - self.assertRaises( - InvalidWants, self.client.fetch_pack, - b'bla', - lambda heads: ['aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'], - None, None, - None) - def test_send_pack_no_sideband64k_with_update_ref_error(self): # No side-bank-64k reported by server shouldn't try to parse # side band data From a367410f2c014e2a3bc147c5e6f3b6c7a6ac211f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 20 Oct 2019 14:58:11 +0000 Subject: [PATCH 07/22] Fix error message if name happens to be a tuple. --- dulwich/object_store.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dulwich/object_store.py b/dulwich/object_store.py index 3ad3fd8c7..ab0e245ff 100644 --- a/dulwich/object_store.py +++ b/dulwich/object_store.py @@ -456,7 +456,7 @@ def get_raw(self, name): sha = name hexsha = None else: - raise AssertionError("Invalid object name %r" % name) + raise AssertionError("Invalid object name %r" % (name, )) for pack in self._iter_cached_packs(): try: return pack.get_raw(sha) From d95f47c9c8b9c597a36445ec4f7be031a658b9b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 20 Oct 2019 15:00:11 +0000 Subject: [PATCH 08/22] Fix get_summary on python3. --- dulwich/patch.py | 2 +- dulwich/tests/test_patch.py | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/dulwich/patch.py b/dulwich/patch.py index 4ff7caaef..5a49e89a7 100644 --- a/dulwich/patch.py +++ b/dulwich/patch.py @@ -83,7 +83,7 @@ def get_summary(commit): :param commit: Commit :return: Summary string """ - return commit.message.splitlines()[0].replace(" ", "-") + return commit.message.decode(errors='replace').splitlines()[0].replace(" ", "-") # Unified Diff diff --git a/dulwich/tests/test_patch.py b/dulwich/tests/test_patch.py index 942b3bffa..0c407277d 100644 --- a/dulwich/tests/test_patch.py +++ b/dulwich/tests/test_patch.py @@ -32,6 +32,7 @@ MemoryObjectStore, ) from dulwich.patch import ( + get_summary, git_am_patch_split, write_blob_diff, write_commit_patch, @@ -537,3 +538,15 @@ def test_object_diff_kind_change(self): b'-same', b'+Submodule commit 06d0bdd9e2e20377b3180e4986b14c8549b393e4', ], f.getvalue().splitlines()) + + +class GetSummaryTests(TestCase): + + def test_simple(self): + c = Commit() + c.committer = c.author = b"Jelmer " + c.commit_time = c.author_time = 1271350201 + c.commit_timezone = c.author_timezone = 0 + c.message = b"This is the first line\nAnd this is the second line.\n" + c.tree = Tree().id + self.assertEqual('This-is-the-first-line', get_summary(c)) From 07fab8de00f0d3a9c21c13cae5f0832d4ec8b612 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 20 Oct 2019 15:08:21 +0000 Subject: [PATCH 09/22] Split line. --- dulwich/patch.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dulwich/patch.py b/dulwich/patch.py index 5a49e89a7..272488eb4 100644 --- a/dulwich/patch.py +++ b/dulwich/patch.py @@ -83,7 +83,8 @@ def get_summary(commit): :param commit: Commit :return: Summary string """ - return commit.message.decode(errors='replace').splitlines()[0].replace(" ", "-") + decoded = commit.message.decode(errors='replace') + return decoded.splitlines()[0].replace(" ", "-") # Unified Diff From 1d7a1e155ae43529a70a0ce26309f79c48f1620f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 20 Oct 2019 15:08:51 +0000 Subject: [PATCH 10/22] Run travis tests on 3.8. --- .travis.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.travis.yml b/.travis.yml index 0db082035..0a9b79261 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,6 +23,10 @@ matrix: env: TEST_REQUIRE=fastimport dist: xenial sudo: true + - python: 3.8 + env: TEST_REQUIRE=fastimport + dist: xenial + sudo: true - python: 3.6 env: PURE=true - python: 2.7 From 9625281f98e3fd301b7e988ef5f21d4866cbb6c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 28 Oct 2019 23:21:53 +0000 Subject: [PATCH 11/22] Switch to google style docstrings. --- NEWS | 3 +++ docs/conf.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/NEWS b/NEWS index 4d0bd7e09..5b09f7c4a 100644 --- a/NEWS +++ b/NEWS @@ -5,6 +5,9 @@ * Stop checking for ref validity client-side. Users can still call check_wants manually. (Jelmer Vernooij) + * Switch over to Google-style docstrings. + (Jelmer Vernooij) + 0.19.13 2019-08-19 BUG FIXES diff --git a/docs/conf.py b/docs/conf.py index 0374178e7..4fbecc197 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -30,7 +30,7 @@ 'sphinx.ext.autodoc', 'sphinx.ext.ifconfig', 'sphinx.ext.intersphinx', - 'sphinx_epytext', + 'sphinx.ext.napoleon', ] try: import rst2pdf @@ -56,7 +56,7 @@ # General information about the project. project = u'dulwich' -copyright = u'2011-2018 Jelmer Vernooij' +copyright = u'2011-2019 Jelmer Vernooij' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the From 9a402c53d114801b55510d5398e8c4f028afbf5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 29 Oct 2019 02:09:01 +0000 Subject: [PATCH 12/22] Initial work migrating docstrings to google style. --- dulwich/archive.py | 12 +- dulwich/client.py | 466 +++++++++++++++++++++++++++------------------ dulwich/config.py | 47 +++-- 3 files changed, 322 insertions(+), 203 deletions(-) diff --git a/dulwich/archive.py b/dulwich/archive.py index b689779ae..f2322da4c 100644 --- a/dulwich/archive.py +++ b/dulwich/archive.py @@ -75,12 +75,14 @@ def tar_stream(store, tree, mtime, prefix=b'', format=''): pieces (bytestrings). To obtain the complete .tar.gz binary file, simply concatenate these chunks. - :param store: Object store to retrieve objects from - :param tree: Tree object for the tree root - :param mtime: UNIX timestamp that is assigned as the modification time for + Args: + store: Object store to retrieve objects from + tree: Tree object for the tree root + mtime: UNIX timestamp that is assigned as the modification time for all files, and the gzip header modification time if format='gz' - :param format: Optional compression format for tarball - :return: Bytestrings + format: Optional compression format for tarball + Returns: + Bytestrings """ buf = BytesIO() with closing(tarfile.open(None, "w:%s" % format, buf)) as tar: diff --git a/dulwich/client.py b/dulwich/client.py index 4db976d74..f271e1484 100644 --- a/dulwich/client.py +++ b/dulwich/client.py @@ -121,12 +121,14 @@ def __init__(self, wants): def _fileno_can_read(fileno): - """Check if a file descriptor is readable.""" + """Check if a file descriptor is readable. + """ return len(select.select([fileno], [], [], 0)[0]) > 0 def _win32_peek_avail(handle): - """Wrapper around PeekNamedPipe to check how many bytes are available.""" + """Wrapper around PeekNamedPipe to check how many bytes are available. + """ from ctypes import byref, wintypes, windll c_avail = wintypes.DWORD() c_message = wintypes.DWORD() @@ -146,8 +148,7 @@ def _win32_peek_avail(handle): class ReportStatusParser(object): - """Handle status as reported by servers with 'report-status' capability. - """ + """Handle status as reported by servers with 'report-status' capability.""" def __init__(self): self._done = False @@ -158,8 +159,9 @@ def __init__(self): def check(self): """Check if there were any errors and, if so, raise exceptions. - :raise SendPackError: Raised when the server could not unpack - :raise UpdateRefsError: Raised when refs could not be updated + Raises: + SendPackError: Raised when the server could not unpack + UpdateRefsError: Raised when refs could not be updated """ if self._pack_status not in (b'unpack ok', None): raise SendPackError(self._pack_status) @@ -187,8 +189,9 @@ def check(self): def handle_packet(self, pkt): """Handle a packet. - :raise GitProtocolError: Raised when packets are received after a - flush packet. + Raises: + GitProtocolError: Raised when packets are received after a flush + packet. """ if self._done: raise GitProtocolError("received more data after status report") @@ -226,9 +229,10 @@ def read_pkt_refs(proto): class FetchPackResult(object): """Result of a fetch-pack operation. - :var refs: Dictionary with all remote refs - :var symrefs: Dictionary with remote symrefs - :var agent: User agent string + Attributes: + refs: Dictionary with all remote refs + symrefs: Dictionary with remote symrefs + agent: User agent string """ _FORWARDED_ATTRS = [ @@ -304,15 +308,14 @@ def _read_shallow_updates(proto): # support some capabilities. This should work properly with servers # that don't support multi_ack. class GitClient(object): - """Git smart server client. - - """ + """Git smart server client.""" def __init__(self, thin_packs=True, report_activity=None, quiet=False): """Create a new GitClient instance. - :param thin_packs: Whether or not thin packs should be retrieved - :param report_activity: Optional callback for reporting transport + Args: + thin_packs: Whether or not thin packs should be retrieved + report_activity: Optional callback for reporting transport activity. """ self._report_activity = report_activity @@ -329,8 +332,12 @@ def __init__(self, thin_packs=True, report_activity=None, quiet=False): def get_url(self, path): """Retrieves full url to given path. - :param path: Repository path (as string) - :return: Url to path (as string) + Args: + path: Repository path (as string) + + Returns: + Url to path (as string) + """ raise NotImplementedError(self.get_url) @@ -338,8 +345,11 @@ def get_url(self, path): def from_parsedurl(cls, parsedurl, **kwargs): """Create an instance of this client from a urlparse.parsed object. - :param parsedurl: Result of urlparse.urlparse() - :return: A `GitClient` object + Args: + parsedurl: Result of urlparse.urlparse() + + Returns: + A `GitClient` object """ raise NotImplementedError(cls.from_parsedurl) @@ -347,19 +357,24 @@ def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. - :param path: Repository path (as bytestring) - :param update_refs: Function to determine changes to remote refs. - Receive dict with existing remote refs, returns dict with + Args: + path: Repository path (as bytestring) + update_refs: Function to determine changes to remote refs. Receive + dict with existing remote refs, returns dict with changed refs (name -> sha, where sha=ZERO_SHA for deletions) - :param generate_pack_data: Function that can return a tuple + generate_pack_data: Function that can return a tuple with number of objects and list of pack data to include - :param progress: Optional progress function + progress: Optional progress function - :raises SendPackError: if server rejects the pack data - :raises UpdateRefsError: if the server supports report-status - and rejects ref updates - :return: new_refs dictionary containing the changes that were made + Returns: + new_refs dictionary containing the changes that were made {refname: new_ref}, including deleted refs. + + Raises: + SendPackError: if server rejects the pack data + UpdateRefsError: if the server supports report-status + and rejects ref updates + """ raise NotImplementedError(self.send_pack) @@ -367,14 +382,18 @@ def fetch(self, path, target, determine_wants=None, progress=None, depth=None): """Fetch into a target repository. - :param path: Path to fetch from (as bytestring) - :param target: Target repository to fetch into - :param determine_wants: Optional function to determine what refs - to fetch. Receives dictionary of name->sha, should return + Args: + path: Path to fetch from (as bytestring) + target: Target repository to fetch into + determine_wants: Optional function to determine what refs to fetch. + Receives dictionary of name->sha, should return list of shas to fetch. Defaults to all shas. - :param progress: Optional progress function - :param depth: Depth to fetch at - :return: Dictionary with all remote refs (not just those fetched) + progress: Optional progress function + depth: Depth to fetch at + + Returns: + Dictionary with all remote refs (not just those fetched) + """ if determine_wants is None: determine_wants = target.object_store.determine_wants_all @@ -408,22 +427,30 @@ def fetch_pack(self, path, determine_wants, graph_walker, pack_data, progress=None, depth=None): """Retrieve a pack from a git smart server. - :param path: Remote path to fetch from - :param determine_wants: Function determine what refs - to fetch. Receives dictionary of name->sha, should return - list of shas to fetch. - :param graph_walker: Object with next() and ack(). - :param pack_data: Callback called for each bit of data in the pack - :param progress: Callback for progress reports (strings) - :param depth: Shallow fetch depth - :return: FetchPackResult object + Args: + path: Remote path to fetch from + determine_wants: Function determine what refs + to fetch. Receives dictionary of name->sha, should return + list of shas to fetch. + graph_walker: Object with next() and ack(). + pack_data: Callback called for each bit of data in the pack + progress: Callback for progress reports (strings) + depth: Shallow fetch depth + + Returns: + FetchPackResult object + """ raise NotImplementedError(self.fetch_pack) def get_refs(self, path): """Retrieve the current refs from a git smart server. - :param path: Path to the repo to fetch from. (as bytestring) + Args: + path: Path to the repo to fetch from. (as bytestring) + + Returns: + """ raise NotImplementedError(self.get_refs) @@ -469,8 +496,9 @@ def _read_side_band64k_data(self, proto, channel_callbacks): This requires the side-band-64k capability. - :param proto: Protocol object to read from - :param channel_callbacks: Dictionary mapping channels to packet + Args: + proto: Protocol object to read from + channel_callbacks: Dictionary mapping channels to packet handlers to use. None for a callback discards channel data. """ for pkt in proto.read_pkt_seq(): @@ -488,11 +516,15 @@ def _handle_receive_pack_head(self, proto, capabilities, old_refs, new_refs): """Handle the head of a 'git-receive-pack' request. - :param proto: Protocol object to read from - :param capabilities: List of negotiated capabilities - :param old_refs: Old refs, as received from the server - :param new_refs: Refs to change - :return: (have, want) tuple + Args: + proto: Protocol object to read from + capabilities: List of negotiated capabilities + old_refs: Old refs, as received from the server + new_refs: Refs to change + + Returns: + have, want) tuple + """ want = [] have = [x for x in old_refs.values() if not x == ZERO_SHA] @@ -536,9 +568,13 @@ def _negotiate_receive_pack_capabilities(self, server_capabilities): def _handle_receive_pack_tail(self, proto, capabilities, progress=None): """Handle the tail of a 'git-receive-pack' request. - :param proto: Protocol object to read from - :param capabilities: List of negotiated capabilities - :param progress: Optional progress reporting function + Args: + proto: Protocol object to read from + capabilities: List of negotiated capabilities + progress: Optional progress reporting function + + Returns: + """ if CAPABILITY_SIDE_BAND_64K in capabilities: if progress is None: @@ -579,13 +615,17 @@ def _handle_upload_pack_head(self, proto, capabilities, graph_walker, wants, can_read, depth): """Handle the head of a 'git-upload-pack' request. - :param proto: Protocol object to read from - :param capabilities: List of negotiated capabilities - :param graph_walker: GraphWalker instance to call .ack() on - :param wants: List of commits to fetch - :param can_read: function that returns a boolean that indicates - whether there is extra graph data to read on proto - :param depth: Depth for request + Args: + proto: Protocol object to read from + capabilities: List of negotiated capabilities + graph_walker: GraphWalker instance to call .ack() on + wants: List of commits to fetch + can_read: function that returns a boolean that indicates + whether there is extra graph data to read on proto + depth: Depth for request + + Returns: + """ assert isinstance(wants, list) and isinstance(wants[0], bytes) proto.write_pkt_line(COMMAND_WANT + b' ' + wants[0] + b' ' + @@ -633,12 +673,16 @@ def _handle_upload_pack_tail(self, proto, capabilities, graph_walker, pack_data, progress=None, rbufsize=_RBUFSIZE): """Handle the tail of a 'git-upload-pack' request. - :param proto: Protocol object to read from - :param capabilities: List of negotiated capabilities - :param graph_walker: GraphWalker instance to call .ack() on - :param pack_data: Function to call with pack data - :param progress: Optional progress reporting function - :param rbufsize: Read buffer size + Args: + proto: Protocol object to read from + capabilities: List of negotiated capabilities + graph_walker: GraphWalker instance to call .ack() on + pack_data: Function to call with pack data + progress: Optional progress reporting function + rbufsize: Read buffer size + + Returns: + """ pkt = proto.read_pkt_line() while pkt: @@ -670,8 +714,12 @@ def progress(x): def check_wants(wants, refs): """Check that a set of wants is valid. - :param wants: Set of object SHAs to fetch - :param refs: Refs dictionary to check against + Args: + wants: Set of object SHAs to fetch + refs: Refs dictionary to check against + + Returns: + """ missing = set(wants) - { v for (k, v) in refs.items() @@ -681,7 +729,6 @@ def check_wants(wants, refs): def remote_error_from_stderr(stderr): - """Return an appropriate exception based on stderr output. """ if stderr is None: return HangupException() for l in stderr.readlines(): @@ -710,8 +757,9 @@ def _connect(self, cmd, path): for use and a can_read function which may be used to see if reads would block. - :param cmd: The git service name to which we should connect. - :param path: The path we should pass to the service. (as bytestirng) + Args: + cmd: The git service name to which we should connect. + path: The path we should pass to the service. (as bytestirng) """ raise NotImplementedError() @@ -719,19 +767,24 @@ def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. - :param path: Repository path (as bytestring) - :param update_refs: Function to determine changes to remote refs. - Receive dict with existing remote refs, returns dict with - changed refs (name -> sha, where sha=ZERO_SHA for deletions) - :param generate_pack_data: Function that can return a tuple with - number of objects and pack data to upload. - :param progress: Optional callback called with progress updates - - :raises SendPackError: if server rejects the pack data - :raises UpdateRefsError: if the server supports report-status - and rejects ref updates - :return: new_refs dictionary containing the changes that were made - {refname: new_ref}, including deleted refs. + Args: + path: Repository path (as bytestring) + update_refs: Function to determine changes to remote refs. + Receive dict with existing remote refs, returns dict with + changed refs (name -> sha, where sha=ZERO_SHA for deletions) + generate_pack_data: Function that can return a tuple with + number of objects and pack data to upload. + progress: Optional callback called with progress updates + + Returns: + new_refs dictionary containing the changes that were made + {refname: new_ref}, including deleted refs. + + Raises: + SendPackError: if server rejects the pack data + UpdateRefsError: if the server supports report-status + and rejects ref updates + """ proto, unused_can_read, stderr = self._connect(b'receive-pack', path) with proto: @@ -798,15 +851,19 @@ def fetch_pack(self, path, determine_wants, graph_walker, pack_data, progress=None, depth=None): """Retrieve a pack from a git smart server. - :param path: Remote path to fetch from - :param determine_wants: Function determine what refs - to fetch. Receives dictionary of name->sha, should return - list of shas to fetch. - :param graph_walker: Object with next() and ack(). - :param pack_data: Callback called for each bit of data in the pack - :param progress: Callback for progress reports (strings) - :param depth: Shallow fetch depth - :return: FetchPackResult object + Args: + path: Remote path to fetch from + determine_wants: Function determine what refs + to fetch. Receives dictionary of name->sha, should return + list of shas to fetch. + graph_walker: Object with next() and ack(). + pack_data: Callback called for each bit of data in the pack + progress: Callback for progress reports (strings) + depth: Shallow fetch depth + + Returns: + FetchPackResult object + """ proto, can_read, stderr = self._connect(b'upload-pack', path) with proto: @@ -842,7 +899,8 @@ def fetch_pack(self, path, determine_wants, graph_walker, pack_data, refs, symrefs, agent, new_shallow, new_unshallow) def get_refs(self, path): - """Retrieve the current refs from a git smart server.""" + """Retrieve the current refs from a git smart server. + """ # stock `git ls-remote` uses upload-pack proto, _, stderr = self._connect(b'upload-pack', path) with proto: @@ -982,8 +1040,7 @@ def close(self): def find_git_command(): - """Find command to run for system Git (usually C Git). - """ + """Find command to run for system Git (usually C Git).""" if sys.platform == 'win32': # support .exe, .bat and .cmd try: # to avoid overhead import win32api @@ -1028,8 +1085,9 @@ class LocalGitClient(GitClient): def __init__(self, thin_packs=True, report_activity=None, config=None): """Create a new LocalGitClient instance. - :param thin_packs: Whether or not thin packs should be retrieved - :param report_activity: Optional callback for reporting transport + Args: + thin_packs: Whether or not thin packs should be retrieved + report_activity: Optional callback for reporting transport activity. """ self._report_activity = report_activity @@ -1053,19 +1111,24 @@ def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. - :param path: Repository path (as bytestring) - :param update_refs: Function to determine changes to remote refs. - Receive dict with existing remote refs, returns dict with - changed refs (name -> sha, where sha=ZERO_SHA for deletions) - :param generate_pack_data: Function that can return a tuple - with number of items and pack data to upload. - :param progress: Optional progress function - - :raises SendPackError: if server rejects the pack data - :raises UpdateRefsError: if the server supports report-status - and rejects ref updates - :return: new_refs dictionary containing the changes that were made - {refname: new_ref}, including deleted refs. + Args: + path: Repository path (as bytestring) + update_refs: Function to determine changes to remote refs. + Receive dict with existing remote refs, returns dict with + changed refs (name -> sha, where sha=ZERO_SHA for deletions) + generate_pack_data: Function that can return a tuple + with number of items and pack data to upload. + progress: Optional progress function + + Returns: + new_refs dictionary containing the changes that were made + {refname: new_ref}, including deleted refs. + + Raises: + SendPackError: if server rejects the pack data + UpdateRefsError: if the server supports report-status + and rejects ref updates + """ if not progress: def progress(x): @@ -1107,14 +1170,18 @@ def fetch(self, path, target, determine_wants=None, progress=None, depth=None): """Fetch into a target repository. - :param path: Path to fetch from (as bytestring) - :param target: Target repository to fetch into - :param determine_wants: Optional function determine what refs - to fetch. Receives dictionary of name->sha, should return - list of shas to fetch. Defaults to all shas. - :param progress: Optional progress function - :param depth: Shallow fetch depth - :return: FetchPackResult object + Args: + path: Path to fetch from (as bytestring) + target: Target repository to fetch into + determine_wants: Optional function determine what refs + to fetch. Receives dictionary of name->sha, should return + list of shas to fetch. Defaults to all shas. + progress: Optional progress function + depth: Shallow fetch depth + + Returns: + FetchPackResult object + """ with self._open_repo(path) as r: refs = r.fetch(target, determine_wants=determine_wants, @@ -1126,15 +1193,19 @@ def fetch_pack(self, path, determine_wants, graph_walker, pack_data, progress=None, depth=None): """Retrieve a pack from a git smart server. - :param path: Remote path to fetch from - :param determine_wants: Function determine what refs - to fetch. Receives dictionary of name->sha, should return - list of shas to fetch. - :param graph_walker: Object with next() and ack(). - :param pack_data: Callback called for each bit of data in the pack - :param progress: Callback for progress reports (strings) - :param depth: Shallow fetch depth - :return: FetchPackResult object + Args: + path: Remote path to fetch from + determine_wants: Function determine what refs + to fetch. Receives dictionary of name->sha, should return + list of shas to fetch. + graph_walker: Object with next() and ack(). + pack_data: Callback called for each bit of data in the pack + progress: Callback for progress reports (strings) + depth: Shallow fetch depth + + Returns: + FetchPackResult object + """ with self._open_repo(path) as r: objects_iter = r.fetch_objects( @@ -1151,7 +1222,8 @@ def fetch_pack(self, path, determine_wants, graph_walker, pack_data, return FetchPackResult(r.get_refs(), symrefs, agent) def get_refs(self, path): - """Retrieve the current refs from a git smart server.""" + """Retrieve the current refs from a git smart server. + """ with self._open_repo(path) as target: return target.get_refs() @@ -1181,12 +1253,16 @@ def run_command(self, host, command, username=None, port=None, Run a command remotely and return a file-like object for interaction with the remote command. - :param host: Host name - :param command: Command to run (as argv array) - :param username: Optional ame of user to log in as - :param port: Optional SSH port to use - :param password: Optional ssh password for login or private key - :param key_filename: Optional path to private keyfile + Args: + host: Host name + command: Command to run (as argv array) + username: Optional ame of user to log in as + port: Optional SSH port to use + password: Optional ssh password for login or private key + key_filename: Optional path to private keyfile + + Returns: + """ raise NotImplementedError(self.run_command) @@ -1348,10 +1424,14 @@ def default_urllib3_manager(config, **override_kwargs): Honour detected proxy configurations. - :param config: `dulwich.config.ConfigDict` instance with Git configuration. - :param kwargs: Additional arguments for urllib3.ProxyManager - :return: `urllib3.ProxyManager` instance for proxy configurations, - `urllib3.PoolManager` otherwise. + Args: + config: dulwich.config.ConfigDict` instance with Git configuration. + kwargs: Additional arguments for urllib3.ProxyManager + + Returns: + urllib3.ProxyManager` instance for proxy configurations, + `urllib3.PoolManager` otherwise. + """ proxy_server = user_agent = None ca_certs = ssl_verify = None @@ -1478,14 +1558,18 @@ def _http_request(self, url, headers=None, data=None, allow_compression=False): """Perform HTTP request. - :param url: Request URL. - :param headers: Optional custom headers to override defaults. - :param data: Request data. - :param allow_compression: Allow GZipped communication. - :return: Tuple (`response`, `read`), where response is an `urllib3` - response object with additional `content_type` and - `redirect_location` properties, and `read` is a consumable read - method for the response data. + Args: + url: Request URL. + headers: Optional custom headers to override defaults. + data: Request data. + allow_compression: Allow GZipped communication. + + Returns: + Tuple (`response`, `read`), where response is an `urllib3` + response object with additional `content_type` and + `redirect_location` properties, and `read` is a consumable read + method for the response data. + """ req_headers = self.pool_manager.headers.copy() if headers is not None: @@ -1576,19 +1660,24 @@ def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. - :param path: Repository path (as bytestring) - :param update_refs: Function to determine changes to remote refs. - Receive dict with existing remote refs, returns dict with - changed refs (name -> sha, where sha=ZERO_SHA for deletions) - :param generate_pack_data: Function that can return a tuple - with number of elements and pack data to upload. - :param progress: Optional progress function - - :raises SendPackError: if server rejects the pack data - :raises UpdateRefsError: if the server supports report-status - and rejects ref updates - :return: new_refs dictionary containing the changes that were made - {refname: new_ref}, including deleted refs. + Args: + path: Repository path (as bytestring) + update_refs: Function to determine changes to remote refs. + Receive dict with existing remote refs, returns dict with + changed refs (name -> sha, where sha=ZERO_SHA for deletions) + generate_pack_data: Function that can return a tuple + with number of elements and pack data to upload. + progress: Optional progress function + + Returns: + new_refs dictionary containing the changes that were made + {refname: new_ref}, including deleted refs. + + Raises: + SendPackError: if server rejects the pack data + UpdateRefsError: if the server supports report-status + and rejects ref updates + """ url = self._get_url(path) old_refs, server_capabilities, url = self._discover_references( @@ -1631,12 +1720,17 @@ def fetch_pack(self, path, determine_wants, graph_walker, pack_data, progress=None, depth=None): """Retrieve a pack from a git smart server. - :param determine_wants: Callback that returns list of commits to fetch - :param graph_walker: Object with next() and ack(). - :param pack_data: Callback called for each bit of data in the pack - :param progress: Callback for progress reports (strings) - :param depth: Depth for request - :return: FetchPackResult object + Args: + path: Path to fetch from + determine_wants: Callback that returns list of commits to fetch + graph_walker: Object with next() and ack(). + pack_data: Callback called for each bit of data in the pack + progress: Callback for progress reports (strings) + depth: Depth for request + + Returns: + FetchPackResult object + """ url = self._get_url(path) refs, server_capabilities, url = self._discover_references( @@ -1672,7 +1766,8 @@ def fetch_pack(self, path, determine_wants, graph_walker, pack_data, resp.close() def get_refs(self, path): - """Retrieve the current refs from a git smart server.""" + """Retrieve the current refs from a git smart server. + """ url = self._get_url(path) refs, _, _ = self._discover_references( b"git-upload-pack", url) @@ -1682,12 +1777,16 @@ def get_refs(self, path): def get_transport_and_path_from_url(url, config=None, **kwargs): """Obtain a git client from a URL. - :param url: URL to open (a unicode string) - :param config: Optional config object - :param thin_packs: Whether or not thin packs should be retrieved - :param report_activity: Optional callback for reporting transport + Args: + url: URL to open (a unicode string) + config: Optional config object + thin_packs: Whether or not thin packs should be retrieved + report_activity: Optional callback for reporting transport activity. - :return: Tuple with client instance and relative path. + + Returns: + Tuple with client instance and relative path. + """ parsed = urlparse.urlparse(url) if parsed.scheme == 'git': @@ -1706,7 +1805,8 @@ def get_transport_and_path_from_url(url, config=None, **kwargs): def parse_rsync_url(location): - """Parse a rsync-style URL.""" + """Parse a rsync-style URL. + """ if ':' in location and '@' not in location: # SSH with no user@, zero or one leading slash. (host, path) = location.split(':', 1) @@ -1727,12 +1827,16 @@ def parse_rsync_url(location): def get_transport_and_path(location, **kwargs): """Obtain a git client from a URL. - :param location: URL or path (a string) - :param config: Optional config object - :param thin_packs: Whether or not thin packs should be retrieved - :param report_activity: Optional callback for reporting transport + Args: + location: URL or path (a string) + config: Optional config object + thin_packs: Whether or not thin packs should be retrieved + report_activity: Optional callback for reporting transport activity. - :return: Tuple with client instance and relative path. + + Returns: + Tuple with client instance and relative path. + """ # First, try to parse it as a URL try: diff --git a/dulwich/config.py b/dulwich/config.py index e7bf6ab16..aebd6fd16 100644 --- a/dulwich/config.py +++ b/dulwich/config.py @@ -119,21 +119,27 @@ class Config(object): def get(self, section, name): """Retrieve the contents of a configuration setting. - :param section: Tuple with section name and optional subsection namee - :param subsection: Subsection name - :return: Contents of the setting - :raise KeyError: if the value is not set + Args: + section: Tuple with section name and optional subsection namee + subsection: Subsection name + Returns: + Contents of the setting + Raises: + KeyError: if the value is not set """ raise NotImplementedError(self.get) def get_boolean(self, section, name, default=None): """Retrieve a configuration setting as boolean. - :param section: Tuple with section name and optional subsection name - :param name: Name of the setting, including section and possible + Args: + section: Tuple with section name and optional subsection name + name: Name of the setting, including section and possible subsection. - :return: Contents of the setting - :raise KeyError: if the value is not set + Returns: + Contents of the setting + Raises: + KeyError: if the value is not set """ try: value = self.get(section, name) @@ -148,18 +154,21 @@ def get_boolean(self, section, name, default=None): def set(self, section, name, value): """Set a configuration value. - :param section: Tuple with section name and optional subsection namee - :param name: Name of the configuration value, including section + Args: + section: Tuple with section name and optional subsection namee + name: Name of the configuration value, including section and optional subsection - :param: Value of the setting + value: value of the setting """ raise NotImplementedError(self.set) def iteritems(self, section): """Iterate over the configuration pairs for a specific section. - :param section: Tuple with section name and optional subsection namee - :return: Iterator over (name, value) pairs + Args: + section: Tuple with section name and optional subsection namee + Returns: + Iterator over (name, value) pairs """ raise NotImplementedError(self.iteritems) @@ -173,8 +182,10 @@ def itersections(self): def has_section(self, name): """Check if a specified section exists. - :param name: Name of section to check for - :return: boolean indicating whether the section exists + Args: + name: Name of section to check for + Returns: + boolean indicating whether the section exists """ return (name in self.itersections()) @@ -538,8 +549,10 @@ def set(self, section, name, value): def parse_submodules(config): """Parse a gitmodules GitConfig file, returning submodules. - :param config: A `ConfigFile` - :return: list of tuples (submodule path, url, name), + Args: + config: A `ConfigFile` + Returns: + list of tuples (submodule path, url, name), where name is quoted part of the section's name. """ for section in config.keys(): From 7cc90f00dcca66db17e073f1aafd65bde926d827 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 29 Oct 2019 02:43:21 +0000 Subject: [PATCH 13/22] Disable PDF building. --- docs/conf.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 4fbecc197..e9f070e78 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -32,13 +32,6 @@ 'sphinx.ext.intersphinx', 'sphinx.ext.napoleon', ] -try: - import rst2pdf - if rst2pdf.version >= '0.16': - extensions.append('rst2pdf.pdfbuilder') -except ImportError: - print("[NOTE] In order to build PDF you need rst2pdf with version >=0.16") - autoclass_content = "both" @@ -213,11 +206,3 @@ # If false, no module index is generated. # latex_use_modindex = True - -pdf_documents = [ - ('index', u'dulwich', u'Documentation for dulwich', - u'Jelmer Vernooij'), -] -pdf_stylesheets = ['sphinx', 'kerning', 'a4'] -pdf_break_level = 2 -pdf_inline_footnotes = True From 0dee0a70433f43851ba3cd17411153cd33a26890 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 29 Oct 2019 02:49:19 +0000 Subject: [PATCH 14/22] Convert more docstrings to google style. --- dulwich/diff_tree.py | 103 +++++++++++++++++++++++++------------------ dulwich/hooks.py | 28 +++++++----- dulwich/ignore.py | 20 +++++---- 3 files changed, 88 insertions(+), 63 deletions(-) diff --git a/dulwich/diff_tree.py b/dulwich/diff_tree.py index 167cb28ef..7166c97a8 100644 --- a/dulwich/diff_tree.py +++ b/dulwich/diff_tree.py @@ -77,10 +77,12 @@ def _tree_entries(path, tree): def _merge_entries(path, tree1, tree2): """Merge the entries of two trees. - :param path: A path to prepend to all tree entry names. - :param tree1: The first Tree object to iterate, or None. - :param tree2: The second Tree object to iterate, or None. - :return: A list of pairs of TreeEntry objects for each pair of entries in + Args: + path: A path to prepend to all tree entry names. + tree1: The first Tree object to iterate, or None. + tree2: The second Tree object to iterate, or None. + Returns: + A list of pairs of TreeEntry objects for each pair of entries in the trees. If an entry exists in one tree but not the other, the other entry will have all attributes set to None. If neither entry's path is None, they are guaranteed to match. @@ -124,11 +126,13 @@ def walk_trees(store, tree1_id, tree2_id, prune_identical=False): Iteration is depth-first pre-order, as in e.g. os.walk. - :param store: An ObjectStore for looking up objects. - :param tree1_id: The SHA of the first Tree object to iterate, or None. - :param tree2_id: The SHA of the second Tree object to iterate, or None. - :param prune_identical: If True, identical subtrees will not be walked. - :return: Iterator over Pairs of TreeEntry objects for each pair of entries + Args: + store: An ObjectStore for looking up objects. + tree1_id: The SHA of the first Tree object to iterate, or None. + tree2_id: The SHA of the second Tree object to iterate, or None. + param prune_identical: If True, identical subtrees will not be walked. + Returns: + Iterator over Pairs of TreeEntry objects for each pair of entries in the trees and their subtrees recursively. If an entry exists in one tree but not the other, the other entry will have all attributes set to None. If neither entry's path is None, they are guaranteed to @@ -164,16 +168,18 @@ def tree_changes(store, tree1_id, tree2_id, want_unchanged=False, change_type_same=False): """Find the differences between the contents of two trees. - :param store: An ObjectStore for looking up objects. - :param tree1_id: The SHA of the source tree. - :param tree2_id: The SHA of the target tree. - :param want_unchanged: If True, include TreeChanges for unmodified entries + Args: + store: An ObjectStore for looking up objects. + tree1_id: The SHA of the source tree. + tree2_id: The SHA of the target tree. + want_unchanged: If True, include TreeChanges for unmodified entries as well. - :param include_trees: Whether to include trees - :param rename_detector: RenameDetector object for detecting renames. - :param change_type_same: Whether to report change types in the same + include_trees: Whether to include trees + rename_detector: RenameDetector object for detecting renames. + change_type_same: Whether to report change types in the same entry or as delete+add. - :return: Iterator over TreeChange instances for each change between the + Returns: + Iterator over TreeChange instances for each change between the source and target tree. """ if include_trees and rename_detector is not None: @@ -232,21 +238,23 @@ def tree_changes_for_merge(store, parent_tree_ids, tree_id, rename_detector=None): """Get the tree changes for a merge tree relative to all its parents. - :param store: An ObjectStore for looking up objects. - :param parent_tree_ids: An iterable of the SHAs of the parent trees. - :param tree_id: The SHA of the merge tree. - :param rename_detector: RenameDetector object for detecting renames. + Args: + store: An ObjectStore for looking up objects. + parent_tree_ids: An iterable of the SHAs of the parent trees. + tree_id: The SHA of the merge tree. + rename_detector: RenameDetector object for detecting renames. - :return: Iterator over lists of TreeChange objects, one per conflicted path - in the merge. + Returns: + Iterator over lists of TreeChange objects, one per conflicted path + in the merge. - Each list contains one element per parent, with the TreeChange for that - path relative to that parent. An element may be None if it never - existed in one parent and was deleted in two others. + Each list contains one element per parent, with the TreeChange for that + path relative to that parent. An element may be None if it never + existed in one parent and was deleted in two others. - A path is only included in the output if it is a conflict, i.e. its SHA - in the merge tree is not found in any of the parents, or in the case of - deletes, if not all of the old SHAs match. + A path is only included in the output if it is a conflict, i.e. its SHA + in the merge tree is not found in any of the parents, or in the case of + deletes, if not all of the old SHAs match. """ all_parent_changes = [tree_changes(store, t, tree_id, rename_detector=rename_detector) @@ -293,8 +301,10 @@ def _count_blocks(obj): Splits the data into blocks either on lines or <=64-byte chunks of lines. - :param obj: The object to count blocks for. - :return: A dict of block hashcode -> total bytes occurring. + Args: + obj: The object to count blocks for. + Returns: + A dict of block hashcode -> total bytes occurring. """ block_counts = defaultdict(int) block = BytesIO() @@ -326,10 +336,12 @@ def _count_blocks(obj): def _common_bytes(blocks1, blocks2): """Count the number of common bytes in two block count dicts. - :param block1: The first dict of block hashcode -> total bytes. - :param block2: The second dict of block hashcode -> total bytes. - :return: The number of bytes in common between blocks1 and blocks2. This is - only approximate due to possible hash collisions. + Args: + block1: The first dict of block hashcode -> total bytes. + block2: The second dict of block hashcode -> total bytes. + Returns: + The number of bytes in common between blocks1 and blocks2. This is + only approximate due to possible hash collisions. """ # Iterate over the smaller of the two dicts, since this is symmetrical. if len(blocks1) > len(blocks2): @@ -345,11 +357,13 @@ def _common_bytes(blocks1, blocks2): def _similarity_score(obj1, obj2, block_cache=None): """Compute a similarity score for two objects. - :param obj1: The first object to score. - :param obj2: The second object to score. - :param block_cache: An optional dict of SHA to block counts to cache + Args: + obj1: The first object to score. + obj2: The second object to score. + block_cache: An optional dict of SHA to block counts to cache results between calls. - :return: The similarity score between the two objects, defined as the + Returns: + The similarity score between the two objects, defined as the number of bytes in common between the two objects divided by the maximum size, scaled to the range 0-100. """ @@ -387,18 +401,19 @@ def __init__(self, store, rename_threshold=RENAME_THRESHOLD, find_copies_harder=False): """Initialize the rename detector. - :param store: An ObjectStore for looking up objects. - :param rename_threshold: The threshold similarity score for considering + Args: + store: An ObjectStore for looking up objects. + rename_threshold: The threshold similarity score for considering an add/delete pair to be a rename/copy; see _similarity_score. - :param max_files: The maximum number of adds and deletes to consider, + max_files: The maximum number of adds and deletes to consider, or None for no limit. The detector is guaranteed to compare no more than max_files ** 2 add/delete pairs. This limit is provided because rename detection can be quadratic in the project size. If the limit is exceeded, no content rename detection is attempted. - :param rewrite_threshold: The threshold similarity score below which a + rewrite_threshold: The threshold similarity score below which a modify should be considered a delete/add, or None to not break modifies; see _similarity_score. - :param find_copies_harder: If True, consider unmodified files when + find_copies_harder: If True, consider unmodified files when detecting copies. """ self._store = store diff --git a/dulwich/hooks.py b/dulwich/hooks.py index 9ff0ba21e..def9c6001 100644 --- a/dulwich/hooks.py +++ b/dulwich/hooks.py @@ -36,9 +36,12 @@ class Hook(object): def execute(self, *args): """Execute the hook with the given args - :param args: argument list to hook - :raise HookError: hook execution failure - :return: a hook may return a useful value + Args: + args: argument list to hook + Raises: + HookError: hook execution failure + Returns: + a hook may return a useful value """ raise NotImplementedError(self.execute) @@ -56,18 +59,19 @@ def __init__(self, name, path, numparam, cwd=None): """Setup shell hook definition - :param name: name of hook for error messages - :param path: absolute path to executable file - :param numparam: number of requirements parameters - :param pre_exec_callback: closure for setup before execution + Args: + name: name of hook for error messages + path: absolute path to executable file + numparam: number of requirements parameters + pre_exec_callback: closure for setup before execution Defaults to None. Takes in the variable argument list from the execute functions and returns a modified argument list for the shell hook. - :param post_exec_callback: closure for cleanup after execution + post_exec_callback: closure for cleanup after execution Defaults to None. Takes in a boolean for hook success and the modified argument list and returns the final hook return value if applicable - :param cwd: working directory to switch to when executing the hook + cwd: working directory to switch to when executing the hook """ self.name = name self.filepath = path @@ -129,8 +133,10 @@ def __init__(self, controldir): class CommitMsgShellHook(ShellHook): """commit-msg shell hook - :param args[0]: commit message - :return: new commit message or None + Args: + args[0]: commit message + Returns: + new commit message or None """ def __init__(self, controldir): diff --git a/dulwich/ignore.py b/dulwich/ignore.py index 9c4267279..51dcf178c 100644 --- a/dulwich/ignore.py +++ b/dulwich/ignore.py @@ -102,8 +102,9 @@ def translate(pat): def read_ignore_patterns(f): """Read a git ignore file. - :param f: File-like object to read from - :return: List of patterns + Args: + f: File-like object to read from + Returns: List of patterns """ for line in f: @@ -128,10 +129,12 @@ def read_ignore_patterns(f): def match_pattern(path, pattern, ignorecase=False): """Match a gitignore-style pattern against a path. - :param path: Path to match - :param pattern: Pattern to match - :param ignorecase: Whether to do case-sensitive matching - :return: bool indicating whether the pattern matched + Args: + path: Path to match + pattern: Pattern to match + ignorecase: Whether to do case-sensitive matching + Returns: + bool indicating whether the pattern matched """ return Pattern(pattern, ignorecase).match(path) @@ -172,8 +175,9 @@ def __repr__(self): def match(self, path): """Try to match a path against this ignore pattern. - :param path: Path to match (relative to ignore location) - :return: boolean + Args: + path: Path to match (relative to ignore location) + Returns: boolean """ return bool(self._re.match(path)) From 5db0caac6e578f40eb9332dd6ec52905fca357a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 9 Nov 2019 03:03:17 +0000 Subject: [PATCH 15/22] Add a active_branch() function. --- NEWS | 3 +++ dulwich/porcelain.py | 27 +++++++++++++++++++++++---- dulwich/tests/test_porcelain.py | 6 ++++++ 3 files changed, 32 insertions(+), 4 deletions(-) diff --git a/NEWS b/NEWS index 5b09f7c4a..1a6ecc4d1 100644 --- a/NEWS +++ b/NEWS @@ -8,6 +8,9 @@ * Switch over to Google-style docstrings. (Jelmer Vernooij) + * Add a ``dulwich.porcelain.active_branch`` function. + (Jelmer Vernooij) + 0.19.13 2019-08-19 BUG FIXES diff --git a/dulwich/porcelain.py b/dulwich/porcelain.py index dd25b66b5..cf5b21698 100644 --- a/dulwich/porcelain.py +++ b/dulwich/porcelain.py @@ -123,6 +123,7 @@ ) from dulwich.refs import ( ANNOTATED_TAG_SUFFIX, + LOCAL_BRANCH_PREFIX, strip_peeled_refs, ) from dulwich.repo import (BaseRepo, Repo) @@ -1113,7 +1114,7 @@ def send_fn(data): def _make_branch_ref(name): if getattr(name, 'encode', None): name = name.encode(DEFAULT_ENCODING) - return b"refs/heads/" + name + return LOCAL_BRANCH_PREFIX + name def _make_tag_ref(name): @@ -1164,7 +1165,25 @@ def branch_list(repo): :param repo: Path to the repository """ with open_repo_closing(repo) as r: - return r.refs.keys(base=b"refs/heads/") + return r.refs.keys(base=LOCAL_BRANCH_PREFIX) + + +def active_branch(repo): + """Return the active branch in the repository, if any. + + Args: + repo: Repository to open + Returns: + branch name + Raises: + KeyError: if the repository does not have a working tree + IndexError: if HEAD is floating + """ + with open_repo_closing(repo) as r: + active_ref = r.refs.follow(b'HEAD')[0][1] + if not active_ref.startswith(LOCAL_BRANCH_PREFIX): + raise ValueError(active_ref) + return active_ref[len(LOCAL_BRANCH_PREFIX):] def fetch(repo, remote_location, remote_name=b'origin', outstream=sys.stdout, @@ -1192,8 +1211,8 @@ def fetch(repo, remote_location, remote_name=b'origin', outstream=sys.stdout, depth=depth) stripped_refs = strip_peeled_refs(fetch_result.refs) branches = { - n[len(b'refs/heads/'):]: v for (n, v) in stripped_refs.items() - if n.startswith(b'refs/heads/')} + n[len(LOCAL_BRANCH_PREFIX):]: v for (n, v) in stripped_refs.items() + if n.startswith(LOCAL_BRANCH_PREFIX)} r.refs.import_refs( b'refs/remotes/' + remote_name, branches, message=message, prune=prune) diff --git a/dulwich/tests/test_porcelain.py b/dulwich/tests/test_porcelain.py index 1c4619c76..0be96ffc7 100644 --- a/dulwich/tests/test_porcelain.py +++ b/dulwich/tests/test_porcelain.py @@ -1795,3 +1795,9 @@ def test_simple(self): self.assertEqual( b'd2092c8a9f311f0311083bf8d177f2ca0ab5b241', porcelain.write_tree(self.repo)) + + +class ActiveBranchTests(PorcelainTestCase): + + def test_simple(self): + self.assertEqual(b'master', porcelain.active_branch(self.repo)) From 7a21a354ff17e72151bd3fa1267b935cdfa6b702 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 9 Nov 2019 03:08:11 +0000 Subject: [PATCH 16/22] Port more docstrings to Google style. --- dulwich/contrib/release_robot.py | 16 +++--- dulwich/contrib/swift.py | 91 +++++++++++++++++++------------- dulwich/ignore.py | 40 +++++++++----- dulwich/index.py | 31 +++++++---- dulwich/porcelain.py | 30 ++++++----- 5 files changed, 128 insertions(+), 80 deletions(-) diff --git a/dulwich/contrib/release_robot.py b/dulwich/contrib/release_robot.py index a54e1d4e1..f2f280e02 100644 --- a/dulwich/contrib/release_robot.py +++ b/dulwich/contrib/release_robot.py @@ -59,8 +59,10 @@ def get_recent_tags(projdir=PROJDIR): """Get list of tags in order from newest to oldest and their datetimes. - :param projdir: path to ``.git`` - :returns: list of tags sorted by commit time from newest to oldest + Args: + projdir: path to ``.git`` + Returns: + list of tags sorted by commit time from newest to oldest Each tag in the list contains the tag name, commit time, commit id, author and any tag meta. If a tag isn't annotated, then its tag meta is ``None``. @@ -115,10 +117,12 @@ def get_current_version(projdir=PROJDIR, pattern=PATTERN, logger=None): version. *EG*: "Release-0.2.1-rc.1" will be come "0.2.1-rc.1". If no match is found, then the most recent tag is return without modification. - :param projdir: path to ``.git`` - :param pattern: regular expression pattern with group that matches version - :param logger: a Python logging instance to capture exception - :returns: tag matching first group in regular expression pattern + Args: + projdir: path to ``.git`` + pattern: regular expression pattern with group that matches version + logger: a Python logging instance to capture exception + Returns: + tag matching first group in regular expression pattern """ tags = get_recent_tags(projdir) try: diff --git a/dulwich/contrib/swift.py b/dulwich/contrib/swift.py index bdad92edf..8436c252f 100644 --- a/dulwich/contrib/swift.py +++ b/dulwich/contrib/swift.py @@ -167,8 +167,9 @@ def next(self): def load_conf(path=None, file=None): """Load configuration in global var CONF - :param path: The path to the configuration file - :param file: If provided read instead the file like object + Args: + path: The path to the configuration file + file: If provided read instead the file like object """ conf = ConfigParser() if file: @@ -195,9 +196,10 @@ def load_conf(path=None, file=None): def swift_load_pack_index(scon, filename): """Read a pack index file from Swift - :param scon: a `SwiftConnector` instance - :param filename: Path to the index file objectise - :return: a `PackIndexer` instance + Args: + scon: a `SwiftConnector` instance + filename: Path to the index file objectise + Returns: a `PackIndexer` instance """ with scon.get_object(filename) as f: return load_pack_index_file(filename, f) @@ -248,8 +250,9 @@ class SwiftConnector(object): def __init__(self, root, conf): """ Initialize a SwiftConnector - :param root: The swift container that will act as Git bare repository - :param conf: A ConfigParser Object + Args: + root: The swift container that will act as Git bare repository + conf: A ConfigParser Object """ self.conf = conf self.auth_ver = self.conf.get("swift", "auth_ver") @@ -394,9 +397,10 @@ def get_container_objects(self): def get_object_stat(self, name): """Retrieve object stat - :param name: The object name - :return: A dict that describe the object - or None if object does not exist + Args: + name: The object name + Returns: + A dict that describe the object or None if object does not exist """ path = self.base_path + '/' + name ret = self.httpclient.request('HEAD', path) @@ -413,9 +417,11 @@ def get_object_stat(self, name): def put_object(self, name, content): """Put an object - :param name: The object name - :param content: A file object - :raise: `SwiftException` if unable to create + Args: + name: The object name + content: A file object + Raises: + SwiftException: if unable to create """ content.seek(0) data = content.read() @@ -442,11 +448,12 @@ def _send(): def get_object(self, name, range=None): """Retrieve an object - :param name: The object name - :param range: A string range like "0-10" to - retrieve specified bytes in object content - :return: A file like instance - or bytestring if range is specified + Args: + name: The object name + range: A string range like "0-10" to + retrieve specified bytes in object content + Returns: + A file like instance or bytestring if range is specified """ headers = {} if range: @@ -467,8 +474,10 @@ def get_object(self, name, range=None): def del_object(self, name): """Delete an object - :param name: The object name - :raise: `SwiftException` if unable to delete + Args: + name: The object name + Raises: + SwiftException: if unable to delete """ path = self.base_path + '/' + name ret = self.httpclient.request('DELETE', path) @@ -502,9 +511,10 @@ class SwiftPackReader(object): def __init__(self, scon, filename, pack_length): """Initialize a SwiftPackReader - :param scon: a `SwiftConnector` instance - :param filename: the pack filename - :param pack_length: The size of the pack object + Args: + scon: a `SwiftConnector` instance + filename: the pack filename + pack_length: The size of the pack object """ self.scon = scon self.filename = filename @@ -525,8 +535,10 @@ def _read(self, more=False): def read(self, length): """Read a specified amount of Bytes form the pack object - :param length: amount of bytes to read - :return: bytestring + Args: + length: amount of bytes to read + Returns: + a bytestring """ end = self.offset+length if self.base_offset + end > self.pack_length: @@ -544,7 +556,8 @@ def read(self, length): def seek(self, offset): """Seek to a specified offset - :param offset: the offset to seek to + Args: + offset: the offset to seek to """ self.base_offset = offset self._read() @@ -568,8 +581,9 @@ class SwiftPackData(PackData): def __init__(self, scon, filename): """ Initialize a SwiftPackReader - :param scon: a `SwiftConnector` instance - :param filename: the pack filename + Args: + scon: a `SwiftConnector` instance + filename: the pack filename """ self.scon = scon self._filename = filename @@ -639,7 +653,8 @@ class SwiftObjectStore(PackBasedObjectStore): def __init__(self, scon): """Open a Swift object store. - :param scon: A `SwiftConnector` instance + Args: + scon: A `SwiftConnector` instance """ super(SwiftObjectStore, self).__init__() self.scon = scon @@ -901,8 +916,9 @@ def __init__(self, root, conf): `SwiftInfoRefsContainer`. The root attribute is the Swift container that contain the Git bare repository. - :param root: The container which contains the bare repo - :param conf: A ConfigParser object + Args: + root: The container which contains the bare repo + conf: A ConfigParser object """ self.root = root.lstrip('/') self.conf = conf @@ -929,8 +945,9 @@ def _determine_file_mode(self): def _put_named_file(self, filename, contents): """Put an object in a Swift container - :param filename: the path to the object to put on Swift - :param contents: the content as bytestring + Args: + filename: the path to the object to put on Swift + contents: the content as bytestring """ with BytesIO() as f: f.write(contents) @@ -940,9 +957,11 @@ def _put_named_file(self, filename, contents): def init_bare(cls, scon, conf): """Create a new bare repository. - :param scon: a `SwiftConnector` instance - :param conf: a ConfigParser object - :return: a `SwiftRepo` instance + Args: + scon: a `SwiftConnector` instance + conf: a ConfigParser object + Returns: + a `SwiftRepo` instance """ scon.create_root() for obj in [posixpath.join(OBJECTDIR, PACKDIR), diff --git a/dulwich/ignore.py b/dulwich/ignore.py index 51dcf178c..a04d29dcf 100644 --- a/dulwich/ignore.py +++ b/dulwich/ignore.py @@ -197,8 +197,10 @@ def append_pattern(self, pattern): def find_matching(self, path): """Yield all matching patterns for path. - :param path: Path to match - :return: Iterator over iterators + Args: + path: Path to match + Returns: + Iterator over iterators """ if not isinstance(path, bytes): path = path.encode(sys.getfilesystemencoding()) @@ -242,9 +244,11 @@ def __init__(self, filters): def is_ignored(self, path): """Check whether a path is explicitly included or excluded in ignores. - :param path: Path to check - :return: None if the file is not mentioned, True if it is included, - False if it is explicitly excluded. + Args: + path: Path to check + Returns: + None if the file is not mentioned, True if it is included, + False if it is explicitly excluded. """ status = None for filter in self._filters: @@ -257,8 +261,10 @@ def is_ignored(self, path): def default_user_ignore_filter_path(config): """Return default user ignore filter path. - :param config: A Config object - :return: Path to a global ignore file + Args: + config: A Config object + Returns: + Path to a global ignore file """ try: return config.get((b'core', ), b'excludesFile') @@ -305,8 +311,10 @@ def find_matching(self, path): Stops after the first ignore file with matches. - :param path: Path to check - :return: Iterator over Pattern instances + Args: + path: Path to check + Returns: + Iterator over Pattern instances """ if os.path.isabs(path): raise ValueError('%s is an absolute path' % path) @@ -333,9 +341,11 @@ def find_matching(self, path): def is_ignored(self, path): """Check whether a path is explicitly included or excluded in ignores. - :param path: Path to check - :return: None if the file is not mentioned, True if it is included, - False if it is explicitly excluded. + Args: + path: Path to check + Returns: + None if the file is not mentioned, True if it is included, + False if it is explicitly excluded. """ matches = list(self.find_matching(path)) if matches: @@ -346,8 +356,10 @@ def is_ignored(self, path): def from_repo(cls, repo): """Create a IgnoreFilterManager from a repository. - :param repo: Repository object - :return: A `IgnoreFilterManager` object + Args: + repo: Repository object + Returns: + A `IgnoreFilterManager` object """ global_filters = [] for p in [ diff --git a/dulwich/index.py b/dulwich/index.py index bc61ce98c..b938e4100 100644 --- a/dulwich/index.py +++ b/dulwich/index.py @@ -56,8 +56,10 @@ def pathsplit(path): """Split a /-delimited path into a directory part and a basename. - :param path: The path to split. - :return: Tuple with directory name and basename + Args: + path: The path to split. + Returns: + Tuple with directory name and basename """ try: (dirname, basename) = path.rsplit(b"/", 1) @@ -77,8 +79,10 @@ def pathjoin(*args): def read_cache_time(f): """Read a cache time. - :param f: File-like object to read from - :return: Tuple with seconds and nanoseconds + Args: + f: File-like object to read from + Returns: + Tuple with seconds and nanoseconds """ return struct.unpack(">LL", f.read(8)) @@ -86,8 +90,9 @@ def read_cache_time(f): def write_cache_time(f, t): """Write a cache time. - :param f: File-like object to write to - :param t: Time to write (as int, float or tuple with secs and nsecs) + Args: + f: File-like object to write to + t: Time to write (as int, float or tuple with secs and nsecs) """ if isinstance(t, int): t = (t, 0) @@ -102,8 +107,10 @@ def write_cache_time(f, t): def read_cache_entry(f): """Read an entry from a cache file. - :param f: File-like object to read from - :return: tuple with: device, inode, mode, uid, gid, size, sha, flags + Args: + f: File-like object to read from + Returns: + tuple with: device, inode, mode, uid, gid, size, sha, flags """ beginoffset = f.tell() ctime = read_cache_time(f) @@ -121,8 +128,9 @@ def read_cache_entry(f): def write_cache_entry(f, entry): """Write an index entry to a file. - :param f: File object - :param entry: Entry to write, tuple with: + Args: + f: File object + entry: Entry to write, tuple with: (name, ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) """ beginoffset = f.tell() @@ -152,7 +160,8 @@ def read_index(f): def read_index_dict(f): """Read an index file and return it as a dictionary. - :param f: File object to read from + Args: + f: File object to read from """ ret = {} for x in read_index(f): diff --git a/dulwich/porcelain.py b/dulwich/porcelain.py index cf5b21698..210c74fb8 100644 --- a/dulwich/porcelain.py +++ b/dulwich/porcelain.py @@ -1191,16 +1191,18 @@ def fetch(repo, remote_location, remote_name=b'origin', outstream=sys.stdout, prune=False, prune_tags=False, **kwargs): """Fetch objects from a remote server. - :param repo: Path to the repository - :param remote_location: String identifying a remote server - :param remote_name: Name for remote server - :param outstream: Output stream (defaults to stdout) - :param errstream: Error stream (defaults to stderr) - :param message: Reflog message (defaults to b"fetch: from ") - :param depth: Depth to fetch at - :param prune: Prune remote removed refs - :param prune_tags: Prune reomte removed tags - :return: Dictionary with refs on the remote + Args: + repo: Path to the repository + remote_location: String identifying a remote server + remote_name: Name for remote server + outstream: Output stream (defaults to stdout) + errstream: Error stream (defaults to stderr) + message: Reflog message (defaults to b"fetch: from ") + depth: Depth to fetch at + prune: Prune remote removed refs + prune_tags: Prune reomte removed tags + Returns: + Dictionary with refs on the remote """ if message is None: message = b'fetch: from ' + remote_location.encode("utf-8") @@ -1229,9 +1231,11 @@ def fetch(repo, remote_location, remote_name=b'origin', outstream=sys.stdout, def ls_remote(remote, config=None, **kwargs): """List the refs in a remote. - :param remote: Remote repository location - :param config: Configuration to use - :return: Dictionary with remote refs + Args: + remote: Remote repository location + config: Configuration to use + Returns: + Dictionary with remote refs """ if config is None: config = StackedConfig.default() From d6d3c089a843243a21364d88df73c1886a1903d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 9 Nov 2019 03:25:15 +0000 Subject: [PATCH 17/22] Convert more docstrings to Google style. --- dulwich/index.py | 165 +++++++++++++----------- dulwich/line_ending.py | 20 +-- dulwich/lru_cache.py | 23 ++-- dulwich/mailmap.py | 10 +- dulwich/object_store.py | 215 ++++++++++++++++++-------------- dulwich/objects.py | 161 ++++++++++++++---------- dulwich/objectspec.py | 88 +++++++------ dulwich/pack.py | 270 +++++++++++++++++++++++----------------- 8 files changed, 549 insertions(+), 403 deletions(-) diff --git a/dulwich/index.py b/dulwich/index.py index b938e4100..1b2289f67 100644 --- a/dulwich/index.py +++ b/dulwich/index.py @@ -172,8 +172,9 @@ def read_index_dict(f): def write_index(f, entries): """Write an index file. - :param f: File-like object to write to - :param entries: Iterable over the entries to write + Args: + f: File-like object to write to + entries: Iterable over the entries to write """ f.write(b'DIRC') f.write(struct.pack(b'>LL', 2, len(entries))) @@ -196,7 +197,8 @@ def cleanup_mode(mode): This will return a mode that can be stored in a tree object. - :param mode: Mode to clean up. + Args: + mode: Mode to clean up. """ if stat.S_ISLNK(mode): return stat.S_IFLNK @@ -215,7 +217,8 @@ class Index(object): def __init__(self, filename): """Open an index file. - :param filename: Path to the index file + Args: + filename: Path to the index file """ self._filename = filename self.clear() @@ -259,7 +262,7 @@ def __len__(self): def __getitem__(self, name): """Retrieve entry by relative path. - :return: tuple with (ctime, mtime, dev, ino, mode, uid, gid, size, sha, + Returns: tuple with (ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) """ return self._byname[name] @@ -314,10 +317,11 @@ def update(self, entries): def changes_from_tree(self, object_store, tree, want_unchanged=False): """Find the differences between the contents of this index and a tree. - :param object_store: Object store to use for retrieving tree contents - :param tree: SHA1 of the root tree - :param want_unchanged: Whether unchanged files should be reported - :return: Iterator over tuples with (oldpath, newpath), (oldmode, + Args: + object_store: Object store to use for retrieving tree contents + tree: SHA1 of the root tree + want_unchanged: Whether unchanged files should be reported + Returns: Iterator over tuples with (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) """ def lookup_entry(path): @@ -331,8 +335,10 @@ def lookup_entry(path): def commit(self, object_store): """Create a new tree from an index. - :param object_store: Object store to save the tree in - :return: Root tree SHA + Args: + object_store: Object store to save the tree in + Returns: + Root tree SHA """ return commit_tree(object_store, self.iterobjects()) @@ -340,9 +346,11 @@ def commit(self, object_store): def commit_tree(object_store, blobs): """Commit a new tree. - :param object_store: Object store to add trees to - :param blobs: Iterable over blob path, sha, mode entries - :return: SHA1 of the created tree. + Args: + object_store: Object store to add trees to + blobs: Iterable over blob path, sha, mode entries + Returns: + SHA1 of the created tree. """ trees = {b'': {}} @@ -380,10 +388,11 @@ def build_tree(path): def commit_index(object_store, index): """Create a new tree from an index. - :param object_store: Object store to save the tree in - :param index: Index file - :note: This function is deprecated, use index.commit() instead. - :return: Root tree sha. + Args: + object_store: Object store to save the tree in + index: Index file + Note: This function is deprecated, use index.commit() instead. + Returns: Root tree sha. """ return commit_tree(object_store, index.iterobjects()) @@ -393,12 +402,13 @@ def changes_from_tree(names, lookup_entry, object_store, tree, """Find the differences between the contents of a tree and a working copy. - :param names: Iterable of names in the working copy - :param lookup_entry: Function to lookup an entry in the working copy - :param object_store: Object store to use for retrieving tree contents - :param tree: SHA1 of the root tree, or None for an empty tree - :param want_unchanged: Whether unchanged files should be reported - :return: Iterator over tuples with (oldpath, newpath), (oldmode, newmode), + Args: + names: Iterable of names in the working copy + lookup_entry: Function to lookup an entry in the working copy + object_store: Object store to use for retrieving tree contents + tree: SHA1 of the root tree, or None for an empty tree + want_unchanged: Whether unchanged files should be reported + Returns: Iterator over tuples with (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) """ # TODO(jelmer): Support a include_trees option @@ -429,9 +439,10 @@ def changes_from_tree(names, lookup_entry, object_store, tree, def index_entry_from_stat(stat_val, hex_sha, flags, mode=None): """Create a new index entry from a stat value. - :param stat_val: POSIX stat_result instance - :param hex_sha: Hex sha of the object - :param flags: Index flags + Args: + stat_val: POSIX stat_result instance + hex_sha: Hex sha of the object + flags: Index flags """ if mode is None: mode = cleanup_mode(stat_val.st_mode) @@ -445,12 +456,13 @@ def index_entry_from_stat(stat_val, hex_sha, flags, mode=None): def build_file_from_blob(blob, mode, target_path, honor_filemode=True): """Build a file or symlink on disk based on a Git object. - :param obj: The git object - :param mode: File mode - :param target_path: Path to write to - :param honor_filemode: An optional flag to honor core.filemode setting in + Args: + obj: The git object + mode: File mode + target_path: Path to write to + honor_filemode: An optional flag to honor core.filemode setting in config file, default is core.filemode=True, change executable bit - :return: stat object for the file + Returns: stat object for the file """ try: oldstat = os.lstat(target_path) @@ -518,16 +530,17 @@ def build_index_from_tree(root_path, index_path, object_store, tree_id, validate_path_element=validate_path_element_default): """Generate and materialize index from a tree - :param tree_id: Tree to materialize - :param root_path: Target dir for materialized index files - :param index_path: Target path for generated index - :param object_store: Non-empty object store holding tree contents - :param honor_filemode: An optional flag to honor core.filemode setting in + Args: + tree_id: Tree to materialize + root_path: Target dir for materialized index files + index_path: Target path for generated index + object_store: Non-empty object store holding tree contents + honor_filemode: An optional flag to honor core.filemode setting in config file, default is core.filemode=True, change executable bit - :param validate_path_element: Function to validate path elements to check + validate_path_element: Function to validate path elements to check out; default just refuses .git and .. directories. - :note:: existing index is wiped and contents are not merged + Note: existing index is wiped and contents are not merged in a working dir. Suitable only for fresh clones. """ @@ -570,9 +583,10 @@ def build_index_from_tree(root_path, index_path, object_store, tree_id, def blob_from_path_and_stat(fs_path, st): """Create a blob from a path and a stat object. - :param fs_path: Full file system path to file - :param st: A stat object - :return: A `Blob` object + Args: + fs_path: Full file system path to file + st: A stat object + Returns: A `Blob` object """ assert isinstance(fs_path, bytes) blob = Blob() @@ -594,8 +608,9 @@ def blob_from_path_and_stat(fs_path, st): def read_submodule_head(path): """Read the head commit of a submodule. - :param path: path to the submodule - :return: HEAD sha, None if not a valid head/repository + Args: + path: path to the submodule + Returns: HEAD sha, None if not a valid head/repository """ from dulwich.errors import NotGitRepository from dulwich.repo import Repo @@ -641,9 +656,10 @@ def _has_directory_changed(tree_path, entry): def get_unstaged_changes(index, root_path, filter_blob_callback=None): """Walk through an index and check for differences against working tree. - :param index: index to check - :param root_path: path in which to find files - :return: iterator over paths with unstaged changes + Args: + index: index to check + root_path: path in which to find files + Returns: iterator over paths with unstaged changes """ # For each entry in the index check the sha1 & ensure not staged if not isinstance(root_path, bytes): @@ -680,10 +696,11 @@ def get_unstaged_changes(index, root_path, filter_blob_callback=None): def _tree_to_fs_path(root_path, tree_path): """Convert a git tree path to a file system path. - :param root_path: Root filesystem path - :param tree_path: Git tree path as bytes + Args: + root_path: Root filesystem path + tree_path: Git tree path as bytes - :return: File system path. + Returns: File system path. """ assert isinstance(tree_path, bytes) if os_sep_bytes != b'/': @@ -696,10 +713,11 @@ def _tree_to_fs_path(root_path, tree_path): def _fs_to_tree_path(fs_path, fs_encoding=None): """Convert a file system path to a git tree path. - :param fs_path: File system path. - :param fs_encoding: File system encoding + Args: + fs_path: File system path. + fs_encoding: File system encoding - :return: Git tree path as bytes + Returns: Git tree path as bytes """ if fs_encoding is None: fs_encoding = sys.getfilesystemencoding() @@ -721,10 +739,11 @@ def index_entry_from_path(path, object_store=None): and tree references. for directories and non-existant files it returns None - :param path: Path to create an index entry for - :param object_store: Optional object store to + Args: + path: Path to create an index entry for + object_store: Optional object store to save new blobs in - :return: An index entry; None for directories + Returns: An index entry; None for directories """ assert isinstance(path, bytes) st = os.lstat(path) @@ -746,10 +765,11 @@ def index_entry_from_path(path, object_store=None): def iter_fresh_entries(paths, root_path, object_store=None): """Iterate over current versions of index entries on disk. - :param paths: Paths to iterate over - :param root_path: Root path to access from - :param store: Optional store to save new blobs in - :return: Iterator over path, index_entry + Args: + paths: Paths to iterate over + root_path: Root path to access from + store: Optional store to save new blobs in + Returns: Iterator over path, index_entry """ for path in paths: p = _tree_to_fs_path(root_path, path) @@ -768,11 +788,12 @@ def iter_fresh_blobs(index, root_path): Don't use this function; it removes missing entries from index. - :param index: Index file - :param root_path: Root path to access from - :param include_deleted: Include deleted entries with sha and + Args: + index: Index file + root_path: Root path to access from + include_deleted: Include deleted entries with sha and mode set to None - :return: Iterator over path, sha, mode + Returns: Iterator over path, sha, mode """ import warnings warnings.warn(PendingDeprecationWarning, @@ -789,12 +810,13 @@ def iter_fresh_objects(paths, root_path, include_deleted=False, object_store=None): """Iterate over versions of objecs on disk referenced by index. - :param index: Index file - :param root_path: Root path to access from - :param include_deleted: Include deleted entries with sha and + Args: + index: Index file + root_path: Root path to access from + include_deleted: Include deleted entries with sha and mode set to None - :param object_store: Optional object store to report new items to - :return: Iterator over path, sha, mode + object_store: Optional object store to report new items to + Returns: Iterator over path, sha, mode """ for path, entry in iter_fresh_entries(paths, root_path, object_store=object_store): @@ -811,8 +833,9 @@ def refresh_index(index, root_path): This is the equivalent to running 'git commit -a'. - :param index: Index to update - :param root_path: Root filesystem path + Args: + index: Index to update + root_path: Root filesystem path """ for path, entry in iter_fresh_entries(index, root_path): index[path] = path diff --git a/dulwich/line_ending.py b/dulwich/line_ending.py index 14f94ae0f..b17d9315f 100644 --- a/dulwich/line_ending.py +++ b/dulwich/line_ending.py @@ -136,8 +136,9 @@ def convert_crlf_to_lf(text_hunk): """Convert CRLF in text hunk into LF - :param text_hunk: A bytes string representing a text hunk - :return: The text hunk with the same type, with CRLF replaced into LF + Args: + text_hunk: A bytes string representing a text hunk + Returns: The text hunk with the same type, with CRLF replaced into LF """ return text_hunk.replace(CRLF, LF) @@ -145,8 +146,9 @@ def convert_crlf_to_lf(text_hunk): def convert_lf_to_crlf(text_hunk): """Convert LF in text hunk into CRLF - :param text_hunk: A bytes string representing a text hunk - :return: The text hunk with the same type, with LF replaced into CRLF + Args: + text_hunk: A bytes string representing a text hunk + Returns: The text hunk with the same type, with LF replaced into CRLF """ # TODO find a more efficient way of doing it intermediary = text_hunk.replace(CRLF, LF) @@ -174,9 +176,10 @@ def get_checkin_filter(core_eol, core_autocrlf, git_attributes): def get_checkout_filter_autocrlf(core_autocrlf): """ Returns the correct checkout filter base on autocrlf value - :param core_autocrlf: The bytes configuration value of core.autocrlf. + Args: + core_autocrlf: The bytes configuration value of core.autocrlf. Valid values are: b'true', b'false' or b'input'. - :return: Either None if no filter has to be applied or a function + Returns: Either None if no filter has to be applied or a function accepting a single argument, a binary text hunk """ @@ -189,9 +192,10 @@ def get_checkout_filter_autocrlf(core_autocrlf): def get_checkin_filter_autocrlf(core_autocrlf): """ Returns the correct checkin filter base on autocrlf value - :param core_autocrlf: The bytes configuration value of core.autocrlf. + Args: + core_autocrlf: The bytes configuration value of core.autocrlf. Valid values are: b'true', b'false' or b'input'. - :return: Either None if no filter has to be applied or a function + Returns: Either None if no filter has to be applied or a function accepting a single argument, a binary text hunk """ diff --git a/dulwich/lru_cache.py b/dulwich/lru_cache.py index 821da5b80..913ab865c 100644 --- a/dulwich/lru_cache.py +++ b/dulwich/lru_cache.py @@ -140,9 +140,10 @@ def add(self, key, value, cleanup=None): Also, if the entry is ever removed from the cache, call cleanup(key, value). - :param key: The key to store it under - :param value: The object to store - :param cleanup: None or a function taking (key, value) to indicate + Args: + key: The key to store it under + value: The object to store + cleanup: None or a function taking (key, value) to indicate 'value' should be cleaned up. """ if key is _null_key: @@ -179,7 +180,7 @@ def keys(self): request them later. This is simply meant as a peak into the current state. - :return: An unordered list of keys that are currently cached. + Returns: An unordered list of keys that are currently cached. """ return self._cache.keys() @@ -288,11 +289,12 @@ def __init__(self, max_size=1024*1024, after_cleanup_size=None, compute_size=None): """Create a new LRUSizeCache. - :param max_size: The max number of bytes to store before we start + Args: + max_size: The max number of bytes to store before we start clearing out entries. - :param after_cleanup_size: After cleaning up, shrink everything to this + after_cleanup_size: After cleaning up, shrink everything to this size. - :param compute_size: A function to compute the size of the values. We + compute_size: A function to compute the size of the values. We use a function here, so that you can pass 'len' if you are just using simple strings, or a more complex function if you are using something like a list of strings, or even a custom object. @@ -312,9 +314,10 @@ def add(self, key, value, cleanup=None): Also, if the entry is ever removed from the cache, call cleanup(key, value). - :param key: The key to store it under - :param value: The object to store - :param cleanup: None or a function taking (key, value) to indicate + Args: + key: The key to store it under + value: The object to store + cleanup: None or a function taking (key, value) to indicate 'value' should be cleaned up. """ if key is _null_key: diff --git a/dulwich/mailmap.py b/dulwich/mailmap.py index be4737bfa..12d6445b4 100644 --- a/dulwich/mailmap.py +++ b/dulwich/mailmap.py @@ -37,8 +37,9 @@ def parse_identity(text): def read_mailmap(f): """Read a mailmap. - :param f: File-like object to read from - :return: Iterator over + Args: + f: File-like object to read from + Returns: Iterator over ((canonical_name, canonical_email), (from_name, from_email)) tuples """ for line in f: @@ -72,8 +73,9 @@ def add_entry(self, canonical_identity, from_identity=None): Any of the fields can be None, but at least one of them needs to be set. - :param canonical_identity: The canonical identity (tuple) - :param from_identity: The from identity (tuple) + Args: + canonical_identity: The canonical identity (tuple) + from_identity: The from identity (tuple) """ if from_identity is None: from_name, from_email = None, None diff --git a/dulwich/object_store.py b/dulwich/object_store.py index ab0e245ff..3a0738230 100644 --- a/dulwich/object_store.py +++ b/dulwich/object_store.py @@ -82,8 +82,9 @@ def determine_wants_all(self, refs): def iter_shas(self, shas): """Iterate over the objects for the specified shas. - :param shas: Iterable object with SHAs - :return: Object iterator + Args: + shas: Iterable object with SHAs + Returns: Object iterator """ return ObjectStoreIterator(self, shas) @@ -110,8 +111,9 @@ def packs(self): def get_raw(self, name): """Obtain the raw text for an object. - :param name: sha for the object. - :return: tuple with numeric type and object contents. + Args: + name: sha for the object. + Returns: tuple with numeric type and object contents. """ raise NotImplementedError(self.get_raw) @@ -133,15 +135,17 @@ def add_object(self, obj): def add_objects(self, objects, progress=None): """Add a set of objects to this object store. - :param objects: Iterable over a list of (object, path) tuples + Args: + objects: Iterable over a list of (object, path) tuples """ raise NotImplementedError(self.add_objects) def add_pack_data(self, count, pack_data, progress=None): """Add pack data to this object store. - :param num_items: Number of items to add - :param pack_data: Iterator over pack data tuples + Args: + num_items: Number of items to add + pack_data: Iterator over pack data tuples """ if count == 0: # Don't bother writing an empty pack file @@ -159,13 +163,14 @@ def tree_changes(self, source, target, want_unchanged=False, include_trees=False, change_type_same=False): """Find the differences between the contents of two trees - :param source: SHA1 of the source tree - :param target: SHA1 of the target tree - :param want_unchanged: Whether unchanged files should be reported - :param include_trees: Whether to include trees - :param change_type_same: Whether to report files changing + Args: + source: SHA1 of the source tree + target: SHA1 of the target tree + want_unchanged: Whether unchanged files should be reported + include_trees: Whether to include trees + change_type_same: Whether to report files changing type in the same entry. - :return: Iterator over tuples with + Returns: Iterator over tuples with (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) """ for change in tree_changes(self, source, target, @@ -181,9 +186,10 @@ def iter_tree_contents(self, tree_id, include_trees=False): Iteration is depth-first pre-order, as in e.g. os.walk. - :param tree_id: SHA1 of the tree. - :param include_trees: If True, include tree objects in the iteration. - :return: Iterator over TreeEntry namedtuples for all the objects in a + Args: + tree_id: SHA1 of the tree. + include_trees: If True, include tree objects in the iteration. + Returns: Iterator over TreeEntry namedtuples for all the objects in a tree. """ for entry, _ in walk_trees(self, tree_id, None): @@ -197,15 +203,16 @@ def find_missing_objects(self, haves, wants, progress=None, depth=None): """Find the missing objects required for a set of revisions. - :param haves: Iterable over SHAs already in common. - :param wants: Iterable over SHAs of objects to fetch. - :param progress: Simple progress function that will be called with + Args: + haves: Iterable over SHAs already in common. + wants: Iterable over SHAs of objects to fetch. + progress: Simple progress function that will be called with updated progress strings. - :param get_tagged: Function that returns a dict of pointed-to sha -> + get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. - :param get_parents: Optional function for getting the parents of a + get_parents: Optional function for getting the parents of a commit. - :return: Iterator over (sha, path) pairs. + Returns: Iterator over (sha, path) pairs. """ finder = MissingObjectFinder(self, haves, wants, progress, get_tagged, get_parents=get_parents) @@ -214,8 +221,9 @@ def find_missing_objects(self, haves, wants, progress=None, def find_common_revisions(self, graphwalker): """Find which revisions this store has in common using graphwalker. - :param graphwalker: A graphwalker object. - :return: List of SHAs that are in common + Args: + graphwalker: A graphwalker object. + Returns: List of SHAs that are in common """ haves = [] sha = next(graphwalker) @@ -229,19 +237,21 @@ def find_common_revisions(self, graphwalker): def generate_pack_contents(self, have, want, progress=None): """Iterate over the contents of a pack file. - :param have: List of SHA1s of objects that should not be sent - :param want: List of SHA1s of objects that should be sent - :param progress: Optional progress reporting method + Args: + have: List of SHA1s of objects that should not be sent + want: List of SHA1s of objects that should be sent + progress: Optional progress reporting method """ return self.iter_shas(self.find_missing_objects(have, want, progress)) def generate_pack_data(self, have, want, progress=None, ofs_delta=True): """Generate pack data objects for a set of wants/haves. - :param have: List of SHA1s of objects that should not be sent - :param want: List of SHA1s of objects that should be sent - :param ofs_delta: Whether OFS deltas can be included - :param progress: Optional progress reporting method + Args: + have: List of SHA1s of objects that should not be sent + want: List of SHA1s of objects that should be sent + ofs_delta: Whether OFS deltas can be included + progress: Optional progress reporting method """ # TODO(jelmer): More efficient implementation return pack_objects_to_data( @@ -250,8 +260,9 @@ def generate_pack_data(self, have, want, progress=None, ofs_delta=True): def peel_sha(self, sha): """Peel all tags from a SHA. - :param sha: The object SHA to peel. - :return: The fully-peeled SHA1 of a tag object, after peeling all + Args: + sha: The object SHA to peel. + Returns: The fully-peeled SHA1 of a tag object, after peeling all intermediate tags; if the original ref does not point to a tag, this will equal the original SHA1. """ @@ -266,12 +277,13 @@ def _collect_ancestors(self, heads, common=set(), get_parents=lambda commit: commit.parents): """Collect all ancestors of heads up to (excluding) those in common. - :param heads: commits to start from - :param common: commits to end at, or empty set to walk repository + Args: + heads: commits to start from + common: commits to end at, or empty set to walk repository completely - :param get_parents: Optional function for getting the parents of a + get_parents: Optional function for getting the parents of a commit. - :return: a tuple (A, B) where A - all commits reachable + Returns: a tuple (A, B) where A - all commits reachable from heads but not present in common, B - common (shared) elements that are directly reachable from heads """ @@ -382,7 +394,7 @@ def _remove_pack(self, name): def pack_loose_objects(self): """Pack loose objects. - :return: Number of objects packed + Returns: Number of objects packed """ objects = set() for sha in self._iter_loose_objects(): @@ -444,8 +456,9 @@ def contains_loose(self, sha): def get_raw(self, name): """Obtain the raw fulltext for an object. - :param name: sha for the object. - :return: tuple with numeric type and object contents. + Args: + name: sha for the object. + Returns: tuple with numeric type and object contents. """ if name == ZERO_SHA: raise KeyError(name) @@ -484,9 +497,10 @@ def get_raw(self, name): def add_objects(self, objects, progress=None): """Add a set of objects to this object store. - :param objects: Iterable over (object, path) tuples, should support + Args: + objects: Iterable over (object, path) tuples, should support __len__. - :return: Pack object of the objects written. + Returns: Pack object of the objects written. """ return self.add_pack_data( *pack_objects_to_data(objects), @@ -499,7 +513,8 @@ class DiskObjectStore(PackBasedObjectStore): def __init__(self, path): """Open an object store. - :param path: Path of the object store. + Args: + path: Path of the object store. """ super(DiskObjectStore, self).__init__() self.path = path @@ -632,13 +647,14 @@ def _get_pack_basepath(self, entries): def _complete_thin_pack(self, f, path, copier, indexer): """Move a specific file containing a pack into the pack directory. - :note: The file should be on the same file system as the + Note: The file should be on the same file system as the packs directory. - :param f: Open file object for the pack. - :param path: Path to the pack file. - :param copier: A PackStreamCopier to use for writing pack data. - :param indexer: A PackIndexer for indexing the pack. + Args: + f: Open file object for the pack. + path: Path to the pack file. + copier: A PackStreamCopier to use for writing pack data. + indexer: A PackIndexer for indexing the pack. """ entries = list(indexer) @@ -701,11 +717,12 @@ def add_thin_pack(self, read_all, read_some): outside the pack. They should never be placed in the object store directly, and always indexed and completed as they are copied. - :param read_all: Read function that blocks until the number of + Args: + read_all: Read function that blocks until the number of requested bytes are read. - :param read_some: Read function that returns at least one byte, but may + read_some: Read function that returns at least one byte, but may not return the number of bytes requested. - :return: A Pack object pointing at the now-completed thin pack in the + Returns: A Pack object pointing at the now-completed thin pack in the objects/pack directory. """ fd, path = tempfile.mkstemp(dir=self.path, prefix='tmp_pack_') @@ -719,10 +736,11 @@ def add_thin_pack(self, read_all, read_some): def move_in_pack(self, path): """Move a specific file containing a pack into the pack directory. - :note: The file should be on the same file system as the + Note: The file should be on the same file system as the packs directory. - :param path: Path to the pack file. + Args: + path: Path to the pack file. """ with PackData(path) as p: entries = p.sorted_entries() @@ -751,7 +769,7 @@ def move_in_pack(self, path): def add_pack(self): """Add a new pack to this object store. - :return: Fileobject to write to, a commit function to + Returns: Fileobject to write to, a commit function to call when the pack is finished and an abort function. """ @@ -776,7 +794,8 @@ def abort(): def add_object(self, obj): """Add a single object to this object store. - :param obj: Object to add + Args: + obj: Object to add """ path = self._get_shafile_path(obj.id) dir = os.path.dirname(path) @@ -837,8 +856,9 @@ def packs(self): def get_raw(self, name): """Obtain the raw text for an object. - :param name: sha for the object. - :return: tuple with numeric type and object contents. + Args: + name: sha for the object. + Returns: tuple with numeric type and object contents. """ obj = self[self._to_hexsha(name)] return obj.type_num, obj.as_raw_string() @@ -859,7 +879,8 @@ def add_object(self, obj): def add_objects(self, objects, progress=None): """Add a set of objects to this object store. - :param objects: Iterable over a list of (object, path) tuples + Args: + objects: Iterable over a list of (object, path) tuples """ for obj, path in objects: self.add_object(obj) @@ -870,7 +891,7 @@ def add_pack(self): Because this object store doesn't support packs, we extract and add the individual objects. - :return: Fileobject to write to and a commit function to + Returns: Fileobject to write to and a commit function to call when the pack is finished. """ f = BytesIO() @@ -888,8 +909,9 @@ def abort(): def _complete_thin_pack(self, f, indexer): """Complete a thin pack by adding external references. - :param f: Open file object for the pack. - :param indexer: A PackIndexer for indexing the pack. + Args: + f: Open file object for the pack. + indexer: A PackIndexer for indexing the pack. """ entries = list(indexer) @@ -915,9 +937,10 @@ def add_thin_pack(self, read_all, read_some): outside the pack. Because this object store doesn't support packs, we extract and add the individual objects. - :param read_all: Read function that blocks until the number of + Args: + read_all: Read function that blocks until the number of requested bytes are read. - :param read_some: Read function that returns at least one byte, but may + read_some: Read function that returns at least one byte, but may not return the number of bytes requested. """ f, commit, abort = self.add_pack() @@ -947,8 +970,9 @@ class ObjectStoreIterator(ObjectIterator): def __init__(self, store, sha_iter): """Create a new ObjectIterator. - :param store: Object store to retrieve from - :param sha_iter: Iterator over (sha, path) tuples + Args: + store: Object store to retrieve from + sha_iter: Iterator over (sha, path) tuples """ self.store = store self.sha_iter = sha_iter @@ -975,11 +999,12 @@ def itershas(self): def __contains__(self, needle): """Check if an object is present. - :note: This checks if the object is present in + Note: This checks if the object is present in the underlying object store, not if it would be yielded by the iterator. - :param needle: SHA1 of the object to check for + Args: + needle: SHA1 of the object to check for """ if needle == ZERO_SHA: return False @@ -988,7 +1013,7 @@ def __contains__(self, needle): def __getitem__(self, key): """Find an object by SHA1. - :note: This retrieves the object from the underlying + Note: This retrieves the object from the underlying object store. It will also succeed if the object would not be returned by the iterator. """ @@ -1020,10 +1045,11 @@ def __bool__(self): def tree_lookup_path(lookup_obj, root_sha, path): """Look up an object in a Git tree. - :param lookup_obj: Callback for retrieving object by SHA1 - :param root_sha: SHA1 of the root tree - :param path: Path to lookup - :return: A tuple of (mode, SHA) of the resulting path. + Args: + lookup_obj: Callback for retrieving object by SHA1 + root_sha: SHA1 of the root tree + path: Path to lookup + Returns: A tuple of (mode, SHA) of the resulting path. """ tree = lookup_obj(root_sha) if not isinstance(tree, Tree): @@ -1034,9 +1060,10 @@ def tree_lookup_path(lookup_obj, root_sha, path): def _collect_filetree_revs(obj_store, tree_sha, kset): """Collect SHA1s of files and directories for specified tree. - :param obj_store: Object store to get objects by SHA from - :param tree_sha: tree reference to walk - :param kset: set to fill with references to files and directories + Args: + obj_store: Object store to get objects by SHA from + tree_sha: tree reference to walk + kset: set to fill with references to files and directories """ filetree = obj_store[tree_sha] for name, mode, sha in filetree.iteritems(): @@ -1054,11 +1081,12 @@ def _split_commits_and_tags(obj_store, lst, ignore_unknown=False): through, and unless ignore_unknown argument is True, KeyError is thrown for SHA1 missing in the repository - :param obj_store: Object store to get objects by SHA1 from - :param lst: Collection of commit and tag SHAs - :param ignore_unknown: True to skip SHA1 missing in the repository + Args: + obj_store: Object store to get objects by SHA1 from + lst: Collection of commit and tag SHAs + ignore_unknown: True to skip SHA1 missing in the repository silently. - :return: A tuple of (commits, tags, others) SHA1s + Returns: A tuple of (commits, tags, others) SHA1s """ commits = set() tags = set() @@ -1088,15 +1116,16 @@ def _split_commits_and_tags(obj_store, lst, ignore_unknown=False): class MissingObjectFinder(object): """Find the objects missing from another object store. - :param object_store: Object store containing at least all objects to be + Args: + object_store: Object store containing at least all objects to be sent - :param haves: SHA1s of commits not to send (already present in target) - :param wants: SHA1s of commits to send - :param progress: Optional function to report progress to. - :param get_tagged: Function that returns a dict of pointed-to sha -> tag + haves: SHA1s of commits not to send (already present in target) + wants: SHA1s of commits to send + progress: Optional function to report progress to. + get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. - :param get_parents: Optional function for getting the parents of a commit. - :param tagged: dict of pointed-to sha -> tag sha for including tags + get_parents: Optional function for getting the parents of a commit. + tagged: dict of pointed-to sha -> tag sha for including tags """ def __init__(self, object_store, haves, wants, progress=None, @@ -1190,8 +1219,9 @@ class ObjectStoreGraphWalker(object): def __init__(self, local_heads, get_parents, shallow=None): """Create a new instance. - :param local_heads: Heads to start search with - :param get_parents: Function for finding the parents of a SHA1. + Args: + local_heads: Heads to start search with + get_parents: Function for finding the parents of a SHA1. """ self.heads = set(local_heads) self.get_parents = get_parents @@ -1254,11 +1284,12 @@ def commit_tree_changes(object_store, tree, changes): number of changes to a big tree. For a large number of changes to a large tree, use e.g. commit_tree. - :param object_store: Object store to store new objects in + Args: + object_store: Object store to store new objects in and retrieve old ones from. - :param tree: Original tree root - :param changes: changes to apply - :return: New tree root object + tree: Original tree root + changes: changes to apply + Returns: New tree root object """ # TODO(jelmer): Save up the objects and add them using .add_objects # rather than with individual calls to .add_object. diff --git a/dulwich/objects.py b/dulwich/objects.py index 3a41732d2..8976d4fd5 100644 --- a/dulwich/objects.py +++ b/dulwich/objects.py @@ -73,8 +73,9 @@ def S_ISGITLINK(m): """Check if a mode indicates a submodule. - :param m: Mode to check - :return: a ``boolean`` + Args: + m: Mode to check + Returns: a ``boolean`` """ return (stat.S_IFMT(m) == S_IFGITLINK) @@ -162,8 +163,9 @@ def get(obj): def object_class(type): """Get the object class corresponding to the given type. - :param type: Either a type name string or a numeric type. - :return: The ShaFile subclass corresponding to the given type, or None if + Args: + type: Either a type name string or a numeric type. + Returns: The ShaFile subclass corresponding to the given type, or None if type is not a valid type name/number. """ return _TYPE_MAP.get(type, None) @@ -172,9 +174,11 @@ def object_class(type): def check_hexsha(hex, error_msg): """Check if a string is a valid hex sha string. - :param hex: Hex string to check - :param error_msg: Error message to use in exception - :raise ObjectFormatException: Raised when the string is not valid + Args: + hex: Hex string to check + error_msg: Error message to use in exception + Raises: + ObjectFormatException: Raised when the string is not valid """ if not valid_hexsha(hex): raise ObjectFormatException("%s %s" % (error_msg, hex)) @@ -185,8 +189,9 @@ def check_identity(identity, error_msg): This will raise an exception if the identity is not valid. - :param identity: Identity string - :param error_msg: Error message to use in exception + Args: + identity: Identity string + error_msg: Error message to use in exception """ email_start = identity.find(b'<') email_end = identity.find(b'>') @@ -202,7 +207,8 @@ def check_time(time_seconds): This will raise an exception if the time is not valid. - :param time_info: author/committer/tagger info + Args: + time_info: author/committer/tagger info """ # Prevent overflow error @@ -279,7 +285,7 @@ def _parse_legacy_object(self, map): def as_legacy_object_chunks(self): """Return chunks representing the object in the experimental format. - :return: List of strings + Returns: List of strings """ compobj = zlib.compressobj() yield compobj.compress(self._header()) @@ -295,7 +301,7 @@ def as_legacy_object(self): def as_raw_chunks(self): """Return chunks with serialization of the object. - :return: List of strings, not necessarily one per line + Returns: List of strings, not necessarily one per line """ if self._needs_serialization: self._sha = None @@ -306,7 +312,7 @@ def as_raw_chunks(self): def as_raw_string(self): """Return raw string with serialization of the object. - :return: String object + Returns: String object """ return b''.join(self.as_raw_chunks()) @@ -417,9 +423,10 @@ def from_file(cls, f): def from_raw_string(type_num, string, sha=None): """Creates an object of the indicated type from the raw string given. - :param type_num: The numeric type of the object. - :param string: The raw uncompressed contents. - :param sha: Optional known sha for the object + Args: + type_num: The numeric type of the object. + string: The raw uncompressed contents. + sha: Optional known sha for the object """ obj = object_class(type_num)() obj.set_raw_string(string, sha) @@ -429,9 +436,10 @@ def from_raw_string(type_num, string, sha=None): def from_raw_chunks(type_num, chunks, sha=None): """Creates an object of the indicated type from the raw chunks given. - :param type_num: The numeric type of the object. - :param chunks: An iterable of the raw uncompressed contents. - :param sha: Optional known sha for the object + Args: + type_num: The numeric type of the object. + chunks: An iterable of the raw uncompressed contents. + sha: Optional known sha for the object """ obj = object_class(type_num)() obj.set_raw_chunks(chunks, sha) @@ -447,9 +455,11 @@ def from_string(cls, string): def _check_has_member(self, member, error_msg): """Check that the object has a given member variable. - :param member: the member variable to check for - :param error_msg: the message for an error if the member is missing - :raise ObjectFormatException: with the given error_msg if member is + Args: + member: the member variable to check for + error_msg: the message for an error if the member is missing + Raises: + ObjectFormatException: with the given error_msg if member is missing or is None """ if getattr(self, member, None) is None: @@ -458,8 +468,9 @@ def _check_has_member(self, member, error_msg): def check(self): """Check this object for internal consistency. - :raise ObjectFormatException: if the object is malformed in some way - :raise ChecksumMismatch: if the object was created with a SHA that does + Raises: + ObjectFormatException: if the object is malformed in some way + ChecksumMismatch: if the object was created with a SHA that does not match its contents """ # TODO: if we find that error-checking during object parsing is a @@ -603,7 +614,8 @@ def from_path(cls, path): def check(self): """Check this object for internal consistency. - :raise ObjectFormatException: if the object is malformed in some way + Raises: + ObjectFormatException: if the object is malformed in some way """ super(Blob, self).check() @@ -638,8 +650,9 @@ def splitlines(self): def _parse_message(chunks): """Parse a message with a list of fields and a body. - :param chunks: the raw chunks of the tag or commit object. - :return: iterator of tuples of (field, value), one per header line, in the + Args: + chunks: the raw chunks of the tag or commit object. + Returns: iterator of tuples of (field, value), one per header line, in the order read from the text, possibly including duplicates. Includes a field named None for the freeform tag/commit text. """ @@ -716,7 +729,8 @@ def from_path(cls, filename): def check(self): """Check this object for internal consistency. - :raise ObjectFormatException: if the object is malformed in some way + Raises: + ObjectFormatException: if the object is malformed in some way """ super(Tag, self).check() self._check_has_member("_object_sha", "missing object sha") @@ -807,7 +821,7 @@ def _deserialize(self, chunks): def _get_object(self): """Get the object pointed to by this tag. - :return: tuple of (object class, sha). + Returns: tuple of (object class, sha). """ return (self._object_class, self._object_sha) @@ -848,9 +862,11 @@ def in_path(self, path): def parse_tree(text, strict=False): """Parse a tree text. - :param text: Serialized text to parse - :return: iterator of tuples of (name, mode, sha) - :raise ObjectFormatException: if the object was malformed in some way + Args: + text: Serialized text to parse + Returns: iterator of tuples of (name, mode, sha) + Raises: + ObjectFormatException: if the object was malformed in some way """ count = 0 length = len(text) @@ -876,8 +892,9 @@ def parse_tree(text, strict=False): def serialize_tree(items): """Serialize the items in a tree to a text. - :param items: Sorted iterable over (name, mode, sha) tuples - :return: Serialized tree text as chunks + Args: + items: Sorted iterable over (name, mode, sha) tuples + Returns: Serialized tree text as chunks """ for name, mode, hexsha in items: yield (("%04o" % mode).encode('ascii') + b' ' + name + @@ -887,11 +904,12 @@ def serialize_tree(items): def sorted_tree_items(entries, name_order): """Iterate over a tree entries dictionary. - :param name_order: If True, iterate entries in order of their name. If + Args: + name_order: If True, iterate entries in order of their name. If False, iterate entries in tree order, that is, treat subtree entries as having '/' appended. - :param entries: Dictionary mapping names to (mode, sha) tuples - :return: Iterator over (name, mode, hexsha) + entries: Dictionary mapping names to (mode, sha) tuples + Returns: Iterator over (name, mode, hexsha) """ key_func = name_order and key_entry_name_order or key_entry for name, entry in sorted(entries.items(), key=key_func): @@ -906,7 +924,8 @@ def sorted_tree_items(entries, name_order): def key_entry(entry): """Sort key for tree entry. - :param entry: (name, value) tuplee + Args: + entry: (name, value) tuplee """ (name, value) = entry if stat.S_ISDIR(value[0]): @@ -922,10 +941,11 @@ def key_entry_name_order(entry): def pretty_format_tree_entry(name, mode, hexsha, encoding="utf-8"): """Pretty format tree entry. - :param name: Name of the directory entry - :param mode: Mode of entry - :param hexsha: Hexsha of the referenced object - :return: string describing the tree entry + Args: + name: Name of the directory entry + mode: Mode of entry + hexsha: Hexsha of the referenced object + Returns: string describing the tree entry """ if mode & stat.S_IFDIR: kind = "tree" @@ -964,8 +984,9 @@ def __getitem__(self, name): def __setitem__(self, name, value): """Set a tree entry by name. - :param name: The name of the entry, as a string. - :param value: A tuple of (mode, hexsha), where mode is the mode of the + Args: + name: The name of the entry, as a string. + value: A tuple of (mode, hexsha), where mode is the mode of the entry as an integral type and hexsha is the hex SHA of the entry as a string. """ @@ -986,10 +1007,11 @@ def __iter__(self): def add(self, name, mode, hexsha): """Add an entry to the tree. - :param mode: The mode of the entry as an integral type. Not all + Args: + mode: The mode of the entry as an integral type. Not all possible modes are supported by git; see check() for details. - :param name: The name of the entry, as a string. - :param hexsha: The hex SHA of the entry as a string. + name: The name of the entry, as a string. + hexsha: The hex SHA of the entry as a string. """ if isinstance(name, int) and isinstance(mode, bytes): (name, mode) = (mode, name) @@ -1002,16 +1024,17 @@ def add(self, name, mode, hexsha): def iteritems(self, name_order=False): """Iterate over entries. - :param name_order: If True, iterate in name order instead of tree + Args: + name_order: If True, iterate in name order instead of tree order. - :return: Iterator over (name, mode, sha) tuples + Returns: Iterator over (name, mode, sha) tuples """ return sorted_tree_items(self._entries, name_order) def items(self): """Return the sorted entries in this tree. - :return: List with (name, mode, sha) tuples + Returns: List with (name, mode, sha) tuples """ return list(self.iteritems()) @@ -1029,7 +1052,8 @@ def _deserialize(self, chunks): def check(self): """Check this object for internal consistency. - :raise ObjectFormatException: if the object is malformed in some way + Raises: + ObjectFormatException: if the object is malformed in some way """ super(Tree, self).check() last = None @@ -1068,9 +1092,10 @@ def as_pretty_string(self): def lookup_path(self, lookup_obj, path): """Look up an object in a Git tree. - :param lookup_obj: Callback for retrieving object by SHA1 - :param path: Path to lookup - :return: A tuple of (mode, SHA) of the resulting path. + Args: + lookup_obj: Callback for retrieving object by SHA1 + path: Path to lookup + Returns: A tuple of (mode, SHA) of the resulting path. """ parts = path.split(b'/') sha = self.id @@ -1088,8 +1113,9 @@ def lookup_path(self, lookup_obj, path): def parse_timezone(text): """Parse a timezone text fragment (e.g. '+0100'). - :param text: Text to parse. - :return: Tuple with timezone as seconds difference to UTC + Args: + text: Text to parse. + Returns: Tuple with timezone as seconds difference to UTC and a boolean indicating whether this was a UTC timezone prefixed with a negative sign (-0000). """ @@ -1114,8 +1140,9 @@ def parse_timezone(text): def format_timezone(offset, unnecessary_negative_timezone=False): """Format a timezone for Git serialization. - :param offset: Timezone offset as seconds difference to UTC - :param unnecessary_negative_timezone: Whether to use a minus sign for + Args: + offset: Timezone offset as seconds difference to UTC + unnecessary_negative_timezone: Whether to use a minus sign for UTC or positive timezones (-0000 and --700 rather than +0000 / +0700). """ if offset % 60 != 0: @@ -1132,10 +1159,12 @@ def format_timezone(offset, unnecessary_negative_timezone=False): def parse_time_entry(value): """Parse time entry behavior - :param value: Bytes representing a git commit/tag line - :raise: ObjectFormatException in case of parsing error (malformed - field date) - :return: Tuple of (author, time, (timezone, timezone_neg_utc)) + Args: + value: Bytes representing a git commit/tag line + Raises: + ObjectFormatException in case of parsing error (malformed + field date) + Returns: Tuple of (author, time, (timezone, timezone_neg_utc)) """ try: sep = value.rindex(b'> ') @@ -1155,8 +1184,9 @@ def parse_time_entry(value): def parse_commit(chunks): """Parse a commit object from chunks. - :param chunks: Chunks to parse - :return: Tuple of (tree, parents, author_info, commit_info, + Args: + chunks: Chunks to parse + Returns: Tuple of (tree, parents, author_info, commit_info, encoding, mergetag, gpgsig, message, extra) """ parents = [] @@ -1234,7 +1264,8 @@ def _deserialize(self, chunks): def check(self): """Check this object for internal consistency. - :raise ObjectFormatException: if the object is malformed in some way + Raises: + ObjectFormatException: if the object is malformed in some way """ super(Commit, self).check() self._check_has_member("_tree", "missing tree") diff --git a/dulwich/objectspec.py b/dulwich/objectspec.py index 48c0116da..f588d3132 100644 --- a/dulwich/objectspec.py +++ b/dulwich/objectspec.py @@ -30,10 +30,12 @@ def to_bytes(text): def parse_object(repo, objectish): """Parse a string referring to an object. - :param repo: A `Repo` object - :param objectish: A string referring to an object - :return: A git object - :raise KeyError: If the object can not be found + Args: + repo: A `Repo` object + objectish: A string referring to an object + Returns: A git object + Raises: + KeyError: If the object can not be found """ objectish = to_bytes(objectish) return repo[objectish] @@ -42,10 +44,12 @@ def parse_object(repo, objectish): def parse_tree(repo, treeish): """Parse a string referring to a tree. - :param repo: A `Repo` object - :param treeish: A string referring to a tree - :return: A git object - :raise KeyError: If the object can not be found + Args: + repo: A `Repo` object + treeish: A string referring to a tree + Returns: A git object + Raises: + KeyError: If the object can not be found """ treeish = to_bytes(treeish) o = repo[treeish] @@ -57,10 +61,12 @@ def parse_tree(repo, treeish): def parse_ref(container, refspec): """Parse a string referring to a reference. - :param container: A RefsContainer object - :param refspec: A string referring to a ref - :return: A ref - :raise KeyError: If the ref can not be found + Args: + container: A RefsContainer object + refspec: A string referring to a ref + Returns: A ref + Raises: + KeyError: If the ref can not be found """ refspec = to_bytes(refspec) possible_refs = [ @@ -80,11 +86,13 @@ def parse_ref(container, refspec): def parse_reftuple(lh_container, rh_container, refspec): """Parse a reftuple spec. - :param lh_container: A RefsContainer object - :param hh_container: A RefsContainer object - :param refspec: A string - :return: A tuple with left and right ref - :raise KeyError: If one of the refs can not be found + Args: + lh_container: A RefsContainer object + hh_container: A RefsContainer object + refspec: A string + Returns: A tuple with left and right ref + Raises: + KeyError: If one of the refs can not be found """ refspec = to_bytes(refspec) if refspec.startswith(b"+"): @@ -115,11 +123,13 @@ def parse_reftuple(lh_container, rh_container, refspec): def parse_reftuples(lh_container, rh_container, refspecs): """Parse a list of reftuple specs to a list of reftuples. - :param lh_container: A RefsContainer object - :param hh_container: A RefsContainer object - :param refspecs: A list of refspecs or a string - :return: A list of refs - :raise KeyError: If one of the refs can not be found + Args: + lh_container: A RefsContainer object + hh_container: A RefsContainer object + refspecs: A list of refspecs or a string + Returns: A list of refs + Raises: + KeyError: If one of the refs can not be found """ if not isinstance(refspecs, list): refspecs = [refspecs] @@ -133,10 +143,12 @@ def parse_reftuples(lh_container, rh_container, refspecs): def parse_refs(container, refspecs): """Parse a list of refspecs to a list of refs. - :param container: A RefsContainer object - :param refspecs: A list of refspecs or a string - :return: A list of refs - :raise KeyError: If one of the refs can not be found + Args: + container: A RefsContainer object + refspecs: A list of refspecs or a string + Returns: A list of refs + Raises: + KeyError: If one of the refs can not be found """ # TODO: Support * in refspecs if not isinstance(refspecs, list): @@ -150,11 +162,13 @@ def parse_refs(container, refspecs): def parse_commit_range(repo, committishs): """Parse a string referring to a range of commits. - :param repo: A `Repo` object - :param committishs: A string referring to a range of commits. - :return: An iterator over `Commit` objects - :raise KeyError: When the reference commits can not be found - :raise ValueError: If the range can not be parsed + Args: + repo: A `Repo` object + committishs: A string referring to a range of commits. + Returns: An iterator over `Commit` objects + Raises: + KeyError: When the reference commits can not be found + ValueError: If the range can not be parsed """ committishs = to_bytes(committishs) # TODO(jelmer): Support more than a single commit.. @@ -187,11 +201,13 @@ def scan_for_short_id(object_store, prefix): def parse_commit(repo, committish): """Parse a string referring to a single commit. - :param repo: A` Repo` object - :param commitish: A string referring to a single commit. - :return: A Commit object - :raise KeyError: When the reference commits can not be found - :raise ValueError: If the range can not be parsed + Args: + repo: A` Repo` object + commitish: A string referring to a single commit. + Returns: A Commit object + Raises: + KeyError: When the reference commits can not be found + ValueError: If the range can not be parsed """ committish = to_bytes(committish) try: diff --git a/dulwich/pack.py b/dulwich/pack.py index e7da3cc8b..7b276c36a 100644 --- a/dulwich/pack.py +++ b/dulwich/pack.py @@ -100,7 +100,8 @@ def take_msb_bytes(read, crc32=None): """Read bytes marked with most significant bit. - :param read: Read function + Args: + read: Read function """ ret = [] while len(ret) == 0 or ret[-1] & 0x80: @@ -206,9 +207,10 @@ def read_zlib_chunks(read_some, unpacked, include_comp=False, This function requires that the buffer have additional data following the compressed data, which is guaranteed to be the case for git pack files. - :param read_some: Read function that returns at least one byte, but may + Args: + read_some: Read function that returns at least one byte, but may return less than the requested size. - :param unpacked: An UnpackedObject to write result data to. If its crc32 + unpacked: An UnpackedObject to write result data to. If its crc32 attr is not None, the CRC32 of the compressed bytes will be computed using this starting CRC32. After this function, will have the following attrs set: @@ -216,10 +218,11 @@ def read_zlib_chunks(read_some, unpacked, include_comp=False, * decomp_chunks * decomp_len * crc32 - :param include_comp: If True, include compressed data in the result. - :param buffer_size: Size of the read buffer. - :return: Leftover unused data from the decompression. - :raise zlib.error: if a decompression error occurred. + include_comp: If True, include compressed data in the result. + buffer_size: Size of the read buffer. + Returns: Leftover unused data from the decompression. + Raises: + zlib.error: if a decompression error occurred. """ if unpacked.decomp_len <= -1: raise ValueError('non-negative zlib data stream size expected') @@ -263,8 +266,9 @@ def read_zlib_chunks(read_some, unpacked, include_comp=False, def iter_sha1(iter): """Return the hexdigest of the SHA1 over a set of names. - :param iter: Iterator over string objects - :return: 40-byte hex sha1 digest + Args: + iter: Iterator over string objects + Returns: 40-byte hex sha1 digest """ sha = sha1() for name in iter: @@ -275,8 +279,9 @@ def iter_sha1(iter): def load_pack_index(path): """Load an index file by path. - :param filename: Path to the index file - :return: A PackIndex loaded from the given path + Args: + filename: Path to the index file + Returns: A PackIndex loaded from the given path """ with GitFile(path, 'rb') as f: return load_pack_index_file(path, f) @@ -307,9 +312,10 @@ def _load_file_contents(f, size=None): def load_pack_index_file(path, f): """Load an index file from a file-like object. - :param path: Path for the index file - :param f: File-like object - :return: A PackIndex loaded from the given file + Args: + path: Path for the index file + f: File-like object + Returns: A PackIndex loaded from the given file """ contents, size = _load_file_contents(f) if contents[:4] == b'\377tOc': @@ -326,11 +332,12 @@ def load_pack_index_file(path, f): def bisect_find_sha(start, end, sha, unpack_name): """Find a SHA in a data blob with sorted SHAs. - :param start: Start index of range to search - :param end: End index of range to search - :param sha: Sha to find - :param unpack_name: Callback to retrieve SHA by index - :return: Index of the SHA, or None if it wasn't found + Args: + start: Start index of range to search + end: End index of range to search + sha: Sha to find + unpack_name: Callback to retrieve SHA by index + Returns: Index of the SHA, or None if it wasn't found """ assert start <= end while start <= end: @@ -376,7 +383,7 @@ def __iter__(self): def iterentries(self): """Iterate over the entries in this pack index. - :return: iterator over tuples with object name, offset in packfile and + Returns: iterator over tuples with object name, offset in packfile and crc32 checksum. """ raise NotImplementedError(self.iterentries) @@ -384,7 +391,7 @@ def iterentries(self): def get_pack_checksum(self): """Return the SHA1 checksum stored for the corresponding packfile. - :return: 20-byte binary digest + Returns: 20-byte binary digest """ raise NotImplementedError(self.get_pack_checksum) @@ -418,14 +425,15 @@ def object_sha1(self, index): def _object_index(self, sha): """See object_index. - :param sha: A *binary* SHA string. (20 characters long)_ + Args: + sha: A *binary* SHA string. (20 characters long)_ """ raise NotImplementedError(self._object_index) def objects_sha1(self): """Return the hex SHA1 over all the shas of all objects in this pack. - :note: This is used for the filename of the pack. + Note: This is used for the filename of the pack. """ return iter_sha1(self._itersha()) @@ -440,8 +448,9 @@ class MemoryPackIndex(PackIndex): def __init__(self, entries, pack_checksum=None): """Create a new MemoryPackIndex. - :param entries: Sequence of name, idx, crc32 (sorted) - :param pack_checksum: Optional pack checksum + Args: + entries: Sequence of name, idx, crc32 (sorted) + pack_checksum: Optional pack checksum """ self._by_sha = {} self._by_index = {} @@ -524,7 +533,7 @@ def __len__(self): def _unpack_entry(self, i): """Unpack the i-th entry in the index file. - :return: Tuple with object name (SHA), offset in pack file and CRC32 + Returns: Tuple with object name (SHA), offset in pack file and CRC32 checksum (if known). """ raise NotImplementedError(self._unpack_entry) @@ -549,7 +558,7 @@ def _itersha(self): def iterentries(self): """Iterate over the entries in this pack index. - :return: iterator over tuples with object name, offset in packfile and + Returns: iterator over tuples with object name, offset in packfile and crc32 checksum. """ for i in range(len(self)): @@ -573,28 +582,29 @@ def check(self): def calculate_checksum(self): """Calculate the SHA1 checksum over this pack index. - :return: This is a 20-byte binary digest + Returns: This is a 20-byte binary digest """ return sha1(self._contents[:-20]).digest() def get_pack_checksum(self): """Return the SHA1 checksum stored for the corresponding packfile. - :return: 20-byte binary digest + Returns: 20-byte binary digest """ return bytes(self._contents[-40:-20]) def get_stored_checksum(self): """Return the SHA1 checksum stored for this index. - :return: 20-byte binary digest + Returns: 20-byte binary digest """ return bytes(self._contents[-20:]) def _object_index(self, sha): """See object_index. - :param sha: A *binary* SHA string. (20 characters long)_ + Args: + sha: A *binary* SHA string. (20 characters long)_ """ assert len(sha) == 20 idx = ord(sha[:1]) @@ -679,8 +689,9 @@ def _unpack_crc32_checksum(self, i): def read_pack_header(read): """Read the header of a pack file. - :param read: Read function - :return: Tuple of (pack version, number of objects). If no data is + Args: + read: Read function + Returns: Tuple of (pack version, number of objects). If no data is available to read, returns (None, None). """ header = read(12) @@ -706,15 +717,16 @@ def unpack_object(read_all, read_some=None, compute_crc32=False, include_comp=False, zlib_bufsize=_ZLIB_BUFSIZE): """Unpack a Git object. - :param read_all: Read function that blocks until the number of requested + Args: + read_all: Read function that blocks until the number of requested bytes are read. - :param read_some: Read function that returns at least one byte, but may not + read_some: Read function that returns at least one byte, but may not return the number of bytes requested. - :param compute_crc32: If True, compute the CRC32 of the compressed data. If + compute_crc32: If True, compute the CRC32 of the compressed data. If False, the returned CRC32 will be None. - :param include_comp: If True, include compressed data in the result. - :param zlib_bufsize: An optional buffer size for zlib operations. - :return: A tuple of (unpacked, unused), where unused is the unused data + include_comp: If True, include compressed data in the result. + zlib_bufsize: An optional buffer size for zlib operations. + Returns: A tuple of (unpacked, unused), where unused is the unused data leftover from decompression, and unpacked in an UnpackedObject with the following attrs set: @@ -799,8 +811,9 @@ def _read(self, read, size): As a side effect, update the verifier's hash (excluding the last 20 bytes read). - :param read: The read callback to read from. - :param size: The maximum number of bytes to read; the particular + Args: + read: The read callback to read from. + size: The maximum number of bytes to read; the particular behavior is callback-specific. """ data = read(size) @@ -860,9 +873,10 @@ def __len__(self): def read_objects(self, compute_crc32=False): """Read the objects in this pack file. - :param compute_crc32: If True, compute the CRC32 of the compressed + Args: + compute_crc32: If True, compute the CRC32 of the compressed data. If False, the returned CRC32 will be None. - :return: Iterator over UnpackedObjects with the following members set: + Returns: Iterator over UnpackedObjects with the following members set: offset obj_type_num obj_chunks (for non-delta types) @@ -870,10 +884,11 @@ def read_objects(self, compute_crc32=False): decomp_chunks decomp_len crc32 (if compute_crc32 is True) - :raise ChecksumMismatch: if the checksum of the pack contents does not + Raises: + ChecksumMismatch: if the checksum of the pack contents does not match the checksum in the pack trailer. - :raise zlib.error: if an error occurred during zlib decompression. - :raise IOError: if an error occurred writing to the output file. + zlib.error: if an error occurred during zlib decompression. + IOError: if an error occurred writing to the output file. """ pack_version, self._num_objects = read_pack_header(self.read) if pack_version is None: @@ -917,12 +932,13 @@ class PackStreamCopier(PackStreamReader): def __init__(self, read_all, read_some, outfile, delta_iter=None): """Initialize the copier. - :param read_all: Read function that blocks until the number of + Args: + read_all: Read function that blocks until the number of requested bytes are read. - :param read_some: Read function that returns at least one byte, but may + read_some: Read function that returns at least one byte, but may not return the number of bytes requested. - :param outfile: File-like object to write output through. - :param delta_iter: Optional DeltaChainIterator to record deltas as we + outfile: File-like object to write output through. + delta_iter: Optional DeltaChainIterator to record deltas as we read them. """ super(PackStreamCopier, self).__init__(read_all, read_some=read_some) @@ -964,12 +980,13 @@ def obj_sha(type, chunks): def compute_file_sha(f, start_ofs=0, end_ofs=0, buffer_size=1 << 16): """Hash a portion of a file into a new SHA. - :param f: A file-like object to read from that supports seek(). - :param start_ofs: The offset in the file to start reading at. - :param end_ofs: The offset in the file to end reading at, relative to the + Args: + f: A file-like object to read from that supports seek(). + start_ofs: The offset in the file to start reading at. + end_ofs: The offset in the file to end reading at, relative to the end of the file. - :param buffer_size: A buffer size for reading. - :return: A new SHA object updated with data read from the file. + buffer_size: A buffer size for reading. + Returns: A new SHA object updated with data read from the file. """ sha = sha1() f.seek(0, SEEK_END) @@ -1078,7 +1095,7 @@ def __len__(self): def calculate_checksum(self): """Calculate the checksum for this pack. - :return: 20-byte binary SHA1 digest + Returns: 20-byte binary SHA1 digest """ return compute_file_sha(self._file, end_ofs=-20).digest() @@ -1102,7 +1119,7 @@ def get_ref(self, sha): def resolve_object(self, offset, type, obj, get_ref=None): """Resolve an object, possibly resolving deltas when necessary. - :return: Tuple with object type and contents. + Returns: Tuple with object type and contents. """ # Walk down the delta chain, building a stack of deltas to reach # the requested object. @@ -1174,9 +1191,10 @@ def _iter_unpacked(self): def iterentries(self, progress=None): """Yield entries summarizing the contents of this pack. - :param progress: Progress function, called with current and total + Args: + progress: Progress function, called with current and total object count. - :return: iterator of tuples with (sha, offset, crc32) + Returns: iterator of tuples with (sha, offset, crc32) """ num_objects = self._num_objects resolve_ext_ref = ( @@ -1191,9 +1209,10 @@ def iterentries(self, progress=None): def sorted_entries(self, progress=None): """Return entries in this pack, sorted by SHA. - :param progress: Progress function, called with current and total + Args: + progress: Progress function, called with current and total object count - :return: List of tuples with (sha, offset, crc32) + Returns: List of tuples with (sha, offset, crc32) """ ret = sorted(self.iterentries(progress=progress)) return ret @@ -1201,9 +1220,10 @@ def sorted_entries(self, progress=None): def create_index_v1(self, filename, progress=None): """Create a version 1 file for this data file. - :param filename: Index filename. - :param progress: Progress report function - :return: Checksum of index file + Args: + filename: Index filename. + progress: Progress report function + Returns: Checksum of index file """ entries = self.sorted_entries(progress=progress) with GitFile(filename, 'wb') as f: @@ -1212,9 +1232,10 @@ def create_index_v1(self, filename, progress=None): def create_index_v2(self, filename, progress=None): """Create a version 2 index file for this data file. - :param filename: Index filename. - :param progress: Progress report function - :return: Checksum of index file + Args: + filename: Index filename. + progress: Progress report function + Returns: Checksum of index file """ entries = self.sorted_entries(progress=progress) with GitFile(filename, 'wb') as f: @@ -1224,9 +1245,10 @@ def create_index(self, filename, progress=None, version=2): """Create an index file for this data file. - :param filename: Index filename. - :param progress: Progress report function - :return: Checksum of index file + Args: + filename: Index filename. + progress: Progress report function + Returns: Checksum of index file """ if version == 1: return self.create_index_v1(filename, progress) @@ -1481,10 +1503,11 @@ def tell(self): def pack_object_header(type_num, delta_base, size): """Create a pack object header for the given object info. - :param type_num: Numeric type of the object. - :param delta_base: Delta base offset or ref, or None for whole objects. - :param size: Uncompressed object size. - :return: A header for a packed object. + Args: + type_num: Numeric type of the object. + delta_base: Delta base offset or ref, or None for whole objects. + size: Uncompressed object size. + Returns: A header for a packed object. """ header = [] c = (type_num << 4) | (size & 15) @@ -1511,10 +1534,11 @@ def pack_object_header(type_num, delta_base, size): def write_pack_object(f, type, object, sha=None): """Write pack object to a file. - :param f: File to write to - :param type: Numeric type of the object - :param object: Object to write - :return: Tuple with offset at which the object was written, and crc32 + Args: + f: File to write to + type: Numeric type of the object + object: Object to write + Returns: Tuple with offset at which the object was written, and crc32 """ if type in DELTA_TYPES: delta_base, object = object @@ -1534,12 +1558,13 @@ def write_pack_object(f, type, object, sha=None): def write_pack(filename, objects, deltify=None, delta_window_size=None): """Write a new pack data file. - :param filename: Path to the new pack file (without .pack extension) - :param objects: Iterable of (object, path) tuples to write. + Args: + filename: Path to the new pack file (without .pack extension) + objects: Iterable of (object, path) tuples to write. Should provide __len__ - :param window_size: Delta window size - :param deltify: Whether to deltify pack objects - :return: Tuple with checksum of pack file and index file + window_size: Delta window size + deltify: Whether to deltify pack objects + Returns: Tuple with checksum of pack file and index file """ with GitFile(filename + '.pack', 'wb') as f: entries, data_sum = write_pack_objects( @@ -1559,9 +1584,10 @@ def write_pack_header(f, num_objects): def deltify_pack_objects(objects, window_size=None): """Generate deltas for pack objects. - :param objects: An iterable of (object, path) tuples to deltify. - :param window_size: Window size; None for default - :return: Iterator over type_num, object id, delta_base, content + Args: + objects: An iterable of (object, path) tuples to deltify. + window_size: Window size; None for default + Returns: Iterator over type_num, object id, delta_base, content delta_base is None for full text entries """ # TODO(jelmer): Use threads @@ -1596,8 +1622,9 @@ def deltify_pack_objects(objects, window_size=None): def pack_objects_to_data(objects): """Create pack data from objects - :param objects: Pack objects - :return: Tuples with (type_num, hexdigest, delta base, object chunks) + Args: + objects: Pack objects + Returns: Tuples with (type_num, hexdigest, delta base, object chunks) """ count = len(objects) return (count, @@ -1608,13 +1635,14 @@ def pack_objects_to_data(objects): def write_pack_objects(f, objects, delta_window_size=None, deltify=None): """Write a new pack data file. - :param f: File to write to - :param objects: Iterable of (object, path) tuples to write. + Args: + f: File to write to + objects: Iterable of (object, path) tuples to write. Should provide __len__ - :param window_size: Sliding window size for searching for deltas; + window_size: Sliding window size for searching for deltas; Set to None for default window size. - :param deltify: Whether to deltify objects - :return: Dict mapping id -> (offset, crc32 checksum), pack checksum + deltify: Whether to deltify objects + Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum """ if deltify is None: # PERFORMANCE/TODO(jelmer): This should be enabled but is *much* too @@ -1632,11 +1660,12 @@ def write_pack_objects(f, objects, delta_window_size=None, deltify=None): def write_pack_data(f, num_records, records, progress=None): """Write a new pack data file. - :param f: File to write to - :param num_records: Number of records - :param records: Iterator over type_num, object_id, delta_base, raw - :param progress: Function to report progress to - :return: Dict mapping id -> (offset, crc32 checksum), pack checksum + Args: + f: File to write to + num_records: Number of records + records: Iterator over type_num, object_id, delta_base, raw + progress: Function to report progress to + Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum """ # Write the pack entries = {} @@ -1665,11 +1694,12 @@ def write_pack_data(f, num_records, records, progress=None): def write_pack_index_v1(f, entries, pack_checksum): """Write a new pack index file. - :param f: A file-like object to write to - :param entries: List of tuples with object name (sha), offset_in_pack, + Args: + f: A file-like object to write to + entries: List of tuples with object name (sha), offset_in_pack, and crc32_checksum. - :param pack_checksum: Checksum of the pack file. - :return: The SHA of the written index file + pack_checksum: Checksum of the pack file. + Returns: The SHA of the written index file """ f = SHA1Writer(f) fan_out_table = defaultdict(lambda: 0) @@ -1723,8 +1753,9 @@ def _encode_copy_operation(start, length): def create_delta(base_buf, target_buf): """Use python difflib to work out how to transform base_buf to target_buf. - :param base_buf: Base buffer - :param target_buf: Target buffer + Args: + base_buf: Base buffer + target_buf: Target buffer """ assert isinstance(base_buf, bytes) assert isinstance(target_buf, bytes) @@ -1766,8 +1797,9 @@ def create_delta(base_buf, target_buf): def apply_delta(src_buf, delta): """Based on the similar function in git's patch-delta.c. - :param src_buf: Source buffer - :param delta: Delta instructions + Args: + src_buf: Source buffer + delta: Delta instructions """ if not isinstance(src_buf, bytes): src_buf = b''.join(src_buf) @@ -1833,11 +1865,12 @@ def get_delta_header_size(delta, index): def write_pack_index_v2(f, entries, pack_checksum): """Write a new pack index file. - :param f: File-like object to write to - :param entries: List of tuples with object name (sha), offset_in_pack, and + Args: + f: File-like object to write to + entries: List of tuples with object name (sha), offset_in_pack, and crc32_checksum. - :param pack_checksum: Checksum of the pack file. - :return: The SHA of the index file written + pack_checksum: Checksum of the pack file. + Returns: The SHA of the index file written """ f = SHA1Writer(f) f.write(b'\377tOc') # Magic! @@ -1917,7 +1950,7 @@ def data(self): def index(self): """The index being used. - :note: This may be an in-memory index + Note: This may be an in-memory index """ if self._idx is None: self._idx = self._idx_load() @@ -1961,7 +1994,8 @@ def check_length_and_checksum(self): def check(self): """Check the integrity of this pack. - :raise ChecksumMismatch: if a checksum for the index or data is wrong + Raises: + ChecksumMismatch: if a checksum for the index or data is wrong """ self.index.check() self.data.check() @@ -1983,8 +2017,9 @@ def __contains__(self, sha1): def get_raw_unresolved(self, sha1): """Get raw unresolved data for a SHA. - :param sha1: SHA to return data for - :return: Tuple with pack object type, delta base (if applicable), + Args: + sha1: SHA to return data for + Returns: Tuple with pack object type, delta base (if applicable), list of data chunks """ offset = self.index.object_index(sha1) @@ -2015,7 +2050,7 @@ def iterobjects(self): def pack_tuples(self): """Provide an iterable for use with write_pack_objects. - :return: Object that can iterate over (object, path) tuples + Returns: Object that can iterate over (object, path) tuples and provides __len__ """ class PackTupleIterable(object): @@ -2034,9 +2069,10 @@ def __iter__(self): def keep(self, msg=None): """Add a .keep file for the pack, preventing git from garbage collecting it. - :param msg: A message written inside the .keep file; can be used later + Args: + msg: A message written inside the .keep file; can be used later to determine whether or not a .keep file is obsolete. - :return: The path of the .keep file, as a string. + Returns: The path of the .keep file, as a string. """ keepfile_name = '%s.keep' % self._basename with GitFile(keepfile_name, 'wb') as keepfile: From aa73abcedb98ac469db645c4ac43ce2c2c6dd45f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 10 Nov 2019 02:36:56 +0000 Subject: [PATCH 18/22] Port remainder of docstrings to Google style. --- dulwich/_diff_tree.c | 11 +- dulwich/config.py | 2 +- dulwich/contrib/swift.py | 18 +- dulwich/file.py | 11 +- dulwich/ignore.py | 2 +- dulwich/patch.py | 68 +++-- dulwich/porcelain.py | 379 +++++++++++++----------- dulwich/protocol.py | 47 +-- dulwich/reflog.py | 23 +- dulwich/refs.py | 164 +++++----- dulwich/repo.py | 282 ++++++++++-------- dulwich/server.py | 86 +++--- dulwich/stash.py | 7 +- dulwich/tests/__init__.py | 10 +- dulwich/tests/compat/test_repository.py | 7 +- dulwich/tests/compat/utils.py | 52 ++-- dulwich/tests/test_fastexport.py | 5 +- dulwich/tests/utils.py | 52 ++-- dulwich/walk.py | 49 +-- dulwich/web.py | 14 +- 20 files changed, 737 insertions(+), 552 deletions(-) diff --git a/dulwich/_diff_tree.c b/dulwich/_diff_tree.c index 7c67ea282..3ddc23de9 100644 --- a/dulwich/_diff_tree.c +++ b/dulwich/_diff_tree.c @@ -62,11 +62,12 @@ static void free_objects(PyObject **objs, Py_ssize_t n) /** * Get the entries of a tree, prepending the given path. * - * :param path: The path to prepend, without trailing slashes. - * :param path_len: The length of path. - * :param tree: The Tree object to iterate. - * :param n: Set to the length of result. - * :return: A (C) array of PyObject pointers to TreeEntry objects for each path + * Args: + * path: The path to prepend, without trailing slashes. + * path_len: The length of path. + * tree: The Tree object to iterate. + * n: Set to the length of result. + * Returns: A (C) array of PyObject pointers to TreeEntry objects for each path * in tree. */ static PyObject **tree_entries(char *path, Py_ssize_t path_len, PyObject *tree, diff --git a/dulwich/config.py b/dulwich/config.py index aebd6fd16..d19038f3d 100644 --- a/dulwich/config.py +++ b/dulwich/config.py @@ -175,7 +175,7 @@ def iteritems(self, section): def itersections(self): """Iterate over the sections. - :return: Iterator over section tuples + Returns: Iterator over section tuples """ raise NotImplementedError(self.itersections) diff --git a/dulwich/contrib/swift.py b/dulwich/contrib/swift.py index 8436c252f..eb96d2e99 100644 --- a/dulwich/contrib/swift.py +++ b/dulwich/contrib/swift.py @@ -356,7 +356,7 @@ def swift_auth_v2(self): def test_root_exists(self): """Check that Swift container exist - :return: True if exist or None it not + Returns: True if exist or None it not """ ret = self.httpclient.request('HEAD', self.base_path) if ret.status_code == 404: @@ -369,7 +369,8 @@ def test_root_exists(self): def create_root(self): """Create the Swift container - :raise: `SwiftException` if unable to create + Raises: + SwiftException: if unable to create """ if not self.test_root_exists(): ret = self.httpclient.request('PUT', self.base_path) @@ -380,7 +381,7 @@ def create_root(self): def get_container_objects(self): """Retrieve objects list in a container - :return: A list of dict that describe objects + Returns: A list of dict that describe objects or None if container does not exist """ qs = '?format=json' @@ -488,7 +489,8 @@ def del_object(self, name): def del_root(self): """Delete the root container by removing container content - :raise: `SwiftException` if unable to delete + Raises: + SwiftException: if unable to delete """ for obj in self.get_container_objects(): self.del_object(obj['name']) @@ -566,7 +568,7 @@ def seek(self, offset): def read_checksum(self): """Read the checksum from the pack - :return: the checksum bytestring + Returns: the checksum bytestring """ return self.scon.get_object(self.filename, range="-20") @@ -681,7 +683,7 @@ def _iter_loose_objects(self): def iter_shas(self, finder): """An iterator over pack's ObjectStore. - :return: a `ObjectStoreIterator` or `GreenThreadsObjectStoreIterator` + Returns: a `ObjectStoreIterator` or `GreenThreadsObjectStoreIterator` instance if gevent is enabled """ shas = iter(finder.next, None) @@ -725,7 +727,7 @@ def _find_parents(commit): def add_pack(self): """Add a new pack to this object store. - :return: Fileobject to write to and a commit function to + Returns: Fileobject to write to and a commit function to call when the pack is finished. """ f = BytesIO() @@ -938,7 +940,7 @@ def __init__(self, root, conf): def _determine_file_mode(self): """Probe the file-system to determine whether permissions can be trusted. - :return: True if permissions can be trusted, False otherwise. + Returns: True if permissions can be trusted, False otherwise. """ return False diff --git a/dulwich/file.py b/dulwich/file.py index 5bcb77253..19d9e2e7f 100644 --- a/dulwich/file.py +++ b/dulwich/file.py @@ -69,9 +69,9 @@ def _fancy_rename(oldname, newname): def GitFile(filename, mode='rb', bufsize=-1): """Create a file object that obeys the git file locking protocol. - :return: a builtin file object or a _GitFile object + Returns: a builtin file object or a _GitFile object - :note: See _GitFile for a description of the file locking protocol. + Note: See _GitFile for a description of the file locking protocol. Only read-only and write-only (binary) modes are supported; r+, w+, and a are not. To read and write from the same file, you can take advantage of @@ -106,7 +106,7 @@ class _GitFile(object): directory, and the lockfile will be renamed to overwrite the original file on close. - :note: You *must* call close() or abort() on a _GitFile for the lock to be + Note: You *must* call close() or abort() on a _GitFile for the lock to be released. Typically this will happen in a finally block. """ @@ -157,11 +157,12 @@ def abort(self): def close(self): """Close this file, saving the lockfile over the original. - :note: If this method fails, it will attempt to delete the lockfile. + Note: If this method fails, it will attempt to delete the lockfile. However, it is not guaranteed to do so (e.g. if a filesystem becomes suddenly read-only), which will prevent future writes to this file until the lockfile is removed manually. - :raises OSError: if the original file could not be overwritten. The + Raises: + OSError: if the original file could not be overwritten. The lock file is still closed, so further attempts to write to the same file object will raise ValueError. """ diff --git a/dulwich/ignore.py b/dulwich/ignore.py index a04d29dcf..2bcfecc76 100644 --- a/dulwich/ignore.py +++ b/dulwich/ignore.py @@ -213,7 +213,7 @@ def is_ignored(self, path): For directories, include a trailing slash. - :return: status is None if file is not mentioned, True if it is + Returns: status is None if file is not mentioned, True if it is included, False if it is explicitly excluded. """ status = None diff --git a/dulwich/patch.py b/dulwich/patch.py index 272488eb4..ea1bdfe10 100644 --- a/dulwich/patch.py +++ b/dulwich/patch.py @@ -41,9 +41,11 @@ def write_commit_patch(f, commit, contents, progress, version=None, encoding=None): """Write a individual file patch. - :param commit: Commit object - :param progress: Tuple with current patch number and total. - :return: tuple with filename and contents + Args: + commit: Commit object + progress: Tuple with current patch number and total. + Returns: + tuple with filename and contents """ encoding = encoding or getattr(f, "encoding", "ascii") if isinstance(contents, str): @@ -80,8 +82,9 @@ def write_commit_patch(f, commit, contents, progress, version=None, def get_summary(commit): """Determine the summary line for use in a filename. - :param commit: Commit - :return: Summary string + Args: + commit: Commit + Returns: Summary string """ decoded = commit.message.decode(errors='replace') return decoded.splitlines()[0].replace(" ", "-") @@ -153,7 +156,8 @@ def unified_diff(a, b, fromfile='', tofile='', fromfiledate='', def is_binary(content): """See if the first few bytes contain any null characters. - :param content: Bytestring to check for binary content + Args: + content: Bytestring to check for binary content """ return b'\0' in content[:FIRST_FEW_BYTES] @@ -175,14 +179,15 @@ def patch_filename(p, root): def write_object_diff(f, store, old_file, new_file, diff_binary=False): """Write the diff for an object. - :param f: File-like object to write to - :param store: Store to retrieve objects from, if necessary - :param old_file: (path, mode, hexsha) tuple - :param new_file: (path, mode, hexsha) tuple - :param diff_binary: Whether to diff files even if they + Args: + f: File-like object to write to + store: Store to retrieve objects from, if necessary + old_file: (path, mode, hexsha) tuple + new_file: (path, mode, hexsha) tuple + diff_binary: Whether to diff files even if they are considered binary files by is_binary(). - :note: the tuple elements should be None for nonexistant files + Note: the tuple elements should be None for nonexistant files """ (old_path, old_mode, old_id) = old_file (new_path, new_mode, new_id) = new_file @@ -225,9 +230,10 @@ def lines(content): def gen_diff_header(paths, modes, shas): """Write a blob diff header. - :param paths: Tuple with old and new path - :param modes: Tuple with old and new modes - :param shas: Tuple with old and new shas + Args: + paths: Tuple with old and new path + modes: Tuple with old and new modes + shas: Tuple with old and new shas """ (old_path, new_path) = paths (old_mode, new_mode) = modes @@ -257,11 +263,12 @@ def gen_diff_header(paths, modes, shas): def write_blob_diff(f, old_file, new_file): """Write blob diff. - :param f: File-like object to write to - :param old_file: (path, mode, hexsha) tuple (None if nonexisting) - :param new_file: (path, mode, hexsha) tuple (None if nonexisting) + Args: + f: File-like object to write to + old_file: (path, mode, hexsha) tuple (None if nonexisting) + new_file: (path, mode, hexsha) tuple (None if nonexisting) - :note: The use of write_object_diff is recommended over this function. + Note: The use of write_object_diff is recommended over this function. """ (old_path, old_mode, old_blob) = old_file (new_path, new_mode, new_blob) = new_file @@ -285,10 +292,11 @@ def lines(blob): def write_tree_diff(f, store, old_tree, new_tree, diff_binary=False): """Write tree diff. - :param f: File-like object to write to. - :param old_tree: Old tree id - :param new_tree: New tree id - :param diff_binary: Whether to diff files even if they + Args: + f: File-like object to write to. + old_tree: Old tree id + new_tree: New tree id + diff_binary: Whether to diff files even if they are considered binary files by is_binary(). """ changes = store.tree_changes(old_tree, new_tree) @@ -300,9 +308,10 @@ def write_tree_diff(f, store, old_tree, new_tree, diff_binary=False): def git_am_patch_split(f, encoding=None): """Parse a git-am-style patch and split it up into bits. - :param f: File-like object to parse - :param encoding: Encoding to use when creating Git objects - :return: Tuple with commit object, diff contents and git version + Args: + f: File-like object to parse + encoding: Encoding to use when creating Git objects + Returns: Tuple with commit object, diff contents and git version """ encoding = encoding or getattr(f, "encoding", "ascii") encoding = encoding or "ascii" @@ -320,9 +329,10 @@ def git_am_patch_split(f, encoding=None): def parse_patch_message(msg, encoding=None): """Extract a Commit object and patch from an e-mail message. - :param msg: An email message (email.message.Message) - :param encoding: Encoding to use to encode Git commits - :return: Tuple with commit object, diff contents and git version + Args: + msg: An email message (email.message.Message) + encoding: Encoding to use to encode Git commits + Returns: Tuple with commit object, diff contents and git version """ c = Commit() c.author = msg["from"].encode(encoding) diff --git a/dulwich/porcelain.py b/dulwich/porcelain.py index 210c74fb8..8db8c92f9 100644 --- a/dulwich/porcelain.py +++ b/dulwich/porcelain.py @@ -199,9 +199,10 @@ def path_to_tree_path(repopath, path): """Convert a path to a path usable in an index, e.g. bytes and relative to the repository root. - :param repopath: Repository path, absolute or relative to the cwd - :param path: A path, absolute or relative to the cwd - :return: A path formatted for use in e.g. an index + Args: + repopath: Repository path, absolute or relative to the cwd + path: A path, absolute or relative to the cwd + Returns: A path formatted for use in e.g. an index """ if not isinstance(path, bytes): path = path.encode(sys.getfilesystemencoding()) @@ -219,10 +220,11 @@ def archive(repo, committish=None, outstream=default_bytes_out_stream, errstream=default_bytes_err_stream): """Create an archive. - :param repo: Path of repository for which to generate an archive. - :param committish: Commit SHA1 or ref to use - :param outstream: Output stream (defaults to stdout) - :param errstream: Error stream (defaults to stderr) + Args: + repo: Path of repository for which to generate an archive. + committish: Commit SHA1 or ref to use + outstream: Output stream (defaults to stdout) + errstream: Error stream (defaults to stderr) """ if committish is None: @@ -238,7 +240,8 @@ def archive(repo, committish=None, outstream=default_bytes_out_stream, def update_server_info(repo="."): """Update server info files for a repository. - :param repo: path to the repository + Args: + repo: path to the repository """ with open_repo_closing(repo) as r: server_update_server_info(r) @@ -247,9 +250,10 @@ def update_server_info(repo="."): def symbolic_ref(repo, ref_name, force=False): """Set git symbolic ref into HEAD. - :param repo: path to the repository - :param ref_name: short name of the new ref - :param force: force settings without checking if it exists in refs/heads + Args: + repo: path to the repository + ref_name: short name of the new ref + force: force settings without checking if it exists in refs/heads """ with open_repo_closing(repo) as repo_obj: ref_path = _make_branch_ref(ref_name) @@ -261,11 +265,12 @@ def symbolic_ref(repo, ref_name, force=False): def commit(repo=".", message=None, author=None, committer=None, encoding=None): """Create a new commit. - :param repo: Path to repository - :param message: Optional commit message - :param author: Optional author name and email - :param committer: Optional committer name and email - :return: SHA1 of the new commit + Args: + repo: Path to repository + message: Optional commit message + author: Optional author name and email + committer: Optional committer name and email + Returns: SHA1 of the new commit """ # FIXME: Support --all argument # FIXME: Support --signoff argument @@ -284,10 +289,11 @@ def commit(repo=".", message=None, author=None, committer=None, encoding=None): def commit_tree(repo, tree, message=None, author=None, committer=None): """Create a new commit object. - :param repo: Path to repository - :param tree: An existing tree object - :param author: Optional author name and email - :param committer: Optional committer name and email + Args: + repo: Path to repository + tree: An existing tree object + author: Optional author name and email + committer: Optional committer name and email """ with open_repo_closing(repo) as r: return r.do_commit( @@ -297,9 +303,10 @@ def commit_tree(repo, tree, message=None, author=None, committer=None): def init(path=".", bare=False): """Create a new git repository. - :param path: Path to repository. - :param bare: Whether to create a bare repository. - :return: A Repo instance + Args: + path: Path to repository. + bare: Whether to create a bare repository. + Returns: A Repo instance """ if not os.path.exists(path): os.mkdir(path) @@ -315,15 +322,16 @@ def clone(source, target=None, bare=False, checkout=None, origin=b"origin", depth=None, **kwargs): """Clone a local or remote git repository. - :param source: Path or URL for source repository - :param target: Path to target repository (optional) - :param bare: Whether or not to create a bare repository - :param checkout: Whether or not to check-out HEAD after cloning - :param errstream: Optional stream to write progress to - :param outstream: Optional stream to write progress to (deprecated) - :param origin: Name of remote from the repository used to clone - :param depth: Depth to fetch at - :return: The new repository + Args: + source: Path or URL for source repository + target: Path to target repository (optional) + bare: Whether or not to create a bare repository + checkout: Whether or not to check-out HEAD after cloning + errstream: Optional stream to write progress to + outstream: Optional stream to write progress to (deprecated) + origin: Name of remote from the repository used to clone + depth: Depth to fetch at + Returns: The new repository """ # TODO(jelmer): This code overlaps quite a bit with Repo.clone if outstream is not None: @@ -383,9 +391,10 @@ def clone(source, target=None, bare=False, checkout=None, def add(repo=".", paths=None): """Add files to the staging area. - :param repo: Repository for the files - :param paths: Paths to add. No value passed stages all modified files. - :return: Tuple with set of added files and ignored files + Args: + repo: Repository for the files + paths: Paths to add. No value passed stages all modified files. + Returns: Tuple with set of added files and ignored files """ ignored = set() with open_repo_closing(repo) as r: @@ -426,8 +435,9 @@ def clean(repo=".", target_dir=None): Equivalent to running `git clean -fd` in target_dir. - :param repo: Repository where the files may be tracked - :param target_dir: Directory to clean - current directory if None + Args: + repo: Repository where the files may be tracked + target_dir: Directory to clean - current directory if None """ if target_dir is None: target_dir = os.getcwd() @@ -463,8 +473,9 @@ def clean(repo=".", target_dir=None): def remove(repo=".", paths=None, cached=False): """Remove files from the staging area. - :param repo: Repository for the files - :param paths: Paths to remove + Args: + repo: Repository for the files + paths: Paths to remove """ with open_repo_closing(repo) as r: index = r.open_index() @@ -518,8 +529,9 @@ def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING): def print_commit(commit, decode, outstream=sys.stdout): """Write a human-readable commit log entry. - :param commit: A `Commit` object - :param outstream: A stream file to write to + Args: + commit: A `Commit` object + outstream: A stream file to write to """ outstream.write("-" * 50 + "\n") outstream.write("commit: " + commit.id.decode('ascii') + "\n") @@ -543,9 +555,10 @@ def print_commit(commit, decode, outstream=sys.stdout): def print_tag(tag, decode, outstream=sys.stdout): """Write a human-readable tag. - :param tag: A `Tag` object - :param decode: Function for decoding bytes to unicode string - :param outstream: A stream to write to + Args: + tag: A `Tag` object + decode: Function for decoding bytes to unicode string + outstream: A stream to write to """ outstream.write("Tagger: " + decode(tag.tagger) + "\n") time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone) @@ -560,10 +573,11 @@ def print_tag(tag, decode, outstream=sys.stdout): def show_blob(repo, blob, decode, outstream=sys.stdout): """Write a blob to a stream. - :param repo: A `Repo` object - :param blob: A `Blob` object - :param decode: Function for decoding bytes to unicode string - :param outstream: A stream file to write to + Args: + repo: A `Repo` object + blob: A `Blob` object + decode: Function for decoding bytes to unicode string + outstream: A stream file to write to """ outstream.write(decode(blob.data)) @@ -571,10 +585,11 @@ def show_blob(repo, blob, decode, outstream=sys.stdout): def show_commit(repo, commit, decode, outstream=sys.stdout): """Show a commit to a stream. - :param repo: A `Repo` object - :param commit: A `Commit` object - :param decode: Function for decoding bytes to unicode string - :param outstream: Stream to write to + Args: + repo: A `Repo` object + commit: A `Commit` object + decode: Function for decoding bytes to unicode string + outstream: Stream to write to """ print_commit(commit, decode=decode, outstream=outstream) if commit.parents: @@ -595,10 +610,11 @@ def show_commit(repo, commit, decode, outstream=sys.stdout): def show_tree(repo, tree, decode, outstream=sys.stdout): """Print a tree to a stream. - :param repo: A `Repo` object - :param tree: A `Tree` object - :param decode: Function for decoding bytes to unicode string - :param outstream: Stream to write to + Args: + repo: A `Repo` object + tree: A `Tree` object + decode: Function for decoding bytes to unicode string + outstream: Stream to write to """ for n in tree: outstream.write(decode(n) + "\n") @@ -607,10 +623,11 @@ def show_tree(repo, tree, decode, outstream=sys.stdout): def show_tag(repo, tag, decode, outstream=sys.stdout): """Print a tag to a stream. - :param repo: A `Repo` object - :param tag: A `Tag` object - :param decode: Function for decoding bytes to unicode string - :param outstream: Stream to write to + Args: + repo: A `Repo` object + tag: A `Tag` object + decode: Function for decoding bytes to unicode string + outstream: Stream to write to """ print_tag(tag, decode, outstream) show_object(repo, repo[tag.object[1]], decode, outstream) @@ -659,12 +676,13 @@ def log(repo=".", paths=None, outstream=sys.stdout, max_entries=None, reverse=False, name_status=False): """Write commit logs. - :param repo: Path to repository - :param paths: Optional set of specific paths to print entries for - :param outstream: Stream to write log output to - :param reverse: Reverse order in which entries are printed - :param name_status: Print name status - :param max_entries: Optional maximum number of entries to display + Args: + repo: Path to repository + paths: Optional set of specific paths to print entries for + outstream: Stream to write log output to + reverse: Reverse order in which entries are printed + name_status: Print name status + max_entries: Optional maximum number of entries to display """ with open_repo_closing(repo) as r: walker = r.get_walker( @@ -683,10 +701,11 @@ def show(repo=".", objects=None, outstream=sys.stdout, default_encoding=DEFAULT_ENCODING): """Print the changes in a commit. - :param repo: Path to repository - :param objects: Objects to show (defaults to [HEAD]) - :param outstream: Stream to write to - :param default_encoding: Default encoding to use if none is set in the + Args: + repo: Path to repository + objects: Objects to show (defaults to [HEAD]) + outstream: Stream to write to + default_encoding: Default encoding to use if none is set in the commit """ if objects is None: @@ -708,10 +727,11 @@ def decode(x): def diff_tree(repo, old_tree, new_tree, outstream=sys.stdout): """Compares the content and mode of blobs found via two tree objects. - :param repo: Path to repository - :param old_tree: Id of old tree - :param new_tree: Id of new tree - :param outstream: Stream to write to + Args: + repo: Path to repository + old_tree: Id of old tree + new_tree: Id of new tree + outstream: Stream to write to """ with open_repo_closing(repo) as r: write_tree_diff(outstream, r.object_store, old_tree, new_tree) @@ -720,9 +740,10 @@ def diff_tree(repo, old_tree, new_tree, outstream=sys.stdout): def rev_list(repo, commits, outstream=sys.stdout): """Lists commit objects in reverse chronological order. - :param repo: Path to repository - :param commits: Commits over which to iterate - :param outstream: Stream to write to + Args: + repo: Path to repository + commits: Commits over which to iterate + outstream: Stream to write to """ with open_repo_closing(repo) as r: for entry in r.get_walker(include=[r[c].id for c in commits]): @@ -742,15 +763,16 @@ def tag_create( sign=False): """Creates a tag in git via dulwich calls: - :param repo: Path to repository - :param tag: tag string - :param author: tag author (optional, if annotated is set) - :param message: tag message (optional) - :param annotated: whether to create an annotated tag - :param objectish: object the tag should point at, defaults to HEAD - :param tag_time: Optional time for annotated tag - :param tag_timezone: Optional timezone for annotated tag - :param sign: GPG Sign the tag + Args: + repo: Path to repository + tag: tag string + author: tag author (optional, if annotated is set) + message: tag message (optional) + annotated: whether to create an annotated tag + objectish: object the tag should point at, defaults to HEAD + tag_time: Optional time for annotated tag + tag_timezone: Optional timezone for annotated tag + sign: GPG Sign the tag """ with open_repo_closing(repo) as r: @@ -798,8 +820,9 @@ def list_tags(*args, **kwargs): def tag_list(repo, outstream=sys.stdout): """List all tags. - :param repo: Path to repository - :param outstream: Stream to write tags to + Args: + repo: Path to repository + outstream: Stream to write tags to """ with open_repo_closing(repo) as r: tags = sorted(r.refs.as_dict(b"refs/tags")) @@ -809,8 +832,9 @@ def tag_list(repo, outstream=sys.stdout): def tag_delete(repo, name): """Remove a tag. - :param repo: Path to repository - :param name: Name of tag to remove + Args: + repo: Path to repository + name: Name of tag to remove """ with open_repo_closing(repo) as r: if isinstance(name, bytes): @@ -826,9 +850,10 @@ def tag_delete(repo, name): def reset(repo, mode, treeish="HEAD"): """Reset current HEAD to the specified state. - :param repo: Path to repository - :param mode: Mode ("hard", "soft", "mixed") - :param treeish: Treeish to reset to + Args: + repo: Path to repository + mode: Mode ("hard", "soft", "mixed") + treeish: Treeish to reset to """ if mode != "hard": @@ -844,11 +869,12 @@ def push(repo, remote_location, refspecs, errstream=default_bytes_err_stream, **kwargs): """Remote push with dulwich via dulwich.client - :param repo: Path to repository - :param remote_location: Location of the remote - :param refspecs: Refs to push to remote - :param outstream: A stream file to write output - :param errstream: A stream file to write errors + Args: + repo: Path to repository + remote_location: Location of the remote + refspecs: Refs to push to remote + outstream: A stream file to write output + errstream: A stream file to write errors """ # Open the repo @@ -891,11 +917,12 @@ def pull(repo, remote_location=None, refspecs=None, errstream=default_bytes_err_stream, **kwargs): """Pull from remote via dulwich.client - :param repo: Path to repository - :param remote_location: Location of the remote - :param refspec: refspecs to fetch - :param outstream: A stream file to write to output - :param errstream: A stream file to write to errors + Args: + repo: Path to repository + remote_location: Location of the remote + refspec: refspecs to fetch + outstream: A stream file to write to output + errstream: A stream file to write to errors """ # Open the repo with open_repo_closing(repo) as r: @@ -928,9 +955,10 @@ def determine_wants(remote_refs): def status(repo=".", ignored=False): """Returns staged, unstaged, and untracked changes relative to the HEAD. - :param repo: Path to repository or repository object - :param ignored: Whether to include ignored files in `untracked` - :return: GitStatus tuple, + Args: + repo: Path to repository or repository object + ignored: Whether to include ignored files in `untracked` + Returns: GitStatus tuple, staged - dict with lists of staged paths (diff index/HEAD) unstaged - list of unstaged paths (diff index/working-tree) untracked - list of untracked, un-ignored & non-.git paths @@ -959,8 +987,9 @@ def status(repo=".", ignored=False): def _walk_working_dir_paths(frompath, basepath): """Get path, is_dir for files in working dir from frompath - :param frompath: Path to begin walk - :param basepath: Path to compare to + Args: + frompath: Path to begin walk + basepath: Path to compare to """ for dirpath, dirnames, filenames in os.walk(frompath): # Skip .git and below. @@ -984,9 +1013,10 @@ def _walk_working_dir_paths(frompath, basepath): def get_untracked_paths(frompath, basepath, index): """Get untracked paths. + Args: ;param frompath: Path to walk - :param basepath: Path to compare to - :param index: Index to check against + basepath: Path to compare to + index: Index to check against """ for ap, is_dir in _walk_working_dir_paths(frompath, basepath): if not is_dir: @@ -998,8 +1028,9 @@ def get_untracked_paths(frompath, basepath, index): def get_tree_changes(repo): """Return add/delete/modify changes to tree by comparing index to HEAD. - :param repo: repo path or object - :return: dict with lists for each type of change + Args: + repo: repo path or object + Returns: dict with lists for each type of change """ with open_repo_closing(repo) as r: index = r.open_index() @@ -1032,9 +1063,10 @@ def get_tree_changes(repo): def daemon(path=".", address=None, port=None): """Run a daemon serving Git requests over TCP/IP. - :param path: Path to the directory to serve. - :param address: Optional address to listen on (defaults to ::) - :param port: Optional port to listen on (defaults to TCP_GIT_PORT) + Args: + path: Path to the directory to serve. + address: Optional address to listen on (defaults to ::) + port: Optional port to listen on (defaults to TCP_GIT_PORT) """ # TODO(jelmer): Support git-daemon-export-ok and --export-all. backend = FileSystemBackend(path) @@ -1045,9 +1077,10 @@ def daemon(path=".", address=None, port=None): def web_daemon(path=".", address=None, port=None): """Run a daemon serving Git requests over HTTP. - :param path: Path to the directory to serve - :param address: Optional address to listen on (defaults to ::) - :param port: Optional port to listen on (defaults to 80) + Args: + path: Path to the directory to serve + address: Optional address to listen on (defaults to ::) + port: Optional port to listen on (defaults to 80) """ from dulwich.web import ( make_wsgi_chain, @@ -1066,9 +1099,10 @@ def web_daemon(path=".", address=None, port=None): def upload_pack(path=".", inf=None, outf=None): """Upload a pack file after negotiating its contents using smart protocol. - :param path: Path to the repository - :param inf: Input stream to communicate with client - :param outf: Output stream to communicate with client + Args: + path: Path to the repository + inf: Input stream to communicate with client + outf: Output stream to communicate with client """ if outf is None: outf = getattr(sys.stdout, 'buffer', sys.stdout) @@ -1090,9 +1124,10 @@ def send_fn(data): def receive_pack(path=".", inf=None, outf=None): """Receive a pack file after negotiating its contents using smart protocol. - :param path: Path to the repository - :param inf: Input stream to communicate with client - :param outf: Output stream to communicate with client + Args: + path: Path to the repository + inf: Input stream to communicate with client + outf: Output stream to communicate with client """ if outf is None: outf = getattr(sys.stdout, 'buffer', sys.stdout) @@ -1126,8 +1161,9 @@ def _make_tag_ref(name): def branch_delete(repo, name): """Delete a branch. - :param repo: Path to the repository - :param name: Name of the branch + Args: + repo: Path to the repository + name: Name of the branch """ with open_repo_closing(repo) as r: if isinstance(name, list): @@ -1141,10 +1177,11 @@ def branch_delete(repo, name): def branch_create(repo, name, objectish=None, force=False): """Create a branch. - :param repo: Path to the repository - :param name: Name of the new branch - :param objectish: Target object to point new branch at (defaults to HEAD) - :param force: Force creation of branch, even if it already exists + Args: + repo: Path to the repository + name: Name of the new branch + objectish: Target object to point new branch at (defaults to HEAD) + force: Force creation of branch, even if it already exists """ with open_repo_closing(repo) as r: if objectish is None: @@ -1162,7 +1199,8 @@ def branch_create(repo, name, objectish=None, force=False): def branch_list(repo): """List all branches. - :param repo: Path to the repository + Args: + repo: Path to the repository """ with open_repo_closing(repo) as r: return r.refs.keys(base=LOCAL_BRANCH_PREFIX) @@ -1248,7 +1286,8 @@ def repack(repo): Currently this only packs loose objects. - :param repo: Path to the repository + Args: + repo: Path to the repository """ with open_repo_closing(repo) as r: r.object_store.pack_loose_objects() @@ -1257,10 +1296,11 @@ def repack(repo): def pack_objects(repo, object_ids, packf, idxf, delta_window_size=None): """Pack objects into a file. - :param repo: Path to the repository - :param object_ids: List of object ids to write - :param packf: File-like object to write to - :param idxf: File-like object to write to (can be None) + Args: + repo: Path to the repository + object_ids: List of object ids to write + packf: File-like object to write to + idxf: File-like object to write to (can be None) """ with open_repo_closing(repo) as r: entries, data_sum = write_pack_objects( @@ -1276,11 +1316,12 @@ def ls_tree(repo, treeish=b"HEAD", outstream=sys.stdout, recursive=False, name_only=False): """List contents of a tree. - :param repo: Path to the repository - :param tree_ish: Tree id to list - :param outstream: Output stream (defaults to stdout) - :param recursive: Whether to recursively list files - :param name_only: Only print item name + Args: + repo: Path to the repository + tree_ish: Tree id to list + outstream: Output stream (defaults to stdout) + recursive: Whether to recursively list files + name_only: Only print item name """ def list_tree(store, treeid, base): for (name, mode, sha) in store[treeid].iteritems(): @@ -1300,9 +1341,10 @@ def list_tree(store, treeid, base): def remote_add(repo, name, url): """Add a remote. - :param repo: Path to the repository - :param name: Remote name - :param url: Remote URL + Args: + repo: Path to the repository + name: Remote name + url: Remote URL """ if not isinstance(name, bytes): name = name.encode(DEFAULT_ENCODING) @@ -1320,10 +1362,11 @@ def remote_add(repo, name, url): def check_ignore(repo, paths, no_index=False): """Debug gitignore files. - :param repo: Path to the repository - :param paths: List of paths to check for - :param no_index: Don't check index - :return: List of ignored files + Args: + repo: Path to the repository + paths: List of paths to check for + no_index: Don't check index + Returns: List of ignored files """ with open_repo_closing(repo) as r: index = r.open_index() @@ -1342,10 +1385,11 @@ def update_head(repo, target, detached=False, new_branch=None): Note that this does not actually update the working tree. - :param repo: Path to the repository - :param detach: Create a detached head - :param target: Branch or committish to switch to - :param new_branch: New branch to create + Args: + repo: Path to the repository + detach: Create a detached head + target: Branch or committish to switch to + new_branch: New branch to create """ with open_repo_closing(repo) as r: if new_branch is not None: @@ -1367,9 +1411,10 @@ def update_head(repo, target, detached=False, new_branch=None): def check_mailmap(repo, contact): """Check canonical name and email of contact. - :param repo: Path to the repository - :param contact: Contact name and/or email - :return: Canonical contact data + Args: + repo: Path to the repository + contact: Contact name and/or email + Returns: Canonical contact data """ with open_repo_closing(repo) as r: from dulwich.mailmap import Mailmap @@ -1386,8 +1431,9 @@ def check_mailmap(repo, contact): def fsck(repo): """Check a repository. - :param repo: A path to the repository - :return: Iterator over errors/warnings + Args: + repo: A path to the repository + Returns: Iterator over errors/warnings """ with open_repo_closing(repo) as r: # TODO(jelmer): check pack files @@ -1434,8 +1480,9 @@ def ls_files(repo): def describe(repo): """Describe the repository version. - :param projdir: git repository root - :returns: a string description of the current git revision + Args: + projdir: git repository root + Returns: a string description of the current git revision Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh". """ @@ -1503,10 +1550,11 @@ def describe(repo): def get_object_by_path(repo, path, committish=None): """Get an object by path. - :param repo: A path to the repository - :param path: Path to look up - :param committish: Commit to look up path in - :return: A `ShaFile` object + Args: + repo: A path to the repository + path: Path to look up + committish: Commit to look up path in + Returns: A `ShaFile` object """ if committish is None: committish = "HEAD" @@ -1525,8 +1573,9 @@ def get_object_by_path(repo, path, committish=None): def write_tree(repo): """Write a tree object from the index. - :param repo: Repository for which to write tree - :return: tree id for the tree that was written + Args: + repo: Repository for which to write tree + Returns: tree id for the tree that was written """ with open_repo_closing(repo) as r: return r.open_index().commit(r.object_store) diff --git a/dulwich/protocol.py b/dulwich/protocol.py index df61b34ee..756fe6631 100644 --- a/dulwich/protocol.py +++ b/dulwich/protocol.py @@ -144,8 +144,9 @@ def close(self): def pkt_line(data): """Wrap data in a pkt-line. - :param data: The data to wrap, as a str or None. - :return: The data prefixed with its length in pkt-line format; if data was + Args: + data: The data to wrap, as a str or None. + Returns: The data prefixed with its length in pkt-line format; if data was None, returns the flush-pkt ('0000'). """ if data is None: @@ -187,7 +188,7 @@ def read_pkt_line(self): This method may read from the readahead buffer; see unread_pkt_line. - :return: The next string from the stream, without the length prefix, or + Returns: The next string from the stream, without the length prefix, or None for a flush-pkt ('0000'). """ if self._readahead is None: @@ -223,7 +224,7 @@ def eof(self): Note that this refers to the actual stream EOF and not just a flush-pkt. - :return: True if the stream is at EOF, False otherwise. + Returns: True if the stream is at EOF, False otherwise. """ try: next_line = self.read_pkt_line() @@ -238,8 +239,10 @@ def unread_pkt_line(self, data): This method can be used to unread a single pkt-line into a fixed readahead buffer. - :param data: The data to unread, without the length prefix. - :raise ValueError: If more than one pkt-line is unread. + Args: + data: The data to unread, without the length prefix. + Raises: + ValueError: If more than one pkt-line is unread. """ if self._readahead is not None: raise ValueError('Attempted to unread multiple pkt-lines.') @@ -248,7 +251,7 @@ def unread_pkt_line(self, data): def read_pkt_seq(self): """Read a sequence of pkt-lines from the remote git process. - :return: Yields each line of data up to but not including the next + Returns: Yields each line of data up to but not including the next flush-pkt. """ pkt = self.read_pkt_line() @@ -259,7 +262,8 @@ def read_pkt_seq(self): def write_pkt_line(self, line): """Sends a pkt-line to the remote git process. - :param line: A string containing the data to send, without the length + Args: + line: A string containing the data to send, without the length prefix. """ try: @@ -294,8 +298,9 @@ def close(self): def write_sideband(self, channel, blob): """Write multiplexed data to the sideband. - :param channel: An int specifying the channel to write to. - :param blob: A blob of data (as a string) to send on this channel. + Args: + channel: An int specifying the channel to write to. + blob: A blob of data (as a string) to send on this channel. """ # a pktline can be a max of 65520. a sideband line can therefore be # 65520-5 = 65515 @@ -309,8 +314,9 @@ def send_cmd(self, cmd, *args): Only used for the TCP git protocol (git://). - :param cmd: The remote service to access. - :param args: List of arguments to send to remove service. + Args: + cmd: The remote service to access. + args: List of arguments to send to remove service. """ self.write_pkt_line(cmd + b" " + b"".join([(a + b"\0") for a in args])) @@ -319,7 +325,7 @@ def read_cmd(self): Only used for the TCP git protocol (git://). - :return: A tuple of (command, [list of arguments]). + Returns: A tuple of (command, [list of arguments]). """ line = self.read_pkt_line() splice_at = line.find(b" ") @@ -439,8 +445,9 @@ def recv(self, size): def extract_capabilities(text): """Extract a capabilities list from a string, if present. - :param text: String to extract from - :return: Tuple with text with capabilities removed and list of capabilities + Args: + text: String to extract from + Returns: Tuple with text with capabilities removed and list of capabilities """ if b"\0" not in text: return text, [] @@ -456,8 +463,9 @@ def extract_want_line_capabilities(text): want obj-id cap1 cap2 ... - :param text: Want line to extract from - :return: Tuple with text with capabilities removed and list of capabilities + Args: + text: Want line to extract from + Returns: Tuple with text with capabilities removed and list of capabilities """ split_text = text.rstrip().split(b" ") if len(split_text) < 3: @@ -485,8 +493,9 @@ class BufferedPktLineWriter(object): def __init__(self, write, bufsize=65515): """Initialize the BufferedPktLineWriter. - :param write: A write callback for the underlying writer. - :param bufsize: The internal buffer size, including length prefixes. + Args: + write: A write callback for the underlying writer. + bufsize: The internal buffer size, including length prefixes. """ self._write = write self._bufsize = bufsize diff --git a/dulwich/reflog.py b/dulwich/reflog.py index aec32e605..37a2ff8cd 100644 --- a/dulwich/reflog.py +++ b/dulwich/reflog.py @@ -38,12 +38,13 @@ def format_reflog_line(old_sha, new_sha, committer, timestamp, timezone, message): """Generate a single reflog line. - :param old_sha: Old Commit SHA - :param new_sha: New Commit SHA - :param committer: Committer name and e-mail - :param timestamp: Timestamp - :param timezone: Timezone - :param message: Message + Args: + old_sha: Old Commit SHA + new_sha: New Commit SHA + committer: Committer name and e-mail + timestamp: Timestamp + timezone: Timezone + message: Message """ if old_sha is None: old_sha = ZERO_SHA @@ -55,8 +56,9 @@ def format_reflog_line(old_sha, new_sha, committer, timestamp, timezone, def parse_reflog_line(line): """Parse a reflog line. - :param line: Line to parse - :return: Tuple of (old_sha, new_sha, committer, timestamp, timezone, + Args: + line: Line to parse + Returns: Tuple of (old_sha, new_sha, committer, timestamp, timezone, message) """ (begin, message) = line.split(b'\t', 1) @@ -69,8 +71,9 @@ def parse_reflog_line(line): def read_reflog(f): """Read reflog. - :param f: File-like object - :returns: Iterator over Entry objects + Args: + f: File-like object + Returns: Iterator over Entry objects """ for l in f: yield parse_reflog_line(l) diff --git a/dulwich/refs.py b/dulwich/refs.py index 75f283fb6..d597289de 100644 --- a/dulwich/refs.py +++ b/dulwich/refs.py @@ -51,8 +51,9 @@ def parse_symref_value(contents): """Parse a symref value. - :param contents: Contents to parse - :return: Destination + Args: + contents: Contents to parse + Returns: Destination """ if contents.startswith(SYMREF): return contents[len(SYMREF):].rstrip(b'\r\n') @@ -67,8 +68,9 @@ def check_ref_format(refname): [1] http://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html - :param refname: The refname to check - :return: True if refname is valid, False otherwise + Args: + refname: The refname to check + Returns: True if refname is valid, False otherwise """ # These could be combined into one big expression, but are listed # separately to parallel [1]. @@ -111,18 +113,19 @@ def set_symbolic_ref(self, name, other, committer=None, timestamp=None, timezone=None, message=None): """Make a ref point at another ref. - :param name: Name of the ref to set - :param other: Name of the ref to point at - :param message: Optional message + Args: + name: Name of the ref to set + other: Name of the ref to point at + message: Optional message """ raise NotImplementedError(self.set_symbolic_ref) def get_packed_refs(self): """Get contents of the packed-refs file. - :return: Dictionary mapping ref names to SHA1s + Returns: Dictionary mapping ref names to SHA1s - :note: Will return an empty dictionary when no packed-refs file is + Note: Will return an empty dictionary when no packed-refs file is present. """ raise NotImplementedError(self.get_packed_refs) @@ -130,8 +133,9 @@ def get_packed_refs(self): def get_peeled(self, name): """Return the cached peeled value of a ref, if available. - :param name: Name of the ref to peel - :return: The peeled value of the ref. If the ref is known not point to + Args: + name: Name of the ref to peel + Returns: The peeled value of the ref. If the ref is known not point to a tag, this will be the SHA the ref refers to. If the ref may point to a tag, but no cached information is available, None is returned. """ @@ -164,8 +168,9 @@ def __iter__(self): def keys(self, base=None): """Refs present in this container. - :param base: An optional base to return refs under. - :return: An unsorted set of valid refs in this container, including + Args: + base: An optional base to return refs under. + Returns: An unsorted set of valid refs in this container, including packed refs. """ if base is not None: @@ -176,8 +181,9 @@ def keys(self, base=None): def subkeys(self, base): """Refs present in this container under a base. - :param base: The base to return refs under. - :return: A set of valid refs in this container under the base; the base + Args: + base: The base to return refs under. + Returns: A set of valid refs in this container under the base; the base prefix is stripped from the ref names returned. """ keys = set() @@ -213,8 +219,10 @@ class needs to be able to touch HEAD. Also, check_ref_format expects refnames without the leading 'refs/', but this class requires that so it cannot touch anything outside the refs dir (or HEAD). - :param name: The name of the reference. - :raises KeyError: if a refname is not HEAD or is otherwise not valid. + Args: + name: The name of the reference. + Raises: + KeyError: if a refname is not HEAD or is otherwise not valid. """ if name in (b'HEAD', b'refs/stash'): return @@ -224,8 +232,9 @@ class needs to be able to touch HEAD. Also, check_ref_format expects def read_ref(self, refname): """Read a reference without following any references. - :param refname: The name of the reference - :return: The contents of the ref file, or None if it does + Args: + refname: The name of the reference + Returns: The contents of the ref file, or None if it does not exist. """ contents = self.read_loose_ref(refname) @@ -236,8 +245,9 @@ def read_ref(self, refname): def read_loose_ref(self, name): """Read a loose reference and return its contents. - :param name: the refname to read - :return: The contents of the ref file, or None if it does + Args: + name: the refname to read + Returns: The contents of the ref file, or None if it does not exist. """ raise NotImplementedError(self.read_loose_ref) @@ -245,7 +255,7 @@ def read_loose_ref(self, name): def follow(self, name): """Follow a reference name. - :return: a tuple of (refnames, sha), wheres refnames are the names of + Returns: a tuple of (refnames, sha), wheres refnames are the names of references in the chain """ contents = SYMREF + name @@ -295,21 +305,23 @@ def set_if_equals(self, name, old_ref, new_ref, committer=None, subclass, and can be used to perform an atomic compare-and-swap operation. - :param name: The refname to set. - :param old_ref: The old sha the refname must refer to, or None to set + Args: + name: The refname to set. + old_ref: The old sha the refname must refer to, or None to set unconditionally. - :param new_ref: The new sha the refname will refer to. - :param message: Message for reflog - :return: True if the set was successful, False otherwise. + new_ref: The new sha the refname will refer to. + message: Message for reflog + Returns: True if the set was successful, False otherwise. """ raise NotImplementedError(self.set_if_equals) def add_if_new(self, name, ref): """Add a new reference only if it does not already exist. - :param name: Ref name - :param ref: Ref value - :param message: Message for reflog + Args: + name: Ref name + ref: Ref value + message: Message for reflog """ raise NotImplementedError(self.add_if_new) @@ -319,11 +331,13 @@ def __setitem__(self, name, ref): This method follows all symbolic references if applicable for the subclass. - :note: This method unconditionally overwrites the contents of a + Note: This method unconditionally overwrites the contents of a reference. To update atomically only if the reference has not changed, use set_if_equals(). - :param name: The refname to set. - :param ref: The new sha the refname will refer to. + + Args: + name: The refname to set. + ref: The new sha the refname will refer to. """ self.set_if_equals(name, None, ref) @@ -335,11 +349,12 @@ def remove_if_equals(self, name, old_ref, committer=None, the subclass. It can be used to perform an atomic compare-and-delete operation. - :param name: The refname to delete. - :param old_ref: The old sha the refname must refer to, or None to + Args: + name: The refname to delete. + old_ref: The old sha the refname must refer to, or None to delete unconditionally. - :param message: Message for reflog - :return: True if the delete was successful, False otherwise. + message: Message for reflog + Returns: True if the delete was successful, False otherwise. """ raise NotImplementedError(self.remove_if_equals) @@ -349,18 +364,19 @@ def __delitem__(self, name): This method does not follow symbolic references, even if applicable for the subclass. - :note: This method unconditionally deletes the contents of a reference. + Note: This method unconditionally deletes the contents of a reference. To delete atomically only if the reference has not changed, use remove_if_equals(). - :param name: The refname to delete. + Args: + name: The refname to delete. """ self.remove_if_equals(name, None) def get_symrefs(self): """Get a dict with all symrefs in this container. - :return: Dictionary mapping source ref to target ref + Returns: Dictionary mapping source ref to target ref """ ret = {} for src in self.allkeys(): @@ -559,9 +575,9 @@ def refpath(self, name): def get_packed_refs(self): """Get contents of the packed-refs file. - :return: Dictionary mapping ref names to SHA1s + Returns: Dictionary mapping ref names to SHA1s - :note: Will return an empty dictionary when no packed-refs file is + Note: Will return an empty dictionary when no packed-refs file is present. """ # TODO: invalidate the cache on repacking @@ -594,8 +610,9 @@ def get_packed_refs(self): def get_peeled(self, name): """Return the cached peeled value of a ref, if available. - :param name: Name of the ref to peel - :return: The peeled value of the ref. If the ref is known not point to + Args: + name: Name of the ref to peel + Returns: The peeled value of the ref. If the ref is known not point to a tag, this will be the SHA the ref refers to. If the ref may point to a tag, but no cached information is available, None is returned. """ @@ -615,10 +632,12 @@ def read_loose_ref(self, name): If the reference file a symbolic reference, only read the first line of the file. Otherwise, only read the first 40 bytes. - :param name: the refname to read, relative to refpath - :return: The contents of the ref file, or None if the file does not + Args: + name: the refname to read, relative to refpath + Returns: The contents of the ref file, or None if the file does not exist. - :raises IOError: if any other error occurs + Raises: + IOError: if any other error occurs """ filename = self.refpath(name) try: @@ -660,9 +679,10 @@ def set_symbolic_ref(self, name, other, committer=None, timestamp=None, timezone=None, message=None): """Make a ref point at another ref. - :param name: Name of the ref to set - :param other: Name of the ref to point at - :param message: Optional message to describe the change + Args: + name: Name of the ref to set + other: Name of the ref to point at + message: Optional message to describe the change """ self._check_refname(name) self._check_refname(other) @@ -687,12 +707,13 @@ def set_if_equals(self, name, old_ref, new_ref, committer=None, This method follows all symbolic references, and can be used to perform an atomic compare-and-swap operation. - :param name: The refname to set. - :param old_ref: The old sha the refname must refer to, or None to set + Args: + name: The refname to set. + old_ref: The old sha the refname must refer to, or None to set unconditionally. - :param new_ref: The new sha the refname will refer to. - :param message: Set message for reflog - :return: True if the set was successful, False otherwise. + new_ref: The new sha the refname will refer to. + message: Set message for reflog + Returns: True if the set was successful, False otherwise. """ self._check_refname(name) try: @@ -742,10 +763,11 @@ def add_if_new(self, name, ref, committer=None, timestamp=None, This method follows symrefs, and only ensures that the last ref in the chain does not exist. - :param name: The refname to set. - :param ref: The new sha the refname will refer to. - :param message: Optional message for reflog - :return: True if the add was successful, False otherwise. + Args: + name: The refname to set. + ref: The new sha the refname will refer to. + message: Optional message for reflog + Returns: True if the add was successful, False otherwise. """ try: realnames, contents = self.follow(name) @@ -779,11 +801,12 @@ def remove_if_equals(self, name, old_ref, committer=None, timestamp=None, This method does not follow symbolic references. It can be used to perform an atomic compare-and-delete operation. - :param name: The refname to delete. - :param old_ref: The old sha the refname must refer to, or None to + Args: + name: The refname to delete. + old_ref: The old sha the refname must refer to, or None to delete unconditionally. - :param message: Optional message - :return: True if the delete was successful, False otherwise. + message: Optional message + Returns: True if the delete was successful, False otherwise. """ self._check_refname(name) filename = self.refpath(name) @@ -850,8 +873,9 @@ def _split_ref_line(line): def read_packed_refs(f): """Read a packed refs file. - :param f: file-like object to read from - :return: Iterator over tuples with SHA1s and ref names. + Args: + f: file-like object to read from + Returns: Iterator over tuples with SHA1s and ref names. """ for l in f: if l.startswith(b'#'): @@ -869,7 +893,8 @@ def read_packed_refs_with_peeled(f): Assumes the "# pack-refs with: peeled" line was already read. Yields tuples with ref names, SHA1s, and peeled SHA1s (or None). - :param f: file-like object to read from, seek'ed to the second line + Args: + f: file-like object to read from, seek'ed to the second line """ last = None for line in f: @@ -897,9 +922,10 @@ def read_packed_refs_with_peeled(f): def write_packed_refs(f, packed_refs, peeled_refs=None): """Write a packed refs file. - :param f: empty file-like object to write to - :param packed_refs: dict of refname to sha of packed refs to write - :param peeled_refs: dict of refname to peeled value of sha + Args: + f: empty file-like object to write to + packed_refs: dict of refname to sha of packed refs to write + peeled_refs: dict of refname to peeled value of sha """ if peeled_refs is None: peeled_refs = {} diff --git a/dulwich/repo.py b/dulwich/repo.py index ccb30f1a2..1e1a4c420 100644 --- a/dulwich/repo.py +++ b/dulwich/repo.py @@ -183,8 +183,10 @@ def get_user_identity(config, kind=None): def check_user_identity(identity): """Verify that a user identity is formatted correctly. - :param identity: User identity bytestring - :raise InvalidUserIdentity: Raised when identity is invalid + Args: + identity: User identity bytestring + Raises: + InvalidUserIdentity: Raised when identity is invalid """ try: fst, snd = identity.split(b' <', 1) @@ -197,7 +199,8 @@ def check_user_identity(identity): def parse_graftpoints(graftpoints): """Convert a list of graftpoints into a dict - :param graftpoints: Iterator of graftpoint lines + Args: + graftpoints: Iterator of graftpoint lines Each line is formatted as: []* @@ -282,8 +285,9 @@ def __init__(self, object_store, refs): This shouldn't be called directly, but rather through one of the base classes, such as MemoryRepo or Repo. - :param object_store: Object store to use - :param refs: Refs container to use + Args: + object_store: Object store to use + refs: Refs container to use """ self.object_store = object_store self.refs = refs @@ -294,7 +298,7 @@ def __init__(self, object_store, refs): def _determine_file_mode(self): """Probe the file-system to determine whether permissions can be trusted. - :return: True if permissions can be trusted, False otherwise. + Returns: True if permissions can be trusted, False otherwise. """ raise NotImplementedError(self._determine_file_mode) @@ -323,16 +327,18 @@ def get_named_file(self, path): the control dir in a disk-based Repo, the object returned need not be pointing to a file in that location. - :param path: The path to the file, relative to the control dir. - :return: An open file object, or None if the file does not exist. + Args: + path: The path to the file, relative to the control dir. + Returns: An open file object, or None if the file does not exist. """ raise NotImplementedError(self.get_named_file) def _put_named_file(self, path, contents): """Write a file to the control dir with the given name and contents. - :param path: The path to the file, relative to the control dir. - :param contents: A string to write to the file. + Args: + path: The path to the file, relative to the control dir. + contents: A string to write to the file. """ raise NotImplementedError(self._put_named_file) @@ -343,20 +349,22 @@ def _del_named_file(self, path): def open_index(self): """Open the index for this repository. - :raise NoIndexPresent: If no index is present - :return: The matching `Index` + Raises: + NoIndexPresent: If no index is present + Returns: The matching `Index` """ raise NotImplementedError(self.open_index) def fetch(self, target, determine_wants=None, progress=None, depth=None): """Fetch objects into another repository. - :param target: The target repository - :param determine_wants: Optional function to determine what refs to + Args: + target: The target repository + determine_wants: Optional function to determine what refs to fetch. - :param progress: Optional progress function - :param depth: Optional shallow fetch depth - :return: The local refs + progress: Optional progress function + depth: Optional shallow fetch depth + Returns: The local refs """ if determine_wants is None: determine_wants = target.object_store.determine_wants_all @@ -370,17 +378,18 @@ def fetch_pack_data(self, determine_wants, graph_walker, progress, get_tagged=None, depth=None): """Fetch the pack data required for a set of revisions. - :param determine_wants: Function that takes a dictionary with heads + Args: + determine_wants: Function that takes a dictionary with heads and returns the list of heads to fetch. - :param graph_walker: Object that can iterate over the list of revisions + graph_walker: Object that can iterate over the list of revisions to fetch and has an "ack" method that will be called to acknowledge that a revision is present. - :param progress: Simple progress function that will be called with + progress: Simple progress function that will be called with updated progress strings. - :param get_tagged: Function that returns a dict of pointed-to sha -> + get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. - :param depth: Shallow fetch depth - :return: count and iterator over pack data + depth: Shallow fetch depth + Returns: count and iterator over pack data """ # TODO(jelmer): Fetch pack data directly, don't create objects first. objects = self.fetch_objects(determine_wants, graph_walker, progress, @@ -391,17 +400,18 @@ def fetch_objects(self, determine_wants, graph_walker, progress, get_tagged=None, depth=None): """Fetch the missing objects required for a set of revisions. - :param determine_wants: Function that takes a dictionary with heads + Args: + determine_wants: Function that takes a dictionary with heads and returns the list of heads to fetch. - :param graph_walker: Object that can iterate over the list of revisions + graph_walker: Object that can iterate over the list of revisions to fetch and has an "ack" method that will be called to acknowledge that a revision is present. - :param progress: Simple progress function that will be called with + progress: Simple progress function that will be called with updated progress strings. - :param get_tagged: Function that returns a dict of pointed-to sha -> + get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. - :param depth: Shallow fetch depth - :return: iterator over objects, with __len__ implemented + depth: Shallow fetch depth + Returns: iterator over objects, with __len__ implemented """ if depth not in (None, 0): raise NotImplementedError("depth not supported yet") @@ -467,8 +477,9 @@ def get_graph_walker(self, heads=None): A graph walker is used by a remote repository (or proxy) to find out which objects are present in this repository. - :param heads: Repository heads to use (optional) - :return: A graph walker object + Args: + heads: Repository heads to use (optional) + Returns: A graph walker object """ if heads is None: heads = [ @@ -480,7 +491,7 @@ def get_graph_walker(self, heads=None): def get_refs(self): """Get dictionary with all refs. - :return: A ``dict`` mapping ref names to SHA1s + Returns: A ``dict`` mapping ref names to SHA1s """ return self.refs.as_dict() @@ -508,9 +519,11 @@ def _get_object(self, sha, cls): def get_object(self, sha): """Retrieve the object with the specified SHA. - :param sha: SHA to retrieve - :return: A ShaFile object - :raise KeyError: when the object can not be found + Args: + sha: SHA to retrieve + Returns: A ShaFile object + Raises: + KeyError: when the object can not be found """ return self.object_store[sha] @@ -520,9 +533,10 @@ def get_parents(self, sha, commit=None): If the specific commit is a graftpoint, the graft parents will be returned instead. - :param sha: SHA of the commit for which to retrieve the parents - :param commit: Optional commit matching the sha - :return: List of parents + Args: + sha: SHA of the commit for which to retrieve the parents + commit: Optional commit matching the sha + Returns: List of parents """ try: @@ -535,14 +549,14 @@ def get_parents(self, sha, commit=None): def get_config(self): """Retrieve the config object. - :return: `ConfigFile` object for the ``.git/config`` file. + Returns: `ConfigFile` object for the ``.git/config`` file. """ raise NotImplementedError(self.get_config) def get_description(self): """Retrieve the description for this repository. - :return: String with the description of the repository + Returns: String with the description of the repository as set by the user. """ raise NotImplementedError(self.get_description) @@ -550,7 +564,8 @@ def get_description(self): def set_description(self, description): """Set the description for this repository. - :param description: Text to set as description for this repository. + Args: + description: Text to set as description for this repository. """ raise NotImplementedError(self.set_description) @@ -561,7 +576,7 @@ def get_config_stack(self): itself (.git/config) and the global configuration, which usually lives in ~/.gitconfig. - :return: `Config` instance for this repository + Returns: `Config` instance for this repository """ from dulwich.config import StackedConfig backends = [self.get_config()] + StackedConfig.default_backends() @@ -570,7 +585,7 @@ def get_config_stack(self): def get_shallow(self): """Get the set of shallow commits. - :return: Set of shallow commits. + Returns: Set of shallow commits. """ f = self.get_named_file('shallow') if f is None: @@ -581,8 +596,9 @@ def get_shallow(self): def update_shallow(self, new_shallow, new_unshallow): """Update the list of shallow objects. - :param new_shallow: Newly shallow objects - :param new_unshallow: Newly no longer shallow objects + Args: + new_shallow: Newly shallow objects + new_unshallow: Newly no longer shallow objects """ shallow = self.get_shallow() if new_shallow: @@ -596,8 +612,9 @@ def update_shallow(self, new_shallow, new_unshallow): def get_peeled(self, ref): """Get the peeled value of a ref. - :param ref: The refname to peel. - :return: The fully-peeled SHA1 of a tag object, after peeling all + Args: + ref: The refname to peel. + Returns: The fully-peeled SHA1 of a tag object, after peeling all intermediate tags; if the original ref does not point to a tag, this will equal the original SHA1. """ @@ -609,27 +626,28 @@ def get_peeled(self, ref): def get_walker(self, include=None, *args, **kwargs): """Obtain a walker for this repository. - :param include: Iterable of SHAs of commits to include along with their + Args: + include: Iterable of SHAs of commits to include along with their ancestors. Defaults to [HEAD] - :param exclude: Iterable of SHAs of commits to exclude along with their + exclude: Iterable of SHAs of commits to exclude along with their ancestors, overriding includes. - :param order: ORDER_* constant specifying the order of results. + order: ORDER_* constant specifying the order of results. Anything other than ORDER_DATE may result in O(n) memory usage. - :param reverse: If True, reverse the order of output, requiring O(n) + reverse: If True, reverse the order of output, requiring O(n) memory. - :param max_entries: The maximum number of entries to yield, or None for + max_entries: The maximum number of entries to yield, or None for no limit. - :param paths: Iterable of file or subtree paths to show entries for. - :param rename_detector: diff.RenameDetector object for detecting + paths: Iterable of file or subtree paths to show entries for. + rename_detector: diff.RenameDetector object for detecting renames. - :param follow: If True, follow path across renames/copies. Forces a + follow: If True, follow path across renames/copies. Forces a default rename_detector. - :param since: Timestamp to list commits after. - :param until: Timestamp to list commits before. - :param queue_cls: A class to use for a queue of commits, supporting the + since: Timestamp to list commits after. + until: Timestamp to list commits before. + queue_cls: A class to use for a queue of commits, supporting the iterator protocol. The constructor takes a single argument, the Walker. - :return: A `Walker` object + Returns: A `Walker` object """ from dulwich.walk import Walker if include is None: @@ -645,9 +663,11 @@ def get_walker(self, include=None, *args, **kwargs): def __getitem__(self, name): """Retrieve a Git object by SHA1 or ref. - :param name: A Git object SHA1 or a ref name - :return: A `ShaFile` object, such as a Commit or Blob - :raise KeyError: when the specified ref or object does not exist + Args: + name: A Git object SHA1 or a ref name + Returns: A `ShaFile` object, such as a Commit or Blob + Raises: + KeyError: when the specified ref or object does not exist """ if not isinstance(name, bytes): raise TypeError("'name' must be bytestring, not %.80s" % @@ -665,7 +685,8 @@ def __getitem__(self, name): def __contains__(self, name): """Check if a specific Git object or ref is present. - :param name: Git object SHA1 or ref name + Args: + name: Git object SHA1 or ref name """ if len(name) in (20, 40): return name in self.object_store or name in self.refs @@ -675,8 +696,9 @@ def __contains__(self, name): def __setitem__(self, name, value): """Set a ref. - :param name: ref name - :param value: Ref value - either a ShaFile object, or a hex sha + Args: + name: ref name + value: Ref value - either a ShaFile object, or a hex sha """ if name.startswith(b"refs/") or name == b'HEAD': if isinstance(value, ShaFile): @@ -691,7 +713,8 @@ def __setitem__(self, name, value): def __delitem__(self, name): """Remove a ref. - :param name: Name of the ref to remove + Args: + name: Name of the ref to remove """ if name.startswith(b"refs/") or name == b"HEAD": del self.refs[name] @@ -707,7 +730,8 @@ def _get_user_identity(self, config, kind=None): def _add_graftpoints(self, updated_graftpoints): """Add or modify graftpoints - :param updated_graftpoints: Dict of commit shas to list of parent shas + Args: + updated_graftpoints: Dict of commit shas to list of parent shas """ # Simple validation @@ -720,7 +744,8 @@ def _add_graftpoints(self, updated_graftpoints): def _remove_graftpoints(self, to_remove=[]): """Remove graftpoints - :param to_remove: List of commit shas + Args: + to_remove: List of commit shas """ for sha in to_remove: del self._graftpoints[sha] @@ -739,21 +764,22 @@ def do_commit(self, message=None, committer=None, ref=b'HEAD', merge_heads=None): """Create a new commit. - :param message: Commit message - :param committer: Committer fullname - :param author: Author fullname (defaults to committer) - :param commit_timestamp: Commit timestamp (defaults to now) - :param commit_timezone: Commit timestamp timezone (defaults to GMT) - :param author_timestamp: Author timestamp (defaults to commit + Args: + message: Commit message + committer: Committer fullname + author: Author fullname (defaults to committer) + commit_timestamp: Commit timestamp (defaults to now) + commit_timezone: Commit timestamp timezone (defaults to GMT) + author_timestamp: Author timestamp (defaults to commit timestamp) - :param author_timezone: Author timestamp timezone + author_timezone: Author timestamp timezone (defaults to commit timestamp timezone) - :param tree: SHA1 of the tree root to use (if not specified the + tree: SHA1 of the tree root to use (if not specified the current index will be committed). - :param encoding: Encoding - :param ref: Optional ref to commit to (defaults to current branch) - :param merge_heads: Merge heads (defaults to .git/MERGE_HEADS) - :return: New commit SHA1 + encoding: Encoding + ref: Optional ref to commit to (defaults to current branch) + merge_heads: Merge heads (defaults to .git/MERGE_HEADS) + Returns: New commit SHA1 """ import time c = Commit() @@ -860,8 +886,9 @@ def read_gitfile(f): The first line of the file should start with "gitdir: " - :param f: File-like object to read from - :return: A path + Args: + f: File-like object to read from + Returns: A path """ cs = f.read() if not cs.startswith("gitdir: "): @@ -959,7 +986,8 @@ def discover(cls, start='.'): Return a Repo object for the first parent directory that looks like a Git repository. - :param start: The directory to start discovery from (defaults to '.') + Args: + start: The directory to start discovery from (defaults to '.') """ remaining = True path = os.path.abspath(start) @@ -989,7 +1017,7 @@ def commondir(self): def _determine_file_mode(self): """Probe the file-system to determine whether permissions can be trusted. - :return: True if permissions can be trusted, False otherwise. + Returns: True if permissions can be trusted, False otherwise. """ fname = os.path.join(self.path, '.probe-permissions') with open(fname, 'w') as f: @@ -1014,8 +1042,9 @@ def _determine_file_mode(self): def _put_named_file(self, path, contents): """Write a file to the control dir with the given name and contents. - :param path: The path to the file, relative to the control dir. - :param contents: A string to write to the file. + Args: + path: The path to the file, relative to the control dir. + contents: A string to write to the file. """ path = path.lstrip(os.path.sep) with GitFile(os.path.join(self.controldir(), path), 'wb') as f: @@ -1036,10 +1065,11 @@ def get_named_file(self, path, basedir=None): the control dir in a disk-based Repo, the object returned need not be pointing to a file in that location. - :param path: The path to the file, relative to the control dir. - :param basedir: Optional argument that specifies an alternative to the + Args: + path: The path to the file, relative to the control dir. + basedir: Optional argument that specifies an alternative to the control dir. - :return: An open file object, or None if the file does not exist. + Returns: An open file object, or None if the file does not exist. """ # TODO(dborowitz): sanitize filenames, since this is used directly by # the dumb web serving code. @@ -1060,8 +1090,9 @@ def index_path(self): def open_index(self): """Open the index for this repository. - :raise NoIndexPresent: If no index is present - :return: The matching `Index` + Raises: + NoIndexPresent: If no index is present + Returns: The matching `Index` """ from dulwich.index import Index if not self.has_index(): @@ -1077,7 +1108,8 @@ def has_index(self): def stage(self, fs_paths): """Stage a set of paths. - :param fs_paths: List of paths, relative to the repository path + Args: + fs_paths: List of paths, relative to the repository path """ root_path_bytes = self.path.encode(sys.getfilesystemencoding()) @@ -1125,12 +1157,13 @@ def clone(self, target_path, mkdir=True, bare=False, origin=b"origin", checkout=None): """Clone this repository. - :param target_path: Target path - :param mkdir: Create the target directory - :param bare: Whether to create a bare repository - :param origin: Base name for refs in target repository + Args: + target_path: Target path + mkdir: Create the target directory + bare: Whether to create a bare repository + origin: Base name for refs in target repository cloned from this repository - :return: Created repository as `Repo` + Returns: Created repository as `Repo` """ if not bare: target = self.init(target_path, mkdir=mkdir) @@ -1179,7 +1212,8 @@ def clone(self, target_path, mkdir=True, bare=False, def reset_index(self, tree=None): """Reset the index back to a specific tree. - :param tree: Tree SHA to reset to, None for current HEAD tree. + Args: + tree: Tree SHA to reset to, None for current HEAD tree. """ from dulwich.index import ( build_index_from_tree, @@ -1203,7 +1237,7 @@ def reset_index(self, tree=None): def get_config(self): """Retrieve the config object. - :return: `ConfigFile` object for the ``.git/config`` file. + Returns: `ConfigFile` object for the ``.git/config`` file. """ from dulwich.config import ConfigFile path = os.path.join(self._controldir, 'config') @@ -1219,7 +1253,7 @@ def get_config(self): def get_description(self): """Retrieve the description of this repository. - :return: A string describing the repository or None. + Returns: A string describing the repository or None. """ path = os.path.join(self._controldir, 'description') try: @@ -1236,7 +1270,8 @@ def __repr__(self): def set_description(self, description): """Set the description for this repository. - :param description: Text to set as description for this repository. + Args: + description: Text to set as description for this repository. """ self._put_named_file('description', description) @@ -1255,9 +1290,10 @@ def _init_maybe_bare(cls, path, bare): def init(cls, path, mkdir=False): """Create a new repository. - :param path: Path in which to create the repository - :param mkdir: Whether to create the directory - :return: `Repo` instance + Args: + path: Path in which to create the repository + mkdir: Whether to create the directory + Returns: `Repo` instance """ if mkdir: os.mkdir(path) @@ -1272,11 +1308,12 @@ def _init_new_working_directory(cls, path, main_repo, identifier=None, mkdir=False): """Create a new working directory linked to a repository. - :param path: Path in which to create the working tree. - :param main_repo: Main repository to reference - :param identifier: Worktree identifier - :param mkdir: Whether to create the directory - :return: `Repo` instance + Args: + path: Path in which to create the working tree. + main_repo: Main repository to reference + identifier: Worktree identifier + mkdir: Whether to create the directory + Returns: `Repo` instance """ if mkdir: os.mkdir(path) @@ -1315,8 +1352,9 @@ def init_bare(cls, path, mkdir=False): ``path`` should already exist and be an empty directory. - :param path: Path to create bare repository in - :return: a `Repo` instance + Args: + path: Path to create bare repository in + Returns: a `Repo` instance """ if mkdir: os.mkdir(path) @@ -1373,15 +1411,16 @@ def get_description(self): def _determine_file_mode(self): """Probe the file-system to determine whether permissions can be trusted. - :return: True if permissions can be trusted, False otherwise. + Returns: True if permissions can be trusted, False otherwise. """ return sys.platform != 'win32' def _put_named_file(self, path, contents): """Write a file to the control dir with the given name and contents. - :param path: The path to the file, relative to the control dir. - :param contents: A string to write to the file. + Args: + path: The path to the file, relative to the control dir. + contents: A string to write to the file. """ self._named_files[path] = contents @@ -1398,8 +1437,9 @@ def get_named_file(self, path, basedir=None): the control dir in a disk-baked Repo, the object returned need not be pointing to a file in that location. - :param path: The path to the file, relative to the control dir. - :return: An open file object, or None if the file does not exist. + Args: + path: The path to the file, relative to the control dir. + Returns: An open file object, or None if the file does not exist. """ contents = self._named_files.get(path, None) if contents is None: @@ -1409,14 +1449,15 @@ def get_named_file(self, path, basedir=None): def open_index(self): """Fail to open index for this repo, since it is bare. - :raise NoIndexPresent: Raised when no index is present + Raises: + NoIndexPresent: Raised when no index is present """ raise NoIndexPresent() def get_config(self): """Retrieve the config object. - :return: `ConfigFile` object. + Returns: `ConfigFile` object. """ return self._config @@ -1424,9 +1465,10 @@ def get_config(self): def init_bare(cls, objects, refs): """Create a new bare repository in memory. - :param objects: Objects for the new repository, + Args: + objects: Objects for the new repository, as iterable - :param refs: Refs as dictionary, mapping names + refs: Refs as dictionary, mapping names to object SHA1s """ ret = cls() diff --git a/dulwich/server.py b/dulwich/server.py index db6ebb7fb..f5af537eb 100644 --- a/dulwich/server.py +++ b/dulwich/server.py @@ -127,9 +127,11 @@ class Backend(object): def open_repository(self, path): """Open the repository at a path. - :param path: Path to the repository - :raise NotGitRepository: no git repository was found at path - :return: Instance of BackendRepo + Args: + path: Path to the repository + Raises: + NotGitRepository: no git repository was found at path + Returns: Instance of BackendRepo """ raise NotImplementedError(self.open_repository) @@ -148,15 +150,16 @@ def get_refs(self): """ Get all the refs in the repository - :return: dict of name -> sha + Returns: dict of name -> sha """ raise NotImplementedError def get_peeled(self, name): """Return the cached peeled value of a ref, if available. - :param name: Name of the ref to peel - :return: The peeled value of the ref. If the ref is known not point to + Args: + name: Name of the ref to peel + Returns: The peeled value of the ref. If the ref is known not point to a tag, this will be the SHA the ref refers to. If no cached information about a tag is available, this method may return None, but it should attempt to peel the tag if possible. @@ -168,8 +171,9 @@ def fetch_objects(self, determine_wants, graph_walker, progress, """ Yield the objects required for a list of commits. - :param progress: is a callback to send progress messages to the client - :param get_tagged: Function that returns a dict of pointed-to sha -> + Args: + progress: is a callback to send progress messages to the client + get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. """ raise NotImplementedError @@ -312,11 +316,12 @@ def progress(self, message): def get_tagged(self, refs=None, repo=None): """Get a dict of peeled values of tags to their original tag shas. - :param refs: dict of refname -> sha of possible tags; defaults to all + Args: + refs: dict of refname -> sha of possible tags; defaults to all of the backend's refs. - :param repo: optional Repo instance for getting peeled refs; defaults + repo: optional Repo instance for getting peeled refs; defaults to the backend's repo, if available - :return: dict of peeled_sha -> tag_sha, where tag_sha is the sha of a + Returns: dict of peeled_sha -> tag_sha, where tag_sha is the sha of a tag whose peeled value is peeled_sha. """ if not self.has_capability(CAPABILITY_INCLUDE_TAG): @@ -385,18 +390,20 @@ def write(x): def _split_proto_line(line, allowed): """Split a line read from the wire. - :param line: The line read from the wire. - :param allowed: An iterable of command names that should be allowed. + Args: + line: The line read from the wire. + allowed: An iterable of command names that should be allowed. Command names not listed below as possible return values will be ignored. If None, any commands from the possible return values are allowed. - :return: a tuple having one of the following forms: + Returns: a tuple having one of the following forms: ('want', obj_id) ('have', obj_id) ('done', None) (None, None) (for a flush-pkt) - :raise UnexpectedCommandError: if the line cannot be parsed into one of the + Raises: + UnexpectedCommandError: if the line cannot be parsed into one of the allowed return values. """ if not line: @@ -422,11 +429,12 @@ def _split_proto_line(line, allowed): def _find_shallow(store, heads, depth): """Find shallow commits according to a given depth. - :param store: An ObjectStore for looking up objects. - :param heads: Iterable of head SHAs to start walking from. - :param depth: The depth of ancestors to include. A depth of one includes + Args: + store: An ObjectStore for looking up objects. + heads: Iterable of head SHAs to start walking from. + depth: The depth of ancestors to include. A depth of one includes only the heads themselves. - :return: A tuple of (shallow, not_shallow), sets of SHAs that should be + Returns: A tuple of (shallow, not_shallow), sets of SHAs that should be considered shallow and unshallow according to the arguments. Note that these sets may overlap if a commit is reachable along multiple paths. """ @@ -484,10 +492,11 @@ def _want_satisfied(store, haves, want, earliest): def _all_wants_satisfied(store, haves, wants): """Check whether all the current wants are satisfied by a set of haves. - :param store: Object store to retrieve objects from - :param haves: A set of commits we know the client has. - :param wants: A set of commits the client wants - :note: Wants are specified with set_wants rather than passed in since + Args: + store: Object store to retrieve objects from + haves: A set of commits we know the client has. + wants: A set of commits the client wants + Note: Wants are specified with set_wants rather than passed in since in the current interface they are determined outside this class. """ haves = set(haves) @@ -546,8 +555,9 @@ def determine_wants(self, heads): walking the graph. Additionally, later code depends on this method consuming everything up to the first 'have' line. - :param heads: a dict of refname->SHA1 to advertise - :return: a list of SHA1s requested by the client + Args: + heads: a dict of refname->SHA1 to advertise + Returns: a list of SHA1s requested by the client """ symrefs = self.get_symrefs() values = set(heads.values()) @@ -638,9 +648,11 @@ def next(self): def read_proto_line(self, allowed): """Read a line from the wire. - :param allowed: An iterable of command names that should be allowed. - :return: A tuple of (command, value); see _split_proto_line. - :raise UnexpectedCommandError: If an error occurred reading the line. + Args: + allowed: An iterable of command names that should be allowed. + Returns: A tuple of (command, value); see _split_proto_line. + Raises: + UnexpectedCommandError: If an error occurred reading the line. """ return _split_proto_line(self.proto.read_pkt_line(), allowed) @@ -691,8 +703,9 @@ def set_wants(self, wants): def all_wants_satisfied(self, haves): """Check whether all the current wants are satisfied by a set of haves. - :param haves: A set of commits we know the client has. - :note: Wants are specified with set_wants rather than passed in since + Args: + haves: A set of commits we know the client has. + Note: Wants are specified with set_wants rather than passed in since in the current interface they are determined outside this class. """ return _all_wants_satisfied(self.store, haves, self._wants) @@ -1133,12 +1146,13 @@ def serve_command(handler_cls, argv=sys.argv, backend=None, inf=sys.stdin, This is mostly useful for the implementation of commands used by e.g. git+ssh. - :param handler_cls: `Handler` class to use for the request - :param argv: execv-style command-line arguments. Defaults to sys.argv. - :param backend: `Backend` to use - :param inf: File-like object to read from, defaults to standard input. - :param outf: File-like object to write to, defaults to standard output. - :return: Exit code for use with sys.exit. 0 on success, 1 on failure. + Args: + handler_cls: `Handler` class to use for the request + argv: execv-style command-line arguments. Defaults to sys.argv. + backend: `Backend` to use + inf: File-like object to read from, defaults to standard input. + outf: File-like object to write to, defaults to standard output. + Returns: Exit code for use with sys.exit. 0 on success, 1 on failure. """ if backend is None: backend = FileSystemBackend() diff --git a/dulwich/stash.py b/dulwich/stash.py index 63f2a3641..fdf9e7bcb 100644 --- a/dulwich/stash.py +++ b/dulwich/stash.py @@ -72,9 +72,10 @@ def pop(self, index): def push(self, committer=None, author=None, message=None): """Create a new stash. - :param committer: Optional committer name to use - :param author: Optional author name to use - :param message: Optional commit message + Args: + committer: Optional committer name to use + author: Optional author name to use + message: Optional commit message """ # First, create the index commit. commit_kwargs = {} diff --git a/dulwich/tests/__init__.py b/dulwich/tests/__init__.py index 2984efc95..eea22fa3d 100644 --- a/dulwich/tests/__init__.py +++ b/dulwich/tests/__init__.py @@ -64,8 +64,9 @@ class BlackboxTestCase(TestCase): def bin_path(self, name): """Determine the full path of a binary. - :param name: Name of the script - :return: Full path + Args: + name: Name of the script + Returns: Full path """ for d in self.bin_directories: p = os.path.join(d, name) @@ -77,8 +78,9 @@ def bin_path(self, name): def run_command(self, name, args): """Run a Dulwich command. - :param name: Name of the command, as it exists in bin/ - :param args: Arguments to the command + Args: + name: Name of the command, as it exists in bin/ + args: Arguments to the command """ env = dict(os.environ) env["PYTHONPATH"] = os.pathsep.join(sys.path) diff --git a/dulwich/tests/compat/test_repository.py b/dulwich/tests/compat/test_repository.py index 4d50f92ea..3bd4107da 100644 --- a/dulwich/tests/compat/test_repository.py +++ b/dulwich/tests/compat/test_repository.py @@ -133,10 +133,11 @@ class WorkingTreeTestCase(ObjectStoreTestCase): def create_new_worktree(self, repo_dir, branch): """Create a new worktree using git-worktree. - :param repo_dir: The directory of the main working tree. - :param branch: The branch or commit to checkout in the new worktree. + Args: + repo_dir: The directory of the main working tree. + branch: The branch or commit to checkout in the new worktree. - :returns: The path to the new working tree. + Returns: The path to the new working tree. """ temp_dir = tempfile.mkdtemp() run_git_or_fail(['worktree', 'add', temp_dir, branch], diff --git a/dulwich/tests/compat/utils.py b/dulwich/tests/compat/utils.py index 6f9f6915b..1f36a2195 100644 --- a/dulwich/tests/compat/utils.py +++ b/dulwich/tests/compat/utils.py @@ -48,9 +48,10 @@ def git_version(git_path=_DEFAULT_GIT): """Attempt to determine the version of git currently installed. - :param git_path: Path to the git executable; defaults to the version in + Args: + git_path: Path to the git executable; defaults to the version in the system path. - :return: A tuple of ints of the form (major, minor, point, sub-point), or + Returns: A tuple of ints of the form (major, minor, point, sub-point), or None if no git installation was found. """ try: @@ -77,12 +78,14 @@ def git_version(git_path=_DEFAULT_GIT): def require_git_version(required_version, git_path=_DEFAULT_GIT): """Require git version >= version, or skip the calling test. - :param required_version: A tuple of ints of the form (major, minor, point, + Args: + required_version: A tuple of ints of the form (major, minor, point, sub-point); ommitted components default to 0. - :param git_path: Path to the git executable; defaults to the version in + git_path: Path to the git executable; defaults to the version in the system path. - :raise ValueError: if the required version tuple has too many parts. - :raise SkipTest: if no suitable git version was found at the given path. + Raises: + ValueError: if the required version tuple has too many parts. + SkipTest: if no suitable git version was found at the given path. """ found_version = git_version(git_path=git_path) if found_version is None: @@ -112,15 +115,17 @@ def run_git(args, git_path=_DEFAULT_GIT, input=None, capture_stdout=False, Input is piped from the input parameter and output is sent to the standard streams, unless capture_stdout is set. - :param args: A list of args to the git command. - :param git_path: Path to to the git executable. - :param input: Input data to be sent to stdin. - :param capture_stdout: Whether to capture and return stdout. - :param popen_kwargs: Additional kwargs for subprocess.Popen; + Args: + args: A list of args to the git command. + git_path: Path to to the git executable. + input: Input data to be sent to stdin. + capture_stdout: Whether to capture and return stdout. + popen_kwargs: Additional kwargs for subprocess.Popen; stdin/stdout args are ignored. - :return: A tuple of (returncode, stdout contents). If capture_stdout is + Returns: A tuple of (returncode, stdout contents). If capture_stdout is False, None will be returned as stdout contents. - :raise OSError: if the git executable was not found. + Raises: + OSError: if the git executable was not found. """ env = popen_kwargs.pop('env', {}) @@ -155,9 +160,10 @@ def import_repo_to_dir(name): These are used rather than binary repos for compat tests because they are more compact and human-editable, and we already depend on git. - :param name: The name of the repository export file, relative to + Args: + name: The name of the repository export file, relative to dulwich/tests/data/repos. - :returns: The path to the imported repository. + Returns: The path to the imported repository. """ temp_dir = tempfile.mkdtemp() export_path = os.path.join(_REPOS_DATA_DIR, name) @@ -175,11 +181,12 @@ def check_for_daemon(limit=10, delay=0.1, timeout=0.1, port=TCP_GIT_PORT): Defaults to checking 10 times with a delay of 0.1 sec between tries. - :param limit: Number of attempts before deciding no daemon is running. - :param delay: Delay between connection attempts. - :param timeout: Socket timeout for connection attempts. - :param port: Port on which we expect the daemon to appear. - :returns: A boolean, true if a daemon is running on the specified port, + Args: + limit: Number of attempts before deciding no daemon is running. + delay: Delay between connection attempts. + timeout: Socket timeout for connection attempts. + port: Port on which we expect the daemon to appear. + Returns: A boolean, true if a daemon is running on the specified port, false if not. """ for _ in range(limit): @@ -231,9 +238,10 @@ def assertReposNotEqual(self, repo1, repo2): def import_repo(self, name): """Import a repo from a fast-export file in a temporary directory. - :param name: The name of the repository export file, relative to + Args: + name: The name of the repository export file, relative to dulwich/tests/data/repos. - :returns: An initialized Repo object that lives in a temporary + Returns: An initialized Repo object that lives in a temporary directory. """ path = import_repo_to_dir(name) diff --git a/dulwich/tests/test_fastexport.py b/dulwich/tests/test_fastexport.py index 0f43efd40..16813061b 100644 --- a/dulwich/tests/test_fastexport.py +++ b/dulwich/tests/test_fastexport.py @@ -213,8 +213,9 @@ def simple_commit(self): def make_file_commit(self, file_cmds): """Create a trivial commit with the specified file commands. - :param file_cmds: File commands to run. - :return: The created commit object + Args: + file_cmds: File commands to run. + Returns: The created commit object """ from fastimport import commands cmd = commands.CommitCommand( diff --git a/dulwich/tests/utils.py b/dulwich/tests/utils.py index cfe446b7d..a49119d52 100644 --- a/dulwich/tests/utils.py +++ b/dulwich/tests/utils.py @@ -67,11 +67,12 @@ def open_repo(name, temp_dir=None): accidentally or intentionally modifying those repos in place. Use tear_down_repo to delete any temp files created. - :param name: The name of the repository, relative to + Args: + name: The name of the repository, relative to dulwich/tests/data/repos - :param temp_dir: temporary directory to initialize to. If not provided, a + temp_dir: temporary directory to initialize to. If not provided, a temporary directory will be created. - :returns: An initialized Repo object that lives in a temporary directory. + Returns: An initialized Repo object that lives in a temporary directory. """ if temp_dir is None: temp_dir = tempfile.mkdtemp() @@ -95,8 +96,9 @@ def make_object(cls, **attrs): reassignment, which is not otherwise possible with objects having __slots__. - :param attrs: dict of attributes to set on the new object. - :return: A newly initialized object of type cls. + Args: + attrs: dict of attributes to set on the new object. + Returns: A newly initialized object of type cls. """ class TestObject(cls): @@ -123,8 +125,9 @@ class TestObject(cls): def make_commit(**attrs): """Make a Commit object with a default set of members. - :param attrs: dict of attributes to overwrite from the default values. - :return: A newly initialized Commit object. + Args: + attrs: dict of attributes to overwrite from the default values. + Returns: A newly initialized Commit object. """ default_time = 1262304000 # 2010-01-01 00:00:00 all_attrs = {'author': b'Test Author ', @@ -143,9 +146,10 @@ def make_commit(**attrs): def make_tag(target, **attrs): """Make a Tag object with a default set of values. - :param target: object to be tagged (Commit, Blob, Tree, etc) - :param attrs: dict of attributes to overwrite from the default values. - :return: A newly initialized Tag object. + Args: + target: object to be tagged (Commit, Blob, Tree, etc) + attrs: dict of attributes to overwrite from the default values. + Returns: A newly initialized Tag object. """ target_id = target.id target_type = object_class(target.type_name) @@ -186,9 +190,10 @@ def _do_some_test(self, func_impl): test_foo = functest_builder(_do_some_test, foo_py) test_foo_extension = ext_functest_builder(_do_some_test, _foo_c) - :param method: The method to run. It must must two parameters, self and the + Args: + method: The method to run. It must must two parameters, self and the function implementation to test. - :param func: The function implementation to pass to method. + func: The function implementation to pass to method. """ def do_test(self): @@ -202,8 +207,9 @@ def do_test(self): def build_pack(f, objects_spec, store=None): """Write test pack data from a concise spec. - :param f: A file-like object to write the pack to. - :param objects_spec: A list of (type_num, obj). For non-delta types, obj + Args: + f: A file-like object to write the pack to. + objects_spec: A list of (type_num, obj). For non-delta types, obj is the string of that object's data. For delta types, obj is a tuple of (base, data), where: @@ -213,8 +219,8 @@ def build_pack(f, objects_spec, store=None): * data is a string of the full, non-deltified data for that object. Note that offsets/refs and deltas are computed within this function. - :param store: An optional ObjectStore for looking up external refs. - :return: A list of tuples in the order specified by objects_spec: + store: An optional ObjectStore for looking up external refs. + Returns: A list of tuples in the order specified by objects_spec: (offset, type num, data, sha, CRC32) """ sf = SHA1Writer(f) @@ -285,21 +291,23 @@ def build_commit_graph(object_store, commit_spec, trees=None, attrs=None): If not otherwise specified, commits will refer to the empty tree and have commit times increasing in the same order as the commit spec. - :param object_store: An ObjectStore to commit objects to. - :param commit_spec: An iterable of iterables of ints defining the commit + Args: + object_store: An ObjectStore to commit objects to. + commit_spec: An iterable of iterables of ints defining the commit graph. Each entry defines one commit, and entries must be in topological order. The first element of each entry is a commit number, and the remaining elements are its parents. The commit numbers are only meaningful for the call to make_commits; since real commit objects are created, they will get created with real, opaque SHAs. - :param trees: An optional dict of commit number -> tree spec for building + trees: An optional dict of commit number -> tree spec for building trees for commits. The tree spec is an iterable of (path, blob, mode) or (path, blob) entries; if mode is omitted, it defaults to the normal file mode (0100644). - :param attrs: A dict of commit number -> (dict of attribute -> value) for + attrs: A dict of commit number -> (dict of attribute -> value) for assigning additional values to the commits. - :return: The list of commit objects created. - :raise ValueError: If an undefined commit identifier is listed as a parent. + Returns: The list of commit objects created. + Raises: + ValueError: If an undefined commit identifier is listed as a parent. """ if trees is None: trees = {} diff --git a/dulwich/walk.py b/dulwich/walk.py index 2f6565e78..40d0a040b 100644 --- a/dulwich/walk.py +++ b/dulwich/walk.py @@ -60,10 +60,11 @@ def __init__(self, walker, commit): def changes(self, path_prefix=None): """Get the tree changes for this entry. - :param path_prefix: Portion of the path in the repository to + Args: + path_prefix: Portion of the path in the repository to use to filter changes. Must be a directory name. Must be a full, valid, path reference (no partial names or wildcards). - :return: For commits with up to one parent, a list of TreeChange + Returns: For commits with up to one parent, a list of TreeChange objects; if the commit has no parents, these will be relative to the empty tree. For merge commits, a list of lists of TreeChange objects; see dulwich.diff.tree_changes_for_merge. @@ -238,26 +239,27 @@ def __init__(self, store, include, exclude=None, order=ORDER_DATE, queue_cls=_CommitTimeQueue): """Constructor. - :param store: ObjectStore instance for looking up objects. - :param include: Iterable of SHAs of commits to include along with their + Args: + store: ObjectStore instance for looking up objects. + include: Iterable of SHAs of commits to include along with their ancestors. - :param exclude: Iterable of SHAs of commits to exclude along with their + exclude: Iterable of SHAs of commits to exclude along with their ancestors, overriding includes. - :param order: ORDER_* constant specifying the order of results. + order: ORDER_* constant specifying the order of results. Anything other than ORDER_DATE may result in O(n) memory usage. - :param reverse: If True, reverse the order of output, requiring O(n) + reverse: If True, reverse the order of output, requiring O(n) memory. - :param max_entries: The maximum number of entries to yield, or None for + max_entries: The maximum number of entries to yield, or None for no limit. - :param paths: Iterable of file or subtree paths to show entries for. - :param rename_detector: diff.RenameDetector object for detecting + paths: Iterable of file or subtree paths to show entries for. + rename_detector: diff.RenameDetector object for detecting renames. - :param follow: If True, follow path across renames/copies. Forces a + follow: If True, follow path across renames/copies. Forces a default rename_detector. - :param since: Timestamp to list commits after. - :param until: Timestamp to list commits before. - :param get_parents: Method to retrieve the parents of a commit - :param queue_cls: A class to use for a queue of commits, supporting the + since: Timestamp to list commits after. + until: Timestamp to list commits before. + get_parents: Method to retrieve the parents of a commit + queue_cls: A class to use for a queue of commits, supporting the iterator protocol. The constructor takes a single argument, the Walker. """ @@ -317,8 +319,9 @@ def _change_matches(self, change): def _should_return(self, entry): """Determine if a walk entry should be returned.. - :param entry: The WalkEntry to consider. - :return: True if the WalkEntry should be returned by this walk, or + Args: + entry: The WalkEntry to consider. + Returns: True if the WalkEntry should be returned by this walk, or False otherwise (e.g. if it doesn't match any requested paths). """ commit = entry.commit @@ -364,9 +367,10 @@ def _next(self): def _reorder(self, results): """Possibly reorder a results iterator. - :param results: An iterator of WalkEntry objects, in the order returned + Args: + results: An iterator of WalkEntry objects, in the order returned from the queue_cls. - :return: An iterator or list of WalkEntry objects, in the order + Returns: An iterator or list of WalkEntry objects, in the order required by the Walker. """ if self.order == ORDER_TOPO: @@ -385,9 +389,10 @@ def _topo_reorder(entries, get_parents=lambda commit: commit.parents): This works best assuming the entries are already in almost-topological order, e.g. in commit time order. - :param entries: An iterable of WalkEntry objects. - :param get_parents: Optional function for getting the parents of a commit. - :return: iterator over WalkEntry objects from entries in FIFO order, except + Args: + entries: An iterable of WalkEntry objects. + get_parents: Optional function for getting the parents of a commit. + Returns: iterator over WalkEntry objects from entries in FIFO order, except where a parent would be yielded before any of its children. """ todo = collections.deque() diff --git a/dulwich/web.py b/dulwich/web.py index 3dc971a7e..3e4469e62 100644 --- a/dulwich/web.py +++ b/dulwich/web.py @@ -89,8 +89,9 @@ def date_time_string(timestamp=None): def url_prefix(mat): """Extract the URL prefix from a regex match. - :param mat: A regex match object. - :returns: The URL prefix, defined as the text before the match in the + Args: + mat: A regex match object. + Returns: The URL prefix, defined as the text before the match in the original string. Normalized to start with one leading slash and end with zero. """ @@ -105,10 +106,11 @@ def get_repo(backend, mat): def send_file(req, f, content_type): """Send a file-like object to the request output. - :param req: The HTTPGitRequest object to send output to. - :param f: An open file-like object to send; will be closed. - :param content_type: The MIME type for the file. - :return: Iterator over the contents of the file, as chunks. + Args: + req: The HTTPGitRequest object to send output to. + f: An open file-like object to send; will be closed. + content_type: The MIME type for the file. + Returns: Iterator over the contents of the file, as chunks. """ if f is None: yield req.not_found('File not found') From 8f73bc51e2620820fa1d3aa2c109a3aa3fbe5bc8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 18 Nov 2019 01:48:47 +0000 Subject: [PATCH 19/22] Fix compatibility with Python 3.8. --- .travis.yml | 9 ++++----- NEWS | 3 +++ dulwich/_diff_tree.c | 1 + dulwich/_objects.c | 3 ++- dulwich/_pack.c | 5 +++-- dulwich/tests/compat/test_patch.py | 4 ++-- 6 files changed, 15 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0a9b79261..966730aa1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,15 +27,14 @@ matrix: env: TEST_REQUIRE=fastimport dist: xenial sudo: true + - python: 3.8 + env: TEST_REQUIRE=fastimport + dist: xenial + sudo: true - python: 3.6 env: PURE=true - python: 2.7 env: PURE=true - # flakes checker fails on python 3.8-dev: - #- python: 3.8-dev - # env: TEST_REQUIRE=fastimport - # dist: xenial - # sudo: true install: - travis_retry pip install -U pip coverage codecov flake8 $TEST_REQUIRE diff --git a/NEWS b/NEWS index 1a6ecc4d1..2d55ab468 100644 --- a/NEWS +++ b/NEWS @@ -21,6 +21,9 @@ * Fix regression that added a dependency on C git for the test suite. (Jelmer Vernooij, #720) + * Fix compatibility with Python 3.8 - mostly deprecation warnings. + (Jelmer Vernooij) + 0.19.12 2019-08-13 BUG FIXES diff --git a/dulwich/_diff_tree.c b/dulwich/_diff_tree.c index 3ddc23de9..3481ddd60 100644 --- a/dulwich/_diff_tree.c +++ b/dulwich/_diff_tree.c @@ -18,6 +18,7 @@ * License, Version 2.0. */ +#define PY_SSIZE_T_CLEAN #include #include diff --git a/dulwich/_objects.c b/dulwich/_objects.c index c21fbbb6d..417e189de 100644 --- a/dulwich/_objects.c +++ b/dulwich/_objects.c @@ -18,6 +18,7 @@ * License, Version 2.0. */ +#define PY_SSIZE_T_CLEAN #include #include #include @@ -61,7 +62,7 @@ static PyObject *sha_to_pyhex(const unsigned char *sha) static PyObject *py_parse_tree(PyObject *self, PyObject *args, PyObject *kw) { char *text, *start, *end; - int len, strict; + Py_ssize_t len; int strict; size_t namelen; PyObject *ret, *item, *name, *sha, *py_strict = NULL; static char *kwlist[] = {"text", "strict", NULL}; diff --git a/dulwich/_pack.c b/dulwich/_pack.c index 8fe62caf8..f2ef0649d 100644 --- a/dulwich/_pack.c +++ b/dulwich/_pack.c @@ -18,6 +18,7 @@ * License, Version 2.0. */ +#define PY_SSIZE_T_CLEAN #include #include @@ -205,7 +206,7 @@ static PyObject *py_bisect_find_sha(PyObject *self, PyObject *args) { PyObject *unpack_name; char *sha; - int sha_len; + Py_ssize_t sha_len; int start, end; #if PY_MAJOR_VERSION >= 3 if (!PyArg_ParseTuple(args, "iiy#O", &start, &end, @@ -227,7 +228,7 @@ static PyObject *py_bisect_find_sha(PyObject *self, PyObject *args) while (start <= end) { PyObject *file_sha; - int i = (start + end)/2; + Py_ssize_t i = (start + end)/2; int cmp; file_sha = PyObject_CallFunction(unpack_name, "i", i); if (file_sha == NULL) { diff --git a/dulwich/tests/compat/test_patch.py b/dulwich/tests/compat/test_patch.py index 085013e0a..bf45aeb86 100644 --- a/dulwich/tests/compat/test_patch.py +++ b/dulwich/tests/compat/test_patch.py @@ -100,7 +100,7 @@ def test_patch_apply(self): new_files = set(os.listdir(copy_path)) # Check that we have the exact same files in both repositories - self.assertEquals(original_files, new_files) + self.assertEqual(original_files, new_files) for file in original_files: if file == ".git": @@ -117,4 +117,4 @@ def test_patch_apply(self): with open(copy_file_path, "rb") as copy_file: copy_content = copy_file.read() - self.assertEquals(original_content, copy_content) + self.assertEqual(original_content, copy_content) From acdc0ac9022dd9c71554992e8dd441c986d5a622 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 24 Nov 2019 02:01:38 +0000 Subject: [PATCH 20/22] Cleanup directory after clone fails. Fixes #733. --- NEWS | 2 ++ dulwich/porcelain.py | 2 ++ dulwich/tests/test_porcelain.py | 6 ++++++ 3 files changed, 10 insertions(+) diff --git a/NEWS b/NEWS index 2d55ab468..e90c38151 100644 --- a/NEWS +++ b/NEWS @@ -11,6 +11,8 @@ * Add a ``dulwich.porcelain.active_branch`` function. (Jelmer Vernooij) + * Cleanup new directory if clone fails. (Jelmer Vernooij, #733) + 0.19.13 2019-08-19 BUG FIXES diff --git a/dulwich/porcelain.py b/dulwich/porcelain.py index 8db8c92f9..5ad41e8c5 100644 --- a/dulwich/porcelain.py +++ b/dulwich/porcelain.py @@ -65,6 +65,7 @@ import datetime import os import posixpath +import shutil import stat import sys import time @@ -382,6 +383,7 @@ def clone(source, target=None, bare=False, checkout=None, errstream.write(b'Checking out ' + head.id + b'\n') r.reset_index(head.tree) except BaseException: + shutil.rmtree(target) r.close() raise diff --git a/dulwich/tests/test_porcelain.py b/dulwich/tests/test_porcelain.py index 0be96ffc7..896520beb 100644 --- a/dulwich/tests/test_porcelain.py +++ b/dulwich/tests/test_porcelain.py @@ -351,6 +351,12 @@ def test_no_head_no_checkout_outstream_errstream_autofallback(self): self.repo.path, target_path, checkout=True, errstream=errstream) r.close() + def test_source_broken(self): + target_path = tempfile.mkdtemp() + self.assertRaises( + Exception, porcelain.clone, '/nonexistant/repo', target_path) + self.assertFalse(os.path.exists(target_path)) + class InitTests(TestCase): From ed4037327b05632706450a86005a150d142b18f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 30 Nov 2019 17:53:13 +0000 Subject: [PATCH 21/22] Expand ~ in global exclude path. --- NEWS | 2 ++ dulwich/ignore.py | 7 +++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/NEWS b/NEWS index e90c38151..12ca7bd3c 100644 --- a/NEWS +++ b/NEWS @@ -13,6 +13,8 @@ * Cleanup new directory if clone fails. (Jelmer Vernooij, #733) + * Expand "~" in global exclude path. (Jelmer Vernooij) + 0.19.13 2019-08-19 BUG FIXES diff --git a/dulwich/ignore.py b/dulwich/ignore.py index 2bcfecc76..37a0d9ec8 100644 --- a/dulwich/ignore.py +++ b/dulwich/ignore.py @@ -271,9 +271,7 @@ def default_user_ignore_filter_path(config): except KeyError: pass - xdg_config_home = os.environ.get( - "XDG_CONFIG_HOME", os.path.expanduser("~/.config/"), - ) + xdg_config_home = os.environ.get("XDG_CONFIG_HOME", "~/.config/") return os.path.join(xdg_config_home, 'git', 'ignore') @@ -366,7 +364,8 @@ def from_repo(cls, repo): os.path.join(repo.controldir(), 'info', 'exclude'), default_user_ignore_filter_path(repo.get_config_stack())]: try: - global_filters.append(IgnoreFilter.from_path(p)) + global_filters.append( + IgnoreFilter.from_path(os.path.expanduser(p))) except IOError: pass config = repo.get_config_stack() From e68ce7c09efd2cc6cd9762c951dde308237cecad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 30 Nov 2019 18:17:10 +0000 Subject: [PATCH 22/22] Release 0.19.14 --- NEWS | 2 +- dulwich/__init__.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/NEWS b/NEWS index 12ca7bd3c..90cab28c0 100644 --- a/NEWS +++ b/NEWS @@ -1,4 +1,4 @@ -0.19.14 UNRELEASED +0.19.14 2019-11-30 * Strip superfluous <> around email. (monnerat) diff --git a/dulwich/__init__.py b/dulwich/__init__.py index 04a25f10c..10721e246 100644 --- a/dulwich/__init__.py +++ b/dulwich/__init__.py @@ -22,4 +22,4 @@ """Python implementation of the Git file formats and protocols.""" -__version__ = (0, 19, 13) +__version__ = (0, 19, 14) diff --git a/setup.py b/setup.py index da6f62c1c..2a75e7fd7 100755 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ import os import sys -dulwich_version_string = '0.19.13' +dulwich_version_string = '0.19.14' include_dirs = [] # Windows MSVC support