From 0f5ca881fe5d84fadd849af7fc4eb9cb885529af Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Wed, 6 Mar 2019 17:11:02 +0100 Subject: [PATCH 01/14] http: Drop `kwargs` parameter for the `download` method --- ipfshttpclient/http.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/ipfshttpclient/http.py b/ipfshttpclient/http.py index 5a29e262..29070004 100644 --- a/ipfshttpclient/http.py +++ b/ipfshttpclient/http.py @@ -298,7 +298,7 @@ def request(self, path, @pass_defaults def download(self, path, args=[], filepath=None, opts={}, - compress=True, timeout=120, offline=False, **kwargs): + compress=True, timeout=120, offline=False): """Makes a request to the IPFS daemon to download a file. Downloads a file or files from IPFS into the current working @@ -335,8 +335,6 @@ def download(self, path, args=[], filepath=None, opts={}, offline : bool Execute request in offline mode, i.e. locally without accessing the network. - kwargs : dict - Additional arguments to pass to :mod:`requests` """ url = self.base + path wd = filepath or '.' @@ -357,7 +355,7 @@ def download(self, path, args=[], filepath=None, opts={}, method = 'get' res = self._do_request(method, url, params=params, stream=True, - timeout=timeout, **kwargs) + timeout=timeout) self._do_raise_for_status(res) From c87940c9b52679d01a89a3b9819d366a5386e433 Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Wed, 6 Mar 2019 17:19:45 +0100 Subject: [PATCH 02/14] docs: Document the universal `offline` parameter --- docs/http_client_ref.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/http_client_ref.md b/docs/http_client_ref.md index fa55ad09..db2bc27e 100644 --- a/docs/http_client_ref.md +++ b/docs/http_client_ref.md @@ -42,6 +42,8 @@ All commands are accessed through the ``ipfshttpclient.Client`` class. All methods accept the following parameters in their `kwargs`: + * **offline** (*bool*) – Prevent the deamon from communicating with any remote + IPFS node while performing the requested action? * **opts** (*dict*) – A mapping of custom IPFS API parameters to be sent along with the regular parameters generated by the client library From 137b84a345dd602d9c1b0045de9facb2fd332545 Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Fri, 10 May 2019 22:00:06 +0200 Subject: [PATCH 03/14] Upgrade documentation to use reCommonMark 0.5 --- docs/conf.py | 6 +----- docs/releasing.md | 8 ++++++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5b7675f4..4454fa8e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -42,13 +42,9 @@ 'sphinx.ext.napoleon', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', + 'recommonmark' ] -# Use reCommonMark for parsing text documents as MarkDown -source_parsers = { - '.md': 'recommonmark.parser.CommonMarkParser', -} - # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] diff --git a/docs/releasing.md b/docs/releasing.md index a1c735ab..8bf7364d 100644 --- a/docs/releasing.md +++ b/docs/releasing.md @@ -21,8 +21,12 @@ DNF line: `sudo dnf install python3-flit` Sphinx is the standard documentation framework for Python. Recommonmark is an extension that allows Sphinx to process Markdown documentation as if it where reStructuredText. -APT line: `sudo apt install python3-sphinx python3-recommonmark` -DNF line: `sudo dnf install python3-sphinx python3-recommonmark` + + + +At least reCommonMark 0.5 is required, so install it using PIP: + +`pip3 install recommonmark~=0.5.0` ## Hosting Documentation From 6337f82eab06bc91f85c09fe8b53d4ef3f078dda Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Fri, 10 May 2019 22:01:41 +0200 Subject: [PATCH 04/14] Document the `timeout` parameter --- docs/http_client_ref.md | 2 +- ipfshttpclient/http.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/http_client_ref.md b/docs/http_client_ref.md index db2bc27e..3e8bd6d9 100644 --- a/docs/http_client_ref.md +++ b/docs/http_client_ref.md @@ -56,7 +56,7 @@ All methods accept the following parameters in their `kwargs`: `bytes` then arbitrary bags of bytes will be yielded that together form a stream; finally, if it is of type `dict` then the single dictonary item will be yielded once. - * **timeout** (**float**) – The number of seconds to wait of a daemon reply + * **timeout** ([float](float)) – The number of seconds to wait of a daemon reply before giving up ```eval_rst diff --git a/ipfshttpclient/http.py b/ipfshttpclient/http.py index 29070004..cfda809f 100644 --- a/ipfshttpclient/http.py +++ b/ipfshttpclient/http.py @@ -141,6 +141,13 @@ class HTTPClient(object): The port the IPFS daemon is running at base : str The path prefix for API calls + timeout : Union[numbers.Real, Tuple[numbers.Real, numbers.Real], NoneType] + The default number of seconds to wait when establishing a connection to + the daemon and waiting for returned data before throwing + :exc:`~ipfshttpclient.exceptions.TimeoutError`; if the value is a tuple + its contents will be interpreted as the values for the connection and + receiving phases respectively, otherwise the value will apply to both + phases; if the value is ``None`` then all timeouts will be disabled defaults : dict The default parameters to be passed to :meth:`~ipfshttpclient.http.HTTPClient.request` From 10dc93fe7f25ed0fe83d7f1fca308665280a4f9b Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 16:30:14 +0200 Subject: [PATCH 05/14] Fix very subtile bug in `multipart.py` When adding a directory from a file descriptor (the default for Python3 on Linux) the top-level added directory would end up being `/` rather then just `` which would cause the directory to be sent twice to the daemon which would then normalize the paths to the same name but treat them as separate entities hence returning an extra empty directory result for the first name. --- ipfshttpclient/multipart.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ipfshttpclient/multipart.py b/ipfshttpclient/multipart.py index 94aa3a35..2ef6c59d 100644 --- a/ipfshttpclient/multipart.py +++ b/ipfshttpclient/multipart.py @@ -482,8 +482,8 @@ def match_short_path(short_path): dirname = os.path.basename(directory) dirname = dirname if isinstance(dirname, str) else os.fsdecode(dirname) else: - dirname = "_" if isinstance(directory, (str, int)) else os.fsencode("_") - assert(type(directory) == type(dirname) or isinstance(directory, int)) + dirname = "_" + assert type(directory) == type(dirname) or isinstance(directory, int) # Identify the unnecessary portion of the relative path truncate = (directory if not isinstance(directory, int) else ".") + sep @@ -508,7 +508,7 @@ def match_short_path(short_path): # remove leading / or \ if it is present if short_path.startswith(os.path.sep): short_path = short_path[len(os.path.sep):] - short_path = os.path.join(dirname, short_path) + short_path = os.path.join(dirname, short_path) if short_path else dirname wildcard_directory = False if os.path.split(short_path)[0] in wildcard_directories: From 1ec6913380e646bb0b6031ec9374379bedfe880d Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 16:46:44 +0200 Subject: [PATCH 06/14] Add wrapper for the `requests` and `urllib3` libraries that allows restricting the address family used during name resolution --- ipfshttpclient/http.py | 2 +- ipfshttpclient/requests_wrapper.py | 269 +++++++++++++++++++++++++++++ 2 files changed, 270 insertions(+), 1 deletion(-) create mode 100644 ipfshttpclient/requests_wrapper.py diff --git a/ipfshttpclient/http.py b/ipfshttpclient/http.py index cfda809f..ea363780 100644 --- a/ipfshttpclient/http.py +++ b/ipfshttpclient/http.py @@ -13,11 +13,11 @@ import tarfile from six.moves import http_client -import requests import six from . import encoding from . import exceptions +from . import requests_wrapper as requests def pass_defaults(func): diff --git a/ipfshttpclient/requests_wrapper.py b/ipfshttpclient/requests_wrapper.py new file mode 100644 index 00000000..1cbcc6e4 --- /dev/null +++ b/ipfshttpclient/requests_wrapper.py @@ -0,0 +1,269 @@ +# -*- encoding: utf-8 -*- +"""Exposes the full ``requests`` HTTP library API, while adding an extra +``family`` parameter to all HTTP request operations that may be used to restrict +the address family used when resolving a domain-name to an IP address. +""" +import socket +try: + import urllib.parse +except ImportError: #PY2 + class urllib: + import urlparse as parse + +import requests +import requests.adapters +import urllib3 +import urllib3.connection +import urllib3.exceptions +import urllib3.poolmanager +import urllib3.util.connection + +AF2NAME = { + int(socket.AF_INET): "ip4", + int(socket.AF_INET6): "ip6", +} +NAME2AF = dict((name, af) for af, name in AF2NAME.items()) + + +# This function is copied from urllib3/util/connection.py (that in turn copied +# it from socket.py in the Python 2.7 standard library test suite) and accepts +# an extra `family` parameter that specifies the allowed address families for +# name resolution. +# +# The entire remainder of this file after this only exists to ensure that this +# `family` parameter is exposed all the way up to request's `Session` interface, +# storing it as part of the URL scheme while traversing most of the layers. +def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, socket_options=None, + family=socket.AF_UNSPEC): + host, port = address + if host.startswith('['): + host = host.strip('[]') + err = None + + if not family or family == socket.AF_UNSPEC: + family = urllib3.util.connection.allowed_gai_family() + + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + + # If provided, set socket level options before connecting. + if socket_options is not None: + for opt in socket_options: + sock.setsockopt(*opt) + + if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + except socket.error as e: + err = e + if sock is not None: + sock.close() + sock = None + + if err is not None: + raise err + + raise socket.error("getaddrinfo returns an empty list") + + +# Override the `urllib3` low-level Connection objects that do the actual work +# of speaking HTTP +def _kw_scheme_to_family(kw, base_scheme): + family = socket.AF_UNSPEC + scheme = kw.pop("scheme", None) + if isinstance(scheme, str): + parts = scheme.rsplit("+", 1) + if len(parts) == 2 and parts[0] == base_scheme: + family = NAME2AF.get(parts[1], family) + return family + + +class ConnectionOverrideMixin: + def _new_conn(self): + extra_kw = { + "family": self.family + } + if self.source_address: + extra_kw['source_address'] = self.source_address + + if self.socket_options: + extra_kw['socket_options'] = self.socket_options + + try: + conn = create_connection( + (self._dns_host, self.port), self.timeout, **extra_kw) + except socket.timeout as e: + raise urllib3.exceptions.ConnectTimeoutError( + self, "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout)) + except socket.error as e: + raise urllib3.exceptions.NewConnectionError( + self, "Failed to establish a new connection: %s" % e) + + return conn + + +class HTTPConnection(ConnectionOverrideMixin, urllib3.connection.HTTPConnection): + def __init__(self, *args, **kw): + self.family = _kw_scheme_to_family(kw, "http") + super(HTTPConnection, self).__init__(*args, **kw) + + +class HTTPSConnection(ConnectionOverrideMixin, urllib3.connection.HTTPSConnection): + def __init__(self, *args, **kw): + self.family = _kw_scheme_to_family(kw, "https") + super(HTTPSConnection, self).__init__(*args, **kw) + + +# Override the higher-level `urllib3` ConnectionPool objects that instantiate +# one or more Connection objects and dispatch work between them +class HTTPConnectionPool(urllib3.HTTPConnectionPool): + ConnectionCls = HTTPConnection + +class HTTPSConnectionPool(urllib3.HTTPConnectionPool): + ConnectionCls = HTTPSConnection + + +# Override the highest-level `urllib3` PoolManager to also properly support the +# address family extended scheme values in URLs and pass these scheme values on +# to the individual ConnectionPool objects +class PoolManager(urllib3.PoolManager): + def __init__(self, *args, **kwargs): + super(PoolManager, self).__init__(*args, **kwargs) + + # Additionally to adding our variant of the usual HTTP and HTTPS + # pool classes, also add these for some variants of the default schemes + # that are limited to some specific address family only + self.pool_classes_by_scheme = {} + for scheme, ConnectionPool in (("http", HTTPConnectionPool), ("https", HTTPSConnectionPool)): + self.pool_classes_by_scheme[scheme] = ConnectionPool + for name in AF2NAME.values(): + self.pool_classes_by_scheme["{0}+{1}".format(scheme, name)] = ConnectionPool + self.key_fn_by_scheme["{0}+{1}".format(scheme, name)] = self.key_fn_by_scheme[scheme] + + # These next two are only required to ensure that our custom `scheme` values + # will be passed down to the `*ConnectionPool`s and finally to the actual + # `*Connection`s as parameter + def _new_pool(self, scheme, host, port, request_context=None): + # Copied from `urllib3` to *not* surpress the `scheme` parameter + pool_cls = self.pool_classes_by_scheme[scheme] + if request_context is None: + request_context = self.connection_pool_kw.copy() + + if scheme == "http" or scheme.startswith("http+"): + for kw in urllib3.poolmanager.SSL_KEYWORDS: + request_context.pop(kw, None) + + return pool_cls(**request_context) + + def connection_from_pool_key(self, pool_key, request_context=None): + # Copied from `urllib3` so that we continue to ensure that this will + # call `_new_pool` + with self.pools.lock: + pool = self.pools.get(pool_key) + if pool: + return pool + + scheme = request_context['scheme'] + host = request_context['host'] + port = request_context['port'] + pool = self._new_pool(scheme, host, port, request_context=request_context) + self.pools[pool_key] = pool + return pool + + +# Override the lower-level `requests` adapter that invokes the `urllib3` +# PoolManager objects +class HTTPAdapter(requests.adapters.HTTPAdapter): + def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): + # save these values for pickling (copied from `requests`) + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, + block=block, strict=True, **pool_kwargs) + + +# Override the highest-level `requests` Session object to accept the `family` +# parameter for any request and encode its value as part of the URL scheme +# when passing it down to the adapter +class Session(requests.Session): + def __init__(self, *args, **kwargs): + super(Session, self).__init__(*args, **kwargs) + + # Additionally to mounting our variant of the usual HTTP and HTTPS + # adapter, also mount it for some variants of the default schemes that + # are limited to some specific address family only + adapter = HTTPAdapter() + for scheme in ("http", "https"): + self.mount("{0}://".format(scheme), adapter) + for name in AF2NAME.values(): + self.mount("{0}+{1}://".format(scheme, name), adapter) + + def request(self, method, url, *args, **kwargs): + family = kwargs.pop("family", socket.AF_UNSPEC) + if family != socket.AF_UNSPEC: + # Inject provided address family value as extension to scheme + url = urllib.parse.urlparse(url) + url = url._replace(scheme="{0}+{1}".format(url.scheme, AF2NAME[int(family)])) + url = url.geturl() + return super(Session, self).request(method, url, *args, **kwargs) + + +session = Session + + +# Import other `requests` stuff to make the top-level API of this more compatible +from requests import ( + __title__, __description__, __url__, __version__, __build__, __author__, + __author_email__, __license__, __copyright__, __cake__, + + exceptions, utils, packages, codes, + Request, Response, PreparedRequest, + RequestException, Timeout, URLRequired, TooManyRedirects, HTTPError, + ConnectionError, FileModeWarning, ConnectTimeout, ReadTimeout +) + + +# Re-implement the top-level “session-less” API +def request(method, url, **kwargs): + with Session() as session: + return session.request(method=method, url=url, **kwargs) + +def get(url, params=None, **kwargs): + kwargs.setdefault('allow_redirects', True) + return request('get', url, params=params, **kwargs) + + +def options(url, **kwargs): + kwargs.setdefault('allow_redirects', True) + return request('options', url, **kwargs) + + +def head(url, **kwargs): + kwargs.setdefault('allow_redirects', False) + return request('head', url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + return request('post', url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + return request('put', url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + return request('patch', url, data=data, **kwargs) + + +def delete(url, **kwargs): + return request('delete', url, **kwargs) From 2cdc478567fc6f328afa4e646cc7f598f949773b Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 16:57:01 +0200 Subject: [PATCH 07/14] Use MultiAddr, rather then (extended) host + port for describing the daemon location --- ipfshttpclient/__init__.py | 2 +- ipfshttpclient/client/__init__.py | 9 +- ipfshttpclient/client/base.py | 26 +- ipfshttpclient/exceptions.py | 11 + ipfshttpclient/http.py | 581 ++++++++++++++++-------------- pyproject.toml | 1 + requirements.txt | 1 + test/functional/test_other.py | 3 +- test/run-tests.py | 10 +- test/unit/test_http.py | 9 +- 10 files changed, 360 insertions(+), 293 deletions(-) diff --git a/ipfshttpclient/__init__.py b/ipfshttpclient/__init__.py index 102da784..fe719760 100644 --- a/ipfshttpclient/__init__.py +++ b/ipfshttpclient/__init__.py @@ -9,6 +9,6 @@ ################################### from . import exceptions -from .client import DEFAULT_HOST, DEFAULT_PORT, DEFAULT_BASE +from .client import DEFAULT_ADDR, DEFAULT_BASE from .client import VERSION_MINIMUM, VERSION_MAXIMUM from .client import Client, assert_version, connect diff --git a/ipfshttpclient/client/__init__.py b/ipfshttpclient/client/__init__.py index c679e0b6..eeeb79f4 100644 --- a/ipfshttpclient/client/__init__.py +++ b/ipfshttpclient/client/__init__.py @@ -10,8 +10,9 @@ import os import warnings -DEFAULT_HOST = str(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_HOST", 'localhost')) -DEFAULT_PORT = int(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_PORT", 5001)) +import multiaddr + +DEFAULT_ADDR = multiaddr.Multiaddr(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_ADDR", '/dns/localhost/tcp/5001')) DEFAULT_BASE = str(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_BASE", 'api/v0')) VERSION_MINIMUM = "0.4.3" @@ -64,7 +65,7 @@ def assert_version(version, minimum=VERSION_MINIMUM, maximum=VERSION_MAXIMUM): raise exceptions.VersionMismatch(version, minimum, maximum) -def connect(host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, +def connect(addr=DEFAULT_ADDR, base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, **defaults): """Create a new :class:`~ipfshttpclient.Client` instance and connect to the daemon to validate that its version is supported. @@ -87,7 +88,7 @@ def connect(host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, :class:`~ipfshttpclient.Client` """ # Create client instance - client = Client(host, port, base, chunk_size, **defaults) + client = Client(addr, base, chunk_size, **defaults) # Query version number from daemon and validate it assert_version(client.version()['Version']) diff --git a/ipfshttpclient/client/base.py b/ipfshttpclient/client/base.py index d13f141a..69e4c73b 100644 --- a/ipfshttpclient/client/base.py +++ b/ipfshttpclient/client/base.py @@ -4,7 +4,7 @@ import six -from . import DEFAULT_HOST, DEFAULT_PORT, DEFAULT_BASE +from . import DEFAULT_ADDR, DEFAULT_BASE from .. import multipart, http @@ -86,11 +86,19 @@ class ClientBase(object): Parameters ---------- - host : str - Hostname or IP address of the computer running the ``ipfs daemon`` - node (defaults to the local system) - port : int - The API port of the IPFS deamon (usually 5001) + addr : Union[str, multiaddr.Multiaddr] + The `MultiAddr `_ describing the + daemon location, as used in the *API* key of + `go-ipfs *Addresses* section `_ + (defaults to ``/dns/localhost/tcp/5001``) + + Supported formats are currently: + + * ``/{dns,dns4,dns6,ip4,ip6}//tcp/`` (HTTP) + * ``/{dns,dns4,dns6,ip4,ip6}//tcp//http`` (HTTP) + * ``/{dns,dns4,dns6,ip4,ip6}//tcp//https`` (HTTPS) + + Additional forms (proxying) may be supported in the future. base : str Path of the deamon's API (currently always ``api/v0``) chunk_size : int @@ -99,11 +107,11 @@ class ClientBase(object): _clientfactory = http.HTTPClient - def __init__(self, host=DEFAULT_HOST, port=DEFAULT_PORT, - base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, + def __init__(self, addr=DEFAULT_ADDR, base=DEFAULT_BASE, + chunk_size=multipart.default_chunk_size, **defaults): """Connects to the API port of an IPFS node.""" self.chunk_size = chunk_size - self._client = self._clientfactory(host, port, base, **defaults) \ No newline at end of file + self._client = self._clientfactory(addr, base, **defaults) \ No newline at end of file diff --git a/ipfshttpclient/exceptions.py b/ipfshttpclient/exceptions.py index 16355213..3739679b 100644 --- a/ipfshttpclient/exceptions.py +++ b/ipfshttpclient/exceptions.py @@ -4,6 +4,7 @@ Error +-- VersionMismatch + +-- AddressError +-- EncoderError | +-- EncoderMissingError | +-- EncodingError @@ -17,6 +18,7 @@ +-- TimeoutError """ +import multiaddr.exceptions class Error(Exception): @@ -24,6 +26,15 @@ class Error(Exception): pass +class AddressError(Error, multiaddr.exceptions.Error): + """Raised when the provided daemon location Multiaddr does not match any + of the supported patterns.""" + + def __init__(self, addr): + self.addr = addr + Error.__init__(self, "Unsupported MultiAddr pattern: {0}".format(addr)) + + class VersionMismatch(Error): """Raised when daemon version is not supported by this client version.""" diff --git a/ipfshttpclient/http.py b/ipfshttpclient/http.py index ea363780..a2aa1fdf 100644 --- a/ipfshttpclient/http.py +++ b/ipfshttpclient/http.py @@ -9,10 +9,17 @@ import abc import contextlib import functools -import re import tarfile from six.moves import http_client - +import socket +try: + import urllib.parse +except ImportError: #PY2 + class urllib: + import urlparse as parse + +import multiaddr +from multiaddr.protocols import (P_DNS, P_DNS4, P_DNS6, P_HTTP, P_HTTPS, P_IP4, P_IP6, P_TCP) import six from . import encoding @@ -21,22 +28,22 @@ def pass_defaults(func): - """Decorator that returns a function named wrapper. + """Decorator that returns a function named wrapper. - When invoked, wrapper invokes func with default kwargs appended. + When invoked, wrapper invokes func with default kwargs appended. - Parameters - ---------- - func : callable - The function to append the default kwargs to - """ - @functools.wraps(func) - def wrapper(self, *args, **kwargs): - merged = {} - merged.update(self.defaults) - merged.update(kwargs) - return func(self, *args, **merged) - return wrapper + Parameters + ---------- + func : callable + The function to append the default kwargs to + """ + @functools.wraps(func) + def wrapper(self, *args, **kwargs): + merged = {} + merged.update(self.defaults) + merged.update(kwargs) + return func(self, *args, **merged) + return wrapper def _notify_stream_iter_closed(): @@ -131,255 +138,293 @@ def stream_decode_full(response, parser): class HTTPClient(object): - """An HTTP client for interacting with the IPFS daemon. - - Parameters - ---------- - host : str - The host the IPFS daemon is running on - port : int - The port the IPFS daemon is running at - base : str - The path prefix for API calls - timeout : Union[numbers.Real, Tuple[numbers.Real, numbers.Real], NoneType] - The default number of seconds to wait when establishing a connection to - the daemon and waiting for returned data before throwing - :exc:`~ipfshttpclient.exceptions.TimeoutError`; if the value is a tuple - its contents will be interpreted as the values for the connection and - receiving phases respectively, otherwise the value will apply to both - phases; if the value is ``None`` then all timeouts will be disabled - defaults : dict - The default parameters to be passed to - :meth:`~ipfshttpclient.http.HTTPClient.request` - """ - - __metaclass__ = abc.ABCMeta - - def __init__(self, host, port, base, **defaults): - self.host = host - self.port = port - if not re.match('^https?://', host.lower()): - host = 'http://' + host - - self.base = '%s:%s/%s' % (host, port, base) - - self.defaults = defaults - self._session = None - - def _do_request(self, *args, **kwargs): - try: - if self._session: - return self._session.request(*args, **kwargs) - else: - return requests.request(*args, **kwargs) - except (requests.ConnectTimeout, requests.Timeout) as error: - six.raise_from(exceptions.TimeoutError(error), error) - except requests.ConnectionError as error: - six.raise_from(exceptions.ConnectionError(error), error) - except http_client.HTTPException as error: - six.raise_from(exceptions.ProtocolError(error), error) - - def _do_raise_for_status(self, response): - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - content = [] - try: - decoder = encoding.get_encoding("json") - for chunk in response.iter_content(chunk_size=None): - content += list(decoder.parse_partial(chunk)) - content += list(decoder.parse_finalize()) - except exceptions.DecodingError: - pass - - # If we have decoded an error response from the server, - # use that as the exception message; otherwise, just pass - # the exception on to the caller. - if len(content) == 1 \ - and isinstance(content[0], dict) \ - and "Message" in content[0]: - msg = content[0]["Message"] - six.raise_from(exceptions.ErrorResponse(msg, error), error) - else: - six.raise_from(exceptions.StatusError(error), error) - - def _request(self, method, url, params, parser, stream=False, files=None, - headers={}, data=None, timeout=120): - # Do HTTP request (synchronously) - res = self._do_request(method, url, params=params, stream=stream, - files=files, headers=headers, data=data, timeout=timeout) - - # Raise exception for response status - # (optionally incorpating the response message, if applicable) - self._do_raise_for_status(res) - - if stream: - # Decode each item as it is read - return StreamDecodeIterator(res, parser) - else: - # Decode received item immediately - return stream_decode_full(res, parser) - - @pass_defaults - def request(self, path, - args=[], files=[], opts={}, stream=False, - decoder=None, headers={}, data=None, timeout=120, - offline=False, method=None, return_result=True): - """Makes an HTTP request to the IPFS daemon. - - This function returns the contents of the HTTP response from the IPFS - daemon. - - Raises - ------ - ~ipfshttpclient.exceptions.ErrorResponse - ~ipfshttpclient.exceptions.ConnectionError - ~ipfshttpclient.exceptions.ProtocolError - ~ipfshttpclient.exceptions.StatusError - ~ipfshttpclient.exceptions.TimeoutError - - Parameters - ---------- - path : str - The REST command path to send - args : list - Positional parameters to be sent along with the HTTP request - files : Union[str, io.RawIOBase, collections.abc.Iterable] - The file object(s) or path(s) to stream to the daemon - opts : dict - Query string paramters to be sent along with the HTTP request - decoder : str - The encoder to use to parse the HTTP response - timeout : float - How many seconds to wait for the server to send data - before giving up - - Defaults to 120 - offline : bool - Execute request in offline mode, i.e. locally without accessing - the network. - method : str - The HTTP request method to use. This argument is optional, - and GET or PUT will be used as appropriate by default. - The main use case for this is method='head' to reduce verbosity - and improve performance. - return_result : bool - Defaults to True. If the return is not relevant, such as in gc(), - passing False will return None and avoid downloading results. - kwargs : dict - Additional arguments to pass to :mod:`requests` - """ - url = self.base + path - - params = [] - params.append(('stream-channels', 'true')) - if offline: - params.append(('offline', 'true')) - for opt in opts.items(): - params.append(opt) - for arg in args: - params.append(('arg', arg)) - - if (files or data): - method = 'post' - elif not return_result: - method = 'head' - else: - method = 'get' - - # Don't attempt to decode response or stream - # (which would keep an iterator open that will then never be waited for) - if not return_result: - decoder = None - stream = False - - parser = encoding.get_encoding(decoder if decoder else "none") - - ret = self._request(method, url, params, parser, stream, - files, headers, data, timeout=timeout) - - if not return_result: - return None - - return ret - - @pass_defaults - def download(self, path, args=[], filepath=None, opts={}, - compress=True, timeout=120, offline=False): - """Makes a request to the IPFS daemon to download a file. - - Downloads a file or files from IPFS into the current working - directory, or the directory given by ``filepath``. - - Raises - ------ - ~ipfshttpclient.exceptions.ErrorResponse - ~ipfshttpclient.exceptions.ConnectionError - ~ipfshttpclient.exceptions.ProtocolError - ~ipfshttpclient.exceptions.StatusError - ~ipfshttpclient.exceptions.TimeoutError - - Parameters - ---------- - path : str - The REST command path to send - filepath : str - The local path where IPFS will store downloaded files - - Defaults to the current working directory. - args : list - Positional parameters to be sent along with the HTTP request - opts : dict - Query string paramters to be sent along with the HTTP request - compress : bool - Whether the downloaded file should be GZip compressed by the - daemon before being sent to the client - timeout : float - How many seconds to wait for the server to send data - before giving up - - Defaults to 120 - offline : bool - Execute request in offline mode, i.e. locally without accessing - the network. - """ - url = self.base + path - wd = filepath or '.' - - params = [] - params.append(('stream-channels', 'true')) - if offline: - params.append(('offline', 'true')) - params.append(('archive', 'true')) - if compress: - params.append(('compress', 'true')) - - for opt in opts.items(): - params.append(opt) - for arg in args: - params.append(('arg', arg)) - - method = 'get' - - res = self._do_request(method, url, params=params, stream=True, - timeout=timeout) - - self._do_raise_for_status(res) - - # try to stream download as a tar file stream - mode = 'r|gz' if compress else 'r|' - - with tarfile.open(fileobj=res.raw, mode=mode) as tf: - tf.extractall(path=wd) - - @contextlib.contextmanager - def session(self): - """A context manager for this client's session. - - This function closes the current session when this client goes out of - scope. - """ - self._session = requests.session() - yield - self._session.close() - self._session = None + """An HTTP client for interacting with the IPFS daemon. + + Parameters + ---------- + addr : Union[str, multiaddr.Multiaddr] + The address where the IPFS daemon may be reached + base : str + The path prefix for API calls + timeout : Union[numbers.Real, Tuple[numbers.Real, numbers.Real], NoneType] + The default number of seconds to wait when establishing a connection to + the daemon and waiting for returned data before throwing + :exc:`~ipfshttpclient.exceptions.TimeoutError`; if the value is a tuple + its contents will be interpreted as the values for the connection and + receiving phases respectively, otherwise the value will apply to both + phases; if the value is ``None`` then all timeouts will be disabled + defaults : dict + The default parameters to be passed to + :meth:`~ipfshttpclient.http.HTTPClient.request` + """ + + __metaclass__ = abc.ABCMeta + + def __init__(self, addr, base, **defaults): + addr = multiaddr.Multiaddr(addr) + addr_iter = iter(addr.items()) + + # Parse the `host`, `family`, `port` & `secure` values from the given + # multiaddr, raising on unsupported `addr` values + try: + # Read host value + proto, host = next(addr_iter) + family = socket.AF_UNSPEC + if proto.code in (P_IP4, P_DNS4): + family = socket.AF_INET + elif proto.code in (P_IP6, P_DNS6): + family = socket.AF_INET6 + elif proto.code != P_DNS: + raise exceptions.AddressError(addr) + + # Read port value + proto, port = next(addr_iter) + if proto.code != P_TCP: + raise exceptions.AddressError(addr) + + # Read application-level protocol name + secure = False + try: + proto, value = next(addr_iter) + except StopIteration: + pass + else: + if proto.code == P_HTTPS: + secure = True + elif proto.code != P_HTTP: + raise exceptions.AddressError(addr) + + # No further values may follow; this also exhausts the iterator + was_final = all(True for _ in addr_iter) + if not was_final: + raise exceptions.AddressError(addr) + except StopIteration: + six.raise_from(exceptions.AddressError(addr), None) + + # Convert the parsed `addr` values to a URL base and parameters + # for `requests` + if ":" in host and not host.startswith("["): + host = "[{0}]".format(host) + self.base = urllib.parse.SplitResult( + scheme = "http" if not secure else "https", + netloc = "{0}:{1}".format(host, port), + path = base, + query = "", + fragment = "" + ).geturl() + self._kwargs = { + "family": family + } + + self.defaults = defaults + self._session = None + + def _do_request(self, *args, **kwargs): + for name, value in self._kwargs.items(): + kwargs.setdefault(name, value) + try: + if self._session: + return self._session.request(*args, **kwargs) + else: + return requests.request(*args, **kwargs) + except (requests.ConnectTimeout, requests.Timeout) as error: + six.raise_from(exceptions.TimeoutError(error), error) + except requests.ConnectionError as error: + six.raise_from(exceptions.ConnectionError(error), error) + except http_client.HTTPException as error: + six.raise_from(exceptions.ProtocolError(error), error) + + def _do_raise_for_status(self, response): + try: + response.raise_for_status() + except requests.exceptions.HTTPError as error: + content = [] + try: + decoder = encoding.get_encoding("json") + for chunk in response.iter_content(chunk_size=None): + content += list(decoder.parse_partial(chunk)) + content += list(decoder.parse_finalize()) + except exceptions.DecodingError: + pass + + # If we have decoded an error response from the server, + # use that as the exception message; otherwise, just pass + # the exception on to the caller. + if len(content) == 1 \ + and isinstance(content[0], dict) \ + and "Message" in content[0]: + msg = content[0]["Message"] + six.raise_from(exceptions.ErrorResponse(msg, error), error) + else: + six.raise_from(exceptions.StatusError(error), error) + + def _request(self, method, url, params, parser, stream=False, files=None, + headers={}, data=None, timeout=120): + # Do HTTP request (synchronously) + res = self._do_request(method, url, params=params, stream=stream, + files=files, headers=headers, data=data, timeout=timeout) + + # Raise exception for response status + # (optionally incorpating the response message, if applicable) + self._do_raise_for_status(res) + + if stream: + # Decode each item as it is read + return StreamDecodeIterator(res, parser) + else: + # Decode received item immediately + return stream_decode_full(res, parser) + + @pass_defaults + def request(self, path, + args=[], files=[], opts={}, stream=False, + decoder=None, headers={}, data=None, + timeout=120, offline=False, return_result=True): + """Makes an HTTP request to the IPFS daemon. + + This function returns the contents of the HTTP response from the IPFS + daemon. + + Raises + ------ + ~ipfshttpclient.exceptions.ErrorResponse + ~ipfshttpclient.exceptions.ConnectionError + ~ipfshttpclient.exceptions.ProtocolError + ~ipfshttpclient.exceptions.StatusError + ~ipfshttpclient.exceptions.TimeoutError + + Parameters + ---------- + path : str + The REST command path to send + args : list + Positional parameters to be sent along with the HTTP request + files : Union[str, io.RawIOBase, collections.abc.Iterable] + The file object(s) or path(s) to stream to the daemon + opts : dict + Query string paramters to be sent along with the HTTP request + decoder : str + The encoder to use to parse the HTTP response + timeout : float + How many seconds to wait for the server to send data + before giving up + + Defaults to 120 + offline : bool + Execute request in offline mode, i.e. locally without accessing + the network. + return_result : bool + Defaults to True. If the return is not relevant, such as in gc(), + passing False will return None and avoid downloading results. + """ + url = self.base + path + + params = [] + params.append(('stream-channels', 'true')) + if offline: + params.append(('offline', 'true')) + for opt in opts.items(): + params.append(opt) + for arg in args: + params.append(('arg', arg)) + + if (files or data): + method = 'post' + elif not return_result: + method = 'head' + else: + method = 'get' + + # Don't attempt to decode response or stream + # (which would keep an iterator open that will then never be waited for) + if not return_result: + decoder = None + stream = False + + parser = encoding.get_encoding(decoder if decoder else "none") + + ret = self._request(method, url, params, parser, stream, + files, headers, data, timeout=timeout) + + return ret if return_result else None + + @pass_defaults + def download(self, path, args=[], filepath=None, opts={}, + compress=True, timeout=120, offline=False): + """Makes a request to the IPFS daemon to download a file. + + Downloads a file or files from IPFS into the current working + directory, or the directory given by ``filepath``. + + Raises + ------ + ~ipfshttpclient.exceptions.ErrorResponse + ~ipfshttpclient.exceptions.ConnectionError + ~ipfshttpclient.exceptions.ProtocolError + ~ipfshttpclient.exceptions.StatusError + ~ipfshttpclient.exceptions.TimeoutError + + Parameters + ---------- + path : str + The REST command path to send + filepath : str + The local path where IPFS will store downloaded files + + Defaults to the current working directory. + args : list + Positional parameters to be sent along with the HTTP request + opts : dict + Query string paramters to be sent along with the HTTP request + compress : bool + Whether the downloaded file should be GZip compressed by the + daemon before being sent to the client + timeout : float + How many seconds to wait for the server to send data + before giving up + + Defaults to 120 + offline : bool + Execute request in offline mode, i.e. locally without accessing + the network. + """ + url = self.base + path + wd = filepath or '.' + + params = [] + params.append(('stream-channels', 'true')) + if offline: + params.append(('offline', 'true')) + params.append(('archive', 'true')) + if compress: + params.append(('compress', 'true')) + + for opt in opts.items(): + params.append(opt) + for arg in args: + params.append(('arg', arg)) + + method = 'get' + + res = self._do_request(method, url, params=params, stream=True, + timeout=timeout) + + self._do_raise_for_status(res) + + # try to stream download as a tar file stream + mode = 'r|gz' if compress else 'r|' + + with tarfile.open(fileobj=res.raw, mode=mode) as tf: + tf.extractall(path=wd) + + @contextlib.contextmanager + def session(self): + """A context manager for this client's session. + + This function closes the current session when this client goes out of + scope. + """ + self._session = requests.Session() + yield + self._session.close() + self._session = None diff --git a/pyproject.toml b/pyproject.toml index 9023339d..4605b023 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ description-file = "README.md" # Unfortunately these currently need to be duplicated from `requirements.txt` requires-python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" requires = [ + "multiaddr (>=0.0.7)", "requests (>=2.11)", "six" ] diff --git a/requirements.txt b/requirements.txt index 0908a9fd..e480107b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ +multiaddr>=0.0.7 requests>=2.11 six \ No newline at end of file diff --git a/test/functional/test_other.py b/test/functional/test_other.py index b9b8956d..4773502f 100644 --- a/test/functional/test_other.py +++ b/test/functional/test_other.py @@ -9,8 +9,7 @@ def test_ipfs_node_available(): Dummy test to ensure that running the tests without a daemon produces a failure, since we think it's unlikely that people running tests want this """ - addr = "[{0}]:{1}".format(ipfshttpclient.DEFAULT_HOST, ipfshttpclient.DEFAULT_PORT) - assert conftest.is_available(), "Functional tests require an IPFS node to be available at: " + addr + assert conftest.is_available(), "Functional tests require an IPFS node to be available at: {0}".format(ipfshttpclient.DEFAULT_ADDR) def test_add_json(client, cleanup_pins): diff --git a/test/run-tests.py b/test/run-tests.py index 7803de11..a95eddba 100755 --- a/test/run-tests.py +++ b/test/run-tests.py @@ -37,8 +37,7 @@ def _contextlib_suppress(*exceptions): TEST_PATH = BASE_PATH / "build" / "test-{}".format(ENVNAME) IPFS_PATH = TEST_PATH / "ipfs-path" -HOST = "127.0.0.1" -PORT = random.randrange(40000, 65535) +ADDR = "/ip4/127.0.0.1/tcp/{0}".format(random.randrange(40000, 65535)) ########################### @@ -53,8 +52,7 @@ def _contextlib_suppress(*exceptions): # Export environment variables required for testing os.environ["IPFS_PATH"] = str(IPFS_PATH) -os.environ["PY_IPFS_HTTP_CLIENT_DEFAULT_HOST"] = str(HOST) -os.environ["PY_IPFS_HTTP_CLIENT_DEFAULT_PORT"] = str(PORT) +os.environ["PY_IPFS_HTTP_CLIENT_DEFAULT_ADDR"] = str(ADDR) # Make sure the IPFS data directory exists and is empty with contextlib.suppress(OSError): #PY2: Replace with `FileNotFoundError` @@ -66,7 +64,7 @@ def _contextlib_suppress(*exceptions): # Initialize the IPFS data directory subprocess.call(["ipfs", "init"]) subprocess.call(["ipfs", "config", "Addresses.Gateway", ""]) -subprocess.call(["ipfs", "config", "Addresses.API", "/ip4/{}/tcp/{}".format(HOST, PORT)]) +subprocess.call(["ipfs", "config", "Addresses.API", ADDR]) subprocess.call(["ipfs", "config", "--bool", "Experimental.FilestoreEnabled", "true"]) @@ -82,7 +80,7 @@ def _contextlib_suppress(*exceptions): extra_args["universal_newlines"] = True # Spawn IPFS daemon in data directory -print("Starting IPFS daemon on /ip4/{0}/tcp/{1}…".format(HOST, PORT), file=sys.stderr) +print("Starting IPFS daemon on {0}…".format(ADDR), file=sys.stderr) DAEMON = subprocess.Popen( ["ipfs", "daemon", "--enable-pubsub-experiment"], stdout=subprocess.PIPE, diff --git a/test/unit/test_http.py b/test/unit/test_http.py index 16f7fe3f..407f8ab4 100644 --- a/test/unit/test_http.py +++ b/test/unit/test_http.py @@ -34,7 +34,8 @@ def http_server(request): @pytest.fixture def http_client(http_server): return ipfshttpclient.http.HTTPClient( - *(http_server.server_address + (ipfshttpclient.DEFAULT_BASE,)) + "/ip4/{0}/tcp/{1}/http".format(*http_server.server_address), + ipfshttpclient.DEFAULT_BASE ) @@ -162,7 +163,8 @@ def test_failed_download(http_client, http_server): def test_download_timeout(slow_http_server): """Tests that a timed-out download raises a TimeoutError.""" http_client = ipfshttpclient.http.HTTPClient( - *(slow_http_server.server_address + (ipfshttpclient.DEFAULT_BASE,)) + "/ip4/{0}/tcp/{1}/http".format(*slow_http_server.server_address), + ipfshttpclient.DEFAULT_BASE ) with pytest.raises(ipfshttpclient.exceptions.TimeoutError): @@ -171,7 +173,8 @@ def test_download_timeout(slow_http_server): def test_request_timeout(slow_http_server): """Tests that a timed-out request raises a TimeoutError.""" http_client = ipfshttpclient.http.HTTPClient( - *(slow_http_server.server_address + (ipfshttpclient.DEFAULT_BASE,)) + "/ip4/{0}/tcp/{1}/http".format(*slow_http_server.server_address), + ipfshttpclient.DEFAULT_BASE ) with pytest.raises(ipfshttpclient.exceptions.TimeoutError): From e6f3bee037c12bb2a1cd6d2107d28d59400dae20 Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 17:13:39 +0200 Subject: [PATCH 08/14] Update `multipart.compile_glob` to the Python 3.7 codebase (contains some bugfixes) --- ipfshttpclient/multipart.py | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/ipfshttpclient/multipart.py b/ipfshttpclient/multipart.py index 2ef6c59d..eb502237 100644 --- a/ipfshttpclient/multipart.py +++ b/ipfshttpclient/multipart.py @@ -313,8 +313,9 @@ def _body(self): def glob_compile(pat): """Translate a shell glob PATTERN to a regular expression. - This is almost entirely based on `fnmatch.translate` source-code from the - python 3.5 standard-library. + Source code taken from the `fnmatch.translate` function of the python 3.7 + standard-library with the glob-style modification of making `*` + non-recursive and the adding `**` as recursive matching operator. """ i, n = 0, len(pat) @@ -348,11 +349,30 @@ def glob_compile(pat): if j >= n: res = res + '\\[' else: - stuff = pat[i:j].replace('\\', '\\\\') + stuff = pat[i:j] + if '--' not in stuff: + stuff = stuff.replace('\\', r'\\') + else: + chunks = [] + k = i+2 if pat[i] == '!' else i + 1 + while True: + k = pat.find('-', k, j) + if k < 0: + break + chunks.append(pat[i:k]) + i = k + 1 + k = k + 3 + chunks.append(pat[i:j]) + # Escape backslashes and hyphens for set difference (--). + # Hyphens that create ranges shouldn't be escaped. + stuff = '-'.join(s.replace('\\', r'\\').replace('-', r'\-') + for s in chunks) + # Escape set operations (&&, ~~ and ||). + stuff = re.sub(r'([&~|])', r'\\\1', stuff) i = j + 1 if stuff[0] == '!': stuff = '^' + stuff[1:] - elif stuff[0] == '^': + elif stuff[0] in ('^', '['): stuff = '\\' + stuff res = '%s[%s]' % (res, stuff) else: From c19fda4d963b97e51fcb9ea63bbd8b91591edbe3 Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 17:17:22 +0200 Subject: [PATCH 09/14] Bump minimum required tox version to something reasonable --- tox.ini | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 8313b275..1cd971df 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -minversion = 2.0 +minversion = 3.3 envlist = py27, py34, @@ -27,6 +27,8 @@ deps = commands = flake8 {posargs} +#TODO: Migrate away from this file to `pyproject.toml` once `flake8` and `pytest` support parsing it + [testenv:coverage] basepython = python3 deps = From 258efe39adba62d410022ca13720581bce2d0edc Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 20:42:51 +0200 Subject: [PATCH 10/14] [docs] Return wrapped class in `SectionProperty`s when invoked from the `Client` class instance --- ipfshttpclient/client/base.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/ipfshttpclient/client/base.py b/ipfshttpclient/client/base.py index 69e4c73b..7a9b21ae 100644 --- a/ipfshttpclient/client/base.py +++ b/ipfshttpclient/client/base.py @@ -47,16 +47,19 @@ def __init__(self, cls): self.__prop_cls__ = cls def __get__(self, client_object, type=None): - try: - return client_object.__prop_objs__[self] - except AttributeError: - client_object.__prop_objs__ = { - self: self.__prop_cls__(client_object) - } - return client_object.__prop_objs__[self] - except KeyError: - client_object.__prop_objs__[self] = self.__prop_cls__(client_object) - return client_object.__prop_objs__[self] + if client_object is not None: # We are invoked on object + try: + return client_object.__prop_objs__[self] + except AttributeError: + client_object.__prop_objs__ = { + self: self.__prop_cls__(client_object) + } + return client_object.__prop_objs__[self] + except KeyError: + client_object.__prop_objs__[self] = self.__prop_cls__(client_object) + return client_object.__prop_objs__[self] + else: # We are invoked on class + return self.__prop_cls__ class SectionBase(object): From 345ab54cfd90317ad53c63908bc505d060a9b13c Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 21:11:27 +0200 Subject: [PATCH 11/14] [docs] Fix and improve documentation output --- docs/conf.py | 5 ++-- docs/http_client_ref.md | 46 ++++++++++++++----------------- ipfshttpclient/client/__init__.py | 5 +++- ipfshttpclient/client/base.py | 18 +++++------- 4 files changed, 34 insertions(+), 40 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 4454fa8e..a416e0ae 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -310,7 +310,8 @@ # External documentation link mapping intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), - 'cid': ('https://py-cid.readthedocs.io/en/master/', None) + 'cid': ('https://py-cid.readthedocs.io/en/master/', None), + 'multiaddr': ('https://multiaddr.readthedocs.io/en/latest/', None) } # -- Napoleon settings ---------------------------------------------------- @@ -382,7 +383,7 @@ def import_object(self): def format_signature(self): """Hide inheritance signature since it's not applicable helpful for these classes.""" - return "" + return "({0})".format(self.args) if self.args is not None else "" class ClientClassDocumenter(ClientClassDocumenterBase): objtype = "clientclass" diff --git a/docs/http_client_ref.md b/docs/http_client_ref.md index 3e8bd6d9..0690d1d3 100644 --- a/docs/http_client_ref.md +++ b/docs/http_client_ref.md @@ -14,25 +14,23 @@ All commands are accessed through the ``ipfshttpclient.Client`` class. ### Utility Functions ```eval_rst -.. data:: ipfshttpclient.DEFAULT_HOST +.. data:: ipfshttpclient.DEFAULT_ADDR - The default hostname that the client library will attempt to connect to. - This may be overwritten on a per-client-instance basis using the ``host`` - parameter of the :func:`~ipfshttpclient.connect` function. - -.. data:: ipfshttpclient.DEFAULT_PORT - - The default port number that the client library will attempt to connect to. - This may be overwritten on a per-client-instance basis using the ``port`` - parameter of the :func:`~ipfshttpclient.connect` function. + The default IPFS API daemon location the client library will attempt to + connect to. By default this will have a value of ``multiaddr.Multiaddr("/dns/localhost/tcp/5001/http")``. + + This may be overwritten on a per-client-instance basis using + the ``addr`` parameter of the :func:`~ipfshttpclient.connect` function. .. data:: ipfshttpclient.DEFAULT_BASE - The default HTTP URL prefix (or “base”) that the client library will use. - This may be overwritten on a per-client-instance basis using the ``base`` - parameter of the :func:`~ipfshttpclient.connect` function. + The default HTTP URL path prefix (or “base”) that the client library will use. + By default this will have a value of ``"api/v0"``. + + This may be overwritten on a per-client-instance basis using the ``base`` + parameter of the :func:`~ipfshttpclient.connect` function. -.. autofunction:: ipfshttpclient.connect(host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE) +.. autofunction:: ipfshttpclient.connect(addr=DEFAULT_ADDR, base=DEFAULT_BASE) .. autofunction:: ipfshttpclient.assert_version @@ -42,27 +40,23 @@ All commands are accessed through the ``ipfshttpclient.Client`` class. All methods accept the following parameters in their `kwargs`: - * **offline** (*bool*) – Prevent the deamon from communicating with any remote - IPFS node while performing the requested action? - * **opts** (*dict*) – A mapping of custom IPFS API parameters to be sent along - with the regular parameters generated by the client - library + * **offline** ([*bool*](https://docs.python.org/3/library/functions.html#bool)) – Prevent the deamon from communicating with any remote IPFS node while performing the requested action? + * **opts** ([*dict*](https://docs.python.org/3/library/stdtypes.html#dict)) – A mapping of custom IPFS API parameters to be sent along with the regular parameters generated by the client library * Values specified here will always override their respective counterparts of the client library itself. - * **stream** (*bool*) – Return results incrementally as they arrive? + * **stream** ([*bool*](https://docs.python.org/3/library/functions.html#bool)) – Return results incrementally as they arrive? * Each method called with `stream=True` will return a generator instead of the documented value. If the return type is of type `list` then each item of the given list will be yielded separately; if it is of type `bytes` then arbitrary bags of bytes will be yielded that together form a stream; finally, if it is of type `dict` then the single dictonary item will be yielded once. - * **timeout** ([float](float)) – The number of seconds to wait of a daemon reply - before giving up + * **timeout** ([*float*](https://docs.python.org/3/library/functions.html#float)) – The number of seconds to wait of a daemon reply before giving up ```eval_rst +.. autoclientclass:: ipfshttpclient.Client(addr=DEFAULT_ADDR, base=DEFAULT_BASE) + :members: + :inherited-members: + :undoc-members: -.. autoclientclass:: ipfshttpclient.Client - :members: - :inherited-members: - :undoc-members: ``` diff --git a/ipfshttpclient/client/__init__.py b/ipfshttpclient/client/__init__.py index eeeb79f4..116e0b29 100644 --- a/ipfshttpclient/client/__init__.py +++ b/ipfshttpclient/client/__init__.py @@ -12,7 +12,7 @@ import multiaddr -DEFAULT_ADDR = multiaddr.Multiaddr(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_ADDR", '/dns/localhost/tcp/5001')) +DEFAULT_ADDR = multiaddr.Multiaddr(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_ADDR", '/dns/localhost/tcp/5001/http')) DEFAULT_BASE = str(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_BASE", 'api/v0')) VERSION_MINIMUM = "0.4.3" @@ -107,6 +107,9 @@ class Client(files.Base, miscellaneous.Base): as :func:`connect`, to prevent the client from checking for an active and compatible version of the daemon. """ + + __doc__ += base.ClientBase.__doc__ + bitswap = base.SectionProperty(bitswap.Section) block = base.SectionProperty(block.Section) bootstrap = base.SectionProperty(bootstrap.Section) diff --git a/ipfshttpclient/client/base.py b/ipfshttpclient/client/base.py index 7a9b21ae..f693e6cb 100644 --- a/ipfshttpclient/client/base.py +++ b/ipfshttpclient/client/base.py @@ -82,20 +82,15 @@ def chunk_size(self, value): class ClientBase(object): - """A TCP client for interacting with an IPFS daemon. - - A :class:`~ipfshttpclient.Client` instance will not actually establish a - connection to the daemon until at least one of it's methods is called. - + """ Parameters ---------- - addr : Union[str, multiaddr.Multiaddr] + addr : Union[bytes, str, multiaddr.Multiaddr] The `MultiAddr `_ describing the - daemon location, as used in the *API* key of - `go-ipfs *Addresses* section `_ - (defaults to ``/dns/localhost/tcp/5001``) + API daemon location, as used in the *API* key of + `go-ipfs Addresses section `_ - Supported formats are currently: + Supported addressing patterns are currently: * ``/{dns,dns4,dns6,ip4,ip6}//tcp/`` (HTTP) * ``/{dns,dns4,dns6,ip4,ip6}//tcp//http`` (HTTP) @@ -103,7 +98,8 @@ class ClientBase(object): Additional forms (proxying) may be supported in the future. base : str - Path of the deamon's API (currently always ``api/v0``) + The HTTP URL path prefix (or “base”) at which the API is exposed on the + API daemon chunk_size : int The size of the chunks to break uploaded files and text content into """ From 1cb96fbf7adf9ec3542edffab4f7c2ccdc2f35c0 Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 21:27:52 +0200 Subject: [PATCH 12/14] Drop the long-since deprecated `client.files.file_ls` --- ipfshttpclient/client/files.py | 50 ---------------------------------- 1 file changed, 50 deletions(-) diff --git a/ipfshttpclient/client/files.py b/ipfshttpclient/client/files.py index 56d8d094..facaf5b3 100644 --- a/ipfshttpclient/client/files.py +++ b/ipfshttpclient/client/files.py @@ -310,56 +310,6 @@ def add(self, file, *files, **kwargs): return resp - @base.returns_single_item - def file_ls(self, multihash, **kwargs): - """Lists directory contents for Unix filesystem objects. - - The result contains size information. For files, the child size is the - total size of the file contents. For directories, the child size is the - IPFS link size. - - The path can be a prefixless reference; in this case, it is assumed - that it is an ``/ipfs/`` reference and not ``/ipns/``. - - .. code-block:: python - - >>> client.file_ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - { - 'Arguments': {'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': - 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D'}, - 'Objects': { - 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': { - 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', - 'Size': 0, 'Type': 'Directory', - 'Links': [ - {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', - 'Name': 'Makefile', 'Size': 163, 'Type': 'File'}, - {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', - 'Name': 'example', 'Size': 1463, 'Type': 'File'}, - {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', - 'Name': 'home', 'Size': 3947, 'Type': 'Directory'}, - {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', - 'Name': 'lib', 'Size': 268261, 'Type': 'Directory'}, - {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', - 'Name': 'published-version', 'Size': 47, 'Type': 'File'} - ] - } - } - } - - Parameters - ---------- - multihash : str - The path to the object(s) to list links from - - Returns - ------- - dict - """ - args = (multihash,) - return self._client.request('/file/ls', args, decoder='json', **kwargs) - - def get(self, cid, **kwargs): """Downloads a file, or directory of files from IPFS. From b8b93c1763bc8e76ae0d17cb7061f5437744e785 Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 22:05:30 +0200 Subject: [PATCH 13/14] =?UTF-8?q?Spaces=20=E2=86=92=20Tabs=20(except=20for?= =?UTF-8?q?=20alignment)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Most parts of the codebase were already using tabs before this and only needed small adjustments. --- ipfshttpclient/client/base.py | 13 +- ipfshttpclient/encoding.py | 572 +++++++++++++------------- ipfshttpclient/exceptions.py | 126 +++--- ipfshttpclient/http.py | 3 +- ipfshttpclient/multipart.py | 2 +- ipfshttpclient/requests_wrapper.py | 32 +- ipfshttpclient/utils.py | 168 ++++---- test/functional/test_files.py | 4 +- test/functional/test_miscellaneous.py | 8 +- test/functional/test_other.py | 4 +- test/functional/test_pin.py | 6 +- test/unit/test_utils.py | 162 ++++---- 12 files changed, 552 insertions(+), 548 deletions(-) diff --git a/ipfshttpclient/client/base.py b/ipfshttpclient/client/base.py index f693e6cb..a8aace3c 100644 --- a/ipfshttpclient/client/base.py +++ b/ipfshttpclient/client/base.py @@ -87,14 +87,15 @@ class ClientBase(object): ---------- addr : Union[bytes, str, multiaddr.Multiaddr] The `MultiAddr `_ describing the - API daemon location, as used in the *API* key of - `go-ipfs Addresses section `_ + API daemon location, as used in the *API* key of `go-ipfs Addresses + section + `_ Supported addressing patterns are currently: - - * ``/{dns,dns4,dns6,ip4,ip6}//tcp/`` (HTTP) - * ``/{dns,dns4,dns6,ip4,ip6}//tcp//http`` (HTTP) - * ``/{dns,dns4,dns6,ip4,ip6}//tcp//https`` (HTTPS) + + * ``/{dns,dns4,dns6,ip4,ip6}//tcp/`` (HTTP) + * ``/{dns,dns4,dns6,ip4,ip6}//tcp//http`` (HTTP) + * ``/{dns,dns4,dns6,ip4,ip6}//tcp//https`` (HTTPS) Additional forms (proxying) may be supported in the future. base : str diff --git a/ipfshttpclient/encoding.py b/ipfshttpclient/encoding.py index 12a02ef3..a1716756 100644 --- a/ipfshttpclient/encoding.py +++ b/ipfshttpclient/encoding.py @@ -3,7 +3,7 @@ .. note:: - The XML and ProtoBuf encoders are currently not functional. + The XML and ProtoBuf encoders are currently not functional. """ from __future__ import absolute_import @@ -18,314 +18,314 @@ class Encoding(object): - """Abstract base for a data parser/encoder interface. - """ - __metaclass__ = abc.ABCMeta - - is_stream = False - - @abc.abstractmethod - def parse_partial(self, raw): - """Parses the given data and yields all complete data sets that can - be built from this. - - Raises - ------ - ~ipfshttpclient.exceptions.DecodingError - - Parameters - ---------- - raw : bytes - Data to be parsed - - Returns - ------- - generator - """ - - def parse_finalize(self): - """Finalizes parsing based on remaining buffered data and yields the - remaining data sets. - - Raises - ------ - ~ipfshttpclient.exceptions.DecodingError - - Returns - ------- - generator - """ - return () - - def parse(self, raw): - """Returns a Python object decoded from the bytes of this encoding. - - Raises - ------ - ~ipfshttpclient.exceptions.DecodingError - - Parameters - ---------- - raw : bytes - Data to be parsed - - Returns - ------- - object - """ - results = list(self.parse_partial(raw)) - results.extend(self.parse_finalize()) - return results[0] if len(results) == 1 else results - - @abc.abstractmethod - def encode(self, obj): - """Serialize a raw object into corresponding encoding. - - Raises - ------ - ~ipfshttpclient.exceptions.EncodingError - - Parameters - ---------- - obj : object - Object to be encoded - """ + """Abstract base for a data parser/encoder interface. + """ + __metaclass__ = abc.ABCMeta + + is_stream = False + + @abc.abstractmethod + def parse_partial(self, raw): + """Parses the given data and yields all complete data sets that can + be built from this. + + Raises + ------ + ~ipfshttpclient.exceptions.DecodingError + + Parameters + ---------- + raw : bytes + Data to be parsed + + Returns + ------- + generator + """ + + def parse_finalize(self): + """Finalizes parsing based on remaining buffered data and yields the + remaining data sets. + + Raises + ------ + ~ipfshttpclient.exceptions.DecodingError + + Returns + ------- + generator + """ + return () + + def parse(self, raw): + """Returns a Python object decoded from the bytes of this encoding. + + Raises + ------ + ~ipfshttpclient.exceptions.DecodingError + + Parameters + ---------- + raw : bytes + Data to be parsed + + Returns + ------- + object + """ + results = list(self.parse_partial(raw)) + results.extend(self.parse_finalize()) + return results[0] if len(results) == 1 else results + + @abc.abstractmethod + def encode(self, obj): + """Serialize a raw object into corresponding encoding. + + Raises + ------ + ~ipfshttpclient.exceptions.EncodingError + + Parameters + ---------- + obj : object + Object to be encoded + """ class Dummy(Encoding): - """Dummy parser/encoder that does nothing. - """ - name = "none" - is_stream = True + """Dummy parser/encoder that does nothing. + """ + name = "none" + is_stream = True - def parse_partial(self, raw): - """Yields the data passed into this method. + def parse_partial(self, raw): + """Yields the data passed into this method. - Parameters - ---------- - raw : bytes - Any kind of data + Parameters + ---------- + raw : bytes + Any kind of data - Returns - ------- - generator - """ - yield raw + Returns + ------- + generator + """ + yield raw - def encode(self, obj): - """Returns the bytes representation of the data passed into this - function. + def encode(self, obj): + """Returns the bytes representation of the data passed into this + function. - Parameters - ---------- - obj : object - Any Python object + Parameters + ---------- + obj : object + Any Python object - Returns - ------- - bytes - """ - return six.b(str(obj)) + Returns + ------- + bytes + """ + return six.b(str(obj)) class Json(Encoding): - """JSON parser/encoder that handles concatenated JSON. - """ - name = 'json' - - def __init__(self): - self._buffer = [] - self._decoder1 = codecs.getincrementaldecoder('utf-8')() - self._decoder2 = json.JSONDecoder() - self._lasterror = None - - def parse_partial(self, data): - """Incrementally decodes JSON data sets into Python objects. - - Raises - ------ - ~ipfshttpclient.exceptions.DecodingError - - Returns - ------- - generator - """ - try: - # Python 3 requires all JSON data to be a text string - lines = self._decoder1.decode(data, False).split("\n") - - # Add first input line to last buffer line, if applicable, to - # handle cases where the JSON string has been chopped in half - # at the network level due to streaming - if len(self._buffer) > 0 and self._buffer[-1] is not None: - self._buffer[-1] += lines[0] - self._buffer.extend(lines[1:]) - else: - self._buffer.extend(lines) - except UnicodeDecodeError as error: - raise exceptions.DecodingError('json', error) - - # Process data buffer - index = 0 - try: - # Process each line as separate buffer - #PERF: This way the `.lstrip()` call becomes almost always a NOP - # even if it does return a different string it will only - # have to allocate a new buffer for the currently processed - # line. - while index < len(self._buffer): - while self._buffer[index]: - # Make sure buffer does not start with whitespace - #PERF: `.lstrip()` does not reallocate if the string does - # not actually start with whitespace. - self._buffer[index] = self._buffer[index].lstrip() - - # Handle case where the remainder of the line contained - # only whitespace - if not self._buffer[index]: - self._buffer[index] = None - continue - - # Try decoding the partial data buffer and return results - # from this - data = self._buffer[index] - for index2 in range(index, len(self._buffer)): - # If decoding doesn't succeed with the currently - # selected buffer (very unlikely with our current - # class of input data) then retry with appending - # any other pending pieces of input data - # This will happen with JSON data that contains - # arbitrary new-lines: "{1:\n2,\n3:4}" - if index2 > index: - data += "\n" + self._buffer[index2] - - try: - (obj, offset) = self._decoder2.raw_decode(data) - except ValueError: - # Treat error as fatal if we have already added - # the final buffer to the input - if (index2 + 1) == len(self._buffer): - raise - else: - index = index2 - break - - # Decoding succeeded – yield result and shorten buffer - yield obj - if offset < len(self._buffer[index]): - self._buffer[index] = self._buffer[index][offset:] - else: - self._buffer[index] = None - index += 1 - except ValueError as error: - # It is unfortunately not possible to reliably detect whether - # parsing ended because of an error *within* the JSON string, or - # an unexpected *end* of the JSON string. - # We therefor have to assume that any error that occurs here - # *might* be related to the JSON parser hitting EOF and therefor - # have to postpone error reporting until `parse_finalize` is - # called. - self._lasterror = error - finally: - # Remove all processed buffers - del self._buffer[0:index] - - def parse_finalize(self): - """Raises errors for incomplete buffered data that could not be parsed - because the end of the input data has been reached. - - Raises - ------ - ~ipfshttpclient.exceptions.DecodingError - - Returns - ------- - tuple : Always empty - """ - try: - try: - # Raise exception for remaining bytes in bytes decoder - self._decoder1.decode(b'', True) - except UnicodeDecodeError as error: - raise exceptions.DecodingError('json', error) - - # Late raise errors that looked like they could have been fixed if - # the caller had provided more data - if self._buffer: - raise exceptions.DecodingError('json', self._lasterror) - finally: - # Reset state - self._buffer = [] - self._lasterror = None - self._decoder1.reset() - - return () - - def encode(self, obj): - """Returns ``obj`` serialized as JSON formatted bytes. - - Raises - ------ - ~ipfshttpclient.exceptions.EncodingError - - Parameters - ---------- - obj : Union[str, list, dict, int] - JSON serializable Python object - - Returns - ------- - bytes - """ - try: - result = json.dumps(obj, sort_keys=True, indent=None, - separators=(',', ':'), ensure_ascii=False) - if isinstance(result, six.text_type): - return result.encode("utf-8") - else: - return result - except (UnicodeEncodeError, TypeError) as error: - raise exceptions.EncodingError('json', error) + """JSON parser/encoder that handles concatenated JSON. + """ + name = 'json' + + def __init__(self): + self._buffer = [] + self._decoder1 = codecs.getincrementaldecoder('utf-8')() + self._decoder2 = json.JSONDecoder() + self._lasterror = None + + def parse_partial(self, data): + """Incrementally decodes JSON data sets into Python objects. + + Raises + ------ + ~ipfshttpclient.exceptions.DecodingError + + Returns + ------- + generator + """ + try: + # Python 3 requires all JSON data to be a text string + lines = self._decoder1.decode(data, False).split("\n") + + # Add first input line to last buffer line, if applicable, to + # handle cases where the JSON string has been chopped in half + # at the network level due to streaming + if len(self._buffer) > 0 and self._buffer[-1] is not None: + self._buffer[-1] += lines[0] + self._buffer.extend(lines[1:]) + else: + self._buffer.extend(lines) + except UnicodeDecodeError as error: + raise exceptions.DecodingError('json', error) + + # Process data buffer + index = 0 + try: + # Process each line as separate buffer + #PERF: This way the `.lstrip()` call becomes almost always a NOP + # even if it does return a different string it will only + # have to allocate a new buffer for the currently processed + # line. + while index < len(self._buffer): + while self._buffer[index]: + # Make sure buffer does not start with whitespace + #PERF: `.lstrip()` does not reallocate if the string does + # not actually start with whitespace. + self._buffer[index] = self._buffer[index].lstrip() + + # Handle case where the remainder of the line contained + # only whitespace + if not self._buffer[index]: + self._buffer[index] = None + continue + + # Try decoding the partial data buffer and return results + # from this + data = self._buffer[index] + for index2 in range(index, len(self._buffer)): + # If decoding doesn't succeed with the currently + # selected buffer (very unlikely with our current + # class of input data) then retry with appending + # any other pending pieces of input data + # This will happen with JSON data that contains + # arbitrary new-lines: "{1:\n2,\n3:4}" + if index2 > index: + data += "\n" + self._buffer[index2] + + try: + (obj, offset) = self._decoder2.raw_decode(data) + except ValueError: + # Treat error as fatal if we have already added + # the final buffer to the input + if (index2 + 1) == len(self._buffer): + raise + else: + index = index2 + break + + # Decoding succeeded – yield result and shorten buffer + yield obj + if offset < len(self._buffer[index]): + self._buffer[index] = self._buffer[index][offset:] + else: + self._buffer[index] = None + index += 1 + except ValueError as error: + # It is unfortunately not possible to reliably detect whether + # parsing ended because of an error *within* the JSON string, or + # an unexpected *end* of the JSON string. + # We therefor have to assume that any error that occurs here + # *might* be related to the JSON parser hitting EOF and therefor + # have to postpone error reporting until `parse_finalize` is + # called. + self._lasterror = error + finally: + # Remove all processed buffers + del self._buffer[0:index] + + def parse_finalize(self): + """Raises errors for incomplete buffered data that could not be parsed + because the end of the input data has been reached. + + Raises + ------ + ~ipfshttpclient.exceptions.DecodingError + + Returns + ------- + tuple : Always empty + """ + try: + try: + # Raise exception for remaining bytes in bytes decoder + self._decoder1.decode(b'', True) + except UnicodeDecodeError as error: + raise exceptions.DecodingError('json', error) + + # Late raise errors that looked like they could have been fixed if + # the caller had provided more data + if self._buffer: + raise exceptions.DecodingError('json', self._lasterror) + finally: + # Reset state + self._buffer = [] + self._lasterror = None + self._decoder1.reset() + + return () + + def encode(self, obj): + """Returns ``obj`` serialized as JSON formatted bytes. + + Raises + ------ + ~ipfshttpclient.exceptions.EncodingError + + Parameters + ---------- + obj : Union[str, list, dict, int] + JSON serializable Python object + + Returns + ------- + bytes + """ + try: + result = json.dumps(obj, sort_keys=True, indent=None, + separators=(',', ':'), ensure_ascii=False) + if isinstance(result, six.text_type): + return result.encode("utf-8") + else: + return result + except (UnicodeEncodeError, TypeError) as error: + raise exceptions.EncodingError('json', error) class Protobuf(Encoding): - """Protobuf parser/encoder that handles protobuf.""" - name = 'protobuf' + """Protobuf parser/encoder that handles protobuf.""" + name = 'protobuf' class Xml(Encoding): - """XML parser/encoder that handles XML.""" - name = 'xml' + """XML parser/encoder that handles XML.""" + name = 'xml' # encodings supported by the IPFS api (default is JSON) __encodings = { - Dummy.name: Dummy, - Json.name: Json, - Protobuf.name: Protobuf, - Xml.name: Xml + Dummy.name: Dummy, + Json.name: Json, + Protobuf.name: Protobuf, + Xml.name: Xml } def get_encoding(name): - """ - Returns an Encoder object for the named encoding - - Raises - ------ - ~ipfshttpclient.exceptions.EncoderMissingError - - Parameters - ---------- - name : str - Encoding name. Supported options: - - * ``"none"`` - * ``"json"`` - * ``"protobuf"`` - * ``"xml"`` - """ - try: - return __encodings[name.lower()]() - except KeyError: - raise exceptions.EncoderMissingError(name) + """ + Returns an Encoder object for the named encoding + + Raises + ------ + ~ipfshttpclient.exceptions.EncoderMissingError + + Parameters + ---------- + name : str + Encoding name. Supported options: + + * ``"none"`` + * ``"json"`` + * ``"protobuf"`` + * ``"xml"`` + """ + try: + return __encodings[name.lower()]() + except KeyError: + raise exceptions.EncoderMissingError(name) diff --git a/ipfshttpclient/exceptions.py b/ipfshttpclient/exceptions.py index 3739679b..4d25c979 100644 --- a/ipfshttpclient/exceptions.py +++ b/ipfshttpclient/exceptions.py @@ -2,28 +2,28 @@ """ The class hierachy for exceptions is:: - Error - +-- VersionMismatch - +-- AddressError - +-- EncoderError - | +-- EncoderMissingError - | +-- EncodingError - | +-- DecodingError - +-- CommunicationError - +-- ProtocolError - +-- StatusError - +-- ErrorResponse - +-- PartialErrorResponse - +-- ConnectionError - +-- TimeoutError + Error + +-- VersionMismatch + +-- AddressError + +-- EncoderError + | +-- EncoderMissingError + | +-- EncodingError + | +-- DecodingError + +-- CommunicationError + +-- ProtocolError + +-- StatusError + +-- ErrorResponse + +-- PartialErrorResponse + +-- ConnectionError + +-- TimeoutError """ import multiaddr.exceptions class Error(Exception): - """Base class for all exceptions in this module.""" - pass + """Base class for all exceptions in this module.""" + pass class AddressError(Error, multiaddr.exceptions.Error): @@ -36,94 +36,94 @@ def __init__(self, addr): class VersionMismatch(Error): - """Raised when daemon version is not supported by this client version.""" + """Raised when daemon version is not supported by this client version.""" - def __init__(self, current, minimum, maximum): - self.current = current - self.minimum = minimum - self.maximum = maximum + def __init__(self, current, minimum, maximum): + self.current = current + self.minimum = minimum + self.maximum = maximum - msg = "Unsupported daemon version '{}' (not in range: {} – {})".format( - current, minimum, maximum - ) - Error.__init__(self, msg) + msg = "Unsupported daemon version '{}' (not in range: {} – {})".format( + current, minimum, maximum + ) + Error.__init__(self, msg) ############### # encoding.py # ############### class EncoderError(Error): - """Base class for all encoding and decoding related errors.""" + """Base class for all encoding and decoding related errors.""" - def __init__(self, message, encoder_name): - self.encoder_name = encoder_name + def __init__(self, message, encoder_name): + self.encoder_name = encoder_name - Error.__init__(self, message) + Error.__init__(self, message) class EncoderMissingError(EncoderError): - """Raised when a requested encoder class does not actually exist.""" + """Raised when a requested encoder class does not actually exist.""" - def __init__(self, encoder_name): - msg = "Unknown encoder: '{}'".format(encoder_name) - EncoderError.__init__(self, msg, encoder_name) + def __init__(self, encoder_name): + msg = "Unknown encoder: '{}'".format(encoder_name) + EncoderError.__init__(self, msg, encoder_name) class EncodingError(EncoderError): - """Raised when encoding a Python object into a byte string has failed - due to some problem with the input data.""" + """Raised when encoding a Python object into a byte string has failed + due to some problem with the input data.""" - def __init__(self, encoder_name, original): - self.original = original + def __init__(self, encoder_name, original): + self.original = original - msg = "Object encoding error: {}".format(original) - EncoderError.__init__(self, msg, encoder_name) + msg = "Object encoding error: {}".format(original) + EncoderError.__init__(self, msg, encoder_name) class DecodingError(EncoderError): - """Raised when decoding a byte string to a Python object has failed due to - some problem with the input data.""" + """Raised when decoding a byte string to a Python object has failed due to + some problem with the input data.""" - def __init__(self, encoder_name, original): - self.original = original + def __init__(self, encoder_name, original): + self.original = original - msg = "Object decoding error: {}".format(original) - EncoderError.__init__(self, msg, encoder_name) + msg = "Object decoding error: {}".format(original) + EncoderError.__init__(self, msg, encoder_name) ########### # http.py # ########### class CommunicationError(Error): - """Base class for all network communication related errors.""" + """Base class for all network communication related errors.""" - def __init__(self, original, _message=None): - self.original = original + def __init__(self, original, _message=None): + self.original = original - if _message: - msg = _message - else: - msg = "{}: {}".format(original.__class__.__name__, str(original)) - Error.__init__(self, msg) + if _message: + msg = _message + else: + msg = "{}: {}".format(original.__class__.__name__, str(original)) + Error.__init__(self, msg) class ProtocolError(CommunicationError): - """Raised when parsing the response from the daemon has failed. + """Raised when parsing the response from the daemon has failed. - This can most likely occur if the service on the remote end isn't in fact - an IPFS daemon.""" + This can most likely occur if the service on the remote end isn't in fact + an IPFS daemon.""" class StatusError(CommunicationError): - """Raised when the daemon responds with an error to our request.""" + """Raised when the daemon responds with an error to our request.""" class ErrorResponse(StatusError): - """Raised when the daemon has responded with an error message because the - requested operation could not be carried out.""" + """Raised when the daemon has responded with an error message because the + requested operation could not be carried out.""" - def __init__(self, message, original): - StatusError.__init__(self, original, message) + def __init__(self, message, original): + StatusError.__init__(self, original, message) class PartialErrorResponse(ErrorResponse): @@ -139,8 +139,8 @@ def __init__(self, message, original, partial): class ConnectionError(CommunicationError): - """Raised when connecting to the service has failed on the socket layer.""" + """Raised when connecting to the service has failed on the socket layer.""" class TimeoutError(CommunicationError): - """Raised when the daemon didn't respond in time.""" + """Raised when the daemon didn't respond in time.""" diff --git a/ipfshttpclient/http.py b/ipfshttpclient/http.py index a2aa1fdf..bed55601 100644 --- a/ipfshttpclient/http.py +++ b/ipfshttpclient/http.py @@ -262,7 +262,8 @@ def _request(self, method, url, params, parser, stream=False, files=None, headers={}, data=None, timeout=120): # Do HTTP request (synchronously) res = self._do_request(method, url, params=params, stream=stream, - files=files, headers=headers, data=data, timeout=timeout) + files=files, headers=headers, data=data, + timeout=timeout) # Raise exception for response status # (optionally incorpating the response message, if applicable) diff --git a/ipfshttpclient/multipart.py b/ipfshttpclient/multipart.py index eb502237..f4cd0fee 100644 --- a/ipfshttpclient/multipart.py +++ b/ipfshttpclient/multipart.py @@ -354,7 +354,7 @@ def glob_compile(pat): stuff = stuff.replace('\\', r'\\') else: chunks = [] - k = i+2 if pat[i] == '!' else i + 1 + k = i + 2 if pat[i] == '!' else i + 1 while True: k = pat.find('-', k, j) if k < 0: diff --git a/ipfshttpclient/requests_wrapper.py b/ipfshttpclient/requests_wrapper.py index 1cbcc6e4..b81c8752 100644 --- a/ipfshttpclient/requests_wrapper.py +++ b/ipfshttpclient/requests_wrapper.py @@ -138,9 +138,9 @@ class PoolManager(urllib3.PoolManager): def __init__(self, *args, **kwargs): super(PoolManager, self).__init__(*args, **kwargs) - # Additionally to adding our variant of the usual HTTP and HTTPS - # pool classes, also add these for some variants of the default schemes - # that are limited to some specific address family only + # Additionally to adding our variant of the usual HTTP and HTTPS + # pool classes, also add these for some variants of the default schemes + # that are limited to some specific address family only self.pool_classes_by_scheme = {} for scheme, ConnectionPool in (("http", HTTPConnectionPool), ("https", HTTPSConnectionPool)): self.pool_classes_by_scheme[scheme] = ConnectionPool @@ -152,7 +152,7 @@ def __init__(self, *args, **kwargs): # will be passed down to the `*ConnectionPool`s and finally to the actual # `*Connection`s as parameter def _new_pool(self, scheme, host, port, request_context=None): - # Copied from `urllib3` to *not* surpress the `scheme` parameter + # Copied from `urllib3` to *not* surpress the `scheme` parameter pool_cls = self.pool_classes_by_scheme[scheme] if request_context is None: request_context = self.connection_pool_kw.copy() @@ -235,35 +235,35 @@ def request(self, method, url, *args, **kwargs): # Re-implement the top-level “session-less” API def request(method, url, **kwargs): - with Session() as session: - return session.request(method=method, url=url, **kwargs) + with Session() as session: + return session.request(method=method, url=url, **kwargs) def get(url, params=None, **kwargs): - kwargs.setdefault('allow_redirects', True) - return request('get', url, params=params, **kwargs) + kwargs.setdefault('allow_redirects', True) + return request('get', url, params=params, **kwargs) def options(url, **kwargs): - kwargs.setdefault('allow_redirects', True) - return request('options', url, **kwargs) + kwargs.setdefault('allow_redirects', True) + return request('options', url, **kwargs) def head(url, **kwargs): - kwargs.setdefault('allow_redirects', False) - return request('head', url, **kwargs) + kwargs.setdefault('allow_redirects', False) + return request('head', url, **kwargs) def post(url, data=None, json=None, **kwargs): - return request('post', url, data=data, json=json, **kwargs) + return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): - return request('put', url, data=data, **kwargs) + return request('put', url, data=data, **kwargs) def patch(url, data=None, **kwargs): - return request('patch', url, data=data, **kwargs) + return request('patch', url, data=data, **kwargs) def delete(url, **kwargs): - return request('delete', url, **kwargs) + return request('delete', url, **kwargs) diff --git a/ipfshttpclient/utils.py b/ipfshttpclient/utils.py index a7129d04..b82feb64 100644 --- a/ipfshttpclient/utils.py +++ b/ipfshttpclient/utils.py @@ -4,11 +4,11 @@ from __future__ import absolute_import try: - import collections.abc + import collections.abc except ImportError: - #PY2: The relevant classes used to be somewhere else - class collections: - import collections as abc + #PY2: The relevant classes used to be somewhere else + class collections: + import collections as abc import mimetypes import os import six @@ -46,101 +46,101 @@ def convert_path(path): def guess_mimetype(filename): - """Guesses the mimetype of a file based on the given ``filename``. + """Guesses the mimetype of a file based on the given ``filename``. - .. code-block:: python + .. code-block:: python - >>> guess_mimetype('example.txt') - 'text/plain' - >>> guess_mimetype('/foo/bar/example') - 'application/octet-stream' + >>> guess_mimetype('example.txt') + 'text/plain' + >>> guess_mimetype('/foo/bar/example') + 'application/octet-stream' - Parameters - ---------- - filename : str - The file name or path for which the mimetype is to be guessed - """ - fn = os.path.basename(filename) - return mimetypes.guess_type(fn)[0] or 'application/octet-stream' + Parameters + ---------- + filename : str + The file name or path for which the mimetype is to be guessed + """ + fn = os.path.basename(filename) + return mimetypes.guess_type(fn)[0] or 'application/octet-stream' def clean_file(file): - """Returns a tuple containing a ``file``-like object and a close indicator. + """Returns a tuple containing a ``file``-like object and a close indicator. - This ensures the given file is opened and keeps track of files that should - be closed after use (files that were not open prior to this function call). + This ensures the given file is opened and keeps track of files that should + be closed after use (files that were not open prior to this function call). - Raises - ------ - OSError : Accessing the given file path failed + Raises + ------ + OSError : Accessing the given file path failed - Parameters - ---------- - file : Union[str, bytes, os.PathLike, io.IOBase, int] - A filepath or ``file``-like object that may or may not need to be - opened - """ - if isinstance(file, int): - return os.fdopen(file, 'rb', closefd=False), True - elif not hasattr(file, 'read'): - return open(convert_path(file), 'rb'), True - else: - return file, False + Parameters + ---------- + file : Union[str, bytes, os.PathLike, io.IOBase, int] + A filepath or ``file``-like object that may or may not need to be + opened + """ + if isinstance(file, int): + return os.fdopen(file, 'rb', closefd=False), True + elif not hasattr(file, 'read'): + return open(convert_path(file), 'rb'), True + else: + return file, False def clean_files(files): - """Generates tuples with a ``file``-like object and a close indicator. + """Generates tuples with a ``file``-like object and a close indicator. - This is a generator of tuples, where the first element is the file object - and the second element is a boolean which is True if this module opened the - file (and thus should close it). + This is a generator of tuples, where the first element is the file object + and the second element is a boolean which is True if this module opened the + file (and thus should close it). - Raises - ------ - OSError : Accessing the given file path failed + Raises + ------ + OSError : Accessing the given file path failed - Parameters - ---------- - files : Union[str, bytes, os.PathLike, io.IOBase, int, collections.abc.Iterable] - Collection or single instance of a filepath and file-like object - """ - if not isinstance(files, path_types) and not hasattr(files, "read"): - for f in files: - yield clean_file(f) - else: - yield clean_file(files) + Parameters + ---------- + files : Union[str, bytes, os.PathLike, io.IOBase, int, collections.abc.Iterable] + Collection or single instance of a filepath and file-like object + """ + if not isinstance(files, path_types) and not hasattr(files, "read"): + for f in files: + yield clean_file(f) + else: + yield clean_file(files) class return_field(object): - """Decorator that returns the given field of a json response. - - Parameters - ---------- - field : object - The response field to be returned for all invocations - """ - def __init__(self, field): - self.field = field - - def __call__(self, cmd): - """Wraps a command so that only a specified field is returned. - - Parameters - ---------- - cmd : callable - A command that is intended to be wrapped - """ - @wraps(cmd) - def wrapper(*args, **kwargs): - """Returns the specified field of the command invocation. - - Parameters - ---------- - args : list - Positional parameters to pass to the wrapped callable - kwargs : dict - Named parameter to pass to the wrapped callable - """ - res = cmd(*args, **kwargs) - return res[self.field] - return wrapper + """Decorator that returns the given field of a json response. + + Parameters + ---------- + field : object + The response field to be returned for all invocations + """ + def __init__(self, field): + self.field = field + + def __call__(self, cmd): + """Wraps a command so that only a specified field is returned. + + Parameters + ---------- + cmd : callable + A command that is intended to be wrapped + """ + @wraps(cmd) + def wrapper(*args, **kwargs): + """Returns the specified field of the command invocation. + + Parameters + ---------- + args : list + Positional parameters to pass to the wrapped callable + kwargs : dict + Named parameter to pass to the wrapped callable + """ + res = cmd(*args, **kwargs) + return res[self.field] + return wrapper diff --git a/test/functional/test_files.py b/test/functional/test_files.py index b4a62e79..1dc15dc8 100644 --- a/test/functional/test_files.py +++ b/test/functional/test_files.py @@ -123,8 +123,8 @@ def test_add_single_from_str_with_dir(client, cleanup_pins): dir_hash = None for item in res: - if item["Name"] == "": - dir_hash = item["Hash"] + if item["Name"] == "": + dir_hash = item["Hash"] assert dir_hash in client.pin.ls(type="recursive")["Keys"] diff --git a/test/functional/test_miscellaneous.py b/test/functional/test_miscellaneous.py index 132681b8..88de3833 100644 --- a/test/functional/test_miscellaneous.py +++ b/test/functional/test_miscellaneous.py @@ -31,13 +31,13 @@ def check_pid_exists(pid): if os.name == "posix": import errno if pid < 0: - return False + return False try: - os.kill(pid, 0) + os.kill(pid, 0) except OSError as e: - return e.errno == errno.EPERM + return e.errno == errno.EPERM else: - return True + return True else: import ctypes kernel32 = ctypes.windll.kernel32 diff --git a/test/functional/test_other.py b/test/functional/test_other.py index 4773502f..32c6c00e 100644 --- a/test/functional/test_other.py +++ b/test/functional/test_other.py @@ -9,7 +9,9 @@ def test_ipfs_node_available(): Dummy test to ensure that running the tests without a daemon produces a failure, since we think it's unlikely that people running tests want this """ - assert conftest.is_available(), "Functional tests require an IPFS node to be available at: {0}".format(ipfshttpclient.DEFAULT_ADDR) + assert conftest.is_available(), \ + "Functional tests require an IPFS node to be available at: {0}" \ + .format(ipfshttpclient.DEFAULT_ADDR) def test_add_json(client, cleanup_pins): diff --git a/test/functional/test_pin.py b/test/functional/test_pin.py index 195190a3..b1fc30c9 100644 --- a/test/functional/test_pin.py +++ b/test/functional/test_pin.py @@ -24,12 +24,12 @@ def resources(client): def is_pinned(client, path): error_msg = None try: - resp = client.pin.ls(path) - assert path.split("/")[-1] in resp["Keys"] + resp = client.pin.ls(path) + assert path.split("/")[-1] in resp["Keys"] except ipfshttpclient.exceptions.ErrorResponse as exc: error_msg = exc.args[0] if "not pinned" in error_msg: - return False + return False raise return True diff --git a/test/unit/test_utils.py b/test/unit/test_utils.py index bee8924a..38f1812d 100644 --- a/test/unit/test_utils.py +++ b/test/unit/test_utils.py @@ -11,95 +11,95 @@ import ipfshttpclient.utils as utils class TestUtils(unittest.TestCase): - """Contains unit tests for utils.py. + """Contains unit tests for utils.py. - Public methods: - test_guess_mimetype -- tests utils.guess_mimetype() - test_ls_dir -- tests utils.ls_dir() - test_clean_file_opened -- tests utils.clean_file() with a stringIO object - test_clean_file_unopened -- tests utils.clean_file() with a filepath - test_clean_files_single -- tests utils.clean_files() with a filepath - test_clean_files_list -- tests utils.clean_files() with a list of files - test_file_size -- tests utils.file_size() - test_return_field_init -- tests utils.return_field.__init__() - test_return_field_call -- tests utils.return_field.__call__() - """ - def test_guess_mimetype(self): - """Tests utils.guess_mimetype(). + Public methods: + test_guess_mimetype -- tests utils.guess_mimetype() + test_ls_dir -- tests utils.ls_dir() + test_clean_file_opened -- tests utils.clean_file() with a stringIO object + test_clean_file_unopened -- tests utils.clean_file() with a filepath + test_clean_files_single -- tests utils.clean_files() with a filepath + test_clean_files_list -- tests utils.clean_files() with a list of files + test_file_size -- tests utils.file_size() + test_return_field_init -- tests utils.return_field.__init__() + test_return_field_call -- tests utils.return_field.__call__() + """ + def test_guess_mimetype(self): + """Tests utils.guess_mimetype(). - Guesses the mimetype of the requirements.txt file - located in the project's root directory. - """ - path = os.path.join(os.path.dirname(__file__), - "..", "..", "requirements.txt") - assert utils.guess_mimetype(path) == "text/plain" + Guesses the mimetype of the requirements.txt file + located in the project's root directory. + """ + path = os.path.join(os.path.dirname(__file__), + "..", "..", "requirements.txt") + assert utils.guess_mimetype(path) == "text/plain" - def test_clean_file_opened(self): - """Tests utils.clean_file() with a stringIO object.""" - string_io = io.StringIO(u'Mary had a little lamb') - f, opened = utils.clean_file(string_io) - assert hasattr(f, 'read') - assert not opened - # Closing stringIO after test assertions. - f.close() + def test_clean_file_opened(self): + """Tests utils.clean_file() with a stringIO object.""" + string_io = io.StringIO(u'Mary had a little lamb') + f, opened = utils.clean_file(string_io) + assert hasattr(f, 'read') + assert not opened + # Closing stringIO after test assertions. + f.close() - def test_clean_file_unopened(self): - """Tests utils.clean_file() with a filepath. + def test_clean_file_unopened(self): + """Tests utils.clean_file() with a filepath. - This test relies on the openability of the file 'fsdfgh' - located in 'test/functional/fake_dir'. - """ - path = os.path.join(os.path.dirname(__file__), - "..", "functional", "fake_dir", "fsdfgh") - f, opened = utils.clean_file(path) - assert hasattr(f, 'read') - assert opened - # Closing file after test assertions. - f.close() + This test relies on the openability of the file 'fsdfgh' + located in 'test/functional/fake_dir'. + """ + path = os.path.join(os.path.dirname(__file__), + "..", "functional", "fake_dir", "fsdfgh") + f, opened = utils.clean_file(path) + assert hasattr(f, 'read') + assert opened + # Closing file after test assertions. + f.close() - def test_clean_files_single(self): - """Tests utils.clean_files() with a singular filepath. + def test_clean_files_single(self): + """Tests utils.clean_files() with a singular filepath. - This test relies on the openability of the file 'fsdfgh' - located in 'test/functional/fake_dir'. - """ - path = os.path.join(os.path.dirname(__file__), - "..", "functional", "fake_dir", "fsdfgh") - gen = utils.clean_files(path) - for tup in gen: - assert hasattr(tup[0], 'read') - assert tup[1] - # Closing file after test assertions. - tup[0].close() + This test relies on the openability of the file 'fsdfgh' + located in 'test/functional/fake_dir'. + """ + path = os.path.join(os.path.dirname(__file__), + "..", "functional", "fake_dir", "fsdfgh") + gen = utils.clean_files(path) + for tup in gen: + assert hasattr(tup[0], 'read') + assert tup[1] + # Closing file after test assertions. + tup[0].close() - def test_clean_files_list(self): - """Tests utils.clean_files() with a list of files/stringIO objects.""" - path = os.path.join(os.path.dirname(__file__), - "..", "functional", "fake_dir", "fsdfgh") - string_io = io.StringIO(u'Mary had a little lamb') - files = [path, string_io] - gen = utils.clean_files(files) - for i in range(0, 2): - tup = next(gen) - assert hasattr(tup[0], 'read') - if i == 0: - assert tup[1] - else: - assert not tup[1] - # Closing files/stringIO objects after test assertions. - tup[0].close() + def test_clean_files_list(self): + """Tests utils.clean_files() with a list of files/stringIO objects.""" + path = os.path.join(os.path.dirname(__file__), + "..", "functional", "fake_dir", "fsdfgh") + string_io = io.StringIO(u'Mary had a little lamb') + files = [path, string_io] + gen = utils.clean_files(files) + for i in range(0, 2): + tup = next(gen) + assert hasattr(tup[0], 'read') + if i == 0: + assert tup[1] + else: + assert not tup[1] + # Closing files/stringIO objects after test assertions. + tup[0].close() - def test_return_field_init(self): - """Tests utils.return_field.__init__().""" - return_field = utils.return_field('Hash') - assert return_field.field == 'Hash' + def test_return_field_init(self): + """Tests utils.return_field.__init__().""" + return_field = utils.return_field('Hash') + assert return_field.field == 'Hash' - def test_return_field_call(self): - """Tests utils.return_field.__call__().""" - expected_hash = u'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' + def test_return_field_call(self): + """Tests utils.return_field.__call__().""" + expected_hash = u'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' - @utils.return_field('Hash') - def wrapper(string, *args, **kwargs): - resp = {'Hash': expected_hash, 'string': string} - return resp - assert wrapper('Mary had a little lamb') == expected_hash + @utils.return_field('Hash') + def wrapper(string, *args, **kwargs): + resp = {'Hash': expected_hash, 'string': string} + return resp + assert wrapper('Mary had a little lamb') == expected_hash From cb6d1b0ded3d9477bd9c1d7f0275915986dc3946 Mon Sep 17 00:00:00 2001 From: Alexander Schlarb Date: Sun, 12 May 2019 22:17:56 +0200 Subject: [PATCH 14/14] Fix codestyle violations --- ipfshttpclient/requests_wrapper.py | 6 ++++-- tox.ini | 9 ++++++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/ipfshttpclient/requests_wrapper.py b/ipfshttpclient/requests_wrapper.py index b81c8752..e20c6e88 100644 --- a/ipfshttpclient/requests_wrapper.py +++ b/ipfshttpclient/requests_wrapper.py @@ -99,7 +99,7 @@ def _new_conn(self): try: conn = create_connection( (self._dns_host, self.port), self.timeout, **extra_kw) - except socket.timeout as e: + except socket.timeout: raise urllib3.exceptions.ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) @@ -120,13 +120,14 @@ class HTTPSConnection(ConnectionOverrideMixin, urllib3.connection.HTTPSConnectio def __init__(self, *args, **kw): self.family = _kw_scheme_to_family(kw, "https") super(HTTPSConnection, self).__init__(*args, **kw) - + # Override the higher-level `urllib3` ConnectionPool objects that instantiate # one or more Connection objects and dispatch work between them class HTTPConnectionPool(urllib3.HTTPConnectionPool): ConnectionCls = HTTPConnection + class HTTPSConnectionPool(urllib3.HTTPConnectionPool): ConnectionCls = HTTPSConnection @@ -238,6 +239,7 @@ def request(method, url, **kwargs): with Session() as session: return session.request(method=method, url=url, **kwargs) + def get(url, params=None, **kwargs): kwargs.setdefault('allow_redirects', True) return request('get', url, params=params, **kwargs) diff --git a/tox.ini b/tox.ini index 1cd971df..fbbe0725 100644 --- a/tox.ini +++ b/tox.ini @@ -41,6 +41,7 @@ exclude = .git,.tox,+junk,coverage,dist,doc,*egg,build,tools,test/unit,docs,*__i # E221: Multiple spaces before operator # E241: Multiple spaces after ',': Breaks element alignment collections +# E251: Spaces around '=' on parameter assignment # E262: Inline comment should start with '# ': Breaks tagged comments (ie: '#TODO: ') # E265: Block comment should start with '# ': ^ # E266: Too many leading '#' for block comment: Breaks declaring mega-blocks (ie: '### Section') @@ -48,16 +49,18 @@ exclude = .git,.tox,+junk,coverage,dist,doc,*egg,build,tools,test/unit,docs,*__i # W292: No newline at end of file # W391: Blank line at end of file (sometimes trigged instead of the above!?) # F403: `from import *` used; unable to detect undefined names ←– Probably should be fixed… -ignore = E221,E241,E262,E265,E266,E303,W292,W391,F403 +ignore = E221,E241,E251,E262,E265,E266,E303,W292,W391,F403 use-flake8-tabs = true max-line-length = 100 tab-width = 4 # E701: Multiple statements on one line -# - multipart.py: Lots of `yield from` polyfills using `for chunk in X: yield chunk` -# - test_*.py: Aligning `assert … not in …` and `assert … in …` kind of statements +# - multipart.py: Lots of `yield from` polyfills using `for chunk in X: yield chunk` +# - requests_wrapper.py: Lots of symbols exported that we specifically don't use but that make sense in a reusable module +# - test_*.py: Aligning `assert … not in …` and `assert … in …` kind of statements per-file-ignores = ./ipfshttpclient/multipart.py:E701 + ./ipfshttpclient/requests_wrapper.py:E401,E402,F401 ./test/functional/test_*.py:E272 [pytest]