diff --git a/.travis.yml b/.travis.yml index 851567b4..70318460 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,12 +9,12 @@ matrix: env: TOXENV=py35 - python: "3.6" env: TOXENV=py36 - - python: "3.6" - env: TOXENV=codestyle - before_install: true + - python: "3.7" + dist: xenial + env: TOXENV=py37 before_install: - - wget "https://dist.ipfs.io/go-ipfs/v0.4.14/go-ipfs_v0.4.14_linux-amd64.tar.gz" -O /tmp/ipfs.tar.gz + - wget "https://dist.ipfs.io/go-ipfs/v0.4.18/go-ipfs_v0.4.18_linux-amd64.tar.gz" -O /tmp/ipfs.tar.gz - mkdir -p $HOME/bin - pushd . && cd $HOME/bin && tar -xzvf /tmp/ipfs.tar.gz && popd - export PATH="$HOME/bin/go-ipfs:$PATH" @@ -22,4 +22,4 @@ before_install: install: - pip install tox -script: tox +script: tox \ No newline at end of file diff --git a/README.md b/README.md index a63c03dd..c7916d84 100644 --- a/README.md +++ b/README.md @@ -4,18 +4,14 @@ [![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](https://webchat.freenode.net/?channels=%23ipfs) [![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) [![](https://img.shields.io/pypi/v/ipfsapi.svg?style=flat-square)](https://pypi.python.org/pypi/ipfsapi) -[![Build Status](https://travis-ci.org/ipfs/py-ipfs-api.svg?branch=master)](https://travis-ci.org/ipfs/py-ipfs-api) ![Python IPFS HTTP Client Library](https://ipfs.io/ipfs/QmQJ68PFMDdAsgCZvA1UVzzn18asVcf7HVvCDgpjiSCAse) -Check out [the client API reference](https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfsapi/) for the full command reference. +## Deprecation Notice -**Important:** The `py-ipfs-api` PIP package and Python module have both been renamed to `ipfsapi` (no dash, lower-case `a`). -The legacy `ipfs-api`/`ipfsApi` package/module will only work for IPFS 0.3.x and Python 2 and is deprecated. [Please upgrade](#important-changes-from-ipfsapi-02x)! +**Important:** The `ipfsapi` PIP package and Python module have both been renamed to `ipfshttpclient` and this library has been converted into a thin wrapper around that other library. Only critical bug-fixes will be accepted for this package. Please see [migration notes on the new package](https://github.com/ipfs/py-ipfs-http-client/blob/master/README.md#important-changes-from-ipfsapi-04x) for details on how to proceed. -**Note:** This library constantly has to change to stay compatible with the IPFS HTTP API. -Currently, this library is tested against [go-ipfs v0.4.10](https://github.com/ipfs/go-ipfs/releases/tag/v0.4.10). -You may experience compatibility issues when attempting to use it with other versions of go-ipfs. +*The remainder of this README remains as a historical curiousity and will not be updated anymore.* ## Table of Contents diff --git a/ipfsapi/__init__.py b/ipfsapi/__init__.py index 45cd4f5d..87578998 100644 --- a/ipfsapi/__init__.py +++ b/ipfsapi/__init__.py @@ -2,6 +2,15 @@ from __future__ import absolute_import +import warnings +warnings.warn( + "The `ipfsapi` library is deprecated and will stop receiving updates on " + "the 31.12.2019! If you are on Python 3.5+ please enable and fix all " + "Python deprecation warnings (CPython flag `-Wd`) and switch to the new " + "`ipfshttpclient` library name. Python 2.7 and 3.4 will not be supported " + "by the new library, so please upgrade.", FutureWarning, stacklevel=2 +) + from .version import __version__ ########################### diff --git a/ipfsapi/client.py b/ipfsapi/client.py deleted file mode 100644 index 4a517f5b..00000000 --- a/ipfsapi/client.py +++ /dev/null @@ -1,2409 +0,0 @@ -# -*- coding: utf-8 -*- -"""IPFS API Bindings for Python. - -Classes: - - * Client – a TCP client for interacting with an IPFS daemon -""" -from __future__ import absolute_import - -import os -import warnings - -from . import http, multipart, utils, exceptions, encoding - -DEFAULT_HOST = str(os.environ.get("PY_IPFSAPI_DEFAULT_HOST", 'localhost')) -DEFAULT_PORT = int(os.environ.get("PY_IPFSAPI_DEFAULT_PORT", 5001)) -DEFAULT_BASE = str(os.environ.get("PY_IPFSAPI_DEFAULT_BASE", 'api/v0')) - -VERSION_MINIMUM = "0.4.3" -VERSION_MAXIMUM = "0.5.0" - - -def assert_version(version, minimum=VERSION_MINIMUM, maximum=VERSION_MAXIMUM): - """Make sure that the given daemon version is supported by this client - version. - - Raises - ------ - ~ipfsapi.exceptions.VersionMismatch - - Parameters - ---------- - version : str - The version of an IPFS daemon. - minimum : str - The minimal IPFS version to allow. - maximum : str - The maximum IPFS version to allow. - """ - # Convert version strings to integer tuples - version = list(map(int, version.split('-', 1)[0].split('.'))) - minimum = list(map(int, minimum.split('-', 1)[0].split('.'))) - maximum = list(map(int, maximum.split('-', 1)[0].split('.'))) - - if minimum > version or version >= maximum: - raise exceptions.VersionMismatch(version, minimum, maximum) - - -def connect(host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, - chunk_size=multipart.default_chunk_size, **defaults): - """Create a new :class:`~ipfsapi.Client` instance and connect to the - daemon to validate that its version is supported. - - Raises - ------ - ~ipfsapi.exceptions.VersionMismatch - ~ipfsapi.exceptions.ErrorResponse - ~ipfsapi.exceptions.ConnectionError - ~ipfsapi.exceptions.ProtocolError - ~ipfsapi.exceptions.StatusError - ~ipfsapi.exceptions.TimeoutError - - - All parameters are identical to those passed to the constructor of the - :class:`~ipfsapi.Client` class. - - Returns - ------- - ~ipfsapi.Client - """ - # Create client instance - client = Client(host, port, base, chunk_size, **defaults) - - # Query version number from daemon and validate it - assert_version(client.version()['Version']) - - return client - - -class SubChannel: - """ - Wrapper for a pubsub subscription object that allows for easy - closing of subscriptions. - """ - - def __init__(self, sub): - self.__sub = sub - - def read_message(self): - return next(self.__sub) - - def __iter__(self): - return self.__sub - - def close(self): - self.__sub.close() - - def __enter__(self): - return self - - def __exit__(self, *a): - self.close() - - -class Client(object): - """A TCP client for interacting with an IPFS daemon. - - A :class:`~ipfsapi.Client` instance will not actually establish a - connection to the daemon until at least one of it's methods is called. - - Parameters - ---------- - host : str - Hostname or IP address of the computer running the ``ipfs daemon`` - node (defaults to the local system) - port : int - The API port of the IPFS deamon (usually 5001) - base : str - Path of the deamon's API (currently always ``api/v0``) - chunk_size : int - The size of the chunks to break uploaded files and text content into - """ - - _clientfactory = http.HTTPClient - - def __init__(self, host=DEFAULT_HOST, port=DEFAULT_PORT, - base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, - **defaults): - """Connects to the API port of an IPFS node.""" - - self.chunk_size = chunk_size - - self._client = self._clientfactory(host, port, base, **defaults) - - def add(self, files, recursive=False, pattern='**', *args, **kwargs): - """Add a file, or directory of files to IPFS. - - .. code-block:: python - - >>> with io.open('nurseryrhyme.txt', 'w', encoding='utf-8') as f: - ... numbytes = f.write('Mary had a little lamb') - >>> c.add('nurseryrhyme.txt') - {'Hash': 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab', - 'Name': 'nurseryrhyme.txt'} - - Parameters - ---------- - files : str - A filepath to either a file or directory - recursive : bool - Controls if files in subdirectories are added or not - pattern : str | list - Single `*glob* `_ - pattern or list of *glob* patterns and compiled regular expressions - to match the names of the filepaths to keep - trickle : bool - Use trickle-dag format (optimized for streaming) when generating - the dag; see `the FAQ ` for - more information (Default: ``False``) - only_hash : bool - Only chunk and hash, but do not write to disk (Default: ``False``) - wrap_with_directory : bool - Wrap files with a directory object to preserve their filename - (Default: ``False``) - chunker : str - The chunking algorithm to use - pin : bool - Pin this object when adding (Default: ``True``) - - Returns - ------- - dict: File name and hash of the added file node - """ - #PY2: No support for kw-only parameters after glob parameters - opts = { - "trickle": kwargs.pop("trickle", False), - "only-hash": kwargs.pop("only_hash", False), - "wrap-with-directory": kwargs.pop("wrap_with_directory", False), - "pin": kwargs.pop("pin", True) - } - if "chunker" in kwargs: - opts["chunker"] = kwargs.pop("chunker") - kwargs.setdefault("opts", opts) - - body, headers = multipart.stream_filesystem_node( - files, recursive, pattern, self.chunk_size - ) - return self._client.request('/add', decoder='json', - data=body, headers=headers, **kwargs) - - def get(self, multihash, **kwargs): - """Downloads a file, or directory of files from IPFS. - - Files are placed in the current working directory. - - Parameters - ---------- - multihash : str - The path to the IPFS object(s) to be outputted - """ - args = (multihash,) - return self._client.download('/get', args, **kwargs) - - def cat(self, multihash, offset=0, length=-1, **kwargs): - r"""Retrieves the contents of a file identified by hash. - - .. code-block:: python - - >>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - Traceback (most recent call last): - ... - ipfsapi.exceptions.Error: this dag node is a directory - >>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') - b'\n\n\n\nipfs example viewer</…' - - Parameters - ---------- - multihash : str - The path to the IPFS object(s) to be retrieved - offset : int - Byte offset to begin reading from - length : int - Maximum number of bytes to read(-1 for all) - - Returns - ------- - str : File contents - """ - opts = {} - if offset != 0: - opts['offset'] = offset - if length != -1: - opts['length'] = length - args = (multihash,) - return self._client.request('/cat', args, opts=opts, **kwargs) - - def ls(self, multihash, **kwargs): - """Returns a list of objects linked to by the given hash. - - .. code-block:: python - - >>> c.ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - {'Objects': [ - {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', - 'Links': [ - {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', - 'Name': 'Makefile', 'Size': 174, 'Type': 2}, - … - {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', - 'Name': 'published-version', 'Size': 55, 'Type': 2} - ]} - ]} - - Parameters - ---------- - multihash : str - The path to the IPFS object(s) to list links from - - Returns - ------- - dict : Directory information and contents - """ - args = (multihash,) - return self._client.request('/ls', args, decoder='json', **kwargs) - - def refs(self, multihash, **kwargs): - """Returns a list of hashes of objects referenced by the given hash. - - .. code-block:: python - - >>> c.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, - … - {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] - - Parameters - ---------- - multihash : str - Path to the object(s) to list refs from - - Returns - ------- - list - """ - args = (multihash,) - return self._client.request('/refs', args, decoder='json', **kwargs) - - def refs_local(self, **kwargs): - """Displays the hashes of all local objects. - - .. code-block:: python - - >>> c.refs_local() - [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, - … - {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] - - Returns - ------- - list - """ - return self._client.request('/refs/local', decoder='json', **kwargs) - - def block_stat(self, multihash, **kwargs): - """Returns a dict with the size of the block with the given hash. - - .. code-block:: python - - >>> c.block_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - {'Key': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', - 'Size': 258} - - Parameters - ---------- - multihash : str - The base58 multihash of an existing block to stat - - Returns - ------- - dict : Information about the requested block - """ - args = (multihash,) - return self._client.request('/block/stat', args, - decoder='json', **kwargs) - - def block_get(self, multihash, **kwargs): - r"""Returns the raw contents of a block. - - .. code-block:: python - - >>> c.block_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - b'\x121\n"\x12 \xdaW>\x14\xe5\xc1\xf6\xe4\x92\xd1 … \n\x02\x08\x01' - - Parameters - ---------- - multihash : str - The base58 multihash of an existing block to get - - Returns - ------- - str : Value of the requested block - """ - args = (multihash,) - return self._client.request('/block/get', args, **kwargs) - - def block_put(self, file, **kwargs): - """Stores the contents of the given file object as an IPFS block. - - .. code-block:: python - - >>> c.block_put(io.BytesIO(b'Mary had a little lamb')) - {'Key': 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', - 'Size': 22} - - Parameters - ---------- - file : io.RawIOBase - The data to be stored as an IPFS block - - Returns - ------- - dict : Information about the new block - - See :meth:`~ipfsapi.Client.block_stat` - """ - body, headers = multipart.stream_files(file, self.chunk_size) - return self._client.request('/block/put', decoder='json', - data=body, headers=headers, **kwargs) - - def bitswap_wantlist(self, peer=None, **kwargs): - """Returns blocks currently on the bitswap wantlist. - - .. code-block:: python - - >>> c.bitswap_wantlist() - {'Keys': [ - 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', - 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', - 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' - ]} - - Parameters - ---------- - peer : str - Peer to show wantlist for. - - Returns - ------- - dict : List of wanted blocks - """ - args = (peer,) - return self._client.request('/bitswap/wantlist', args, - decoder='json', **kwargs) - - def bitswap_stat(self, **kwargs): - """Returns some diagnostic information from the bitswap agent. - - .. code-block:: python - - >>> c.bitswap_stat() - {'BlocksReceived': 96, - 'DupBlksReceived': 73, - 'DupDataReceived': 2560601, - 'ProviderBufLen': 0, - 'Peers': [ - 'QmNZFQRxt9RMNm2VVtuV2Qx7q69bcMWRVXmr5CEkJEgJJP', - 'QmNfCubGpwYZAQxX8LQDsYgB48C4GbfZHuYdexpX9mbNyT', - 'QmNfnZ8SCs3jAtNPc8kf3WJqJqSoX7wsX7VqkLdEYMao4u', - … - ], - 'Wantlist': [ - 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', - 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', - 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' - ] - } - - Returns - ------- - dict : Statistics, peers and wanted blocks - """ - return self._client.request('/bitswap/stat', decoder='json', **kwargs) - - def bitswap_unwant(self, key, **kwargs): - """ - Remove a given block from wantlist. - - Parameters - ---------- - key : str - Key to remove from wantlist. - """ - args = (key,) - return self._client.request('/bitswap/unwant', args, **kwargs) - - def object_data(self, multihash, **kwargs): - r"""Returns the raw bytes in an IPFS object. - - .. code-block:: python - - >>> c.object_data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - b'\x08\x01' - - Parameters - ---------- - multihash : str - Key of the object to retrieve, in base58-encoded multihash format - - Returns - ------- - str : Raw object data - """ - args = (multihash,) - return self._client.request('/object/data', args, **kwargs) - - def object_new(self, template=None, **kwargs): - """Creates a new object from an IPFS template. - - By default this creates and returns a new empty merkledag node, but you - may pass an optional template argument to create a preformatted node. - - .. code-block:: python - - >>> c.object_new() - {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'} - - Parameters - ---------- - template : str - Blueprints from which to construct the new object. Possible values: - - * ``"unixfs-dir"`` - * ``None`` - - Returns - ------- - dict : Object hash - """ - args = (template,) if template is not None else () - return self._client.request('/object/new', args, - decoder='json', **kwargs) - - def object_links(self, multihash, **kwargs): - """Returns the links pointed to by the specified object. - - .. code-block:: python - - >>> c.object_links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDx … ca7D') - {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', - 'Links': [ - {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', - 'Name': 'Makefile', 'Size': 174}, - {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', - 'Name': 'example', 'Size': 1474}, - {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', - 'Name': 'home', 'Size': 3947}, - {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', - 'Name': 'lib', 'Size': 268261}, - {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', - 'Name': 'published-version', 'Size': 55}]} - - Parameters - ---------- - multihash : str - Key of the object to retrieve, in base58-encoded multihash format - - Returns - ------- - dict : Object hash and merkedag links - """ - args = (multihash,) - return self._client.request('/object/links', args, - decoder='json', **kwargs) - - def object_get(self, multihash, **kwargs): - """Get and serialize the DAG node named by multihash. - - .. code-block:: python - - >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - {'Data': '\x08\x01', - 'Links': [ - {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', - 'Name': 'Makefile', 'Size': 174}, - {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', - 'Name': 'example', 'Size': 1474}, - {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', - 'Name': 'home', 'Size': 3947}, - {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', - 'Name': 'lib', 'Size': 268261}, - {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', - 'Name': 'published-version', 'Size': 55}]} - - Parameters - ---------- - multihash : str - Key of the object to retrieve, in base58-encoded multihash format - - Returns - ------- - dict : Object data and links - """ - args = (multihash,) - return self._client.request('/object/get', args, - decoder='json', **kwargs) - - def object_put(self, file, **kwargs): - """Stores input as a DAG object and returns its key. - - .. code-block:: python - - >>> c.object_put(io.BytesIO(b''' - ... { - ... "Data": "another", - ... "Links": [ { - ... "Name": "some link", - ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", - ... "Size": 8 - ... } ] - ... }''')) - {'Hash': 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', - 'Links': [ - {'Hash': 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', - 'Size': 8, 'Name': 'some link'} - ] - } - - Parameters - ---------- - file : io.RawIOBase - (JSON) object from which the DAG object will be created - - Returns - ------- - dict : Hash and links of the created DAG object - - See :meth:`~ipfsapi.Object.object_links` - """ - body, headers = multipart.stream_files(file, self.chunk_size) - return self._client.request('/object/put', decoder='json', - data=body, headers=headers, **kwargs) - - def object_stat(self, multihash, **kwargs): - """Get stats for the DAG node named by multihash. - - .. code-block:: python - - >>> c.object_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - {'LinksSize': 256, 'NumLinks': 5, - 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', - 'BlockSize': 258, 'CumulativeSize': 274169, 'DataSize': 2} - - Parameters - ---------- - multihash : str - Key of the object to retrieve, in base58-encoded multihash format - - Returns - ------- - dict - """ - args = (multihash,) - return self._client.request('/object/stat', args, - decoder='json', **kwargs) - - def object_patch_append_data(self, multihash, new_data, **kwargs): - """Creates a new merkledag object based on an existing one. - - The new object will have the provided data appended to it, - and will thus have a new Hash. - - .. code-block:: python - - >>> c.object_patch_append_data("QmZZmY … fTqm", io.BytesIO(b"bla")) - {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} - - Parameters - ---------- - multihash : str - The hash of an ipfs object to modify - new_data : io.RawIOBase - The data to append to the object's data section - - Returns - ------- - dict : Hash of new object - """ - args = (multihash,) - body, headers = multipart.stream_files(new_data, self.chunk_size) - return self._client.request('/object/patch/append-data', args, - decoder='json', - data=body, headers=headers, **kwargs) - - def object_patch_add_link(self, root, name, ref, create=False, **kwargs): - """Creates a new merkledag object based on an existing one. - - The new object will have a link to the provided object. - - .. code-block:: python - - >>> c.object_patch_add_link( - ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2', - ... 'Johnny', - ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2' - ... ) - {'Hash': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'} - - Parameters - ---------- - root : str - IPFS hash for the object being modified - name : str - name for the new link - ref : str - IPFS hash for the object being linked to - create : bool - Create intermediary nodes - - Returns - ------- - dict : Hash of new object - """ - kwargs.setdefault("opts", {"create": create}) - - args = ((root, name, ref),) - return self._client.request('/object/patch/add-link', args, - decoder='json', **kwargs) - - def object_patch_rm_link(self, root, link, **kwargs): - """Creates a new merkledag object based on an existing one. - - The new object will lack a link to the specified object. - - .. code-block:: python - - >>> c.object_patch_rm_link( - ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', - ... 'Johnny' - ... ) - {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} - - Parameters - ---------- - root : str - IPFS hash of the object to modify - link : str - name of the link to remove - - Returns - ------- - dict : Hash of new object - """ - args = ((root, link),) - return self._client.request('/object/patch/rm-link', args, - decoder='json', **kwargs) - - def object_patch_set_data(self, root, data, **kwargs): - """Creates a new merkledag object based on an existing one. - - The new object will have the same links as the old object but - with the provided data instead of the old object's data contents. - - .. code-block:: python - - >>> c.object_patch_set_data( - ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', - ... io.BytesIO(b'bla') - ... ) - {'Hash': 'QmSw3k2qkv4ZPsbu9DVEJaTMszAQWNgM1FTFYpfZeNQWrd'} - - Parameters - ---------- - root : str - IPFS hash of the object to modify - data : io.RawIOBase - The new data to store in root - - Returns - ------- - dict : Hash of new object - """ - args = (root,) - body, headers = multipart.stream_files(data, self.chunk_size) - return self._client.request('/object/patch/set-data', args, - decoder='json', - data=body, headers=headers, **kwargs) - - def file_ls(self, multihash, **kwargs): - """Lists directory contents for Unix filesystem objects. - - The result contains size information. For files, the child size is the - total size of the file contents. For directories, the child size is the - IPFS link size. - - The path can be a prefixless reference; in this case, it is assumed - that it is an ``/ipfs/`` reference and not ``/ipns/``. - - .. code-block:: python - - >>> c.file_ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') - {'Arguments': {'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': - 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D'}, - 'Objects': { - 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': { - 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', - 'Size': 0, 'Type': 'Directory', - 'Links': [ - {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', - 'Name': 'Makefile', 'Size': 163, 'Type': 'File'}, - {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', - 'Name': 'example', 'Size': 1463, 'Type': 'File'}, - {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', - 'Name': 'home', 'Size': 3947, 'Type': 'Directory'}, - {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', - 'Name': 'lib', 'Size': 268261, 'Type': 'Directory'}, - {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', - 'Name': 'published-version', - 'Size': 47, 'Type': 'File'} - ] - } - }} - - Parameters - ---------- - multihash : str - The path to the object(s) to list links from - - Returns - ------- - dict - """ - args = (multihash,) - return self._client.request('/file/ls', args, decoder='json', **kwargs) - - def resolve(self, name, recursive=False, **kwargs): - """Accepts an identifier and resolves it to the referenced item. - - There are a number of mutable name protocols that can link among - themselves and into IPNS. For example IPNS references can (currently) - point at an IPFS object, and DNS links can point at other DNS links, - IPNS entries, or IPFS objects. This command accepts any of these - identifiers. - - .. code-block:: python - - >>> c.resolve("/ipfs/QmTkzDwWqPbnAh5YiV5VwcTLnGdw … ca7D/Makefile") - {'Path': '/ipfs/Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV'} - >>> c.resolve("/ipns/ipfs.io") - {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} - - Parameters - ---------- - name : str - The name to resolve - recursive : bool - Resolve until the result is an IPFS name - - Returns - ------- - dict : IPFS path of resource - """ - kwargs.setdefault("opts", {"recursive": recursive}) - - args = (name,) - return self._client.request('/resolve', args, decoder='json', **kwargs) - - def key_list(self, **kwargs): - """Returns a list of generated public keys that can be used with name_publish - - .. code-block:: python - - >>> c.key_list() - [{'Name': 'self', - 'Id': 'QmQf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'}, - {'Name': 'example_key_name', - 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} - ] - - Returns - ------- - list : List of dictionaries with Names and Ids of public keys. - """ - return self._client.request('/key/list', decoder='json', **kwargs) - - def key_gen(self, key_name, type, size=2048, **kwargs): - """Adds a new public key that can be used for name_publish. - - .. code-block:: python - - >>> c.key_gen('example_key_name') - {'Name': 'example_key_name', - 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} - - Parameters - ---------- - key_name : str - Name of the new Key to be generated. Used to reference the Keys. - type : str - Type of key to generate. The current possible keys types are: - - * ``"rsa"`` - * ``"ed25519"`` - size : int - Bitsize of key to generate - - Returns - ------- - dict : Key name and Key Id - """ - - opts = {"type": type, "size": size} - kwargs.setdefault("opts", opts) - args = (key_name,) - - return self._client.request('/key/gen', args, - decoder='json', **kwargs) - - def key_rm(self, key_name, *key_names, **kwargs): - """Remove a keypair - - .. code-block:: python - - >>> c.key_rm("bla") - {"Keys": [ - {"Name": "bla", - "Id": "QmfJpR6paB6h891y7SYXGe6gapyNgepBeAYMbyejWA4FWA"} - ]} - - Parameters - ---------- - key_name : str - Name of the key(s) to remove. - - Returns - ------- - dict : List of key names and IDs that have been removed - """ - args = (key_name,) + key_names - return self._client.request('/key/rm', args, decoder='json', **kwargs) - - def key_rename(self, key_name, new_key_name, **kwargs): - """Rename a keypair - - .. code-block:: python - - >>> c.key_rename("bla", "personal") - {"Was": "bla", - "Now": "personal", - "Id": "QmeyrRNxXaasZaoDXcCZgryoBCga9shaHQ4suHAYXbNZF3", - "Overwrite": False} - - Parameters - ---------- - key_name : str - Current name of the key to rename - new_key_name : str - New name of the key - - Returns - ------- - dict : List of key names and IDs that have been removed - """ - args = (key_name, new_key_name) - return self._client.request('/key/rename', args, decoder='json', - **kwargs) - - def name_publish(self, ipfs_path, resolve=True, lifetime="24h", ttl=None, - key=None, **kwargs): - """Publishes an object to IPNS. - - IPNS is a PKI namespace, where names are the hashes of public keys, and - the private key enables publishing new (signed) values. In publish, the - default value of *name* is your own identity public key. - - .. code-block:: python - - >>> c.name_publish('/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZK … GZ5d') - {'Value': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d', - 'Name': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc'} - - Parameters - ---------- - ipfs_path : str - IPFS path of the object to be published - resolve : bool - Resolve given path before publishing - lifetime : str - Time duration that the record will be valid for - - Accepts durations such as ``"300s"``, ``"1.5h"`` or ``"2h45m"``. - Valid units are: - - * ``"ns"`` - * ``"us"`` (or ``"µs"``) - * ``"ms"`` - * ``"s"`` - * ``"m"`` - * ``"h"`` - ttl : int - Time duration this record should be cached for - key : string - Name of the key to be used, as listed by 'ipfs key list'. - - Returns - ------- - dict : IPNS hash and the IPFS path it points at - """ - opts = {"lifetime": lifetime, "resolve": resolve} - if ttl: - opts["ttl"] = ttl - if key: - opts["key"] = key - kwargs.setdefault("opts", opts) - - args = (ipfs_path,) - return self._client.request('/name/publish', args, - decoder='json', **kwargs) - - def name_resolve(self, name=None, recursive=False, - nocache=False, **kwargs): - """Gets the value currently published at an IPNS name. - - IPNS is a PKI namespace, where names are the hashes of public keys, and - the private key enables publishing new (signed) values. In resolve, the - default value of ``name`` is your own identity public key. - - .. code-block:: python - - >>> c.name_resolve() - {'Path': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d'} - - Parameters - ---------- - name : str - The IPNS name to resolve (defaults to the connected node) - recursive : bool - Resolve until the result is not an IPFS name (default: false) - nocache : bool - Do not use cached entries (default: false) - - Returns - ------- - dict : The IPFS path the IPNS hash points at - """ - kwargs.setdefault("opts", {"recursive": recursive, - "nocache": nocache}) - args = (name,) if name is not None else () - return self._client.request('/name/resolve', args, - decoder='json', **kwargs) - - def dns(self, domain_name, recursive=False, **kwargs): - """Resolves DNS links to the referenced object. - - Multihashes are hard to remember, but domain names are usually easy to - remember. To create memorable aliases for multihashes, DNS TXT records - can point to other DNS links, IPFS objects, IPNS keys, etc. - This command resolves those links to the referenced object. - - For example, with this DNS TXT record:: - - >>> import dns.resolver - >>> a = dns.resolver.query("ipfs.io", "TXT") - >>> a.response.answer[0].items[0].to_text() - '"dnslink=/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n"' - - The resolver will give:: - - >>> c.dns("ipfs.io") - {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} - - Parameters - ---------- - domain_name : str - The domain-name name to resolve - recursive : bool - Resolve until the name is not a DNS link - - Returns - ------- - dict : Resource were a DNS entry points to - """ - kwargs.setdefault("opts", {"recursive": recursive}) - - args = (domain_name,) - return self._client.request('/dns', args, decoder='json', **kwargs) - - def pin_add(self, path, *paths, **kwargs): - """Pins objects to local storage. - - Stores an IPFS object(s) from a given path locally to disk. - - .. code-block:: python - - >>> c.pin_add("QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d") - {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} - - Parameters - ---------- - path : str - Path to object(s) to be pinned - recursive : bool - Recursively unpin the object linked to by the specified object(s) - - Returns - ------- - dict : List of IPFS objects that have been pinned - """ - #PY2: No support for kw-only parameters after glob parameters - if "recursive" in kwargs: - kwargs.setdefault("opts", {"recursive": kwargs.pop("recursive")}) - - args = (path,) + paths - return self._client.request('/pin/add', args, decoder='json', **kwargs) - - def pin_rm(self, path, *paths, **kwargs): - """Removes a pinned object from local storage. - - Removes the pin from the given object allowing it to be garbage - collected if needed. - - .. code-block:: python - - >>> c.pin_rm('QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d') - {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} - - Parameters - ---------- - path : str - Path to object(s) to be unpinned - recursive : bool - Recursively unpin the object linked to by the specified object(s) - - Returns - ------- - dict : List of IPFS objects that have been unpinned - """ - #PY2: No support for kw-only parameters after glob parameters - if "recursive" in kwargs: - kwargs.setdefault("opts", {"recursive": kwargs["recursive"]}) - del kwargs["recursive"] - - args = (path,) + paths - return self._client.request('/pin/rm', args, decoder='json', **kwargs) - - def pin_ls(self, type="all", **kwargs): - """Lists objects pinned to local storage. - - By default, all pinned objects are returned, but the ``type`` flag or - arguments can restrict that to a specific pin type or to some specific - objects respectively. - - .. code-block:: python - - >>> c.pin_ls() - {'Keys': { - 'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'}, - 'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'}, - 'QmNg5zWpRMxzRAVg7FTQ3tUxVbKj8E … gHPz': {'Type': 'indirect'}, - … - 'QmNiuVapnYCrLjxyweHeuk6Xdqfvts … wCCe': {'Type': 'indirect'}}} - - Parameters - ---------- - type : "str" - The type of pinned keys to list. Can be: - - * ``"direct"`` - * ``"indirect"`` - * ``"recursive"`` - * ``"all"`` - - Returns - ------- - dict : Hashes of pinned IPFS objects and why they are pinned - """ - kwargs.setdefault("opts", {"type": type}) - - return self._client.request('/pin/ls', decoder='json', **kwargs) - - def pin_update(self, from_path, to_path, **kwargs): - """Replaces one pin with another. - - Updates one pin to another, making sure that all objects in the new pin - are local. Then removes the old pin. This is an optimized version of - using first using :meth:`~ipfsapi.Client.pin_add` to add a new pin - for an object and then using :meth:`~ipfsapi.Client.pin_rm` to remove - the pin for the old object. - - .. code-block:: python - - >>> c.pin_update("QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", - ... "QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH") - {"Pins": ["/ipfs/QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", - "/ipfs/QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH"]} - - Parameters - ---------- - from_path : str - Path to the old object - to_path : str - Path to the new object to be pinned - unpin : bool - Should the pin of the old object be removed? (Default: ``True``) - - Returns - ------- - dict : List of IPFS objects affected by the pinning operation - """ - #PY2: No support for kw-only parameters after glob parameters - if "unpin" in kwargs: - kwargs.setdefault("opts", {"unpin": kwargs["unpin"]}) - del kwargs["unpin"] - - args = (from_path, to_path) - return self._client.request('/pin/update', args, decoder='json', - **kwargs) - - def pin_verify(self, path, *paths, **kwargs): - """Verify that recursive pins are complete. - - Scan the repo for pinned object graphs and check their integrity. - Issues will be reported back with a helpful human-readable error - message to aid in error recovery. This is useful to help recover - from datastore corruptions (such as when accidentally deleting - files added using the filestore backend). - - This function returns an iterator needs to be closed using a context - manager (``with``-statement) or using the ``.close()`` method. - - .. code-block:: python - - >>> with c.pin_verify("QmN…TTZ", verbose=True) as pin_verify_iter: - ... for item in pin_verify_iter: - ... print(item) - ... - {"Cid":"QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV","Ok":True} - {"Cid":"QmbPzQruAEFjUU3gQfupns6b8USr8VrD9H71GrqGDXQSxm","Ok":True} - {"Cid":"Qmcns1nUvbeWiecdGDPw8JxWeUfxCV8JKhTfgzs3F8JM4P","Ok":True} - … - - Parameters - ---------- - path : str - Path to object(s) to be checked - verbose : bool - Also report status of items that were OK? (Default: ``False``) - - Returns - ------- - iterable - """ - #PY2: No support for kw-only parameters after glob parameters - if "verbose" in kwargs: - kwargs.setdefault("opts", {"verbose": kwargs["verbose"]}) - del kwargs["verbose"] - - args = (path,) + paths - return self._client.request('/pin/verify', args, decoder='json', - stream=True, **kwargs) - - def repo_gc(self, **kwargs): - """Removes stored objects that are not pinned from the repo. - - .. code-block:: python - - >>> c.repo_gc() - [{'Key': 'QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQuwaHG2mpW2'}, - {'Key': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'}, - {'Key': 'QmRVBnxUCsD57ic5FksKYadtyUbMsyo9KYQKKELajqAp4q'}, - … - {'Key': 'QmYp4TeCurXrhsxnzt5wqLqqUz8ZRg5zsc7GuUrUSDtwzP'}] - - Performs a garbage collection sweep of the local set of - stored objects and remove ones that are not pinned in order - to reclaim hard disk space. Returns the hashes of all collected - objects. - - Returns - ------- - dict : List of IPFS objects that have been removed - """ - return self._client.request('/repo/gc', decoder='json', **kwargs) - - def repo_stat(self, **kwargs): - """Displays the repo's status. - - Returns the number of objects in the repo and the repo's size, - version, and path. - - .. code-block:: python - - >>> c.repo_stat() - {'NumObjects': 354, - 'RepoPath': '…/.local/share/ipfs', - 'Version': 'fs-repo@4', - 'RepoSize': 13789310} - - Returns - ------- - dict : General information about the IPFS file repository - - +------------+-------------------------------------------------+ - | NumObjects | Number of objects in the local repo. | - +------------+-------------------------------------------------+ - | RepoPath | The path to the repo being currently used. | - +------------+-------------------------------------------------+ - | RepoSize | Size in bytes that the repo is currently using. | - +------------+-------------------------------------------------+ - | Version | The repo version. | - +------------+-------------------------------------------------+ - """ - return self._client.request('/repo/stat', decoder='json', **kwargs) - - def id(self, peer=None, **kwargs): - """Shows IPFS Node ID info. - - Returns the PublicKey, ProtocolVersion, ID, AgentVersion and - Addresses of the connected daemon or some other node. - - .. code-block:: python - - >>> c.id() - {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc', - 'PublicKey': 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggE … BAAE=', - 'AgentVersion': 'go-libp2p/3.3.4', - 'ProtocolVersion': 'ipfs/0.1.0', - 'Addresses': [ - '/ip4/127.0.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYo … E9Uc', - '/ip4/10.1.0.172/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', - '/ip4/172.18.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', - '/ip6/::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYoDEyB97 … E9Uc', - '/ip6/fccc:7904:b05b:a579:957b:deef:f066:cad9/tcp/400 … E9Uc', - '/ip6/fd56:1966:efd8::212/tcp/4001/ipfs/QmVgNoP89mzpg … E9Uc', - '/ip6/fd56:1966:efd8:0:def1:34d0:773:48f/tcp/4001/ipf … E9Uc', - '/ip6/2001:db8:1::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', - '/ip4/77.116.233.54/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', - '/ip4/77.116.233.54/tcp/10842/ipfs/QmVgNoP89mzpgEAAqK … E9Uc']} - - Parameters - ---------- - peer : str - Peer.ID of the node to look up (local node if ``None``) - - Returns - ------- - dict : Information about the IPFS node - """ - args = (peer,) if peer is not None else () - return self._client.request('/id', args, decoder='json', **kwargs) - - def bootstrap(self, **kwargs): - """Compatiblity alias for :meth:`~ipfsapi.Client.bootstrap_list`.""" - self.bootstrap_list(**kwargs) - - def bootstrap_list(self, **kwargs): - """Returns the addresses of peers used during initial discovery of the - IPFS network. - - Peers are output in the format ``<multiaddr>/<peerID>``. - - .. code-block:: python - - >>> c.bootstrap_list() - {'Peers': [ - '/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ', - '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRa … ca9z', - '/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLPppuBtQSGwKD … KrGM', - … - '/ip4/178.62.61.185/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3p … QBU3']} - - Returns - ------- - dict : List of known bootstrap peers - """ - return self._client.request('/bootstrap', decoder='json', **kwargs) - - def bootstrap_add(self, peer, *peers, **kwargs): - """Adds peers to the bootstrap list. - - Parameters - ---------- - peer : str - IPFS MultiAddr of a peer to add to the list - - Returns - ------- - dict - """ - args = (peer,) + peers - return self._client.request('/bootstrap/add', args, - decoder='json', **kwargs) - - def bootstrap_rm(self, peer, *peers, **kwargs): - """Removes peers from the bootstrap list. - - Parameters - ---------- - peer : str - IPFS MultiAddr of a peer to remove from the list - - Returns - ------- - dict - """ - args = (peer,) + peers - return self._client.request('/bootstrap/rm', args, - decoder='json', **kwargs) - - def swarm_peers(self, **kwargs): - """Returns the addresses & IDs of currently connected peers. - - .. code-block:: python - - >>> c.swarm_peers() - {'Strings': [ - '/ip4/101.201.40.124/tcp/40001/ipfs/QmZDYAhmMDtnoC6XZ … kPZc', - '/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ', - '/ip4/104.223.59.174/tcp/4001/ipfs/QmeWdgoZezpdHz1PX8 … 1jB6', - … - '/ip6/fce3: … :f140/tcp/43901/ipfs/QmSoLnSGccFuZQJzRa … ca9z']} - - Returns - ------- - dict : List of multiaddrs of currently connected peers - """ - return self._client.request('/swarm/peers', decoder='json', **kwargs) - - def swarm_addrs(self, **kwargs): - """Returns the addresses of currently connected peers by peer id. - - .. code-block:: python - - >>> pprint(c.swarm_addrs()) - {'Addrs': { - 'QmNMVHJTSZHTWMWBbmBrQgkA1hZPWYuVJx2DpSGESWW6Kn': [ - '/ip4/10.1.0.1/tcp/4001', - '/ip4/127.0.0.1/tcp/4001', - '/ip4/51.254.25.16/tcp/4001', - '/ip6/2001:41d0:b:587:3cae:6eff:fe40:94d8/tcp/4001', - '/ip6/2001:470:7812:1045::1/tcp/4001', - '/ip6/::1/tcp/4001', - '/ip6/fc02:2735:e595:bb70:8ffc:5293:8af8:c4b7/tcp/4001', - '/ip6/fd00:7374:6172:100::1/tcp/4001', - '/ip6/fd20:f8be:a41:0:c495:aff:fe7e:44ee/tcp/4001', - '/ip6/fd20:f8be:a41::953/tcp/4001'], - 'QmNQsK1Tnhe2Uh2t9s49MJjrz7wgPHj4VyrZzjRe8dj7KQ': [ - '/ip4/10.16.0.5/tcp/4001', - '/ip4/127.0.0.1/tcp/4001', - '/ip4/172.17.0.1/tcp/4001', - '/ip4/178.62.107.36/tcp/4001', - '/ip6/::1/tcp/4001'], - … - }} - - Returns - ------- - dict : Multiaddrs of peers by peer id - """ - return self._client.request('/swarm/addrs', decoder='json', **kwargs) - - def swarm_connect(self, address, *addresses, **kwargs): - """Opens a connection to a given address. - - This will open a new direct connection to a peer address. The address - format is an IPFS multiaddr:: - - /ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ - - .. code-block:: python - - >>> c.swarm_connect("/ip4/104.131.131.82/tcp/4001/ipfs/Qma … uvuJ") - {'Strings': ['connect QmaCpDMGvV2BGHeYERUEnRQAwe3 … uvuJ success']} - - Parameters - ---------- - address : str - Address of peer to connect to - - Returns - ------- - dict : Textual connection status report - """ - args = (address,) + addresses - return self._client.request('/swarm/connect', args, - decoder='json', **kwargs) - - def swarm_disconnect(self, address, *addresses, **kwargs): - """Closes the connection to a given address. - - This will close a connection to a peer address. The address format is - an IPFS multiaddr:: - - /ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ - - The disconnect is not permanent; if IPFS needs to talk to that address - later, it will reconnect. - - .. code-block:: python - - >>> c.swarm_disconnect("/ip4/104.131.131.82/tcp/4001/ipfs/Qm … uJ") - {'Strings': ['disconnect QmaCpDMGvV2BGHeYERUEnRQA … uvuJ success']} - - Parameters - ---------- - address : str - Address of peer to disconnect from - - Returns - ------- - dict : Textual connection status report - """ - args = (address,) + addresses - return self._client.request('/swarm/disconnect', args, - decoder='json', **kwargs) - - def swarm_filters_add(self, address, *addresses, **kwargs): - """Adds a given multiaddr filter to the filter list. - - This will add an address filter to the daemons swarm. Filters applied - this way will not persist daemon reboots, to achieve that, add your - filters to the configuration file. - - .. code-block:: python - - >>> c.swarm_filters_add("/ip4/192.168.0.0/ipcidr/16") - {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} - - Parameters - ---------- - address : str - Multiaddr to filter - - Returns - ------- - dict : List of swarm filters added - """ - args = (address,) + addresses - return self._client.request('/swarm/filters/add', args, - decoder='json', **kwargs) - - def swarm_filters_rm(self, address, *addresses, **kwargs): - """Removes a given multiaddr filter from the filter list. - - This will remove an address filter from the daemons swarm. Filters - removed this way will not persist daemon reboots, to achieve that, - remove your filters from the configuration file. - - .. code-block:: python - - >>> c.swarm_filters_rm("/ip4/192.168.0.0/ipcidr/16") - {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} - - Parameters - ---------- - address : str - Multiaddr filter to remove - - Returns - ------- - dict : List of swarm filters removed - """ - args = (address,) + addresses - return self._client.request('/swarm/filters/rm', args, - decoder='json', **kwargs) - - def dht_query(self, peer_id, *peer_ids, **kwargs): - """Finds the closest Peer IDs to a given Peer ID by querying the DHT. - - .. code-block:: python - - >>> c.dht_query("/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDM … uvuJ") - [{'ID': 'QmPkFbxAQ7DeKD5VGSh9HQrdS574pyNzDmxJeGrRJxoucF', - 'Extra': '', 'Type': 2, 'Responses': None}, - {'ID': 'QmR1MhHVLJSLt9ZthsNNhudb1ny1WdhY4FPW21ZYFWec4f', - 'Extra': '', 'Type': 2, 'Responses': None}, - {'ID': 'Qmcwx1K5aVme45ab6NYWb52K2TFBeABgCLccC7ntUeDsAs', - 'Extra': '', 'Type': 2, 'Responses': None}, - … - {'ID': 'QmYYy8L3YD1nsF4xtt4xmsc14yqvAAnKksjo3F3iZs5jPv', - 'Extra': '', 'Type': 1, 'Responses': []}] - - Parameters - ---------- - peer_id : str - The peerID to run the query against - - Returns - ------- - dict : List of peers IDs - """ - args = (peer_id,) + peer_ids - return self._client.request('/dht/query', args, - decoder='json', **kwargs) - - def dht_findprovs(self, multihash, *multihashes, **kwargs): - """Finds peers in the DHT that can provide a specific value. - - .. code-block:: python - - >>> c.dht_findprovs("QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQu … mpW2") - [{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', - 'Extra': '', 'Type': 6, 'Responses': None}, - {'ID': 'QmaK6Aj5WXkfnWGoWq7V8pGUYzcHPZp4jKQ5JtmRvSzQGk', - 'Extra': '', 'Type': 6, 'Responses': None}, - {'ID': 'QmdUdLu8dNvr4MVW1iWXxKoQrbG6y1vAVWPdkeGK4xppds', - 'Extra': '', 'Type': 6, 'Responses': None}, - … - {'ID': '', 'Extra': '', 'Type': 4, 'Responses': [ - {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97Mk … E9Uc', 'Addrs': None} - ]}, - {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', - 'Extra': '', 'Type': 1, 'Responses': [ - {'ID': 'QmSHXfsmN3ZduwFDjeqBn1C8b1tcLkxK6yd … waXw', 'Addrs': [ - '/ip4/127.0.0.1/tcp/4001', - '/ip4/172.17.0.8/tcp/4001', - '/ip6/::1/tcp/4001', - '/ip4/52.32.109.74/tcp/1028' - ]} - ]}] - - Parameters - ---------- - multihash : str - The DHT key to find providers for - - Returns - ------- - dict : List of provider Peer IDs - """ - args = (multihash,) + multihashes - return self._client.request('/dht/findprovs', args, - decoder='json', **kwargs) - - def dht_findpeer(self, peer_id, *peer_ids, **kwargs): - """Queries the DHT for all of the associated multiaddresses. - - .. code-block:: python - - >>> c.dht_findpeer("QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZN … MTLZ") - [{'ID': 'QmfVGMFrwW6AV6fTWmD6eocaTybffqAvkVLXQEFrYdk6yc', - 'Extra': '', 'Type': 6, 'Responses': None}, - {'ID': 'QmTKiUdjbRjeN9yPhNhG1X38YNuBdjeiV9JXYWzCAJ4mj5', - 'Extra': '', 'Type': 6, 'Responses': None}, - {'ID': 'QmTGkgHSsULk8p3AKTAqKixxidZQXFyF7mCURcutPqrwjQ', - 'Extra': '', 'Type': 6, 'Responses': None}, - … - {'ID': '', 'Extra': '', 'Type': 2, - 'Responses': [ - {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', - 'Addrs': [ - '/ip4/10.9.8.1/tcp/4001', - '/ip6/::1/tcp/4001', - '/ip4/164.132.197.107/tcp/4001', - '/ip4/127.0.0.1/tcp/4001']} - ]}] - - Parameters - ---------- - peer_id : str - The ID of the peer to search for - - Returns - ------- - dict : List of multiaddrs - """ - args = (peer_id,) + peer_ids - return self._client.request('/dht/findpeer', args, - decoder='json', **kwargs) - - def dht_get(self, key, *keys, **kwargs): - """Queries the DHT for its best value related to given key. - - There may be several different values for a given key stored in the - DHT; in this context *best* means the record that is most desirable. - There is no one metric for *best*: it depends entirely on the key type. - For IPNS, *best* is the record that is both valid and has the highest - sequence number (freshest). Different key types may specify other rules - for they consider to be the *best*. - - Parameters - ---------- - key : str - One or more keys whose values should be looked up - - Returns - ------- - str - """ - args = (key,) + keys - res = self._client.request('/dht/get', args, decoder='json', **kwargs) - - if isinstance(res, dict) and "Extra" in res: - return res["Extra"] - else: - for r in res: - if "Extra" in r and len(r["Extra"]) > 0: - return r["Extra"] - raise exceptions.Error("empty response from DHT") - - def dht_put(self, key, value, **kwargs): - """Writes a key/value pair to the DHT. - - Given a key of the form ``/foo/bar`` and a value of any form, this will - write that value to the DHT with that key. - - Keys have two parts: a keytype (foo) and the key name (bar). IPNS uses - the ``/ipns/`` keytype, and expects the key name to be a Peer ID. IPNS - entries are formatted with a special strucutre. - - You may only use keytypes that are supported in your ``ipfs`` binary: - ``go-ipfs`` currently only supports the ``/ipns/`` keytype. Unless you - have a relatively deep understanding of the key's internal structure, - you likely want to be using the :meth:`~ipfsapi.Client.name_publish` - instead. - - Value is arbitrary text. - - .. code-block:: python - - >>> c.dht_put("QmVgNoP89mzpgEAAqK8owYoDEyB97Mkc … E9Uc", "test123") - [{'ID': 'QmfLy2aqbhU1RqZnGQyqHSovV8tDufLUaPfN1LNtg5CvDZ', - 'Extra': '', 'Type': 5, 'Responses': None}, - {'ID': 'QmZ5qTkNvvZ5eFq9T4dcCEK7kX8L7iysYEpvQmij9vokGE', - 'Extra': '', 'Type': 5, 'Responses': None}, - {'ID': 'QmYqa6QHCbe6eKiiW6YoThU5yBy8c3eQzpiuW22SgVWSB8', - 'Extra': '', 'Type': 6, 'Responses': None}, - … - {'ID': 'QmP6TAKVDCziLmx9NV8QGekwtf7ZMuJnmbeHMjcfoZbRMd', - 'Extra': '', 'Type': 1, 'Responses': []}] - - Parameters - ---------- - key : str - A unique identifier - value : str - Abitrary text to associate with the input (2048 bytes or less) - - Returns - ------- - list - """ - args = (key, value) - return self._client.request('/dht/put', args, decoder='json', **kwargs) - - def ping(self, peer, *peers, **kwargs): - """Provides round-trip latency information for the routing system. - - Finds nodes via the routing system, sends pings, waits for pongs, - and prints out round-trip latency information. - - .. code-block:: python - - >>> c.ping("QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n") - [{'Success': True, 'Time': 0, - 'Text': 'Looking up peer QmTzQ1JRkWErjk39mryYw2WVaphAZN … c15n'}, - {'Success': False, 'Time': 0, - 'Text': 'Peer lookup error: routing: not found'}] - - Parameters - ---------- - peer : str - ID of peer to be pinged - count : int - Number of ping messages to send (Default: ``10``) - - Returns - ------- - list : Progress reports from the ping - """ - #PY2: No support for kw-only parameters after glob parameters - if "count" in kwargs: - kwargs.setdefault("opts", {"count": kwargs["count"]}) - del kwargs["count"] - - args = (peer,) + peers - return self._client.request('/ping', args, decoder='json', **kwargs) - - def config(self, key, value=None, **kwargs): - """Controls configuration variables. - - .. code-block:: python - - >>> c.config("Addresses.Gateway") - {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'} - >>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081") - {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'} - - Parameters - ---------- - key : str - The key of the configuration entry (e.g. "Addresses.API") - value : dict - The value to set the configuration entry to - - Returns - ------- - dict : Requested/updated key and its (new) value - """ - args = (key, value) - return self._client.request('/config', args, decoder='json', **kwargs) - - def config_show(self, **kwargs): - """Returns a dict containing the server's configuration. - - .. warning:: - - The configuration file contains private key data that must be - handled with care. - - .. code-block:: python - - >>> config = c.config_show() - >>> config['Addresses'] - {'API': '/ip4/127.0.0.1/tcp/5001', - 'Gateway': '/ip4/127.0.0.1/tcp/8080', - 'Swarm': ['/ip4/0.0.0.0/tcp/4001', '/ip6/::/tcp/4001']}, - >>> config['Discovery'] - {'MDNS': {'Enabled': True, 'Interval': 10}} - - Returns - ------- - dict : The entire IPFS daemon configuration - """ - return self._client.request('/config/show', decoder='json', **kwargs) - - def config_replace(self, *args, **kwargs): - """Replaces the existing config with a user-defined config. - - Make sure to back up the config file first if neccessary, as this - operation can't be undone. - """ - return self._client.request('/config/replace', args, - decoder='json', **kwargs) - - def log_level(self, subsystem, level, **kwargs): - r"""Changes the logging output of a running daemon. - - .. code-block:: python - - >>> c.log_level("path", "info") - {'Message': "Changed log level of 'path' to 'info'\n"} - - Parameters - ---------- - subsystem : str - The subsystem logging identifier (Use ``"all"`` for all subsystems) - level : str - The desired logging level. Must be one of: - - * ``"debug"`` - * ``"info"`` - * ``"warning"`` - * ``"error"`` - * ``"fatal"`` - * ``"panic"`` - - Returns - ------- - dict : Status message - """ - args = (subsystem, level) - return self._client.request('/log/level', args, - decoder='json', **kwargs) - - def log_ls(self, **kwargs): - """Lists the logging subsystems of a running daemon. - - .. code-block:: python - - >>> c.log_ls() - {'Strings': [ - 'github.com/ipfs/go-libp2p/p2p/host', 'net/identify', - 'merkledag', 'providers', 'routing/record', 'chunk', 'mfs', - 'ipns-repub', 'flatfs', 'ping', 'mockrouter', 'dagio', - 'cmds/files', 'blockset', 'engine', 'mocknet', 'config', - 'commands/http', 'cmd/ipfs', 'command', 'conn', 'gc', - 'peerstore', 'core', 'coreunix', 'fsrepo', 'core/server', - 'boguskey', 'github.com/ipfs/go-libp2p/p2p/host/routed', - 'diagnostics', 'namesys', 'fuse/ipfs', 'node', 'secio', - 'core/commands', 'supernode', 'mdns', 'path', 'table', - 'swarm2', 'peerqueue', 'mount', 'fuse/ipns', 'blockstore', - 'github.com/ipfs/go-libp2p/p2p/host/basic', 'lock', 'nat', - 'importer', 'corerepo', 'dht.pb', 'pin', 'bitswap_network', - 'github.com/ipfs/go-libp2p/p2p/protocol/relay', 'peer', - 'transport', 'dht', 'offlinerouting', 'tarfmt', 'eventlog', - 'ipfsaddr', 'github.com/ipfs/go-libp2p/p2p/net/swarm/addr', - 'bitswap', 'reprovider', 'supernode/proxy', 'crypto', 'tour', - 'commands/cli', 'blockservice']} - - Returns - ------- - dict : List of daemon logging subsystems - """ - return self._client.request('/log/ls', decoder='json', **kwargs) - - def log_tail(self, **kwargs): - r"""Reads log outputs as they are written. - - This function returns an iterator needs to be closed using a context - manager (``with``-statement) or using the ``.close()`` method. - - .. code-block:: python - - >>> with c.log_tail() as log_tail_iter: - ... for item in log_tail_iter: - ... print(item) - ... - {"event":"updatePeer","system":"dht", - "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", - "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", - "time":"2016-08-22T13:25:27.43353297Z"} - {"event":"handleAddProviderBegin","system":"dht", - "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", - "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", - "time":"2016-08-22T13:25:27.433642581Z"} - {"event":"handleAddProvider","system":"dht","duration":91704, - "key":"QmNT9Tejg6t57Vs8XM2TVJXCwevWiGsZh3kB4HQXUZRK1o", - "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", - "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", - "time":"2016-08-22T13:25:27.433747513Z"} - {"event":"updatePeer","system":"dht", - "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", - "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", - "time":"2016-08-22T13:25:27.435843012Z"} - … - - Returns - ------- - iterable - """ - return self._client.request('/log/tail', decoder='json', - stream=True, **kwargs) - - def version(self, **kwargs): - """Returns the software version of the currently connected node. - - .. code-block:: python - - >>> c.version() - {'Version': '0.4.3-rc2', 'Repo': '4', 'Commit': '', - 'System': 'amd64/linux', 'Golang': 'go1.6.2'} - - Returns - ------- - dict : Daemon and system version information - """ - return self._client.request('/version', decoder='json', **kwargs) - - def files_cp(self, source, dest, **kwargs): - """Copies files within the MFS. - - Due to the nature of IPFS this will not actually involve any of the - file's content being copied. - - .. code-block:: python - - >>> c.files_ls("/") - {'Entries': [ - {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, - {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} - ]} - >>> c.files_cp("/test", "/bla") - '' - >>> c.files_ls("/") - {'Entries': [ - {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, - {'Size': 0, 'Hash': '', 'Name': 'bla', 'Type': 0}, - {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} - ]} - - Parameters - ---------- - source : str - Filepath within the MFS to copy from - dest : str - Destination filepath with the MFS to which the file will be - copied to - """ - args = (source, dest) - return self._client.request('/files/cp', args, **kwargs) - - def files_ls(self, path, **kwargs): - """Lists contents of a directory in the MFS. - - .. code-block:: python - - >>> c.files_ls("/") - {'Entries': [ - {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0} - ]} - - Parameters - ---------- - path : str - Filepath within the MFS - - Returns - ------- - dict : Directory entries - """ - args = (path,) - return self._client.request('/files/ls', args, - decoder='json', **kwargs) - - def files_mkdir(self, path, parents=False, **kwargs): - """Creates a directory within the MFS. - - .. code-block:: python - - >>> c.files_mkdir("/test") - b'' - - Parameters - ---------- - path : str - Filepath within the MFS - parents : bool - Create parent directories as needed and do not raise an exception - if the requested directory already exists - """ - kwargs.setdefault("opts", {"parents": parents}) - - args = (path,) - return self._client.request('/files/mkdir', args, **kwargs) - - def files_stat(self, path, **kwargs): - """Returns basic ``stat`` information for an MFS file - (including its hash). - - .. code-block:: python - - >>> c.files_stat("/test") - {'Hash': 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - 'Size': 0, 'CumulativeSize': 4, 'Type': 'directory', 'Blocks': 0} - - Parameters - ---------- - path : str - Filepath within the MFS - - Returns - ------- - dict : MFS file information - """ - args = (path,) - return self._client.request('/files/stat', args, - decoder='json', **kwargs) - - def files_rm(self, path, recursive=False, **kwargs): - """Removes a file from the MFS. - - .. code-block:: python - - >>> c.files_rm("/bla/file") - b'' - - Parameters - ---------- - path : str - Filepath within the MFS - recursive : bool - Recursively remove directories? - """ - kwargs.setdefault("opts", {"recursive": recursive}) - - args = (path,) - return self._client.request('/files/rm', args, **kwargs) - - def files_read(self, path, offset=0, count=None, **kwargs): - """Reads a file stored in the MFS. - - .. code-block:: python - - >>> c.files_read("/bla/file") - b'hi' - - Parameters - ---------- - path : str - Filepath within the MFS - offset : int - Byte offset at which to begin reading at - count : int - Maximum number of bytes to read - - Returns - ------- - str : MFS file contents - """ - opts = {"offset": offset} - if count is not None: - opts["count"] = count - kwargs.setdefault("opts", opts) - - args = (path,) - return self._client.request('/files/read', args, **kwargs) - - def files_write(self, path, file, offset=0, create=False, truncate=False, - count=None, **kwargs): - """Writes to a mutable file in the MFS. - - .. code-block:: python - - >>> c.files_write("/test/file", io.BytesIO(b"hi"), create=True) - b'' - - Parameters - ---------- - path : str - Filepath within the MFS - file : io.RawIOBase - IO stream object with data that should be written - offset : int - Byte offset at which to begin writing at - create : bool - Create the file if it does not exist - truncate : bool - Truncate the file to size zero before writing - count : int - Maximum number of bytes to read from the source ``file`` - """ - opts = {"offset": offset, "create": create, "truncate": truncate} - if count is not None: - opts["count"] = count - kwargs.setdefault("opts", opts) - - args = (path,) - body, headers = multipart.stream_files(file, self.chunk_size) - return self._client.request('/files/write', args, - data=body, headers=headers, **kwargs) - - def files_mv(self, source, dest, **kwargs): - """Moves files and directories within the MFS. - - .. code-block:: python - - >>> c.files_mv("/test/file", "/bla/file") - b'' - - Parameters - ---------- - source : str - Existing filepath within the MFS - dest : str - Destination to which the file will be moved in the MFS - """ - args = (source, dest) - return self._client.request('/files/mv', args, **kwargs) - - def shutdown(self): - """Stop the connected IPFS daemon instance. - - Sending any further requests after this will fail with - ``ipfsapi.exceptions.ConnectionError``, until you start another IPFS - daemon instance. - """ - try: - return self._client.request('/shutdown') - except exceptions.ConnectionError: - # Sometimes the daemon kills the connection before sending a - # response causing an incorrect `ConnectionError` to bubble - pass - - ########### - # HELPERS # - ########### - - @utils.return_field('Hash') - def add_bytes(self, data, **kwargs): - """Adds a set of bytes as a file to IPFS. - - .. code-block:: python - - >>> c.add_bytes(b"Mary had a little lamb") - 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' - - Also accepts and will stream generator objects. - - Parameters - ---------- - data : bytes - Content to be added as a file - - Returns - ------- - str : Hash of the added IPFS object - """ - body, headers = multipart.stream_bytes(data, self.chunk_size) - return self._client.request('/add', decoder='json', - data=body, headers=headers, **kwargs) - - @utils.return_field('Hash') - def add_str(self, string, **kwargs): - """Adds a Python string as a file to IPFS. - - .. code-block:: python - - >>> c.add_str(u"Mary had a little lamb") - 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' - - Also accepts and will stream generator objects. - - Parameters - ---------- - string : str - Content to be added as a file - - Returns - ------- - str : Hash of the added IPFS object - """ - body, headers = multipart.stream_text(string, self.chunk_size) - return self._client.request('/add', decoder='json', - data=body, headers=headers, **kwargs) - - def add_json(self, json_obj, **kwargs): - """Adds a json-serializable Python dict as a json file to IPFS. - - .. code-block:: python - - >>> c.add_json({'one': 1, 'two': 2, 'three': 3}) - 'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob' - - Parameters - ---------- - json_obj : dict - A json-serializable Python dictionary - - Returns - ------- - str : Hash of the added IPFS object - """ - return self.add_bytes(encoding.Json().encode(json_obj), **kwargs) - - def get_json(self, multihash, **kwargs): - """Loads a json object from IPFS. - - .. code-block:: python - - >>> c.get_json('QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob') - {'one': 1, 'two': 2, 'three': 3} - - Parameters - ---------- - multihash : str - Multihash of the IPFS object to load - - Returns - ------- - object : Deserialized IPFS JSON object value - """ - return self.cat(multihash, decoder='json', **kwargs) - - def add_pyobj(self, py_obj, **kwargs): - """Adds a picklable Python object as a file to IPFS. - - .. deprecated:: 0.4.2 - The ``*_pyobj`` APIs allow for arbitrary code execution if abused. - Either switch to :meth:`~ipfsapi.Client.add_json` or use - ``client.add_bytes(pickle.dumps(py_obj))`` instead. - - Please see :meth:`~ipfsapi.Client.get_pyobj` for the - **security risks** of using these methods! - - .. code-block:: python - - >>> c.add_pyobj([0, 1.0, 2j, '3', 4e5]) - 'QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji' - - Parameters - ---------- - py_obj : object - A picklable Python object - - Returns - ------- - str : Hash of the added IPFS object - """ - warnings.warn("Using `*_pyobj` on untrusted data is a security risk", - DeprecationWarning) - return self.add_bytes(encoding.Pickle().encode(py_obj), **kwargs) - - def get_pyobj(self, multihash, **kwargs): - """Loads a pickled Python object from IPFS. - - .. deprecated:: 0.4.2 - The ``*_pyobj`` APIs allow for arbitrary code execution if abused. - Either switch to :meth:`~ipfsapi.Client.get_json` or use - ``pickle.loads(client.cat(multihash))`` instead. - - .. caution:: - - The pickle module is not intended to be secure against erroneous or - maliciously constructed data. Never unpickle data received from an - untrusted or unauthenticated source. - - Please **read** - `this article <https://www.cs.uic.edu/%7Es/musings/pickle/>`_ to - understand the security risks of using this method! - - .. code-block:: python - - >>> c.get_pyobj('QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji') - [0, 1.0, 2j, '3', 400000.0] - - Parameters - ---------- - multihash : str - Multihash of the IPFS object to load - - Returns - ------- - object : Deserialized IPFS Python object - """ - warnings.warn("Using `*_pyobj` on untrusted data is a security risk", - DeprecationWarning) - return self.cat(multihash, decoder='pickle', **kwargs) - - def pubsub_ls(self, **kwargs): - """Lists subscribed topics by name - - This method returns data that contains a list of - all topics the user is subscribed to. In order - to subscribe to a topic pubsub_sub must be called. - - .. code-block:: python - - # subscribe to a channel - >>> with c.pubsub_sub("hello") as sub: - ... c.pubsub_ls() - { - 'Strings' : ["hello"] - } - - Returns - ------- - dict : Dictionary with the key "Strings" who's value is an array of - topics we are subscribed to - """ - return self._client.request('/pubsub/ls', decoder='json', **kwargs) - - def pubsub_peers(self, topic=None, **kwargs): - """List the peers we are pubsubbing with. - - Lists the id's of other IPFS users who we - are connected to via some topic. Without specifying - a topic, IPFS peers from all subscribed topics - will be returned in the data. If a topic is specified - only the IPFS id's of the peers from the specified - topic will be returned in the data. - - .. code-block:: python - - >>> c.pubsub_peers() - {'Strings': - [ - 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', - 'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA', - ... - 'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a' - ] - } - - ## with a topic - - # subscribe to a channel - >>> with c.pubsub_sub('hello') as sub: - ... c.pubsub_peers(topic='hello') - {'String': - [ - 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', - ... - # other peers connected to the same channel - ] - } - - Parameters - ---------- - topic : str - The topic to list connected peers of - (defaults to None which lists peers for all topics) - - Returns - ------- - dict : Dictionary with the ke "Strings" who's value is id of IPFS - peers we're pubsubbing with - """ - args = (topic,) if topic is not None else () - return self._client.request('/pubsub/peers', args, - decoder='json', **kwargs) - - def pubsub_pub(self, topic, payload, **kwargs): - """Publish a message to a given pubsub topic - - Publishing will publish the given payload (string) to - everyone currently subscribed to the given topic. - - All data (including the id of the publisher) is automatically - base64 encoded when published. - - .. code-block:: python - - # publishes the message 'message' to the topic 'hello' - >>> c.pubsub_pub('hello', 'message') - [] - - Parameters - ---------- - topic : str - Topic to publish to - payload : Data to be published to the given topic - - Returns - ------- - list : empty list - """ - args = (topic, payload) - return self._client.request('/pubsub/pub', args, - decoder='json', **kwargs) - - def pubsub_sub(self, topic, discover=False, **kwargs): - """Subscribe to mesages on a given topic - - Subscribing to a topic in IPFS means anytime - a message is published to a topic, the subscribers - will be notified of the publication. - - The connection with the pubsub topic is opened and read. - The Subscription returned should be used inside a context - manager to ensure that it is closed properly and not left - hanging. - - .. code-block:: python - - >>> sub = c.pubsub_sub('testing') - >>> with c.pubsub_sub('testing') as sub: - # publish a message 'hello' to the topic 'testing' - ... c.pubsub_pub('testing', 'hello') - ... for message in sub: - ... print(message) - ... # Stop reading the subscription after - ... # we receive one publication - ... break - {'from': '<base64encoded IPFS id>', - 'data': 'aGVsbG8=', - 'topicIDs': ['testing']} - - # NOTE: in order to receive published data - # you must already be subscribed to the topic at publication - # time. - - Parameters - ---------- - topic : str - Name of a topic to subscribe to - - discover : bool - Try to discover other peers subscibed to the same topic - (defaults to False) - - Returns - ------- - Generator wrapped in a context - manager that maintains a connection - stream to the given topic. - """ - args = (topic, discover) - return SubChannel(self._client.request('/pubsub/sub', args, - stream=True, decoder='json')) diff --git a/ipfsapi/client/__init__.py b/ipfsapi/client/__init__.py new file mode 100644 index 00000000..63a327fc --- /dev/null +++ b/ipfsapi/client/__init__.py @@ -0,0 +1,309 @@ +# -*- coding: utf-8 -*- +"""IPFS API Bindings for Python. + +Classes: + + * Client – a TCP client for interacting with an IPFS daemon +""" +from __future__ import absolute_import + +import functools +import inspect +import os +import re +import warnings +try: #PY3 + import urllib.parse +except ImportError: #PY2 + class urllib: + import urlparse as parse + +import ipfshttpclient +import netaddr + +DEFAULT_HOST = str(os.environ.get("PY_IPFSAPI_DEFAULT_HOST", 'localhost')) +DEFAULT_PORT = int(os.environ.get("PY_IPFSAPI_DEFAULT_PORT", 5001)) +DEFAULT_BASE = str(os.environ.get("PY_IPFSAPI_DEFAULT_BASE", 'api/v0')) + +VERSION_MINIMUM = "0.4.3" +VERSION_MAXIMUM = "0.5.0" + +from .. import exceptions, encoding + +from . import base + + +def assert_version(version, minimum=VERSION_MINIMUM, maximum=VERSION_MAXIMUM): + """Make sure that the given daemon version is supported by this client + version. + + Raises + ------ + ~ipfsapi.exceptions.VersionMismatch + + Parameters + ---------- + version : str + The version of an IPFS daemon. + minimum : str + The minimal IPFS version to allow. + maximum : str + The maximum IPFS version to allow. + """ + # Convert version strings to integer tuples + version = list(map(int, version.split('-', 1)[0].split('.'))) + minimum = list(map(int, minimum.split('-', 1)[0].split('.'))) + maximum = list(map(int, maximum.split('-', 1)[0].split('.'))) + + if minimum > version or version >= maximum: + raise exceptions.VersionMismatch(version, minimum, maximum) + + +def connect(host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, + chunk_size=4096, **defaults): + """Create a new :class:`~ipfsapi.Client` instance and connect to the + daemon to validate that its version is supported. + + Raises + ------ + ~ipfsapi.exceptions.VersionMismatch + ~ipfsapi.exceptions.ErrorResponse + ~ipfsapi.exceptions.ConnectionError + ~ipfsapi.exceptions.ProtocolError + ~ipfsapi.exceptions.StatusError + ~ipfsapi.exceptions.TimeoutError + + + All parameters are identical to those passed to the constructor of the + :class:`~ipfsapi.Client` class. + + Returns + ------- + ~ipfsapi.Client + """ + # Create client instance + client = Client(host, port, base, chunk_size, **defaults) + + # Query version number from daemon and validate it + assert_version(client.version()['Version']) + + return client + + +class Client(ipfshttpclient.Client): + # Aliases for previous method names + key_gen = base.DeprecatedMethodProperty("key", "gen") + key_list = base.DeprecatedMethodProperty("key", "list") + key_rename = base.DeprecatedMethodProperty("key", "rename") + key_rm = base.DeprecatedMethodProperty("key", "rm") + + block_get = base.DeprecatedMethodProperty("block", "get") + block_put = base.DeprecatedMethodProperty("block", "put") + block_stat = base.DeprecatedMethodProperty("block", "stat") + + files_cp = base.DeprecatedMethodProperty("files", "cp") + files_ls = base.DeprecatedMethodProperty("files", "ls") + files_mkdir = base.DeprecatedMethodProperty("files", "mkdir") + files_stat = base.DeprecatedMethodProperty("files", "stat") + files_rm = base.DeprecatedMethodProperty("files", "rm") + files_read = base.DeprecatedMethodProperty("files", "read") + files_write = base.DeprecatedMethodProperty("files", "write") + files_mv = base.DeprecatedMethodProperty("files", "mv") + + object_data = base.DeprecatedMethodProperty("object", "data") + object_get = base.DeprecatedMethodProperty("object", "get") + object_links = base.DeprecatedMethodProperty("object", "links") + object_new = base.DeprecatedMethodProperty("object", "new") + object_put = base.DeprecatedMethodProperty("object", "put") + object_stat = base.DeprecatedMethodProperty("object", "stat") + object_patch_add_link = base.DeprecatedMethodProperty("object", "patch", "add_link") + object_patch_append_data = base.DeprecatedMethodProperty("object", "patch", "append_data") + object_patch_rm_link = base.DeprecatedMethodProperty("object", "patch", "rm_link") + object_patch_set_data = base.DeprecatedMethodProperty("object", "patch", "set_data") + + pin_add = base.DeprecatedMethodProperty("pin", "add") + pin_ls = base.DeprecatedMethodProperty("pin", "ls") + pin_rm = base.DeprecatedMethodProperty("pin", "rm") + pin_update = base.DeprecatedMethodProperty("pin", "update") + pin_verify = base.DeprecatedMethodProperty("pin", "verify") + + refs = base.DeprecatedMethodProperty("unstable", "refs") + refs_local = base.DeprecatedMethodProperty("unstable", "refs", "local") + + bootstrap_add = base.DeprecatedMethodProperty("bootstrap", "add") + bootstrap_list = base.DeprecatedMethodProperty("bootstrap", "list") + bootstrap_rm = base.DeprecatedMethodProperty("bootstrap", "rm") + + bitswap_stat = base.DeprecatedMethodProperty("bitswap", "stat") + bitswap_wantlist = base.DeprecatedMethodProperty("bitswap", "wantlist") + + dht_findpeer = base.DeprecatedMethodProperty("dht", "findpeer") + dht_findprovs = base.DeprecatedMethodProperty("dht", "findproves") + dht_get = base.DeprecatedMethodProperty("dht", "get") + dht_put = base.DeprecatedMethodProperty("dht", "put") + dht_query = base.DeprecatedMethodProperty("dht", "query") + + pubsub_ls = base.DeprecatedMethodProperty("pubsub", "ls") + pubsub_peers = base.DeprecatedMethodProperty("pubsub", "peers") + pubsub_pub = base.DeprecatedMethodProperty("pubsub", "publish") + pubsub_sub = base.DeprecatedMethodProperty("pubsub", "subscribe") + + swarm_addrs = base.DeprecatedMethodProperty("swarm", "addrs") + swarm_connect = base.DeprecatedMethodProperty("swarm", "connect") + swarm_disconnect = base.DeprecatedMethodProperty("swarm", "disconnect") + swarm_peers = base.DeprecatedMethodProperty("swarm", "peers") + swarm_filters_add = base.DeprecatedMethodProperty("swarm", "filters", "add") + swarm_filters_rm = base.DeprecatedMethodProperty("swarm", "filters", "rm") + + name_publish = base.DeprecatedMethodProperty("name", "publish") + name_resolve = base.DeprecatedMethodProperty("name", "resolve") + + repo_gc = base.DeprecatedMethodProperty("repo", "gc") + repo_stat = base.DeprecatedMethodProperty("repo", "stat") + + config = base.DeprecatedMethodProperty("config", "set") + config_show = base.DeprecatedMethodProperty("config", "get") + config_replace = base.DeprecatedMethodProperty("config", "replace") + + log_level = base.DeprecatedMethodProperty("unstable", "log", "level") + log_ls = base.DeprecatedMethodProperty("unstable", "log", "ls") + log_tail = base.DeprecatedMethodProperty("unstable", "log", "tail") + + shutdown = base.DeprecatedMethodProperty("stop") + + + def __init__(self, host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, + chunk_size=4096, **defaults): + # Assemble and parse the URL these parameters are supposed to represent + if not re.match('^https?://', host.lower()): + host = 'http://' + host + url = urllib.parse.urlsplit('%s:%s/%s' % (host, port, base)) + + # Detect whether `host` is a (DNS) hostname or an IP address + host_type = "dns" + try: + host_type = "ip{0}".format(netaddr.IPAddress(url.hostname).version) + except netaddr.AddrFormatError: + pass + + addr = "/{0}/{1}/tcp/{2}/{3}".format(host_type, url.hostname, url.port, url.scheme) + super(Client, self).__init__(addr, base, chunk_size, timeout=None, **defaults) + + + def __getattribute__(self, name): + value = super(Client, self).__getattribute__(name) + if inspect.ismethod(value): + @functools.wraps(value) + def wrapper(*args, **kwargs): + # Rewrite changed named parameter names + if "multihash" in kwargs: + kwargs["cid"] = kwargs.pop("multihash") + if "multihashes" in kwargs: + kwargs["cids"] = kwargs.pop("multihashes") + + try: + return value(*args, **kwargs) + # Partial error responses used to incorrectly just return + # the parts that were successfully received followed by the + # (undetected) error frame + except exceptions.PartialErrorResponse as error: + return error.partial + [{"Type": "error", "Message": str(error)}] + return wrapper + return value + + + def add(self, files, recursive=False, pattern='**', *args, **kwargs): + # Signature changed to: add(self, *files, recursive=False, pattern='**', **kwargs) + if not isinstance(files, (list, tuple)): + files = (files,) + return super(Client, self).add(*files, recursive=recursive, pattern=pattern, **kwargs) + + + # Dropped API methods + def bitswap_unwant(self, key, **kwargs): + """Deprecated method: Do not use anymore""" + warnings.warn( + "IPFS API function “bitswap_unwant” support has been dropped " + "from go-ipfs", FutureWarning + ) + + args = (key,) + return self._client.request('/bitswap/unwant', args, **kwargs) + + + def file_ls(self, multihash, **kwargs): + """Deprecated method: Replace usages with the similar “client.ls”""" + warnings.warn( + "IPFS API function “file_ls” support is highly deprecated and will " + "be removed soon from go-ipfs, use plain “ls” instead", FutureWarning + ) + + args = (multihash,) + return self._client.request('/file/ls', args, decoder='json', **kwargs) + + + # Dropped utility methods + def add_pyobj(self, py_obj, **kwargs): + """Adds a picklable Python object as a file to IPFS. + + .. deprecated:: 0.4.2 + The ``*_pyobj`` APIs allow for arbitrary code execution if abused. + Either switch to :meth:`~ipfsapi.Client.add_json` or use + ``client.add_bytes(pickle.dumps(py_obj))`` instead. + + Please see :meth:`~ipfsapi.Client.get_pyobj` for the + **security risks** of using these methods! + + .. code-block:: python + + >>> c.add_pyobj([0, 1.0, 2j, '3', 4e5]) + 'QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji' + + Parameters + ---------- + py_obj : object + A picklable Python object + + Returns + ------- + str : Hash of the added IPFS object + """ + warnings.warn("Using `*_pyobj` on untrusted data is a security risk", + DeprecationWarning) + return self.add_bytes(encoding.Pickle().encode(py_obj), **kwargs) + + def get_pyobj(self, multihash, **kwargs): + """Loads a pickled Python object from IPFS. + + .. deprecated:: 0.4.2 + The ``*_pyobj`` APIs allow for arbitrary code execution if abused. + Either switch to :meth:`~ipfsapi.Client.get_json` or use + ``pickle.loads(client.cat(multihash))`` instead. + + .. caution:: + + The pickle module is not intended to be secure against erroneous or + maliciously constructed data. Never unpickle data received from an + untrusted or unauthenticated source. + + Please **read** + `this article <https://www.cs.uic.edu/%7Es/musings/pickle/>`_ to + understand the security risks of using this method! + + .. code-block:: python + + >>> c.get_pyobj('QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji') + [0, 1.0, 2j, '3', 400000.0] + + Parameters + ---------- + multihash : str + Multihash of the IPFS object to load + + Returns + ------- + object : Deserialized IPFS Python object + """ + warnings.warn("Using `*_pyobj` on untrusted data is a security risk", + DeprecationWarning) + return encoding.Pickle().parse(self.cat(multihash, **kwargs)) diff --git a/ipfsapi/client/base.py b/ipfsapi/client/base.py new file mode 100644 index 00000000..69f7fb20 --- /dev/null +++ b/ipfsapi/client/base.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import warnings + +from . import DEFAULT_HOST, DEFAULT_PORT, DEFAULT_BASE + + +class DeprecatedMethodProperty(object): + def __init__(self, *path, **kwargs): + #PY2: No support for kw-only parameters after glob parameters + prefix = kwargs.pop("prefix", []) + strip = kwargs.pop("strip", 0) + assert not kwargs + + self.props = path + self.path = tuple(prefix) + (path[:-strip] if strip > 0 else tuple(path)) + self.warned = False + + self.__help__ = "Deprecated method: Please use “client.{0}” instead".format( + ".".join(self.path) + ) + + def __get__(self, obj, type=None): + if not self.warned: + message = "IPFS API function “{0}” has been renamed to “{1}”".format( + "_".join(self.path), ".".join(self.path) + ) + warnings.warn(message, FutureWarning) + self.warned = True + + for name in self.props: + print(name, obj) + obj = getattr(obj, name) + return obj diff --git a/ipfsapi/exceptions.py b/ipfsapi/exceptions.py index d7f2af44..2c167dcb 100644 --- a/ipfsapi/exceptions.py +++ b/ipfsapi/exceptions.py @@ -16,107 +16,19 @@ +-- TimeoutError """ - - -class Error(Exception): - """Base class for all exceptions in this module.""" - pass - - -class VersionMismatch(Error): - """Raised when daemon version is not supported by this client version.""" - - def __init__(self, current, minimum, maximum): - self.current = current - self.minimum = minimum - self.maximum = maximum - - msg = "Unsupported daemon version '{}' (not in range: {} – {})".format( - current, minimum, maximum - ) - Error.__init__(self, msg) - - -############### -# encoding.py # -############### -class EncoderError(Error): - """Base class for all encoding and decoding related errors.""" - - def __init__(self, message, encoder_name): - self.encoder_name = encoder_name - - Error.__init__(self, message) - - -class EncoderMissingError(EncoderError): - """Raised when a requested encoder class does not actually exist.""" - - def __init__(self, encoder_name): - msg = "Unknown encoder: '{}'".format(encoder_name) - EncoderError.__init__(self, msg, encoder_name) - - -class EncodingError(EncoderError): - """Raised when encoding a Python object into a byte string has failed - due to some problem with the input data.""" - - def __init__(self, encoder_name, original): - self.original = original - - msg = "Object encoding error: {}".format(original) - EncoderError.__init__(self, msg, encoder_name) - - -class DecodingError(EncoderError): - """Raised when decoding a byte string to a Python object has failed due to - some problem with the input data.""" - - def __init__(self, encoder_name, original): - self.original = original - - msg = "Object decoding error: {}".format(original) - EncoderError.__init__(self, msg, encoder_name) - - -########### -# http.py # -########### -class CommunicationError(Error): - """Base class for all network communication related errors.""" - - def __init__(self, original, _message=None): - self.original = original - - if _message: - msg = _message - else: - msg = "{}: {}".format(original.__class__.__name__, str(original)) - Error.__init__(self, msg) - - -class ProtocolError(CommunicationError): - """Raised when parsing the response from the daemon has failed. - - This can most likely occur if the service on the remote end isn't in fact - an IPFS daemon.""" - - -class StatusError(CommunicationError): - """Raised when the daemon responds with an error to our request.""" - - -class ErrorResponse(StatusError): - """Raised when the daemon has responded with an error message because the - requested operation could not be carried out.""" - - def __init__(self, message, original): - StatusError.__init__(self, original, message) - - -class ConnectionError(CommunicationError): - """Raised when connecting to the service has failed on the socket layer.""" - - -class TimeoutError(CommunicationError): - """Raised when the daemon didn't respond in time.""" +# Delegate list of exceptions to `ipfshttpclient` +from ipfshttpclient.exceptions import * +__all__ = [ + "Error", + "VersionMismatch", + "EncoderError", + "EncoderMissingError", + "EncodingError", + "DecodingError", + "CommunicationError", + "ProtocolError", + "StatusError", + "ErrorResponse", + "ConnectionError", + "TimeoutError" +] \ No newline at end of file diff --git a/ipfsapi/http.py b/ipfsapi/http.py deleted file mode 100644 index c954a9a9..00000000 --- a/ipfsapi/http.py +++ /dev/null @@ -1,320 +0,0 @@ -# -*- encoding: utf-8 -*- -"""HTTP client for api requests. - -This is pluggable into the IPFS Api client and will hopefully be supplemented -by an asynchronous version. -""" -from __future__ import absolute_import - -import abc -import contextlib -import functools -import re -import tarfile -from six.moves import http_client - -import requests -import six - -from . import encoding -from . import exceptions - - -def pass_defaults(func): - """Decorator that returns a function named wrapper. - - When invoked, wrapper invokes func with default kwargs appended. - - Parameters - ---------- - func : callable - The function to append the default kwargs to - """ - @functools.wraps(func) - def wrapper(self, *args, **kwargs): - merged = {} - merged.update(self.defaults) - merged.update(kwargs) - return func(self, *args, **merged) - return wrapper - - -def _notify_stream_iter_closed(): - pass # Mocked by unit tests to determine check for proper closing - - -class StreamDecodeIterator(object): - """ - Wrapper around `Iterable` that allows the iterable to be used in a - context manager (`with`-statement) allowing for easy cleanup. - """ - def __init__(self, response, parser): - self._response = response - self._parser = parser - self._response_iter = response.iter_content(chunk_size=None) - self._parser_iter = None - - def __iter__(self): - return self - - def __next__(self): - while True: - # Try reading for current parser iterator - if self._parser_iter is not None: - try: - return next(self._parser_iter) - except StopIteration: - self._parser_iter = None - - # Forward exception to caller if we do not expect any - # further data - if self._response_iter is None: - raise - - try: - data = next(self._response_iter) - - # Create new parser iterator using the newly recieved data - self._parser_iter = iter(self._parser.parse_partial(data)) - except StopIteration: - # No more data to receive – destroy response iterator and - # iterate over the final fragments returned by the parser - self._response_iter = None - self._parser_iter = iter(self._parser.parse_finalize()) - - #PY2: Old iterator syntax - def next(self): - return self.__next__() - - def __enter__(self): - return self - - def __exit__(self, *a): - self.close() - - def close(self): - # Clean up any open iterators first - if self._response_iter is not None: - self._response_iter.close() - if self._parser_iter is not None: - self._parser_iter.close() - self._response_iter = None - self._parser_iter = None - - # Clean up response object and parser - if self._response is not None: - self._response.close() - self._response = None - self._parser = None - - _notify_stream_iter_closed() - - -def stream_decode_full(response, parser): - with StreamDecodeIterator(response, parser) as response_iter: - result = list(response_iter) - if len(result) == 0: - return b'' - if len(result) == 1: - return result[0] - else: - return result - - -class HTTPClient(object): - """An HTTP client for interacting with the IPFS daemon. - - Parameters - ---------- - host : str - The host the IPFS daemon is running on - port : int - The port the IPFS daemon is running at - base : str - The path prefix for API calls - defaults : dict - The default parameters to be passed to - :meth:`~ipfsapi.http.HTTPClient.request` - """ - - __metaclass__ = abc.ABCMeta - - def __init__(self, host, port, base, **defaults): - self.host = host - self.port = port - if not re.match('^https?://', host.lower()): - host = 'http://' + host - - self.base = '%s:%s/%s' % (host, port, base) - - self.defaults = defaults - self._session = None - - def _do_request(self, *args, **kwargs): - try: - if self._session: - return self._session.request(*args, **kwargs) - else: - return requests.request(*args, **kwargs) - except requests.ConnectionError as error: - six.raise_from(exceptions.ConnectionError(error), error) - except http_client.HTTPException as error: - six.raise_from(exceptions.ProtocolError(error), error) - except requests.Timeout as error: - six.raise_from(exceptions.TimeoutError(error), error) - - def _do_raise_for_status(self, response, content=None): - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - # If we have decoded an error response from the server, - # use that as the exception message; otherwise, just pass - # the exception on to the caller. - if isinstance(content, dict) and 'Message' in content: - msg = content['Message'] - six.raise_from(exceptions.ErrorResponse(msg, error), error) - else: - six.raise_from(exceptions.StatusError(error), error) - - def _request(self, method, url, params, parser, stream=False, files=None, - headers={}, data=None): - # Do HTTP request (synchronously) - res = self._do_request(method, url, params=params, stream=stream, - files=files, headers=headers, data=data) - - if stream: - # Raise exceptions for response status - self._do_raise_for_status(res) - - # Decode each item as it is read - return StreamDecodeIterator(res, parser) - else: - # First decode received item - ret = stream_decode_full(res, parser) - - # Raise exception for response status - # (optionally incorpating the response message, if applicable) - self._do_raise_for_status(res, ret) - - return ret - - @pass_defaults - def request(self, path, - args=[], files=[], opts={}, stream=False, - decoder=None, headers={}, data=None): - """Makes an HTTP request to the IPFS daemon. - - This function returns the contents of the HTTP response from the IPFS - daemon. - - Raises - ------ - ~ipfsapi.exceptions.ErrorResponse - ~ipfsapi.exceptions.ConnectionError - ~ipfsapi.exceptions.ProtocolError - ~ipfsapi.exceptions.StatusError - ~ipfsapi.exceptions.TimeoutError - - Parameters - ---------- - path : str - The REST command path to send - args : list - Positional parameters to be sent along with the HTTP request - files : :class:`io.RawIOBase` | :obj:`str` | :obj:`list` - The file object(s) or path(s) to stream to the daemon - opts : dict - Query string paramters to be sent along with the HTTP request - decoder : str - The encoder to use to parse the HTTP response - kwargs : dict - Additional arguments to pass to :mod:`requests` - """ - url = self.base + path - - params = [] - params.append(('stream-channels', 'true')) - for opt in opts.items(): - params.append(opt) - for arg in args: - params.append(('arg', arg)) - - method = 'post' if (files or data) else 'get' - - parser = encoding.get_encoding(decoder if decoder else "none") - - return self._request(method, url, params, parser, stream, - files, headers, data) - - @pass_defaults - def download(self, path, args=[], filepath=None, opts={}, - compress=True, **kwargs): - """Makes a request to the IPFS daemon to download a file. - - Downloads a file or files from IPFS into the current working - directory, or the directory given by ``filepath``. - - Raises - ------ - ~ipfsapi.exceptions.ErrorResponse - ~ipfsapi.exceptions.ConnectionError - ~ipfsapi.exceptions.ProtocolError - ~ipfsapi.exceptions.StatusError - ~ipfsapi.exceptions.TimeoutError - - Parameters - ---------- - path : str - The REST command path to send - filepath : str - The local path where IPFS will store downloaded files - - Defaults to the current working directory. - args : list - Positional parameters to be sent along with the HTTP request - opts : dict - Query string paramters to be sent along with the HTTP request - compress : bool - Whether the downloaded file should be GZip compressed by the - daemon before being sent to the client - kwargs : dict - Additional arguments to pass to :mod:`requests` - """ - url = self.base + path - wd = filepath or '.' - - params = [] - params.append(('stream-channels', 'true')) - params.append(('archive', 'true')) - if compress: - params.append(('compress', 'true')) - - for opt in opts.items(): - params.append(opt) - for arg in args: - params.append(('arg', arg)) - - method = 'get' - - res = self._do_request(method, url, params=params, stream=True, - **kwargs) - - self._do_raise_for_status(res) - - # try to stream download as a tar file stream - mode = 'r|gz' if compress else 'r|' - - with tarfile.open(fileobj=res.raw, mode=mode) as tf: - tf.extractall(path=wd) - - @contextlib.contextmanager - def session(self): - """A context manager for this client's session. - - This function closes the current session when this client goes out of - scope. - """ - self._session = requests.session() - yield - self._session.close() - self._session = None diff --git a/ipfsapi/multipart.py b/ipfsapi/multipart.py deleted file mode 100644 index ed732156..00000000 --- a/ipfsapi/multipart.py +++ /dev/null @@ -1,692 +0,0 @@ -"""HTTP :mimetype:`multipart/*`-encoded file streaming. -""" -from __future__ import absolute_import - -import re -import requests -import io -import os -from inspect import isgenerator -from uuid import uuid4 - -import six - -from six.moves.urllib.parse import quote - -from . import utils - -if six.PY3: - from builtins import memoryview as buffer - - -CRLF = b'\r\n' - -default_chunk_size = 4096 - - -def content_disposition(fn, disptype='file'): - """Returns a dict containing the MIME content-disposition header for a file. - - .. code-block:: python - - >>> content_disposition('example.txt') - {'Content-Disposition': 'file; filename="example.txt"'} - - >>> content_disposition('example.txt', 'attachment') - {'Content-Disposition': 'attachment; filename="example.txt"'} - - Parameters - ---------- - fn : str - Filename to retrieve the MIME content-disposition for - disptype : str - Rhe disposition type to use for the file - """ - disp = '%s; filename="%s"' % ( - disptype, - quote(fn, safe='') - ) - return {'Content-Disposition': disp} - - -def content_type(fn): - """Returns a dict with the content-type header for a file. - - Guesses the mimetype for a filename and returns a dict - containing the content-type header. - - .. code-block:: python - - >>> content_type('example.txt') - {'Content-Type': 'text/plain'} - - >>> content_type('example.jpeg') - {'Content-Type': 'image/jpeg'} - - >>> content_type('example') - {'Content-Type': 'application/octet-stream'} - - Parameters - ---------- - fn : str - Filename to guess the content-type for - """ - return {'Content-Type': utils.guess_mimetype(fn)} - - -def multipart_content_type(boundary, subtype='mixed'): - """Creates a MIME multipart header with the given configuration. - - Returns a dict containing a MIME multipart header with the given - boundary. - - .. code-block:: python - - >>> multipart_content_type('8K5rNKlLQVyreRNncxOTeg') - {'Content-Type': 'multipart/mixed; boundary="8K5rNKlLQVyreRNncxOTeg"'} - - >>> multipart_content_type('8K5rNKlLQVyreRNncxOTeg', 'alt') - {'Content-Type': 'multipart/alt; boundary="8K5rNKlLQVyreRNncxOTeg"'} - - Parameters - ---------- - boundry : str - The content delimiter to put into the header - subtype : str - The subtype in :mimetype:`multipart/*`-domain to put into the header - """ - ctype = 'multipart/%s; boundary="%s"' % ( - subtype, - boundary - ) - return {'Content-Type': ctype} - - -class BodyGenerator(object): - """Generators for creating the body of a :mimetype:`multipart/*` - HTTP request. - - Parameters - ---------- - name : str - The filename of the file(s)/content being encoded - disptype : str - The ``Content-Disposition`` of the content - subtype : str - The :mimetype:`multipart/*`-subtype of the content - boundary : str - An identifier used as a delimiter for the content's body - """ - - def __init__(self, name, disptype='file', subtype='mixed', boundary=None): - # If the boundary is unspecified, make a random one - if boundary is None: - boundary = self._make_boundary() - self.boundary = boundary - - headers = content_disposition(name, disptype=disptype) - headers.update(multipart_content_type(boundary, subtype=subtype)) - self.headers = headers - - def _make_boundary(self): - """Returns a random hexadecimal string (UUID 4). - - The HTTP multipart request body spec requires a boundary string to - separate different content chunks within a request, and this is - usually a random string. Using a UUID is an easy way to generate - a random string of appropriate length as this content separator. - """ - return uuid4().hex - - def _write_headers(self, headers): - """Yields the HTTP header text for some content. - - Parameters - ---------- - headers : dict - The headers to yield - """ - if headers: - for name in sorted(headers.keys()): - yield name.encode("ascii") - yield b': ' - yield headers[name].encode("ascii") - yield CRLF - yield CRLF - - def write_headers(self): - """Yields the HTTP header text for the content.""" - for c in self._write_headers(self.headers): - yield c - - def open(self, **kwargs): - """Yields the body section for the content. - """ - yield b'--' - yield self.boundary.encode() - yield CRLF - - def file_open(self, fn): - """Yields the opening text of a file section in multipart HTTP. - - Parameters - ---------- - fn : str - Filename for the file being opened and added to the HTTP body - """ - yield b'--' - yield self.boundary.encode() - yield CRLF - headers = content_disposition(fn) - headers.update(content_type(fn)) - for c in self._write_headers(headers): - yield c - - def file_close(self): - """Yields the end text of a file section in HTTP multipart encoding.""" - yield CRLF - - def close(self): - """Yields the ends of the content area in a HTTP multipart body.""" - yield b'--' - yield self.boundary.encode() - yield b'--' - yield CRLF - - -class BufferedGenerator(object): - """Generator that encodes multipart/form-data. - - An abstract buffered generator class which encodes - :mimetype:`multipart/form-data`. - - Parameters - ---------- - name : str - The name of the file to encode - chunk_size : int - The maximum size that any single file chunk may have in bytes - """ - - def __init__(self, name, chunk_size=default_chunk_size): - self.chunk_size = chunk_size - self._internal = bytearray(chunk_size) - self.buf = buffer(self._internal) - - self.name = name - self.envelope = BodyGenerator(self.name, - disptype='form-data', - subtype='form-data') - self.headers = self.envelope.headers - - def file_chunks(self, fp): - """Yields chunks of a file. - - Parameters - ---------- - fp : io.RawIOBase - The file to break into chunks - (must be an open file or have the ``readinto`` method) - """ - fsize = utils.file_size(fp) - offset = 0 - if hasattr(fp, 'readinto'): - while offset < fsize: - nb = fp.readinto(self._internal) - yield self.buf[:nb] - offset += nb - else: - while offset < fsize: - nb = min(self.chunk_size, fsize - offset) - yield fp.read(nb) - offset += nb - - def gen_chunks(self, gen): - """Generates byte chunks of a given size. - - Takes a bytes generator and yields chunks of a maximum of - ``chunk_size`` bytes. - - Parameters - ---------- - gen : generator - The bytes generator that produces the bytes - """ - for data in gen: - size = len(data) - if size < self.chunk_size: - yield data - else: - mv = buffer(data) - offset = 0 - while offset < size: - nb = min(self.chunk_size, size - offset) - yield mv[offset:offset + nb] - offset += nb - - def body(self, *args, **kwargs): - """Returns the body of the buffered file. - - .. note:: This function is not actually implemented. - """ - raise NotImplementedError - - def close(self): - """Yields the closing text of a multipart envelope.""" - for chunk in self.gen_chunks(self.envelope.close()): - yield chunk - - -class FileStream(BufferedGenerator): - """Generator that encodes multiples files into HTTP multipart. - - A buffered generator that encodes an array of files as - :mimetype:`multipart/form-data`. This is a concrete implementation of - :class:`~ipfsapi.multipart.BufferedGenerator`. - - Parameters - ---------- - name : str - The filename of the file to encode - chunk_size : int - The maximum size that any single file chunk may have in bytes - """ - - def __init__(self, files, chunk_size=default_chunk_size): - BufferedGenerator.__init__(self, 'files', chunk_size=chunk_size) - - self.files = utils.clean_files(files) - - def body(self): - """Yields the body of the buffered file.""" - for fp, need_close in self.files: - try: - name = os.path.basename(fp.name) - except AttributeError: - name = '' - for chunk in self.gen_chunks(self.envelope.file_open(name)): - yield chunk - for chunk in self.file_chunks(fp): - yield chunk - for chunk in self.gen_chunks(self.envelope.file_close()): - yield chunk - if need_close: - fp.close() - for chunk in self.close(): - yield chunk - - -def glob_compile(pat): - """Translate a shell glob PATTERN to a regular expression. - - This is almost entirely based on `fnmatch.translate` source-code from the - python 3.5 standard-library. - """ - - i, n = 0, len(pat) - res = '' - while i < n: - c = pat[i] - i = i + 1 - if c == '/' and len(pat) > (i + 2) and pat[i:(i + 3)] == '**/': - # Special-case for "any number of sub-directories" operator since - # may also expand to no entries: - # Otherwise `a/**/b` would expand to `a[/].*[/]b` which wouldn't - # match the immediate sub-directories of `a`, like `a/b`. - i = i + 3 - res = res + '[/]([^/]*[/])*' - elif c == '*': - if len(pat) > i and pat[i] == '*': - i = i + 1 - res = res + '.*' - else: - res = res + '[^/]*' - elif c == '?': - res = res + '[^/]' - elif c == '[': - j = i - if j < n and pat[j] == '!': - j = j + 1 - if j < n and pat[j] == ']': - j = j + 1 - while j < n and pat[j] != ']': - j = j + 1 - if j >= n: - res = res + '\\[' - else: - stuff = pat[i:j].replace('\\', '\\\\') - i = j + 1 - if stuff[0] == '!': - stuff = '^' + stuff[1:] - elif stuff[0] == '^': - stuff = '\\' + stuff - res = '%s[%s]' % (res, stuff) - else: - res = res + re.escape(c) - return re.compile('^' + res + '\Z(?ms)' + '$') - - -class DirectoryStream(BufferedGenerator): - """Generator that encodes a directory into HTTP multipart. - - A buffered generator that encodes an array of files as - :mimetype:`multipart/form-data`. This is a concrete implementation of - :class:`~ipfsapi.multipart.BufferedGenerator`. - - Parameters - ---------- - directory : str - The filepath of the directory to encode - patterns : str | list - A single glob pattern or a list of several glob patterns and - compiled regular expressions used to determine which filepaths to match - chunk_size : int - The maximum size that any single file chunk may have in bytes - """ - - def __init__(self, - directory, - recursive=False, - patterns='**', - chunk_size=default_chunk_size): - BufferedGenerator.__init__(self, directory, chunk_size=chunk_size) - - self.patterns = [] - patterns = [patterns] if isinstance(patterns, str) else patterns - for pattern in patterns: - if isinstance(pattern, str): - self.patterns.append(glob_compile(pattern)) - else: - self.patterns.append(pattern) - - self.directory = os.path.normpath(directory) - self.recursive = recursive - self._request = self._prepare() - self.headers = self._request.headers - - def body(self): - """Returns the HTTP headers for this directory upload request.""" - return self._request.body - - def headers(self): - """Returns the HTTP body for this directory upload request.""" - return self._request.headers - - def _prepare(self): - """Pre-formats the multipart HTTP request to transmit the directory.""" - names = [] - - added_directories = set() - - def add_directory(short_path): - # Do not continue if this directory has already been added - if short_path in added_directories: - return - - # Scan for first super-directory that has already been added - dir_base = short_path - dir_parts = [] - while dir_base: - dir_base, dir_name = os.path.split(dir_base) - dir_parts.append(dir_name) - if dir_base in added_directories: - break - - # Add missing intermediate directory nodes in the right order - while dir_parts: - dir_base = os.path.join(dir_base, dir_parts.pop()) - - # Create an empty, fake file to represent the directory - mock_file = io.StringIO() - mock_file.write(u'') - # Add this directory to those that will be sent - names.append(('files', - (dir_base.replace(os.sep, '/'), mock_file, 'application/x-directory'))) - # Remember that this directory has already been sent - added_directories.add(dir_base) - - def add_file(short_path, full_path): - try: - # Always add files in wildcard directories - names.append(('files', (short_name.replace(os.sep, '/'), - open(full_path, 'rb'), - 'application/octet-stream'))) - except OSError: - # File might have disappeared between `os.walk()` and `open()` - pass - - def match_short_path(short_path): - # Remove initial path component so that all files are based in - # the target directory itself (not one level above) - if os.sep in short_path: - path = short_path.split(os.sep, 1)[1] - else: - return False - - # Convert all path seperators to POSIX style - path = path.replace(os.sep, '/') - - # Do the matching and the simplified path - for pattern in self.patterns: - if pattern.match(path): - return True - return False - - # Identify the unecessary portion of the relative path - truncate = os.path.dirname(self.directory) - # Traverse the filesystem downward from the target directory's uri - # Errors: `os.walk()` will simply return an empty generator if the - # target directory does not exist. - wildcard_directories = set() - for curr_dir, _, files in os.walk(self.directory): - # find the path relative to the directory being added - if len(truncate) > 0: - _, _, short_path = curr_dir.partition(truncate) - else: - short_path = curr_dir - # remove leading / or \ if it is present - if short_path.startswith(os.sep): - short_path = short_path[1:] - - wildcard_directory = False - if os.path.split(short_path)[0] in wildcard_directories: - # Parent directory has matched a pattern, all sub-nodes should - # be added too - wildcard_directories.add(short_path) - wildcard_directory = True - else: - # Check if directory path matches one of the patterns - if match_short_path(short_path): - # Directory matched pattern and it should therefor - # be added along with all of its contents - wildcard_directories.add(short_path) - wildcard_directory = True - - # Always add directories within wildcard directories - even if they - # are empty - if wildcard_directory: - add_directory(short_path) - - # Iterate across the files in the current directory - for filename in files: - # Find the filename relative to the directory being added - short_name = os.path.join(short_path, filename) - filepath = os.path.join(curr_dir, filename) - - if wildcard_directory: - # Always add files in wildcard directories - add_file(short_name, filepath) - else: - # Add file (and all missing intermediary directories) - # if it matches one of the patterns - if match_short_path(short_name): - add_directory(short_path) - add_file(short_name, filepath) - # Send the request and present the response body to the user - req = requests.Request("POST", 'http://localhost', files=names) - prep = req.prepare() - return prep - - -class BytesStream(BufferedGenerator): - """A buffered generator that encodes bytes as - :mimetype:`multipart/form-data`. - - Parameters - ---------- - data : bytes - The binary data to stream to the daemon - chunk_size : int - The maximum size of a single data chunk - """ - - def __init__(self, data, chunk_size=default_chunk_size): - BufferedGenerator.__init__(self, 'bytes', chunk_size=chunk_size) - - self.data = data if isgenerator(data) else (data,) - - def body(self): - """Yields the encoded body.""" - for chunk in self.gen_chunks(self.envelope.file_open(self.name)): - yield chunk - for chunk in self.gen_chunks(self.data): - yield chunk - for chunk in self.gen_chunks(self.envelope.file_close()): - yield chunk - for chunk in self.close(): - yield chunk - - -def stream_files(files, chunk_size=default_chunk_size): - """Gets a buffered generator for streaming files. - - Returns a buffered generator which encodes a file or list of files as - :mimetype:`multipart/form-data` with the corresponding headers. - - Parameters - ---------- - files : str - The file(s) to stream - chunk_size : int - Maximum size of each stream chunk - """ - stream = FileStream(files, chunk_size=chunk_size) - - return stream.body(), stream.headers - - -def stream_directory(directory, - recursive=False, - patterns='**', - chunk_size=default_chunk_size): - """Gets a buffered generator for streaming directories. - - Returns a buffered generator which encodes a directory as - :mimetype:`multipart/form-data` with the corresponding headers. - - Parameters - ---------- - directory : str - The filepath of the directory to stream - recursive : bool - Stream all content within the directory recursively? - patterns : str | list - Single *glob* pattern or list of *glob* patterns and compiled - regular expressions to match the names of the filepaths to keep - chunk_size : int - Maximum size of each stream chunk - """ - stream = DirectoryStream(directory, - recursive=recursive, - patterns=patterns, - chunk_size=chunk_size) - - return stream.body(), stream.headers - - -def stream_filesystem_node(path, - recursive=False, - patterns='**', - chunk_size=default_chunk_size): - """Gets a buffered generator for streaming either files or directories. - - Returns a buffered generator which encodes the file or directory at the - given path as :mimetype:`multipart/form-data` with the corresponding - headers. - - Parameters - ---------- - path : str - The filepath of the directory or file to stream - recursive : bool - Stream all content within the directory recursively? - patterns : str | list - Single *glob* pattern or list of *glob* patterns and compiled - regular expressions to match the names of the filepaths to keep - chunk_size : int - Maximum size of each stream chunk - """ - is_dir = isinstance(path, six.string_types) and os.path.isdir(path) - if recursive or is_dir: - return stream_directory(path, recursive, patterns, chunk_size) - else: - return stream_files(path, chunk_size) - - -def stream_bytes(data, chunk_size=default_chunk_size): - """Gets a buffered generator for streaming binary data. - - Returns a buffered generator which encodes binary data as - :mimetype:`multipart/form-data` with the corresponding headers. - - Parameters - ---------- - data : bytes - The data bytes to stream - chunk_size : int - The maximum size of each stream chunk - - Returns - ------- - (generator, dict) - """ - stream = BytesStream(data, chunk_size=chunk_size) - - return stream.body(), stream.headers - - -def stream_text(text, chunk_size=default_chunk_size): - """Gets a buffered generator for streaming text. - - Returns a buffered generator which encodes a string as - :mimetype:`multipart/form-data` with the corresponding headers. - - Parameters - ---------- - text : str - The data bytes to stream - chunk_size : int - The maximum size of each stream chunk - - Returns - ------- - (generator, dict) - """ - if isgenerator(text): - def binary_stream(): - for item in text: - if six.PY2 and isinstance(text, six.binary_type): - #PY2: Allow binary strings under Python 2 since - # Python 2 code is not expected to always get the - # distinction between text and binary strings right. - yield text - else: - yield text.encode("utf-8") - data = binary_stream() - elif six.PY2 and isinstance(text, six.binary_type): - #PY2: See above. - data = text - else: - data = text.encode("utf-8") - - return stream_bytes(data, chunk_size) diff --git a/ipfsapi/utils.py b/ipfsapi/utils.py deleted file mode 100644 index 9f995417..00000000 --- a/ipfsapi/utils.py +++ /dev/null @@ -1,152 +0,0 @@ -"""A module to handle generic operations. -""" - -from __future__ import absolute_import - -import mimetypes -import os -from functools import wraps - -import six - - -def guess_mimetype(filename): - """Guesses the mimetype of a file based on the given ``filename``. - - .. code-block:: python - - >>> guess_mimetype('example.txt') - 'text/plain' - >>> guess_mimetype('/foo/bar/example') - 'application/octet-stream' - - Parameters - ---------- - filename : str - The file name or path for which the mimetype is to be guessed - """ - fn = os.path.basename(filename) - return mimetypes.guess_type(fn)[0] or 'application/octet-stream' - - -def ls_dir(dirname): - """Returns files and subdirectories within a given directory. - - Returns a pair of lists, containing the names of directories and files - in ``dirname``. - - Raises - ------ - OSError : Accessing the given directory path failed - - Parameters - ---------- - dirname : str - The path of the directory to be listed - """ - ls = os.listdir(dirname) - files = [p for p in ls if os.path.isfile(os.path.join(dirname, p))] - dirs = [p for p in ls if os.path.isdir(os.path.join(dirname, p))] - return files, dirs - - -def clean_file(file): - """Returns a tuple containing a ``file``-like object and a close indicator. - - This ensures the given file is opened and keeps track of files that should - be closed after use (files that were not open prior to this function call). - - Raises - ------ - OSError : Accessing the given file path failed - - Parameters - ---------- - file : str | io.IOBase - A filepath or ``file``-like object that may or may not need to be - opened - """ - if not hasattr(file, 'read'): - return open(file, 'rb'), True - else: - return file, False - - -def clean_files(files): - """Generates tuples with a ``file``-like object and a close indicator. - - This is a generator of tuples, where the first element is the file object - and the second element is a boolean which is True if this module opened the - file (and thus should close it). - - Raises - ------ - OSError : Accessing the given file path failed - - Parameters - ---------- - files : list | io.IOBase | str - Collection or single instance of a filepath and file-like object - """ - if isinstance(files, (list, tuple)): - for f in files: - yield clean_file(f) - else: - yield clean_file(files) - - -def file_size(f): - """Returns the size of a file in bytes. - - Raises - ------ - OSError : Accessing the given file path failed - - Parameters - ---------- - f : io.IOBase | str - The file path or object for which the size should be determined - """ - if isinstance(f, (six.string_types, six.text_type)): - return os.path.getsize(f) - else: - cur = f.tell() - f.seek(0, 2) - size = f.tell() - f.seek(cur) - return size - - -class return_field(object): - """Decorator that returns the given field of a json response. - - Parameters - ---------- - field : object - The response field to be returned for all invocations - """ - def __init__(self, field): - self.field = field - - def __call__(self, cmd): - """Wraps a command so that only a specified field is returned. - - Parameters - ---------- - cmd : callable - A command that is intended to be wrapped - """ - @wraps(cmd) - def wrapper(*args, **kwargs): - """Returns the specified field of the command invocation. - - Parameters - ---------- - args : list - Positional parameters to pass to the wrapped callable - kwargs : dict - Named parameter to pass to the wrapped callable - """ - res = cmd(*args, **kwargs) - return res[self.field] - return wrapper diff --git a/ipfsapi/version.py b/ipfsapi/version.py index 897e3a8f..5535bb70 100644 --- a/ipfsapi/version.py +++ b/ipfsapi/version.py @@ -8,4 +8,4 @@ # `0.4.1` and so on. When IPFS `0.5.0` is released, the first client version # to support it will also be released as `0.5.0`. -__version__ = "0.4.3" +__version__ = "0.4.4" diff --git a/pyproject.toml b/pyproject.toml index 863c07ee..fe09c897 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ module = "ipfsapi" author = "py-ipfs-api team" author-email = "" -home-page = "https://github.com/ipfs/py-ipfs-api" +home-page = "https://github.com/ipfs/py-ipfs-http-client/tree/py-ipfs-api" keywords = "ipfs storage distribution development" license = "MIT License" description-file = "README.md" @@ -15,7 +15,8 @@ description-file = "README.md" # Unfortunately these currently need to be duplicated from `requirements.txt` requires-python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" requires = [ - "requests (>=2.11)", + "ipfshttpclient>=0.4.10,<0.5.0", + "netaddr", "six" ] @@ -46,5 +47,5 @@ classifiers = [ ] [tool.flit.metadata.urls] -Documentation = "https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfsapi/" +Documentation = "https://ipfs.io/ipns/12D3KooWEqnTdgqHnkkwarSrJjeMP2ZJiADWLYADaNvUb6SQNyPF/docs/" diff --git a/requirements-codestyle.txt b/requirements-codestyle.txt index 39304807..0b15ce2a 100644 --- a/requirements-codestyle.txt +++ b/requirements-codestyle.txt @@ -1 +1,2 @@ flake8 +flake8-expandtab~=0.3 \ No newline at end of file diff --git a/requirements-testing.txt b/requirements-testing.txt index 5f9f7b8f..ea705620 100644 --- a/requirements-testing.txt +++ b/requirements-testing.txt @@ -1,6 +1,3 @@ -httmock pathlib ; python_version < "3.4" pytest -pytest-cov -pytest-mock pytest-ordering diff --git a/requirements.txt b/requirements.txt index 0908a9fd..103d1821 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ -requests>=2.11 +ipfshttpclient>=0.4.10,<0.5.0 +netaddr six \ No newline at end of file diff --git a/test/functional/tests.py b/test/functional/tests.py index adc1f77a..7c9a0bac 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -360,7 +360,7 @@ def test_get_path(self): try: test_hash = self.fake[8]['Hash'] + '/fsdfgh' - self.api.get(test_hash) + self.api.get(multihash=test_hash) assert 'fsdfgh' in os.listdir(os.getcwd()) os.remove('fsdfgh') @@ -622,7 +622,7 @@ def setUp(self): def test_block_stat(self): expected_keys = ['Key', 'Size'] - res = self.api.block_stat(self.multihash) + res = self.api.block_stat(multihash=self.multihash) for key in expected_keys: self.assertTrue(key in res) @@ -833,15 +833,6 @@ def test_bitswap_stat(self): result = self.api.bitswap_stat() self.assertTrue(result and type(result) is dict and 'Wantlist' in result) - def test_bitswap_unwant(self): - """ - Cannot ensure what is present in the wantlist prior to execution, so just ensure - something comes back. - """ - - result = self.api.bitswap_unwant(key='QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V') - self.assertTrue(result is not None) - @skipIfOffline() class IpfsApiPubSubTest(unittest.TestCase): diff --git a/test/run-tests.py b/test/run-tests.py index b2e379d3..c19da8a1 100755 --- a/test/run-tests.py +++ b/test/run-tests.py @@ -103,16 +103,12 @@ def _contextlib_suppress(*exceptions): os.environ["CI"] = "true" # Make sure all required py.test plugins are loaded - os.environ["PYTEST_PLUGINS"] = ",".join(["pytest_cov", "pytest_ordering"]) + os.environ["PYTEST_PLUGINS"] = ",".join(["pytest_ordering"]) # Launch py.test in-process import pytest PYTEST_CODE = pytest.main([ - "--verbose", - "--cov=ipfsapi", - "--cov-report=term", - "--cov-report=html:{}".format(str(TEST_PATH / "cov_html")), - "--cov-report=xml:{}".format(str(TEST_PATH / "cov.xml")) + "--verbose" ] + sys.argv[1:]) finally: # Make sure daemon was terminated during the tests diff --git a/test/unit/test_encoding.py b/test/unit/test_encoding.py index eabcd61a..267a57ec 100644 --- a/test/unit/test_encoding.py +++ b/test/unit/test_encoding.py @@ -10,7 +10,6 @@ import pytest import six -from httmock import urlmatch, HTTMock import ipfsapi.encoding import ipfsapi.exceptions diff --git a/test/unit/test_http.py b/test/unit/test_http.py deleted file mode 100644 index 6992e513..00000000 --- a/test/unit/test_http.py +++ /dev/null @@ -1,236 +0,0 @@ -"""Test cases for http.py. - -These tests are designed to mock http responses from the IPFS daemon. They -are used to determine if the functions in http.py are operating correctly. - -Classes: -TestHttp -- A TCP client for interacting with an IPFS daemon -""" - -import unittest -import json -import tarfile -import os - -from httmock import urlmatch, HTTMock -import pytest -import requests -try: - from unittest import mock -except ImportError: - import mock - -import ipfsapi.http -import ipfsapi.exceptions - - -@urlmatch(netloc='localhost:5001', path=r'.*/okay') -def return_okay(url, request): - """Defines an endpoint for successful http requests. - - This endpoint will listen at http://localhost:5001/*/okay for incoming - requests and will always respond with a 200 status code and a Message of - "okay". - - Keyword arguments: - url -- the url of the incoming request - request -- the request that is being responded to - """ - return { - 'status_code': 200, - 'content': 'okay'.encode('utf-8'), - } - - -@urlmatch(netloc='localhost:5001', path=r'.*/fail') -def return_fail(url, request): - """Defines an endpoint for failed http requests. - - This endpoint will listen at http://localhost:5001/*/fail for incoming - requests and will always respond with a 500 status code and a Message of - "fail". - - Keyword arguments: - url -- the url of the incoming request - request -- the request that is being responded to - """ - return { - 'status_code': 500, - 'content': 'fail'.encode('utf-8'), - } - - -@urlmatch(netloc='localhost:5001', path=r'.*/apiokay') -def api_okay(url, request): - """Defines an endpoint for successful api requests. - - This endpoint will listen at http://localhost:5001/*/apiokay for incoming - requests and will always respond with a 200 status code and a json encoded - Message of "okay". - - Keyword arguments: - url -- the url of the incoming request - request -- the request that is being responded to - """ - return { - 'status_code': 200, - 'content': json.dumps({ - 'Message': 'okay'}).encode('utf-8') - } - - -@urlmatch(netloc='localhost:5001', path=r'.*/apifail') -def api_fail(url, request): - """Defines an endpoint for failed api requests. - - This endpoint will listen at http://localhost:5001/*/apifail for incoming - requests and will always respond with a 500 status code and a json encoded - Message of "Someone set us up the bomb". - - Keyword arguments: - url -- the url of the incoming request - request -- the request that is being responded to - """ - return { - 'status_code': 500, - 'content': json.dumps({ - 'Message': 'Someone set us up the bomb'}).encode('utf-8') - } - - -@urlmatch(netloc='localhost:5001', path=r'.*/cat') -def api_cat(url, request): - """Defines an endpoint for a request to cat a file. - - This endpoint will listen at http://localhost:5001/*/cat for incoming - requests and will always respond with a 200 status code and a json encoded - Message of "do not parse". - - Keyword arguments: - url -- the url of the incoming request - request -- the request that is being responded to - """ - return { - 'status_code': 200, - 'content': json.dumps({ - 'Message': 'do not parse'}).encode('utf-8') - } - - -class TestHttp(unittest.TestCase): - """A series of tests to test the functionality of http.py. - - Public methods: - setUp -- creates an instance of HTTPClient to test against - test_successful_request -- tests that a successful http request returns the - proper message - test_generic_failure -- tests that a failed http request raises an HTTPError - test_api_failure -- tests that an api failure raises an ispfApiError - test_stream -- tests that the stream flag being set returns the raw response - test_cat -- tests that paths ending in /cat are not parsed - test_default_decoder -- tests that the default encoding is set to json - test_explicit_decoder -- tests that an explicit decoder is handled correctly - test_unsupported_decoder -- tests that unsupported encodings raise an - EncodingException - test_failed_decoder -- tests that a failed encoding parse returns response - text - test_failed_download -- tests that a failed download raises an HTTPError - test_session -- tests that a session is established and then closed - """ - def setUp(self): - """Creates an instance of HTTPClient to test against.""" - self.client = ipfsapi.http.HTTPClient( - 'localhost', - 5001, - 'api/v0') - - def test_successful_request(self): - """Tests that a successful http request returns the proper message.""" - with HTTMock(return_okay): - res = self.client.request('/okay') - assert res == b'okay' - - def test_generic_failure(self): - """Tests that a failed http request raises an HTTPError.""" - with HTTMock(return_fail): - with pytest.raises(ipfsapi.exceptions.StatusError): - self.client.request('/fail') - - def test_api_failure(self): - """Tests that an api failure raises an ispfApiError.""" - with HTTMock(api_fail): - with pytest.raises(ipfsapi.exceptions.Error): - self.client.request('/apifail') - - def test_stream(self): - """Tests that the stream flag being set returns the raw response.""" - with HTTMock(return_okay): - res = self.client.request('/okay', stream=True) - assert next(res) == b'okay' - - def test_cat(self): - """Tests that paths ending in /cat are not parsed.""" - with HTTMock(api_cat): - res = self.client.request('/cat') - assert res == b'{"Message": "do not parse"}' - - def test_default_decoder(self): - """Tests that the default encoding is set to json.""" - with HTTMock(api_okay): - res = self.client.request('/apiokay') - assert res == b'{"Message": "okay"}' - - def test_explicit_decoder(self): - """Tests that an explicit decoder is handled correctly.""" - with HTTMock(api_okay): - res = self.client.request('/apiokay', - decoder='json') - assert res['Message'] == 'okay' - - def test_unsupported_decoder(self): - """Tests that unsupported encodings raise an exception.""" - with HTTMock(api_fail): - with pytest.raises(ipfsapi.exceptions.EncoderMissingError): - self.client.request('/apifail', decoder='xyz') - - def test_failed_decoder(self): - """Tests that a failed encoding parse raises an exception.""" - with HTTMock(return_okay): - with pytest.raises(ipfsapi.exceptions.DecodingError): - self.client.request('/okay', decoder='json') - - """TODO: Test successful download - Need to determine correct way to mock an http request that returns a tar - file. tarfile.open expects the tar to be in the form of an octal escaped - string, but internal functionality keeps resulting in hexidecimal. - """ - - def test_failed_download(self): - """Tests that a failed download raises an HTTPError.""" - with HTTMock(return_fail): - with pytest.raises(ipfsapi.exceptions.StatusError): - self.client.download('/fail') - - def test_session(self): - """Tests that a session is established and then closed.""" - with HTTMock(return_okay): - with self.client.session(): - res = self.client.request('/okay') - assert res == b'okay' - assert self.client._session is None - -def test_stream_close(mocker): - client = ipfsapi.http.HTTPClient("localhost", 5001, "api/v0") - mocker.patch("ipfsapi.http._notify_stream_iter_closed") - with HTTMock(return_okay): - with client.request("/okay", stream=True) as response_iter: - assert ipfsapi.http._notify_stream_iter_closed.call_count == 0 - assert ipfsapi.http._notify_stream_iter_closed.call_count == 1 - - response_iter = client.request("/okay", stream=True) - assert ipfsapi.http._notify_stream_iter_closed.call_count == 1 - response_iter.close() - assert ipfsapi.http._notify_stream_iter_closed.call_count == 2 - - client.request("/okay") - assert ipfsapi.http._notify_stream_iter_closed.call_count == 3 diff --git a/test/unit/test_multipart.py b/test/unit/test_multipart.py deleted file mode 100644 index 7cf7af32..00000000 --- a/test/unit/test_multipart.py +++ /dev/null @@ -1,402 +0,0 @@ -"""Test the file multipart.py - -Classes: -TestContentHelpers -- test the three content-header helper functions -TestBodyGenerator -- test the BodyGenerator helper class -TestBufferedGenerator -- test the BufferedGenerator helper class -TestFileStream -- test the FileStream generator class -TestDirectoryStream -- test the DirectoryStream generator class -TestTextStream -- test the TextStream generator class -TestStreamHelpers -- unimplemented -""" - -import unittest -import os -import re - -import pytest -import six - -import ipfsapi.multipart - - -class TestContentHelpers(unittest.TestCase): - """Tests the functionality of the three content-oriented helper functions. - - Public methods: - test_content_disposition -- check the content_disposition defaults - test_content_disposition_with_type -- check that content_disposition - handles given disposition type - test_content_type -- check the content_type guessing functionality - test_multipart_content_type -- check multipart_content_type functionality - """ - - def test_content_disposition(self): - """Check that content_disposition defaults properly""" - expected = {'Content-Disposition': 'file; filename="example.txt"'} - actual = ipfsapi.multipart.content_disposition('example.txt') - assert expected == actual - - def test_content_disposition_with_type(self): - """Check that content_disposition handles given disposition type""" - expected = {'Content-Disposition': - 'attachment; filename="example.txt"'} - actual = ipfsapi.multipart.content_disposition('example.txt', - 'attachment') - assert expected == actual - - def test_content_type(self): - """Check the content_type guessing functionality.""" - actual = ipfsapi.multipart.content_type('example.txt') - expected = {'Content-Type': 'text/plain'} - assert expected == actual - - actual = ipfsapi.multipart.content_type('example.jpeg') - expected = {'Content-Type': 'image/jpeg'} - assert expected == actual - - actual = ipfsapi.multipart.content_type('example') - expected = {'Content-Type': 'application/octet-stream'} - assert expected == actual - - def test_multipart_content_type(self): - """Check test_multipart_content_type functionality.""" - actual = ipfsapi.multipart.multipart_content_type( - '8K5rNKlLQVyreRNncxOTeg') - expected = {'Content-Type': - 'multipart/mixed; boundary="8K5rNKlLQVyreRNncxOTeg"'} - assert expected == actual - - actual = ipfsapi.multipart.multipart_content_type( - '8K5rNKlLQVyreRNncxOTeg', 'alt') - expected = {'Content-Type': - 'multipart/alt; boundary="8K5rNKlLQVyreRNncxOTeg"'} - assert expected == actual - - -class TestBodyGenerator(unittest.TestCase): - """Tests the functionality of the BodyGenerator class. - - Public methods: - test_init_defaults -- tests the constructor and its behavior with only the - required argument - test_init_with_all -- tests the constructor when all arguments are set - explicitly - test_write_headers -- tests write_headers function against example output - test_open -- tests open function against example output - test_file_open -- test file_open function against example output - test_file_close -- test file_close function against example output - test_close -- test close function against example output - """ - - def test_init_defaults(self): - """Test the __init__ function for default parameter values.""" - name = "test_name" - expected_disposition = 'file; filename="test_name"' - expected_type = 'multipart/mixed; boundary="\S*"' - expected_boundary_pattern = '\S*' - generator = ipfsapi.multipart.BodyGenerator(name) - assert generator.headers['Content-Disposition'] == expected_disposition - assert re.search(expected_type, generator.headers['Content-Type']) - assert re.search(expected_boundary_pattern, generator.boundary) - - def test_init_with_all(self): - """Test the __init__ function for explicitly set parameter values.""" - name = "test_name" - disptype = "test_disp" - subtype = "test_subtype" - boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) - assert generator.headers == { - 'Content-Disposition': 'test_disp; filename="test_name"', - 'Content-Type': - 'multipart/test_subtype; boundary="test_boundary"'} - assert generator.boundary == boundary - - def test_write_headers(self): - """Test the write_headers function against sample output.""" - expected = 'Content-Disposition: test_disp; filename="test_name"' \ - + '\r\nContent-Type: multipart/test_subtype; ' \ - + 'boundary="test_boundary"\r\n\r\n' - name = "test_name" - disptype = "test_disp" - subtype = "test_subtype" - boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) - headers = "" - for chunk in generator.write_headers(): - if type(chunk) is not str: - chunk = chunk.decode() - headers += chunk - assert headers == expected - - def test_open(self): - """Test the open function against sample output.""" - expected = '--test_boundary\r\n' - name = "test_name" - disptype = "test_disp" - subtype = "test_subtype" - boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) - headers = "" - for chunk in generator.open(): - if type(chunk) is not str: - chunk = chunk.decode() - headers += chunk - assert headers == expected - - def test_file_open(self): - """Test the file_open function against sample output.""" - expected = '--test_boundary\r\nContent-Disposition: file; '\ - + 'filename="test_name"\r\nContent-Type: '\ - + 'application/octet-stream\r\n\r\n' - name = "test_name" - disptype = "test_disp" - subtype = "test_subtype" - boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) - headers = "" - for chunk in generator.file_open(name): - if type(chunk) is not str: - chunk = chunk.decode() - headers += chunk - assert headers == expected - - def test_file_close(self): - """Test the file_close function against sample output.""" - expected = '\r\n' - name = "test_name" - disptype = "test_disp" - subtype = "test_subtype" - boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) - headers = "" - for chunk in generator.file_close(): - if type(chunk) is not str: - chunk = chunk.decode() - headers += chunk - assert headers == expected - - def test_close(self): - """Test the close function against sample output.""" - expected = '--test_boundary--\r\n' - name = "test_name" - disptype = "test_disp" - subtype = "test_subtype" - boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) - headers = "" - for chunk in generator.close(): - if type(chunk) is not str: - chunk = chunk.decode() - headers += chunk - assert headers == expected - - -def _generate_test_chunks(chunk_size, interations): - """Generates strings of chunk_size length until out of iterations.""" - for i in range(interations): - output = b"" - for j in range(chunk_size): - output += b"z" - yield output - - -class TestBufferedGenerator(unittest.TestCase): - """Test the BufferedGenerator class. - - Public methods: - test_init -- test the default arguments of the constructor - test_file_chunks -- test the file_chunks function against example output - test_gen_chunks -- test the gen_chunks function against example output - test_body -- verify that body is unimplemented - test_close -- test the close function against example output - """ - - def test_init(self): - """Test the __init__ function for default parameter values.""" - name = "test_name" - instance = ipfsapi.multipart.BufferedGenerator(name) - assert instance.name == name - - def test_file_chunks(self): - """Test the file_chunks function against example output. - - Warning: This test depends on the contents of - test/functional/fake_dir/fsdfgh - Changing that file could break the test. - """ - name = "fsdfgh" - chunk_size = 2 - path = os.path.join(os.path.dirname(os.path.dirname(__file__)), - "functional", "fake_dir", "fsdfgh") - instance = ipfsapi.multipart.BufferedGenerator(name, chunk_size) - expected = 'dsadsad\n' - output = "" - open_file = open(path) - for emitted in instance.file_chunks(open_file): - if type(emitted) is not str: - emitted = emitted.decode() - assert len(emitted) <= chunk_size - output += emitted - open_file.close() - assert output == expected - - def test_gen_chunks(self): - """Test the gen_chunks function against example output.""" - name = "fsdfgh" - chunk_size = 2 - instance = ipfsapi.multipart.BufferedGenerator(name, chunk_size) - for i in instance.gen_chunks(_generate_test_chunks(5, 5)): - assert len(i) <= chunk_size - - def test_body(self): - """Ensure that body throws a NotImplemented exception.""" - instance = ipfsapi.multipart.BufferedGenerator("name") - with pytest.raises(NotImplementedError): - instance.body() - - def test_close(self): - """Test the close function against example output.""" - name = "fsdfgh" - chunk_size = 2 - instance = ipfsapi.multipart.BufferedGenerator(name, chunk_size) - expected = '--\S+--\r\n' - actual = '' - for i in instance.close(): - if type(i) is not str and type(i) is not memoryview: - i = i.decode() - elif six.PY3 and type(i) is memoryview: - i = i.tobytes().decode() - assert len(i) <= chunk_size - actual += i - - assert re.search(expected, actual) - - -class TestFileStream(unittest.TestCase): - """Test the FileStream class - - Public methods: - test_body -- check file stream body for proper structure - """ - - def test_body(self): - """Test the body function against expected output. - - Warning: This test depends on the contents of - test/functional/fake_dir - Changing that directory or its contents could break the test. - """ - # Get OS-agnostic path to test files - path = os.path.join(os.path.dirname(os.path.dirname(__file__)), - "functional", "fake_dir") - # Collect absolute paths to all test files - filenames_list = [] - for (dirpath, _, filenames) in os.walk(path): - temp_list = [os.path.join(dirpath, name) for name in filenames] - filenames_list.extend(temp_list) - # Convert absolute paths to relative - relative_paths_list = [os.path.relpath(cur_path, os.getcwd()) - for cur_path in filenames_list] - - instance = ipfsapi.multipart.FileStream(relative_paths_list) - - expected = "(--\S+\r\nContent-Disposition: file; filename=\"\S+\""\ - + "\r\nContent-Type: application/\S+\r\n"\ - + "\r\n(.|\n)*\r\n)+--\S+--\r\n" - actual = "" - for i in instance.body(): - if type(i) is not str and type(i) is not memoryview: - i = i.decode() - elif six.PY3 and type(i) is memoryview: - i = i.tobytes().decode() - actual += i - assert re.search(expected, actual) - - -class TestDirectoryStream(unittest.TestCase): - """Test the DirectoryStream class. - - Public methods: - test_body -- check that the HTTP body for the directory is correct - test_body_recursive -- check body structure when recursive directory - is uploaded - """ - - def test_body(self): - """Check the multipart HTTP body for the streamed directory.""" - # Get OS-agnostic path to test files - path = os.path.join(os.path.dirname(os.path.dirname(__file__)), - "functional", "fake_dir") - instance = ipfsapi.multipart.DirectoryStream(path) - expected = b"^(--\S+\r\nContent-Disposition: form-data; name=\"\S+\"; filename=\"\S+\""\ - + b"\r\nContent-Type: application/\S+\r\n\r\n(.|\n)*"\ - + b"\r\n)+--\S+--\r\n$" - actual = instance.body() - """ - for i in instance.body(): - if type(i) is not str and type(i) is not memoryview: - i = i.decode() - elif six.PY3 and type(i) is memoryview: - i = i.tobytes().decode() - actual += i - """ - assert re.search(expected, actual) - - -class TestTextStream(unittest.TestCase): - """Test the TextStream class. - - Public methods: - test_body -- check that the HTTP body for the text is correct - """ - - def test_body(self): - """Check the multipart HTTP body for the streamed directory.""" - # Get OS-agnostic path to test files - text = "Here is some text for this test." - instance = ipfsapi.multipart.BytesStream(text) - expected = "(--\S+\r\nContent-Disposition: file; filename=\"\S+\""\ - + "\r\nContent-Type: application/\S+\r\n"\ - + "\r\n(.|\n)*\r\n)+--\S+--\r\n" - actual = "" - for i in instance.body(): - if type(i) is not str and type(i) is not memoryview: - i = i.decode() - elif six.PY3 and type(i) is memoryview: - i = i.tobytes().decode() - actual += i - assert re.search(expected, actual) - - -class TestStreamHelpers(unittest.TestCase): - """Test stream_files, stream_directory, and stream_text. - - TODO: These functions are just wrappers around other, - already-tested functions. Maybe they should be tested, - but it is unclear how. - - Public Methods: - test_stream_files -- unimplemented - test_stream_directory -- unimplemented - test_stream_text -- unimplemented - """ - - def test_stream_files(self): - """Test the stream_files function.""" - pass - - def test_stream_directory(self): - """Test the stream_directory function.""" - pass - - def test_stream_text(self): - """Test the stream_text function.""" - pass diff --git a/test/unit/test_utils.py b/test/unit/test_utils.py deleted file mode 100644 index bfbc5e80..00000000 --- a/test/unit/test_utils.py +++ /dev/null @@ -1,137 +0,0 @@ -"""Tox unit tests for utils.py. - -Classes: -TestUtils -- defines a set of unit tests for untils.py -""" - -import io -import json -import os -import pickle -import unittest - -import ipfsapi.utils as utils - -class TestUtils(unittest.TestCase): - """Contains unit tests for utils.py. - - Public methods: - test_guess_mimetype -- tests utils.guess_mimetype() - test_ls_dir -- tests utils.ls_dir() - test_clean_file_opened -- tests utils.clean_file() with a stringIO object - test_clean_file_unopened -- tests utils.clean_file() with a filepath - test_clean_files_single -- tests utils.clean_files() with a filepath - test_clean_files_list -- tests utils.clean_files() with a list of files - test_file_size -- tests utils.file_size() - test_return_field_init -- tests utils.return_field.__init__() - test_return_field_call -- tests utils.return_field.__call__() - """ - def test_guess_mimetype(self): - """Tests utils.guess_mimetype(). - - Guesses the mimetype of the requirements.txt file - located in the project's root directory. - """ - path = os.path.join(os.path.dirname(__file__), - "..", "..", "requirements.txt") - assert utils.guess_mimetype(path) == "text/plain" - - def test_ls_dir(self): - """Tests utils.ls_dir() - - This test is dependent on the contents of the directory 'fake_dir' - located in 'test/functional' not being modified. - """ - path = os.path.join(os.path.dirname(__file__), - "..", "functional", "fake_dir") - dirs = ['test2', 'test3'] - files = ['fsdfgh', 'popoiopiu'] - contents = (files, dirs) - - # Sort items before comparing as the ordering of files returned by - # the file system is not stable - result = utils.ls_dir(path) - result[0].sort() - result[1].sort() - - assert result == contents - - def test_clean_file_opened(self): - """Tests utils.clean_file() with a stringIO object.""" - string_io = io.StringIO(u'Mary had a little lamb') - f, opened = utils.clean_file(string_io) - assert hasattr(f, 'read') - assert not opened - # Closing stringIO after test assertions. - f.close() - - def test_clean_file_unopened(self): - """Tests utils.clean_file() with a filepath. - - This test relies on the openability of the file 'fsdfgh' - located in 'test/functional/fake_dir'. - """ - path = os.path.join(os.path.dirname(__file__), - "..", "functional", "fake_dir", "fsdfgh") - f, opened = utils.clean_file(path) - assert hasattr(f, 'read') - assert opened - # Closing file after test assertions. - f.close() - - def test_clean_files_single(self): - """Tests utils.clean_files() with a singular filepath. - - This test relies on the openability of the file 'fsdfgh' - located in 'test/functional/fake_dir'. - """ - path = os.path.join(os.path.dirname(__file__), - "..", "functional", "fake_dir", "fsdfgh") - gen = utils.clean_files(path) - for tup in gen: - assert hasattr(tup[0], 'read') - assert tup[1] - # Closing file after test assertions. - tup[0].close() - - def test_clean_files_list(self): - """Tests utils.clean_files() with a list of files/stringIO objects.""" - path = os.path.join(os.path.dirname(__file__), - "..", "functional", "fake_dir", "fsdfgh") - string_io = io.StringIO(u'Mary had a little lamb') - files = [path, string_io] - gen = utils.clean_files(files) - for i in range(0, 2): - tup = next(gen) - assert hasattr(tup[0], 'read') - if i == 0: - assert tup[1] - else: - assert not tup[1] - # Closing files/stringIO objects after test assertions. - tup[0].close() - - def test_file_size(self): - """Tests utils.file_size(). - - This test relies on the content size of the file 'fsdfgh' - located in 'test/functional/fake_dir' not being modified. - """ - path = os.path.join(os.path.dirname(__file__), - "..", "functional", "fake_dir", "fsdfgh") - assert utils.file_size(path) == 8 - - def test_return_field_init(self): - """Tests utils.return_field.__init__().""" - return_field = utils.return_field('Hash') - assert return_field.field == 'Hash' - - def test_return_field_call(self): - """Tests utils.return_field.__call__().""" - expected_hash = u'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' - - @utils.return_field('Hash') - def wrapper(string, *args, **kwargs): - resp = {'Hash': expected_hash, 'string': string} - return resp - assert wrapper('Mary had a little lamb') == expected_hash diff --git a/tox.ini b/tox.ini index 46219f41..b8c97106 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,8 @@ envlist = py27, py34, py35, - py36 + py36, + py37 # Tox' sdist feature presumes that `./setup.py sdist` is available # Disable this feature until PEP-517 is implemented by both tox and flit. @@ -27,8 +28,19 @@ commands = flake8 {posargs} [flake8] -ignore = E222,E221,F403,E265 -exclude = .venv,.git,.tox,+junk,dist,doc,*egg,build,tools,test,docs,*__init__.py +exclude = .git,.tox,+junk,dist,doc,*egg,build,tools,test,docs,*__init__.py + +# E221: Multiple spaces before operator +# E222: Multiple spaces after operator +# E262: Inline comment should start with '# ': Breaks tagged comments (ie: '#TODO: ') +# E265: Block comment should start with '# ': ^ +# E303: More than 2 consecutive newlines +# W292: No newline at end of file +# W391: Blank line at end of file (sometimes trigged instead of the above!?) +# F403: `from <module> import *` used; unable to detect undefined names ←– Probably should be fixed… +ignore = E221,E222,E262,E265,E303,W292,W391,F403 +max-line-length = 100 +tab-width = 4 [pytest] python_files = @@ -36,8 +48,7 @@ python_files = *_test.py tests.py addopts = - --doctest-modules - --ignore ipfsapi/client.py +# --doctest-modules / Totally useless since it cannot properly check the `client` package ipfsapi test/unit test/functional