diff --git a/README.md b/README.md index a63c03dd..056ea217 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,16 @@ -# py-ipfs-api +# py-ipfs-http-client [![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](https://ipfs.io/) [![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](https://webchat.freenode.net/?channels=%23ipfs) [![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) -[![](https://img.shields.io/pypi/v/ipfsapi.svg?style=flat-square)](https://pypi.python.org/pypi/ipfsapi) -[![Build Status](https://travis-ci.org/ipfs/py-ipfs-api.svg?branch=master)](https://travis-ci.org/ipfs/py-ipfs-api) +[![](https://img.shields.io/pypi/v/ipfshttpclient.svg?style=flat-square)](https://pypi.python.org/pypi/ipfshttpclient) +[![Build Status](https://travis-ci.org/ipfs/py-ipfs-http-client.svg?branch=master)](https://travis-ci.org/ipfs/py-ipfs-http-client) ![Python IPFS HTTP Client Library](https://ipfs.io/ipfs/QmQJ68PFMDdAsgCZvA1UVzzn18asVcf7HVvCDgpjiSCAse) -Check out [the client API reference](https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfsapi/) for the full command reference. +Check out [the HTTP Client reference](https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfshttpclient/) for the full command reference. -**Important:** The `py-ipfs-api` PIP package and Python module have both been renamed to `ipfsapi` (no dash, lower-case `a`). +**Important:** The `ipfsapi` PIP package and Python module have both been renamed to `ipfshttpclient` The legacy `ipfs-api`/`ipfsApi` package/module will only work for IPFS 0.3.x and Python 2 and is deprecated. [Please upgrade](#important-changes-from-ipfsapi-02x)! **Note:** This library constantly has to change to stay compatible with the IPFS HTTP API. @@ -35,7 +35,7 @@ You may experience compatibility issues when attempting to use it with other ver Install with pip: ```sh -pip install ipfsapi +pip install ipfshttpclient ``` ## Usage @@ -43,19 +43,19 @@ pip install ipfsapi Basic use-case (requires a running instance of IPFS daemon): ```py ->>> import ipfsapi ->>> api = ipfsapi.connect('127.0.0.1', 5001) ->>> res = api.add('test.txt') +>>> import ipfshttpclient +>>> http_client = ipfshttpclient.connect('127.0.0.1', 5001) +>>> res = http_client.add('test.txt') >>> res {'Hash': 'QmWxS5aNTFEc9XbMX1ASvLET1zrqEaTssqt33rVZQCQb22', 'Name': 'test.txt'} ->>> api.cat(res['Hash']) +>>> http_client.cat(res['Hash']) 'fdsafkljdskafjaksdjf\n' ``` Administrative functions: ```py ->>> api.id() +>>> http_client.id() {'Addresses': ['/ip4/127.0.0.1/tcp/4001/ipfs/QmS2C4MjZsv2iP1UDMMLCYqJ4WeJw8n3vXx1VKxW1UbqHS', '/ip6/::1/tcp/4001/ipfs/QmS2C4MjZsv2iP1UDMMLCYqJ4WeJw8n3vXx1VKxW1UbqHS'], 'AgentVersion': 'go-ipfs/0.4.10', @@ -67,7 +67,7 @@ Administrative functions: Pass in API options: ```py ->>> api.pin_ls(type='all') +>>> http_client.pin_ls(type='all') {'Keys': {'QmNMELyizsfFdNZW3yKTi1SE2pErifwDTXx6vvQBfwcJbU': {'Count': 1, 'Type': 'indirect'}, 'QmNQ1h6o1xJARvYzwmySPsuv9L5XfzS4WTvJSTAWwYRSd8': {'Count': 1, @@ -78,7 +78,7 @@ Pass in API options: Add a directory and match against a filename pattern: ```py ->>> api.add('photos', match='*.jpg') +>>> http_client.add('photos', match='*.jpg') [{'Hash': 'QmcqBstfu5AWpXUqbucwimmWdJbu89qqYmE3WXVktvaXhX', 'Name': 'photos/photo1.jpg'}, {'Hash': 'QmSbmgg7kYwkSNzGLvWELnw1KthvTAMszN5TNg3XQ799Fu', @@ -90,7 +90,7 @@ Add a directory and match against a filename pattern: Or add a directory recursively: ```py ->>> api.add('fake_dir', recursive=True) +>>> http_client.add('fake_dir', recursive=True) [{'Hash': 'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX', 'Name': 'fake_dir/fsdfgh'}, {'Hash': 'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ', @@ -105,9 +105,9 @@ This module also contains some helper functions for adding strings and JSON to I ```py >>> lst = [1, 77, 'lol'] ->>> client.add_json(lst) +>>> http_client.add_json(lst) 'QmQ4R5cCUYBWiJpNL7mFe4LDrwD6qBr5Re17BoRAY9VNpd' ->>> client.get_json(_) +>>> http_client.get_json(_) [1, 77, 'lol'] ``` @@ -137,7 +137,7 @@ The `ipfs` [command-line Client documentation](https://ipfs.io/docs/commands/) m ## Featured Projects -Projects that currently use py-ipfs-api. If your project isn't here, feel free to submit a PR to add it! +Projects that currently use py-ipfs-http-client. If your project isn't here, feel free to submit a PR to add it! - [git-remote-ipfs](https://github.com/larsks/git-remote-ipfs) allows users to push and pull git repositories from the IPFS network. - [InterPlanetary Wayback](https://github.com/oduwsdl/ipwb) interfaces web archive ([WARC](https://www.iso.org/standard/44717.html)) files for distributed indexing and replay using IPFS. @@ -151,7 +151,7 @@ or if you just want to discuss IPFS and python. ### Bug reports -You can submit bug reports using the [GitHub issue tracker](https://github.com/ipfs/python-ipfs-api/issues). +You can submit bug reports using the [GitHub issue tracker](https://github.com/ipfs/py-ipfs-http-client/issues). ### Pull requests diff --git a/docs/Makefile b/docs/Makefile index af4e928d..7ae700d8 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -91,9 +91,9 @@ qthelp: @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PythonIPFSAPI.qhcp" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PythonIPFSHTTPClient.qhcp" @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PythonIPFSAPI.qhc" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PythonIPFSHTTPClient.qhc" .PHONY: applehelp applehelp: @@ -110,8 +110,8 @@ devhelp: @echo @echo "Build finished." @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/PythonIPFSAPI" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PythonIPFSAPI" + @echo "# mkdir -p $$HOME/.local/share/devhelp/PythonIPFSHTTPClient" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PythonIPFSHTTPClient" @echo "# devhelp" .PHONY: epub diff --git a/docs/api_ref.md b/docs/api_ref.md deleted file mode 100644 index 0c33c913..00000000 --- a/docs/api_ref.md +++ /dev/null @@ -1,31 +0,0 @@ -Client API Reference --------------------- - -All commands are accessed through the ``ipfsapi.Client`` class. - -### Exceptions - -```eval_rst -.. automodule:: ipfsapi.exceptions - :members: -``` - - - -### The API Client - -All methods accept the following parameters in their ``kwargs``: - - * **opts** (*dict*) – A dictonary of custom parameters to be sent with the - HTTP request - -```eval_rst -.. autofunction:: ipfsapi.connect - -.. autofunction:: ipfsapi.assert_version - -.. autoclass:: ipfsapi.Client - :members: - :show-inheritance: - -``` diff --git a/docs/conf.py b/docs/conf.py index a502027c..98738feb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- # -# Python IPFS API documentation build configuration file, created by +# Python IPFS HTTP Client documentation build configuration file, created by # sphinx-quickstart on Wed Aug 17 20:41:53 2016. # # This file is execfile()d with the current directory set to its @@ -22,7 +22,7 @@ sys.path.insert(0, os.path.abspath('..')) # Make current version number as `__version__` available -with open(os.path.join(sys.path[0], 'ipfsapi', 'version.py')) as file: +with open(os.path.join(sys.path[0], 'ipfshttpclient', 'version.py')) as file: exec(file.read()) # -- General configuration ------------------------------------------------ @@ -63,9 +63,9 @@ master_doc = 'index' # General information about the project. -project = 'Python IPFS API' -copyright = '2016, py-ipfs-api team' -author = 'py-ipfs-api team' +project = 'Python IPFS HTTP Client' +copyright = '2016, py-ipfs-http-client team' +author = 'py-ipfs-http-client team' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -99,32 +99,31 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True - # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -134,140 +133,140 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. # " v documentation" by default. -#html_title = 'Python IPFS API v0.2.4' +# html_title = 'Python IPFS HTTP Client v0.2.4' # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] +# html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. -#html_last_updated_fmt = None +# html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'py-ipfs-api' +htmlhelp_basename = 'py-ipfs-http-client' # -- Options for LaTeX output --------------------------------------------- latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', + # Latex figure (float) alignment + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'PythonIPFSAPI.tex', 'Python IPFS API Documentation', - 'py-ipfs-api team', 'manual'), + (master_doc, 'PythonIPFSHTTPClient.tex', 'Python IPFS HTTP Client Documentation', + 'py-ipfs-http-client team', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- @@ -275,12 +274,12 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'py-ipfs-api', 'Python IPFS API Documentation', + (master_doc, 'py-ipfs-http-client', 'Python IPFS HTTP Client Documentation', [author], 1) ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -289,22 +288,22 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'py-ipfs-api', 'Python IPFS API Documentation', - author, 'py-ipfs-api', 'One line description of project.', + (master_doc, 'py-ipfs-http-client', 'Python IPFS HTTP Client Documentation', + author, 'py-ipfs-http-client', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False # -- AutoDoc settings ----------------------------------------------------- @@ -314,7 +313,7 @@ # External documentation link mapping intersphinx_mapping = { - 'python': ('https://docs.python.org/3', None) + 'python': ('https://docs.python.org/3', None) } # -- Napoleon settings ---------------------------------------------------- @@ -333,7 +332,7 @@ # Use the .. admonition:: directive for the Example and Examples sections. # False to use the .. rubric:: directive instead. -# One may look better than the other depending on what HTML theme is used. +# One may look better than the other depending on what HTML theme is used. napoleon_use_admonition_for_examples = False # Use the .. admonition:: directive for Notes sections. @@ -357,11 +356,10 @@ napoleon_use_rtype = False - # app setup hook for reCommonMark's AutoStructify def setup(app): - from recommonmark.transform import AutoStructify - app.add_config_value('recommonmark_config', { - 'auto_toc_tree_section': 'Contents', - }, True) - app.add_transform(AutoStructify) + from recommonmark.transform import AutoStructify + app.add_config_value('recommonmark_config', { + 'auto_toc_tree_section': 'Contents', + }, True) + app.add_transform(AutoStructify) diff --git a/docs/http_client_ref.md b/docs/http_client_ref.md new file mode 100644 index 00000000..bb0a4d00 --- /dev/null +++ b/docs/http_client_ref.md @@ -0,0 +1,31 @@ +HTTP Client Reference +-------------------- + +All commands are accessed through the ``ipfshttpclient.Client`` class. + +### Exceptions + +```eval_rst +.. automodule:: ipfshttpclient.exceptions + :members: +``` + + + +### The HTTP Client + +All methods accept the following parameters in their ``kwargs``: + + * **opts** (*dict*) – A dictionary of custom parameters to be sent with the + HTTP request + +```eval_rst +.. autofunction:: ipfshttpclient.connect + +.. autofunction:: ipfshttpclient.assert_version + +.. autoclass:: ipfshttpclient.Client + :members: + :show-inheritance: + +``` diff --git a/docs/index.md b/docs/index.md index 0d9d151b..a4e896b5 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,11 +1,11 @@ -Python IPFS API's documentation! +Python IPFS HTTP Client's documentation! ================================ Contents -------- -* [Client API Reference](api_ref.md) -* [Internal API Reference](internal_ref.md) +* [HTTP Client Reference](http_client_ref.md) +* [Internal HTTP Client Reference](internal_ref.md) Indices and tables ------------------ diff --git a/docs/internal_ref.md b/docs/internal_ref.md index ab78cbe9..b9e67558 100644 --- a/docs/internal_ref.md +++ b/docs/internal_ref.md @@ -1,10 +1,10 @@ -Internal API Reference +Internal HTTP Client Reference ---------------------- ### `encoding` ```eval_rst -.. automodule:: ipfsapi.encoding +.. automodule:: ipfshttpclient.encoding :members: :show-inheritance: @@ -13,7 +13,7 @@ Internal API Reference ### `http` ```eval_rst -.. automodule:: ipfsapi.http +.. automodule:: ipfshttpclient.http :members: :show-inheritance: @@ -22,7 +22,7 @@ Internal API Reference ### `multipart` ```eval_rst -.. automodule:: ipfsapi.multipart +.. automodule:: ipfshttpclient.multipart :members: :show-inheritance: @@ -31,7 +31,7 @@ Internal API Reference ### `utils` ```eval_rst -.. automodule:: ipfsapi.utils +.. automodule:: ipfshttpclient.utils :members: :show-inheritance: diff --git a/docs/releasing.md b/docs/releasing.md index c5dff746..a1c735ab 100644 --- a/docs/releasing.md +++ b/docs/releasing.md @@ -48,8 +48,8 @@ You can download it at: ## Update the source code - 1. Make a GIT commit incrementing the version number in `ipfsapi/version.py`: - `git commit -m "Release version 0.4.X" ipfsapi/version.py`) + 1. Make a GIT commit incrementing the version number in `ipfshttpclient/version.py`: + `git commit -m "Release version 0.4.X" ipfshttpclient/version.py`) 2. Tag the GIT commit with the version number using an annotated and signed tag: `git tag --sign -m "Release version 0.4.X" 0.4.X` 3. Push the new version @@ -64,4 +64,4 @@ Run: `make -C docs/ html` ## Publish the documentation -Make sure an IPFS daemon is running and run: `ipfs-file-publish /Software/Python/ipfsapi/ docs/build/html/` +Make sure an IPFS daemon is running and run: `ipfs-file-publish /Software/Python/ipfshttpclient/ docs/build/html/` diff --git a/ipfsapi/__init__.py b/ipfshttpclient/__init__.py similarity index 64% rename from ipfsapi/__init__.py rename to ipfshttpclient/__init__.py index 45cd4f5d..102da784 100644 --- a/ipfsapi/__init__.py +++ b/ipfshttpclient/__init__.py @@ -1,12 +1,12 @@ -"""Python IPFS API client library""" +"""Python IPFS HTTP CLIENT library""" from __future__ import absolute_import from .version import __version__ -########################### -# Import stable API parts # -########################### +################################### +# Import stable HTTP CLIENT parts # +################################### from . import exceptions from .client import DEFAULT_HOST, DEFAULT_PORT, DEFAULT_BASE diff --git a/ipfsapi/client.py b/ipfshttpclient/client.py similarity index 98% rename from ipfsapi/client.py rename to ipfshttpclient/client.py index 4a517f5b..3f8ed965 100644 --- a/ipfsapi/client.py +++ b/ipfshttpclient/client.py @@ -12,9 +12,9 @@ from . import http, multipart, utils, exceptions, encoding -DEFAULT_HOST = str(os.environ.get("PY_IPFSAPI_DEFAULT_HOST", 'localhost')) -DEFAULT_PORT = int(os.environ.get("PY_IPFSAPI_DEFAULT_PORT", 5001)) -DEFAULT_BASE = str(os.environ.get("PY_IPFSAPI_DEFAULT_BASE", 'api/v0')) +DEFAULT_HOST = str(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_HOST", 'localhost')) +DEFAULT_PORT = int(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_PORT", 5001)) +DEFAULT_BASE = str(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_BASE", 'api/v0')) VERSION_MINIMUM = "0.4.3" VERSION_MAXIMUM = "0.5.0" @@ -26,7 +26,7 @@ def assert_version(version, minimum=VERSION_MINIMUM, maximum=VERSION_MAXIMUM): Raises ------ - ~ipfsapi.exceptions.VersionMismatch + ~ipfshttpclient.exceptions.VersionMismatch Parameters ---------- @@ -48,25 +48,25 @@ def assert_version(version, minimum=VERSION_MINIMUM, maximum=VERSION_MAXIMUM): def connect(host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, **defaults): - """Create a new :class:`~ipfsapi.Client` instance and connect to the + """Create a new :class:`~ipfshttpclient.Client` instance and connect to the daemon to validate that its version is supported. Raises ------ - ~ipfsapi.exceptions.VersionMismatch - ~ipfsapi.exceptions.ErrorResponse - ~ipfsapi.exceptions.ConnectionError - ~ipfsapi.exceptions.ProtocolError - ~ipfsapi.exceptions.StatusError - ~ipfsapi.exceptions.TimeoutError + ~ipfshttpclient.exceptions.VersionMismatch + ~ipfshttpclient.exceptions.ErrorResponse + ~ipfshttpclient.exceptions.ConnectionError + ~ipfshttpclient.exceptions.ProtocolError + ~ipfshttpclient.exceptions.StatusError + ~ipfshttpclient.exceptions.TimeoutError All parameters are identical to those passed to the constructor of the - :class:`~ipfsapi.Client` class. + :class:`~ipfshttpclient.Client` class. Returns ------- - ~ipfsapi.Client + ~ipfshttpclient.Client """ # Create client instance client = Client(host, port, base, chunk_size, **defaults) @@ -105,7 +105,7 @@ def __exit__(self, *a): class Client(object): """A TCP client for interacting with an IPFS daemon. - A :class:`~ipfsapi.Client` instance will not actually establish a + A :class:`~ipfshttpclient.Client` instance will not actually establish a connection to the daemon until at least one of it's methods is called. Parameters @@ -209,7 +209,7 @@ def cat(self, multihash, offset=0, length=-1, **kwargs): >>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... - ipfsapi.exceptions.Error: this dag node is a directory + ipfshttpclient.exceptions.Error: this dag node is a directory >>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'\n\n\n\nipfs example viewer</…' @@ -361,7 +361,7 @@ def block_put(self, file, **kwargs): ------- dict : Information about the new block - See :meth:`~ipfsapi.Client.block_stat` + See :meth:`~ipfshttpclient.Client.block_stat` """ body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/block/put', decoder='json', @@ -574,7 +574,7 @@ def object_put(self, file, **kwargs): ------- dict : Hash and links of the created DAG object - See :meth:`~ipfsapi.Object.object_links` + See :meth:`~ipfshttpclient.Object.object_links` """ body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/object/put', decoder='json', @@ -1122,8 +1122,8 @@ def pin_update(self, from_path, to_path, **kwargs): Updates one pin to another, making sure that all objects in the new pin are local. Then removes the old pin. This is an optimized version of - using first using :meth:`~ipfsapi.Client.pin_add` to add a new pin - for an object and then using :meth:`~ipfsapi.Client.pin_rm` to remove + using first using :meth:`~ipfshttpclient.Client.pin_add` to add a new pin + for an object and then using :meth:`~ipfshttpclient.Client.pin_rm` to remove the pin for the old object. .. code-block:: python @@ -1289,7 +1289,7 @@ def id(self, peer=None, **kwargs): return self._client.request('/id', args, decoder='json', **kwargs) def bootstrap(self, **kwargs): - """Compatiblity alias for :meth:`~ipfsapi.Client.bootstrap_list`.""" + """Compatiblity alias for :meth:`~ipfshttpclient.Client.bootstrap_list`.""" self.bootstrap_list(**kwargs) def bootstrap_list(self, **kwargs): @@ -1650,7 +1650,7 @@ def dht_put(self, key, value, **kwargs): You may only use keytypes that are supported in your ``ipfs`` binary: ``go-ipfs`` currently only supports the ``/ipns/`` keytype. Unless you have a relatively deep understanding of the key's internal structure, - you likely want to be using the :meth:`~ipfsapi.Client.name_publish` + you likely want to be using the :meth:`~ipfshttpclient.Client.name_publish` instead. Value is arbitrary text. @@ -2091,7 +2091,7 @@ def shutdown(self): """Stop the connected IPFS daemon instance. Sending any further requests after this will fail with - ``ipfsapi.exceptions.ConnectionError``, until you start another IPFS + ``ipfshttpclient.exceptions.ConnectionError``, until you start another IPFS daemon instance. """ try: @@ -2196,10 +2196,10 @@ def add_pyobj(self, py_obj, **kwargs): .. deprecated:: 0.4.2 The ``*_pyobj`` APIs allow for arbitrary code execution if abused. - Either switch to :meth:`~ipfsapi.Client.add_json` or use + Either switch to :meth:`~ipfshttpclient.Client.add_json` or use ``client.add_bytes(pickle.dumps(py_obj))`` instead. - Please see :meth:`~ipfsapi.Client.get_pyobj` for the + Please see :meth:`~ipfshttpclient.Client.get_pyobj` for the **security risks** of using these methods! .. code-block:: python @@ -2225,7 +2225,7 @@ def get_pyobj(self, multihash, **kwargs): .. deprecated:: 0.4.2 The ``*_pyobj`` APIs allow for arbitrary code execution if abused. - Either switch to :meth:`~ipfsapi.Client.get_json` or use + Either switch to :meth:`~ipfshttpclient.Client.get_json` or use ``pickle.loads(client.cat(multihash))`` instead. .. caution:: diff --git a/ipfsapi/encoding.py b/ipfshttpclient/encoding.py similarity index 95% rename from ipfsapi/encoding.py rename to ipfshttpclient/encoding.py index c6e2d8f0..2f2f2aa7 100644 --- a/ipfsapi/encoding.py +++ b/ipfshttpclient/encoding.py @@ -31,7 +31,7 @@ def parse_partial(self, raw): Raises ------ - ~ipfsapi.exceptions.DecodingError + ~ipfshttpclient.exceptions.DecodingError Parameters ---------- @@ -49,7 +49,7 @@ def parse_finalize(self): Raises ------ - ~ipfsapi.exceptions.DecodingError + ~ipfshttpclient.exceptions.DecodingError Returns ------- @@ -62,7 +62,7 @@ def parse(self, raw): Raises ------ - ~ipfsapi.exceptions.DecodingError + ~ipfshttpclient.exceptions.DecodingError Parameters ---------- @@ -83,7 +83,7 @@ def encode(self, obj): Raises ------ - ~ipfsapi.exceptions.EncodingError + ~ipfshttpclient.exceptions.EncodingError Parameters ---------- @@ -143,7 +143,7 @@ def parse_partial(self, data): Raises ------ - ~ipfsapi.exceptions.DecodingError + ~ipfshttpclient.exceptions.DecodingError Returns ------- @@ -235,7 +235,7 @@ def parse_finalize(self): Raises ------ - ~ipfsapi.exceptions.DecodingError + ~ipfshttpclient.exceptions.DecodingError Returns ------- @@ -265,7 +265,7 @@ def encode(self, obj): Raises ------ - ~ipfsapi.exceptions.EncodingError + ~ipfshttpclient.exceptions.EncodingError Parameters ---------- @@ -321,7 +321,7 @@ def parse_finalize(self): Raises ------ - ~ipfsapi.exceptions.DecodingError + ~ipfshttpclient.exceptions.DecodingError Returns ------- @@ -344,7 +344,7 @@ def parse(self, raw): Raises ------ - ~ipfsapi.exceptions.DecodingError + ~ipfshttpclient.exceptions.DecodingError Parameters ---------- @@ -362,7 +362,7 @@ def encode(self, obj): Raises ------ - ~ipfsapi.exceptions.EncodingError + ~ipfshttpclient.exceptions.EncodingError Parameters ---------- @@ -405,7 +405,7 @@ def get_encoding(name): Raises ------ - ~ipfsapi.exceptions.EncoderMissingError + ~ipfshttpclient.exceptions.EncoderMissingError Parameters ---------- diff --git a/ipfsapi/exceptions.py b/ipfshttpclient/exceptions.py similarity index 100% rename from ipfsapi/exceptions.py rename to ipfshttpclient/exceptions.py diff --git a/ipfsapi/http.py b/ipfshttpclient/http.py similarity index 94% rename from ipfsapi/http.py rename to ipfshttpclient/http.py index c954a9a9..2d5f0ddf 100644 --- a/ipfsapi/http.py +++ b/ipfshttpclient/http.py @@ -134,7 +134,7 @@ class HTTPClient(object): The path prefix for API calls defaults : dict The default parameters to be passed to - :meth:`~ipfsapi.http.HTTPClient.request` + :meth:`~ipfshttpclient.http.HTTPClient.request` """ __metaclass__ = abc.ABCMeta @@ -209,11 +209,11 @@ def request(self, path, Raises ------ - ~ipfsapi.exceptions.ErrorResponse - ~ipfsapi.exceptions.ConnectionError - ~ipfsapi.exceptions.ProtocolError - ~ipfsapi.exceptions.StatusError - ~ipfsapi.exceptions.TimeoutError + ~ipfshttpclient.exceptions.ErrorResponse + ~ipfshttpclient.exceptions.ConnectionError + ~ipfshttpclient.exceptions.ProtocolError + ~ipfshttpclient.exceptions.StatusError + ~ipfshttpclient.exceptions.TimeoutError Parameters ---------- @@ -256,11 +256,11 @@ def download(self, path, args=[], filepath=None, opts={}, Raises ------ - ~ipfsapi.exceptions.ErrorResponse - ~ipfsapi.exceptions.ConnectionError - ~ipfsapi.exceptions.ProtocolError - ~ipfsapi.exceptions.StatusError - ~ipfsapi.exceptions.TimeoutError + ~ipfshttpclient.exceptions.ErrorResponse + ~ipfshttpclient.exceptions.ConnectionError + ~ipfshttpclient.exceptions.ProtocolError + ~ipfshttpclient.exceptions.StatusError + ~ipfshttpclient.exceptions.TimeoutError Parameters ---------- diff --git a/ipfsapi/multipart.py b/ipfshttpclient/multipart.py similarity index 99% rename from ipfsapi/multipart.py rename to ipfshttpclient/multipart.py index ed732156..22d6db9b 100644 --- a/ipfsapi/multipart.py +++ b/ipfshttpclient/multipart.py @@ -282,7 +282,7 @@ class FileStream(BufferedGenerator): A buffered generator that encodes an array of files as :mimetype:`multipart/form-data`. This is a concrete implementation of - :class:`~ipfsapi.multipart.BufferedGenerator`. + :class:`~ipfshttpclient.multipart.BufferedGenerator`. Parameters ---------- @@ -371,7 +371,7 @@ class DirectoryStream(BufferedGenerator): A buffered generator that encodes an array of files as :mimetype:`multipart/form-data`. This is a concrete implementation of - :class:`~ipfsapi.multipart.BufferedGenerator`. + :class:`~ipfshttpclient.multipart.BufferedGenerator`. Parameters ---------- diff --git a/ipfsapi/utils.py b/ipfshttpclient/utils.py similarity index 100% rename from ipfsapi/utils.py rename to ipfshttpclient/utils.py diff --git a/ipfsapi/version.py b/ipfshttpclient/version.py similarity index 87% rename from ipfsapi/version.py rename to ipfshttpclient/version.py index 897e3a8f..fdf563be 100644 --- a/ipfsapi/version.py +++ b/ipfshttpclient/version.py @@ -1,7 +1,7 @@ # _Versioning scheme:_ # The major and minor version of each release correspond to the supported # IPFS daemon version. The revision number will be updated whenever we make -# a new release for the `py-ipfs-api` client for that daemon version. +# a new release for the `py-ipfs-http-client` for that daemon version. # # Example: The first client version to support the `0.4.x`-series of the IPFS # HTTP API will have version `0.4.0`, the second version will have version diff --git a/pyproject.toml b/pyproject.toml index 863c07ee..9023339d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,11 +3,11 @@ requires = ["flit"] build-backend = "flit.buildapi" [tool.flit.metadata] -module = "ipfsapi" +module = "ipfshttpclient" -author = "py-ipfs-api team" +author = "py-ipfs-http-client team" author-email = "" -home-page = "https://github.com/ipfs/py-ipfs-api" +home-page = "https://github.com/ipfs/py-ipfs-http-client" keywords = "ipfs storage distribution development" license = "MIT License" description-file = "README.md" @@ -21,20 +21,20 @@ requires = [ classifiers = [ "Development Status :: 3 - Alpha", - + # Indicate who your project is intended for "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: Science/Research", - + "Topic :: Internet", "Topic :: Scientific/Engineering", "Topic :: System :: Filesystems", "Topic :: System :: Networking", - + # Pick your license as you wish (should match "license" above) "License :: OSI Approved :: MIT License", - + # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. "Programming Language :: Python :: 2", @@ -46,5 +46,5 @@ classifiers = [ ] [tool.flit.metadata.urls] -Documentation = "https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfsapi/" +Documentation = "https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfshttpclient/" diff --git a/renaming_status.txt b/renaming_status.txt new file mode 100644 index 00000000..2ce1ef57 --- /dev/null +++ b/renaming_status.txt @@ -0,0 +1,83 @@ +PROPOSED GITHUB REPO RENAME: py-ipfs-api -> py-ipfs-http-client +PROPOSED PIP MODULE RENAME: ipfshttpclient +PROPOSED PROJECT RENAME: Python IPFS API -> Python IPFS HTTP Client +External Changes to be made: +(See the section under "FOR README.md") + + +THESE MAY REFER TO THE ACTUAL API +ipfshttpclient/encoding.py Ln 392 +ipfshttpclient/http.py Ln 2,4,134 +ipfshttpclient/version.py Ln 7 + +THESE MAY REFER TO THE ACTUAL API +ipfshttpclient/client.py All occurrences +test/run-tests.py Ln 68,102 + + +FOR README.md: +* I have made changes in the original file itself +* External changes to be done by the project maintainers (I have already made the changes in the file, on the respective lines): + https://img.shields.io/pypi/v/ipfsapi.svg?style=flat-square -> https://img.shields.io/pypi/v/ipfshttpclient.svg?style=flat-square line 6 + https://pypi.python.org/pypi/ipfsapi -> https://pypi.python.org/pypi/ipfshttpclient line 6 + https://travis-ci.org/ipfs/py-ipfs-api.svg?branch=master -> https://travis-ci.org/ipfs/py-ipfs-http-client.svg?branch=master line 7 + https://travis-ci.org/ipfs/py-ipfs-api -> https://travis-ci.org/ipfs/py-ipfs-http-client line 7 + https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfsapi/ -> + https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfshttpclient/ line 11,118 + The PIP package and Python module: ipfshttpclient + The Github repo: py-ipfs-api -> py-ipfs-http-client + PROJECT RENAME: Python IPFS API -> Python IPFS HTTP Client +* Changes: All the above changes + + py-ipfs-api -> py-ipfs-http-client (Github repo name) + ipfsapi -> ipfshttpclient (pip module) + api -> http_client (code) + client -> http_client (code) + client API -> HTTP Client (text) +* Doubtful : Line 14 + Line 67 + Line 25, 122-136 + + +FOR docs/conf.py: +* I have made changes in the original file itself +* Changes: API -> HTTP Client + PythonIPFSAPI -> PythonIPFSHTTPClient + py-ipfs-api -> py-ipfs-http-client + + +FOR pyproject.toml: +* I have made changes in the original file itself +* Changes: module = "ipfsapi" -> module = "ipfshttpclient" Line 6 + py-ipfs-api -> py-ipfs-http-client + https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfsapi/ -> + https://ipfs.io/ipns/QmZ86ow1byeyhNRJEatWxGPJKcnQKG7s51MtbHdxxUddTH/Software/Python/ipfshttpclient/ +* Doubtful: Line 3 + + +RENAMING +PY_IPFSAPI -> PY_IPFS_HTTP_CLIENT +ipfsapi -> ipfshttpclient +IpfsApiTest -> IpfsHttpClientTest +api -> http_client +IpfsApiLogTest -> IpfsHttpClientLogTest +IpfsApiPinTest -> IpfsHttpClientPinTest +IpfsApiMFSTest -> IpfsHttpClientMFSTest +IpfsApiRepoTest -> IpfsHttpClientRepoTest +IpfsApiKeyTest -> IpfsHttpClientKeyTest +IpfsApiObjectTest -> IpfsHttpClientObjectTest +IpfsApiBitswapTest -> IpfsHttpClientBitswapTest +IpfsApiPubSubTest -> IpfsHttpClientPubSubTest +IpfsApiShutdownTest -> IpfsHttpClientShutdownTest +Client API -> HTTP Client +API Client -> HTTP Client +API -> HTTP Client +PythonIPFSAPI -> PythonIPFSHTTPClient +apiokay -> http_client_okay +apifail -> http_client_fail +api_cat -> http_client_cat +test_api_failure -> test_http_client_failure +api/v0 -> http_client/v0 +ipfsApiError -> ipfsHTTPClientError +py-ipfs-api -> py-ipfs-http-client +client -> http_client +Python IPFS API -> Python IPFS HTTP Client \ No newline at end of file diff --git a/test/functional/tests.py b/test/functional/tests.py index adc1f77a..f50720a4 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -11,7 +11,7 @@ import pytest -import ipfsapi +import ipfshttpclient __is_available = None @@ -23,12 +23,12 @@ def is_available(): if not isinstance(__is_available, bool): try: - ipfsapi.connect() - except ipfsapi.exceptions.Error as error: + ipfshttpclient.connect() + except ipfshttpclient.exceptions.Error as error: __is_available = False # Make sure version incompatiblity is displayed to the user - if isinstance(error, ipfsapi.exceptions.VersionMismatch): + if isinstance(error, ipfshttpclient.exceptions.VersionMismatch): raise else: __is_available = True @@ -44,12 +44,12 @@ def skipIfOffline(): def skipUnlessCI(): have_ci = os.environ.get("CI", "false") == "true" - have_pid = os.environ.get("PY_IPFSAPI_TEST_DAEMON_PID", "").isdigit() + have_pid = os.environ.get("PY_IPFS_HTTP_CLIENT_TEST_DAEMON_PID", "").isdigit() return unittest.skipUnless(have_ci and have_pid, "CI-only test") def test_ipfs_node_available(): - addr = "[{0}]:{1}".format(ipfsapi.DEFAULT_HOST, ipfsapi.DEFAULT_PORT) + addr = "[{0}]:{1}".format(ipfshttpclient.DEFAULT_HOST, ipfshttpclient.DEFAULT_PORT) assert is_available(), "Functional tests require an IPFS node to be available at: " + addr @@ -59,20 +59,20 @@ def test_ipfs_node_available(): class AssertVersionTest(unittest.TestCase): def test_assert_version(self): # Minimum required version - ipfsapi.assert_version("0.1.0", "0.1.0", "0.2.0") + ipfshttpclient.assert_version("0.1.0", "0.1.0", "0.2.0") # Too high version - with self.assertRaises(ipfsapi.exceptions.VersionMismatch): - ipfsapi.assert_version("0.2.0", "0.1.0", "0.2.0") + with self.assertRaises(ipfshttpclient.exceptions.VersionMismatch): + ipfshttpclient.assert_version("0.2.0", "0.1.0", "0.2.0") # Too low version - with self.assertRaises(ipfsapi.exceptions.VersionMismatch): - ipfsapi.assert_version("0.0.5", "0.1.0", "0.2.0") + with self.assertRaises(ipfshttpclient.exceptions.VersionMismatch): + ipfshttpclient.assert_version("0.0.5", "0.1.0", "0.2.0") @skipIfOffline() -class IpfsApiTest(unittest.TestCase): +class IpfsHttpClientTest(unittest.TestCase): - api = ipfsapi.Client() + http_client = ipfshttpclient.Client() fake = [{'Hash': u'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX', 'Name': 'fake_dir/fsdfgh'}, @@ -191,15 +191,15 @@ def setUp(self): # Makes all of the diff visible if the hashes change for some reason self.maxDiff = None - self.pinned = set(self.api.pin_ls(type="recursive")["Keys"]) + self.pinned = set(self.http_client.pin_ls(type="recursive")["Keys"]) def tearDown(self): os.chdir(self._olddir) def _clean_up_pins(self): - for multihash in self.api.pin_ls(type="recursive")["Keys"]: + for multihash in self.http_client.pin_ls(type="recursive")["Keys"]: if multihash not in self.pinned: - self.api.pin_rm(multihash) + self.http_client.pin_rm(multihash) @staticmethod def _sort_by_key(items, key="Name"): @@ -211,36 +211,36 @@ def _sort_by_key(items, key="Name"): def test_version(self): expected = ['Repo', 'Commit', 'Version'] - resp_version = self.api.version() + resp_version = self.http_client.version() for key in expected: assert key in resp_version def test_id(self): expected = ['PublicKey', 'ProtocolVersion', 'ID', 'AgentVersion', 'Addresses'] - resp_id = self.api.id() + resp_id = self.http_client.id() for key in expected: assert key in resp_id def test_add_single_from_str(self): - res = self.api.add(self.fake_file, pin=False) + res = self.http_client.add(self.fake_file, pin=False) assert self.fake_file_only_res == res - assert res["Hash"] not in self.api.pin_ls(type="recursive") - assert res["Hash"] in list(map(lambda i: i["Ref"], self.api.refs_local())) + assert res["Hash"] not in self.http_client.pin_ls(type="recursive") + assert res["Hash"] in list(map(lambda i: i["Ref"], self.http_client.refs_local())) def test_add_single_from_fp(self): with open(self.fake_file, 'rb') as fp: - res = self.api.add(fp, pin=False) + res = self.http_client.add(fp, pin=False) assert self.fake_file_only_res == res - assert res["Hash"] not in self.api.pin_ls(type="recursive") - assert res["Hash"] in list(map(lambda i: i["Ref"], self.api.refs_local())) + assert res["Hash"] not in self.http_client.pin_ls(type="recursive") + assert res["Hash"] in list(map(lambda i: i["Ref"], self.http_client.refs_local())) def test_add_single_from_str_with_dir(self): - res = self.api.add(self.fake_file, wrap_with_directory=True) + res = self.http_client.add(self.fake_file, wrap_with_directory=True) try: assert self.fake_file_dir_res == res @@ -249,22 +249,22 @@ def test_add_single_from_str_with_dir(self): for item in res: if item["Name"] == "": dir_hash = item["Hash"] - assert dir_hash in self.api.pin_ls(type="recursive")["Keys"] + assert dir_hash in self.http_client.pin_ls(type="recursive")["Keys"] finally: self._clean_up_pins() def test_only_hash_file(self): - self.api.repo_gc() + self.http_client.repo_gc() - res = self.api.add(self.fake_file, only_hash=True) + res = self.http_client.add(self.fake_file, only_hash=True) assert self.fake_file_only_res == res - assert res["Hash"] not in self.api.pin_ls(type="recursive") - assert res["Hash"] not in list(map(lambda i: i["Ref"], self.api.refs_local())) + assert res["Hash"] not in self.http_client.pin_ls(type="recursive") + assert res["Hash"] not in list(map(lambda i: i["Ref"], self.http_client.refs_local())) def test_add_multiple_from_list(self): - res = self.api.add([self.fake_file, self.fake_file2]) + res = self.http_client.add([self.fake_file, self.fake_file2]) try: assert self.fake_files_res == res @@ -272,7 +272,7 @@ def test_add_multiple_from_list(self): self._clean_up_pins() def test_add_multiple_from_dirname(self): - res = self.api.add(self.fake_dir_test2) + res = self.http_client.add(self.fake_dir_test2) try: assert self._sort_by_key(self.fake_dir_res) == self._sort_by_key(res) @@ -280,7 +280,7 @@ def test_add_multiple_from_dirname(self): self._clean_up_pins() def test_add_filepattern_from_dirname(self): - res = self.api.add(self.fake_dir, pattern=self.pattern) + res = self.http_client.add(self.fake_dir, pattern=self.pattern) try: assert self._sort_by_key(self.fake_dir_fnpattern_res) == self._sort_by_key(res) @@ -289,7 +289,7 @@ def test_add_filepattern_from_dirname(self): def test_add_filepattern_subdir_wildcard(self): - res = self.api.add(self.fake_dir, pattern=self.pattern2) + res = self.http_client.add(self.fake_dir, pattern=self.pattern2) try: assert self._sort_by_key(self.fake_dir_fnpattern2_res) == self._sort_by_key(res) @@ -297,7 +297,7 @@ def test_add_filepattern_subdir_wildcard(self): self._clean_up_pins() def test_add_recursive(self): - res = self.api.add(self.fake_dir, recursive=True) + res = self.http_client.add(self.fake_dir, recursive=True) try: assert self._sort_by_key(self.fake_dir_recursive_res) == self._sort_by_key(res) @@ -306,33 +306,33 @@ def test_add_recursive(self): def test_add_json(self): data = {'Action': 'Open', 'Type': 'PR', 'Name': 'IPFS', 'Pubkey': 7} - res = self.api.add_json(data) + res = self.http_client.add_json(data) try: - assert data == self.api.get_json(res) + assert data == self.http_client.get_json(res) # have to test the string added to IPFS, deserializing JSON will not # test order of keys - assert '{"Action":"Open","Name":"IPFS","Pubkey":7,"Type":"PR"}' == self.api.cat(res).decode('utf-8') + assert '{"Action":"Open","Name":"IPFS","Pubkey":7,"Type":"PR"}' == self.http_client.cat(res).decode('utf-8') finally: self._clean_up_pins() def test_add_get_pyobject(self): data = [-1, 3.14, u'Hän€', b'23' ] - res = self.api.add_pyobj(data) + res = self.http_client.add_pyobj(data) try: - assert data == self.api.get_pyobj(res) + assert data == self.http_client.get_pyobj(res) finally: self._clean_up_pins() def test_get_file(self): - self.api.add(self.fake_file) + self.http_client.add(self.fake_file) try: test_hash = self.fake[0]['Hash'] - self.api.get(test_hash) + self.http_client.get(test_hash) assert test_hash in os.listdir(os.getcwd()) os.remove(test_hash) @@ -341,12 +341,12 @@ def test_get_file(self): self._clean_up_pins() def test_get_dir(self): - self.api.add(self.fake_dir, recursive=True) + self.http_client.add(self.fake_dir, recursive=True) try: test_hash = self.fake[8]['Hash'] - self.api.get(test_hash) + self.http_client.get(test_hash) assert test_hash in os.listdir(os.getcwd()) shutil.rmtree(test_hash) @@ -355,12 +355,12 @@ def test_get_dir(self): self._clean_up_pins() def test_get_path(self): - self.api.add(self.fake_file) + self.http_client.add(self.fake_file) try: test_hash = self.fake[8]['Hash'] + '/fsdfgh' - self.api.get(test_hash) + self.http_client.get(test_hash) assert 'fsdfgh' in os.listdir(os.getcwd()) os.remove('fsdfgh') @@ -369,41 +369,41 @@ def test_get_path(self): self._clean_up_pins() def test_refs(self): - self.api.add(self.fake_dir, recursive=True) + self.http_client.add(self.fake_dir, recursive=True) try: - refs = self.api.refs(self.fake[8]['Hash']) + refs = self.http_client.refs(self.fake[8]['Hash']) assert self._sort_by_key(self.refs_res, "Ref") == self._sort_by_key(refs, "Ref") finally: self._clean_up_pins() def test_cat_single_file_str(self): - self.api.add(self.fake_file) + self.http_client.add(self.fake_file) try: - content = self.api.cat('QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX') + content = self.http_client.cat('QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX') assert content == b"dsadsad\n" finally: self._clean_up_pins() def test_cat_file_block(self): - self.api.add(self.fake_file) + self.http_client.add(self.fake_file) content = b"dsadsad\n" try: for offset in range(len(content)): for length in range(len(content)): - block = self.api.cat('QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX', offset=offset, length=length) + block = self.http_client.cat('QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX', offset=offset, length=length) assert block == content[offset:offset+length] finally: self._clean_up_pins() @skipIfOffline() -class IpfsApiLogTest(unittest.TestCase): +class IpfsHttpClientLogTest(unittest.TestCase): def setUp(self): - self.api = ipfsapi.Client() + self.http_client = ipfshttpclient.Client() def test_log_ls_level(self): """ @@ -412,19 +412,19 @@ def test_log_ls_level(self): levels are the same as before the test was run. """ # Retrieves the list of logging subsystems for a running daemon. - resp_ls = self.api.log_ls() + resp_ls = self.http_client.log_ls() # The response should be a dictionary with only one key ('Strings'). self.assertTrue('Strings' in resp_ls) # Sets the logging level to 'error' for the first subsystem found. sub = resp_ls['Strings'][0] - resp_level = self.api.log_level(sub, 'error') + resp_level = self.http_client.log_level(sub, 'error') self.assertEqual(resp_level['Message'], "Changed log level of \'%s\' to 'error'\n" % sub) def test_log_tail(self): # Gets the response object. - tail = self.api.log_tail() + tail = self.http_client.log_tail() # The log should have been parsed into a dictionary object with # various keys depending on the event that occured. @@ -432,13 +432,13 @@ def test_log_tail(self): @skipIfOffline() -class IpfsApiPinTest(unittest.TestCase): +class IpfsHttpClientPinTest(unittest.TestCase): def setUp(self): - self.api = ipfsapi.Client() + self.http_client = ipfshttpclient.Client() # Add resources to be pinned. - self.resource = self.api.add_str('Mary had a little lamb') - resp_add = self.api.add('test/functional/fake_dir', recursive=True) + self.resource = self.http_client.add_str('Mary had a little lamb') + resp_add = self.http_client.add('test/functional/fake_dir', recursive=True) self.fake_dir_hashes = [el['Hash'] for el in resp_add if 'Hash' in el] for resp in resp_add: if resp["Name"] == "fake_dir": @@ -448,107 +448,107 @@ def setUp(self): def test_pin_ls_add_rm_single(self): # Get pinned objects at start. - pins_begin = self.api.pin_ls()['Keys'] + pins_begin = self.http_client.pin_ls()['Keys'] # Unpin the resource if already pinned. if self.resource in pins_begin.keys(): - self.api.pin_rm(self.resource) + self.http_client.pin_rm(self.resource) # No matter what, the resource should not be pinned at this point. - self.assertNotIn(self.resource, self.api.pin_ls()['Keys']) + self.assertNotIn(self.resource, self.http_client.pin_ls()['Keys']) for option in [True, False]: # Pin the resource. - resp_add = self.api.pin_add(self.resource, recursive=option) - pins_afer_add = self.api.pin_ls()['Keys'] + resp_add = self.http_client.pin_add(self.resource, recursive=option) + pins_afer_add = self.http_client.pin_ls()['Keys'] self.assertEqual(resp_add['Pins'], [self.resource]) self.assertTrue(self.resource in pins_afer_add) self.assertEqual(pins_afer_add[self.resource]['Type'] == 'recursive', option) # Unpin the resource. - resp_rm = self.api.pin_rm(self.resource) - pins_afer_rm = self.api.pin_ls()['Keys'] + resp_rm = self.http_client.pin_rm(self.resource) + pins_afer_rm = self.http_client.pin_ls()['Keys'] self.assertEqual(resp_rm['Pins'], [self.resource]) self.assertFalse(self.resource in pins_afer_rm) # Get pinned objects at end. - pins_end = self.api.pin_ls()['Keys'] + pins_end = self.http_client.pin_ls()['Keys'] # Compare pinned items from start to finish of test. self.assertFalse(self.resource in pins_end.keys()) def test_pin_ls_add_rm_directory(self): # Remove fake_dir if it had previously been pinned. - if self.fake_dir_hash in self.api.pin_ls(type="recursive")['Keys'].keys(): - self.api.pin_rm(self.fake_dir_hash) + if self.fake_dir_hash in self.http_client.pin_ls(type="recursive")['Keys'].keys(): + self.http_client.pin_rm(self.fake_dir_hash) # Make sure I removed it - assert self.fake_dir_hash not in self.api.pin_ls()['Keys'].keys() + assert self.fake_dir_hash not in self.http_client.pin_ls()['Keys'].keys() # Add 'fake_dir' recursively. - self.api.pin_add(self.fake_dir_hash) + self.http_client.pin_add(self.fake_dir_hash) # Make sure all appear on the list of pinned objects. - pins_after_add = self.api.pin_ls()['Keys'].keys() + pins_after_add = self.http_client.pin_ls()['Keys'].keys() for el in self.fake_dir_hashes: assert el in pins_after_add # Clean up. - self.api.pin_rm(self.fake_dir_hash) - pins_end = self.api.pin_ls(type="recursive")['Keys'].keys() + self.http_client.pin_rm(self.fake_dir_hash) + pins_end = self.http_client.pin_ls(type="recursive")['Keys'].keys() assert self.fake_dir_hash not in pins_end def test_pin_add_update_verify_rm(self): # Get pinned objects at start. - pins_begin = self.api.pin_ls(type="recursive")['Keys'].keys() + pins_begin = self.http_client.pin_ls(type="recursive")['Keys'].keys() # Remove fake_dir and demo resource if it had previously been pinned. if self.fake_dir_hash in pins_begin: - self.api.pin_rm(self.fake_dir_hash) + self.http_client.pin_rm(self.fake_dir_hash) if self.fake_dir_test2_hash in pins_begin: - self.api.pin_rm(self.fake_dir_test2_hash) + self.http_client.pin_rm(self.fake_dir_test2_hash) # Ensure that none of the above are pinned anymore. - pins_after_rm = self.api.pin_ls(type="recursive")['Keys'].keys() + pins_after_rm = self.http_client.pin_ls(type="recursive")['Keys'].keys() assert self.fake_dir_hash not in pins_after_rm assert self.fake_dir_test2_hash not in pins_after_rm # Add pin for sub-directory - self.api.pin_add(self.fake_dir_test2_hash) + self.http_client.pin_add(self.fake_dir_test2_hash) # Replace it by pin for the entire fake dir - self.api.pin_update(self.fake_dir_test2_hash, self.fake_dir_hash) + self.http_client.pin_update(self.fake_dir_test2_hash, self.fake_dir_hash) # Ensure that the sub-directory is not pinned directly anymore - pins_after_update = self.api.pin_ls(type="recursive")["Keys"].keys() + pins_after_update = self.http_client.pin_ls(type="recursive")["Keys"].keys() assert self.fake_dir_test2_hash not in pins_after_update assert self.fake_dir_hash in pins_after_update # Now add a pin to the sub-directory from the parent directory - self.api.pin_update(self.fake_dir_hash, self.fake_dir_test2_hash, unpin=False) + self.http_client.pin_update(self.fake_dir_hash, self.fake_dir_test2_hash, unpin=False) # Check integrity of all directory content hashes and whether all # directory contents have been processed in doing this hashes = [] - for result in self.api.pin_verify(self.fake_dir_hash, verbose=True): + for result in self.http_client.pin_verify(self.fake_dir_hash, verbose=True): assert result["Ok"] hashes.append(result["Cid"]) assert self.fake_dir_hash in hashes # Ensure that both directories are now recursively pinned - pins_after_update2 = self.api.pin_ls(type="recursive")["Keys"].keys() + pins_after_update2 = self.http_client.pin_ls(type="recursive")["Keys"].keys() assert self.fake_dir_test2_hash in pins_after_update2 assert self.fake_dir_hash in pins_after_update2 # Clean up - self.api.pin_rm(self.fake_dir_hash, self.fake_dir_test2_hash) + self.http_client.pin_rm(self.fake_dir_hash, self.fake_dir_test2_hash) @skipIfOffline() -class IpfsApiMFSTest(unittest.TestCase): +class IpfsHttpClientMFSTest(unittest.TestCase): test_files = { 'test_file1': { @@ -564,7 +564,7 @@ class IpfsApiMFSTest(unittest.TestCase): test_directory_path = '/test_dir' def setUp(self): - self.api = ipfsapi.Client() + self.http_client = ipfshttpclient.Client() self._olddir = os.getcwd() os.chdir(HERE) @@ -576,91 +576,91 @@ def test_file_write_stat_read_delete(self): filepath = "/" + filename # Create target file - self.api.files_write(filepath, desc[u'Name'], create=True) + self.http_client.files_write(filepath, desc[u'Name'], create=True) # Verify stat information of file - stat = self.api.files_stat(filepath) + stat = self.http_client.files_stat(filepath) self.assertEqual(sorted(desc[u'Stat'].items()), sorted(stat.items())) # Read back (and compare file contents) with open(desc[u'Name'], 'rb') as file: - content = self.api.files_read(filepath) + content = self.http_client.files_read(filepath) self.assertEqual(content, file.read()) # Remove file - self.api.files_rm(filepath) + self.http_client.files_rm(filepath) def test_dir_make_fill_list_delete(self): - self.api.files_mkdir(self.test_directory_path) + self.http_client.files_mkdir(self.test_directory_path) for filename, desc in self.test_files.items(): # Create target file in directory - self.api.files_write( + self.http_client.files_write( self.test_directory_path + "/" + filename, desc[u'Name'], create=True ) # Verify directory contents - contents = self.api.files_ls(self.test_directory_path)[u'Entries'] + contents = self.http_client.files_ls(self.test_directory_path)[u'Entries'] filenames1 = list(map(lambda d: d[u'Name'], contents)) filenames2 = list(self.test_files.keys()) self.assertEqual(filenames1, filenames2) # Remove directory - self.api.files_rm(self.test_directory_path, recursive=True) + self.http_client.files_rm(self.test_directory_path, recursive=True) - with self.assertRaises(ipfsapi.exceptions.Error): - self.api.files_stat(self.test_directory_path) + with self.assertRaises(ipfshttpclient.exceptions.Error): + self.http_client.files_stat(self.test_directory_path) skipIfOffline() class TestBlockFunctions(unittest.TestCase): def setUp(self): - self.api = ipfsapi.Client() + self.http_client = ipfshttpclient.Client() self.multihash = 'QmYA2fn8cMbVWo4v95RwcwJVyQsNtnEwHerfWR8UNtEwoE' self.content_size = 248 def test_block_stat(self): expected_keys = ['Key', 'Size'] - res = self.api.block_stat(self.multihash) + res = self.http_client.block_stat(self.multihash) for key in expected_keys: self.assertTrue(key in res) def test_block_get(self): - self.assertEqual(len(self.api.block_get(self.multihash)), self.content_size) + self.assertEqual(len(self.http_client.block_get(self.multihash)), self.content_size) def test_block_put(self): path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "functional", "fake_dir", "fsdfgh") expected_block_multihash = 'QmPevo2B1pwvDyuZyJbWVfhwkaGPee3f1kX36wFmqx1yna' expected_keys = ['Key', 'Size'] - res = self.api.block_put(path) + res = self.http_client.block_put(path) for key in expected_keys: self.assertTrue(key in res) self.assertEqual(res['Key'], expected_block_multihash) @skipIfOffline() -class IpfsApiRepoTest(unittest.TestCase): +class IpfsHttpClientRepoTest(unittest.TestCase): def setUp(self): - self.api = ipfsapi.Client() + self.http_client = ipfshttpclient.Client() def test_repo_stat(self): # Verify that the correct key-value pairs are returned - stat = self.api.repo_stat() + stat = self.http_client.repo_stat() self.assertEqual(sorted(stat.keys()), [u'NumObjects', u'RepoPath', u'RepoSize', u'StorageMax', u'Version']) def test_repo_gc(self): # Add and unpin an object to be garbage collected - garbage = self.api.add_str('Test String') - self.api.pin_rm(garbage) + garbage = self.http_client.add_str('Test String') + self.http_client.pin_rm(garbage) # Collect the garbage object with object count before and after - orig_objs = self.api.repo_stat()['NumObjects'] - gc = self.api.repo_gc() - cur_objs = self.api.repo_stat()['NumObjects'] + orig_objs = self.http_client.repo_stat()['NumObjects'] + gc = self.http_client.repo_gc() + cur_objs = self.http_client.repo_stat()['NumObjects'] # Verify the garbage object was collected self.assertGreater(orig_objs, cur_objs) @@ -669,68 +669,68 @@ def test_repo_gc(self): @skipIfOffline() -class IpfsApiKeyTest(unittest.TestCase): +class IpfsHttpClientKeyTest(unittest.TestCase): def setUp(self): - self.api = ipfsapi.Client() + self.http_client = ipfshttpclient.Client() def test_key_add_list_rename_rm(self): # Remove keys if they already exist - key_list = list(map(lambda k: k["Name"], self.api.key_list()["Keys"])) - if "ipfsapi-test-rsa" in key_list: - self.api.key_rm("ipfsapi-test-rsa") - if "ipfsapi-test-ed" in key_list: - self.api.key_rm("ipfsapi-test-ed") + key_list = list(map(lambda k: k["Name"], self.http_client.key_list()["Keys"])) + if "ipfshttpclient-test-rsa" in key_list: + self.http_client.key_rm("ipfshttpclient-test-rsa") + if "ipfshttpclient-test-ed" in key_list: + self.http_client.key_rm("ipfshttpclient-test-ed") # Add new RSA and ED25519 key - key1 = self.api.key_gen("ipfsapi-test-rsa", "rsa")["Name"] - key2 = self.api.key_gen("ipfsapi-test-ed", "ed25519")["Name"] + key1 = self.http_client.key_gen("ipfshttpclient-test-rsa", "rsa")["Name"] + key2 = self.http_client.key_gen("ipfshttpclient-test-ed", "ed25519")["Name"] # Validate the keys exist now - key_list = list(map(lambda k: k["Name"], self.api.key_list()["Keys"])) + key_list = list(map(lambda k: k["Name"], self.http_client.key_list()["Keys"])) assert key1 in key_list assert key2 in key_list # Rename the EC key - key2_new = self.api.key_rename(key2, "ipfsapi-test-ed2")["Now"] + key2_new = self.http_client.key_rename(key2, "ipfshttpclient-test-ed2")["Now"] # Validate that the key was successfully renamed - key_list = list(map(lambda k: k["Name"], self.api.key_list()["Keys"])) + key_list = list(map(lambda k: k["Name"], self.http_client.key_list()["Keys"])) assert key1 in key_list assert key2 not in key_list assert key2_new in key_list # Drop both keys with one request - self.api.key_rm(key1, key2_new) + self.http_client.key_rm(key1, key2_new) # Validate that the keys are gone again - key_list = list(map(lambda k: k["Name"], self.api.key_list()["Keys"])) + key_list = list(map(lambda k: k["Name"], self.http_client.key_list()["Keys"])) assert key1 not in key_list assert key2_new not in key_list @skipIfOffline() -class IpfsApiObjectTest(unittest.TestCase): +class IpfsHttpClientObjectTest(unittest.TestCase): def setUp(self): - self.api = ipfsapi.Client() + self.http_client = ipfshttpclient.Client() self._olddir = os.getcwd() os.chdir(HERE) # Add a resource to get the stats for. - self.resource = self.api.add_str('Mary had a little lamb') + self.resource = self.http_client.add_str('Mary had a little lamb') def tearDown(self): os.chdir(self._olddir) def test_object_new(self): expected_keys = ['Hash'] - res = self.api.object_new() + res = self.http_client.object_new() for key in expected_keys: self.assertTrue(key in res) def test_object_stat(self): expected = ['Hash', 'CumulativeSize', 'DataSize', 'NumLinks', 'LinksSize', 'BlockSize'] - resp_stat = self.api.object_stat(self.resource) + resp_stat = self.http_client.object_stat(self.resource) for key in expected: self.assertTrue(key in resp_stat) @@ -742,16 +742,16 @@ def test_object_put_get(self): "fake_json", "links.json") # Put the json objects on the DAG - no_links = self.api.object_put(path_no_links) - links = self.api.object_put(path_links) + no_links = self.http_client.object_put(path_no_links) + links = self.http_client.object_put(path_links) # Verify the correct content was put self.assertEqual(no_links['Hash'], 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') self.assertEqual(links['Hash'], 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') # Get the objects from the DAG - get_no_links = self.api.object_get('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') - get_links = self.api.object_get('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') + get_no_links = self.http_client.object_get('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') + get_links = self.http_client.object_get('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') # Verify the objects we put have been gotten self.assertEqual(get_no_links['Data'], 'abc') @@ -764,8 +764,8 @@ def test_object_links(self): "fake_json", "links.json") # Put json object on the DAG and get its links - self.api.object_put(path_links) - links = self.api.object_links('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') + self.http_client.object_put(path_links) + links = self.http_client.object_links('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') # Verify the correct link has been gotten self.assertEqual(links['Links'][0]['Hash'], 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') @@ -776,8 +776,8 @@ def test_object_data(self): "fake_json", "links.json") # Put json objects on the DAG and get its data - self.api.object_put(path_links) - data = self.api.object_data('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') + self.http_client.object_put(path_links) + data = self.http_client.object_data('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') # Verify the correct bytes have been gotten self.assertEqual(data, b'another') @@ -786,7 +786,7 @@ def test_object_patch_append_data(self): """Warning, this test depends on the contents of test/functional/fake_dir/fsdfgh """ - result = self.api.object_patch_append_data( + result = self.http_client.object_patch_append_data( 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', 'fake_dir/fsdfgh') self.assertEqual(result, {'Hash': 'QmcUsyoGVxWoQgYKgmLaDBGm8J3eHWfchMh3oDUD5FrrtN'}) @@ -795,7 +795,7 @@ def test_object_patch_add_link(self): """Warning, this test depends on the contents of test/functional/fake_dir/fsdfgh """ - result = self.api.object_patch_add_link( + result = self.http_client.object_patch_add_link( 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', 'self', 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') self.assertEqual(result, @@ -805,7 +805,7 @@ def test_object_patch_rm_link(self): """Warning, this test depends on the contents of test/functional/fake_dir/fsdfgh """ - result = self.api.object_patch_rm_link( + result = self.http_client.object_patch_rm_link( 'QmbWSr7YXBLcF23VVb7yPvUuogUPn46GD7gXftXC6mmsNM', 'self') self.assertEqual(result, {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'}) @@ -814,23 +814,23 @@ def test_object_patch_set_data(self): """Warning, this test depends on the contents of test/functional/fake_dir/popoiopiu """ - result = self.api.object_patch_set_data( + result = self.http_client.object_patch_set_data( 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', 'fake_dir/popoiopiu') self.assertEqual(result, {'Hash': 'QmV4QR7MCBj5VTi6ddHmXPyjWGzbaKEtX2mx7axA5PA13G'}) @skipIfOffline() -class IpfsApiBitswapTest(unittest.TestCase): +class IpfsHttpClientBitswapTest(unittest.TestCase): def setUp(self): - self.api = ipfsapi.Client() + self.http_client = ipfshttpclient.Client() def test_bitswap_wantlist(self): - result = self.api.bitswap_wantlist(peer='QmdkJZUWnVkEc6yfptVu4LWY8nHkEnGwsxqQ233QSGj8UP') + result = self.http_client.bitswap_wantlist(peer='QmdkJZUWnVkEc6yfptVu4LWY8nHkEnGwsxqQ233QSGj8UP') self.assertTrue(result and type(result) is dict and 'Keys' in result) def test_bitswap_stat(self): - result = self.api.bitswap_stat() + result = self.http_client.bitswap_stat() self.assertTrue(result and type(result) is dict and 'Wantlist' in result) def test_bitswap_unwant(self): @@ -839,14 +839,14 @@ def test_bitswap_unwant(self): something comes back. """ - result = self.api.bitswap_unwant(key='QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V') + result = self.http_client.bitswap_unwant(key='QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V') self.assertTrue(result is not None) @skipIfOffline() -class IpfsApiPubSubTest(unittest.TestCase): +class IpfsHttpClientPubSubTest(unittest.TestCase): def setUp(self): - self.api = ipfsapi.Client() + self.http_client = ipfshttpclient.Client() def createTestChannel(self): """ @@ -874,13 +874,13 @@ def test_pubsub_pub_sub(self): # get the subscription stream - with self.api.pubsub_sub(topic) as sub: + with self.http_client.pubsub_sub(topic) as sub: # make sure something was actually returned from the subscription assert sub is not None # publish a message to topic - self.api.pubsub_pub(topic, message) + self.http_client.pubsub_pub(topic, message) # get the message sub_data = sub.read_message() @@ -901,12 +901,12 @@ def test_pubsub_ls(self): expected_return = { 'Strings': [topic] } # subscribe to the topic testing - sub = self.api.pubsub_sub(topic) + sub = self.http_client.pubsub_sub(topic) channels = None try: # grab the channels we're subscribed to - channels = self.api.pubsub_ls() + channels = self.http_client.pubsub_ls() finally: sub.close() @@ -918,7 +918,7 @@ def test_pubsub_peers(self): on who we're connected to. We may not even have any peers """ - peers = self.api.pubsub_peers() + peers = self.http_client.pubsub_peers() expected_return = { 'Strings': [] @@ -937,10 +937,10 @@ def test_pubsub_peers(self): @skipIfOffline() @skipUnlessCI() @pytest.mark.last -class IpfsApiShutdownTest(unittest.TestCase): +class IpfsHttpClientShutdownTest(unittest.TestCase): def setUp(self): - self.api = ipfsapi.Client() - self.pid = int(os.environ["PY_IPFSAPI_TEST_DAEMON_PID"]) + self.http_client = ipfshttpclient.Client() + self.pid = int(os.environ["PY_IPFS_HTTP_CLIENT_TEST_DAEMON_PID"]) @staticmethod def _pid_exists(pid): @@ -1000,7 +1000,7 @@ def test_daemon_shutdown(self): assert self._is_ipfs_running() # Send stop request - self.api.shutdown() + self.http_client.shutdown() # Wait for daemon process to disappear for _ in range(10000): diff --git a/test/run-tests.py b/test/run-tests.py index b2e379d3..1fbff48e 100755 --- a/test/run-tests.py +++ b/test/run-tests.py @@ -52,8 +52,8 @@ def _contextlib_suppress(*exceptions): # Export environment variables required for testing os.environ["IPFS_PATH"] = str(IPFS_PATH) -os.environ["PY_IPFSAPI_DEFAULT_HOST"] = str(HOST) -os.environ["PY_IPFSAPI_DEFAULT_PORT"] = str(PORT) +os.environ["PY_IPFS_HTTP_CLIENT_DEFAULT_HOST"] = str(HOST) +os.environ["PY_IPFS_HTTP_CLIENT_DEFAULT_PORT"] = str(PORT) # Make sure the IPFS data directory exists and is empty with contextlib.suppress(OSError): #PY2: Replace with `FileNotFoundError` @@ -74,7 +74,7 @@ def _contextlib_suppress(*exceptions): # Spawn IPFS daemon in data directory DAEMON = subprocess.Popen(["ipfs", "daemon", "--enable-pubsub-experiment"]) -os.environ["PY_IPFSAPI_TEST_DAEMON_PID"] = str(DAEMON.pid) +os.environ["PY_IPFS_HTTP_CLIENT_TEST_DAEMON_PID"] = str(DAEMON.pid) # Collect the exit code of `DAEMON` when `SIGCHLD` is received # (otherwise the shutdown test fails to recognize that the daemon process is dead) @@ -83,11 +83,11 @@ def _contextlib_suppress(*exceptions): signal.signal(signal.SIGCHLD, lambda *a: DAEMON.poll()) # Wait for daemon to start up -import ipfsapi +import ipfshttpclient while True: try: - ipfsapi.connect(HOST, PORT) - except ipfsapi.exceptions.ConnectionError: + ipfshttpclient.connect(HOST, PORT) + except ipfshttpclient.exceptions.ConnectionError: time.sleep(0.05) else: break @@ -109,7 +109,7 @@ def _contextlib_suppress(*exceptions): import pytest PYTEST_CODE = pytest.main([ "--verbose", - "--cov=ipfsapi", + "--cov=ipfshttpclient", "--cov-report=term", "--cov-report=html:{}".format(str(TEST_PATH / "cov_html")), "--cov-report=xml:{}".format(str(TEST_PATH / "cov.xml")) diff --git a/test/unit/test_encoding.py b/test/unit/test_encoding.py index eabcd61a..15b72ee2 100644 --- a/test/unit/test_encoding.py +++ b/test/unit/test_encoding.py @@ -12,8 +12,8 @@ import six from httmock import urlmatch, HTTMock -import ipfsapi.encoding -import ipfsapi.exceptions +import ipfshttpclient.encoding +import ipfshttpclient.exceptions class TestEncoding(unittest.TestCase): @@ -31,8 +31,8 @@ class TestEncoding(unittest.TestCase): """ def setUp(self): """create a Json encoder""" - self.encoder_json = ipfsapi.encoding.Json() - self.encoder_pickle = ipfsapi.encoding.Pickle() + self.encoder_json = ipfshttpclient.encoding.Json() + self.encoder_pickle = ipfshttpclient.encoding.Pickle() def test_json_parse(self): """Asserts parsed key/value json matches expected output.""" @@ -59,7 +59,7 @@ def test_json_parse_partial(self): assert list(self.encoder_json.parse_finalize()) == [] # String containing broken UTF-8 - with pytest.raises(ipfsapi.exceptions.DecodingError): + with pytest.raises(ipfshttpclient.exceptions.DecodingError): list(self.encoder_json.parse_partial(b'{"hello": "\xc3ber world!"}')) assert list(self.encoder_json.parse_finalize()) == [] @@ -77,11 +77,11 @@ def test_json_with_newlines(self): def test_json_parse_incomplete(self): """Tests if feeding the JSON parse incomplete data correctly produces an error.""" list(self.encoder_json.parse_partial(b'{"bla":')) - with pytest.raises(ipfsapi.exceptions.DecodingError): + with pytest.raises(ipfshttpclient.exceptions.DecodingError): self.encoder_json.parse_finalize() list(self.encoder_json.parse_partial(b'{"\xc3')) # Incomplete UTF-8 sequence - with pytest.raises(ipfsapi.exceptions.DecodingError): + with pytest.raises(ipfshttpclient.exceptions.DecodingError): self.encoder_json.parse_finalize() def test_json_parse_chained(self): @@ -129,10 +129,10 @@ def test_parse_pickle(self): def test_get_encoder_by_name(self): """Tests the process of obtaining an Encoder object given the named encoding.""" - encoder = ipfsapi.encoding.get_encoding('json') + encoder = ipfshttpclient.encoding.get_encoding('json') assert encoder.name == 'json' def test_get_invalid_encoder(self): """Tests the exception handling given an invalid named encoding.""" - with pytest.raises(ipfsapi.exceptions.EncoderMissingError): - ipfsapi.encoding.get_encoding('fake') + with pytest.raises(ipfshttpclient.exceptions.EncoderMissingError): + ipfshttpclient.encoding.get_encoding('fake') diff --git a/test/unit/test_http.py b/test/unit/test_http.py index 6992e513..d0ccde65 100644 --- a/test/unit/test_http.py +++ b/test/unit/test_http.py @@ -20,8 +20,8 @@ except ImportError: import mock -import ipfsapi.http -import ipfsapi.exceptions +import ipfshttpclient.http +import ipfshttpclient.exceptions @urlmatch(netloc='localhost:5001', path=r'.*/okay') @@ -60,11 +60,11 @@ def return_fail(url, request): } -@urlmatch(netloc='localhost:5001', path=r'.*/apiokay') -def api_okay(url, request): - """Defines an endpoint for successful api requests. +@urlmatch(netloc='localhost:5001', path=r'.*/http_client_okay') +def http_client_okay(url, request): + """Defines an endpoint for successful http client requests. - This endpoint will listen at http://localhost:5001/*/apiokay for incoming + This endpoint will listen at http://localhost:5001/*/http_client_okay for incoming requests and will always respond with a 200 status code and a json encoded Message of "okay". @@ -79,11 +79,11 @@ def api_okay(url, request): } -@urlmatch(netloc='localhost:5001', path=r'.*/apifail') -def api_fail(url, request): - """Defines an endpoint for failed api requests. +@urlmatch(netloc='localhost:5001', path=r'.*/http_client_fail') +def http_client_fail(url, request): + """Defines an endpoint for failed http client requests. - This endpoint will listen at http://localhost:5001/*/apifail for incoming + This endpoint will listen at http://localhost:5001/*/http_client_fail for incoming requests and will always respond with a 500 status code and a json encoded Message of "Someone set us up the bomb". @@ -99,7 +99,7 @@ def api_fail(url, request): @urlmatch(netloc='localhost:5001', path=r'.*/cat') -def api_cat(url, request): +def http_client_cat(url, request): """Defines an endpoint for a request to cat a file. This endpoint will listen at http://localhost:5001/*/cat for incoming @@ -125,7 +125,7 @@ class TestHttp(unittest.TestCase): test_successful_request -- tests that a successful http request returns the proper message test_generic_failure -- tests that a failed http request raises an HTTPError - test_api_failure -- tests that an api failure raises an ispfApiError + test_http_client_failure -- tests that an http client failure raises an ipfsHTTPClientError test_stream -- tests that the stream flag being set returns the raw response test_cat -- tests that paths ending in /cat are not parsed test_default_decoder -- tests that the default encoding is set to json @@ -139,7 +139,7 @@ class TestHttp(unittest.TestCase): """ def setUp(self): """Creates an instance of HTTPClient to test against.""" - self.client = ipfsapi.http.HTTPClient( + self.client = ipfshttpclient.http.HTTPClient( 'localhost', 5001, 'api/v0') @@ -153,14 +153,14 @@ def test_successful_request(self): def test_generic_failure(self): """Tests that a failed http request raises an HTTPError.""" with HTTMock(return_fail): - with pytest.raises(ipfsapi.exceptions.StatusError): + with pytest.raises(ipfshttpclient.exceptions.StatusError): self.client.request('/fail') - def test_api_failure(self): - """Tests that an api failure raises an ispfApiError.""" - with HTTMock(api_fail): - with pytest.raises(ipfsapi.exceptions.Error): - self.client.request('/apifail') + def test_http_client_failure(self): + """Tests that an http client failure raises an ipfsHTTPClientError.""" + with HTTMock(http_client_fail): + with pytest.raises(ipfshttpclient.exceptions.Error): + self.client.request('/http_client_fail') def test_stream(self): """Tests that the stream flag being set returns the raw response.""" @@ -170,45 +170,45 @@ def test_stream(self): def test_cat(self): """Tests that paths ending in /cat are not parsed.""" - with HTTMock(api_cat): + with HTTMock(http_client_cat): res = self.client.request('/cat') assert res == b'{"Message": "do not parse"}' def test_default_decoder(self): """Tests that the default encoding is set to json.""" - with HTTMock(api_okay): - res = self.client.request('/apiokay') + with HTTMock(http_client_okay): + res = self.client.request('/http_client_okay') assert res == b'{"Message": "okay"}' def test_explicit_decoder(self): """Tests that an explicit decoder is handled correctly.""" - with HTTMock(api_okay): - res = self.client.request('/apiokay', + with HTTMock(http_client_okay): + res = self.client.request('/http_client_okay', decoder='json') assert res['Message'] == 'okay' def test_unsupported_decoder(self): """Tests that unsupported encodings raise an exception.""" - with HTTMock(api_fail): - with pytest.raises(ipfsapi.exceptions.EncoderMissingError): - self.client.request('/apifail', decoder='xyz') + with HTTMock(http_client_fail): + with pytest.raises(ipfshttpclient.exceptions.EncoderMissingError): + self.client.request('/http_client_fail', decoder='xyz') def test_failed_decoder(self): """Tests that a failed encoding parse raises an exception.""" with HTTMock(return_okay): - with pytest.raises(ipfsapi.exceptions.DecodingError): + with pytest.raises(ipfshttpclient.exceptions.DecodingError): self.client.request('/okay', decoder='json') """TODO: Test successful download Need to determine correct way to mock an http request that returns a tar file. tarfile.open expects the tar to be in the form of an octal escaped - string, but internal functionality keeps resulting in hexidecimal. + string, but internal functionality keeps resulting in hexadecimal. """ def test_failed_download(self): """Tests that a failed download raises an HTTPError.""" with HTTMock(return_fail): - with pytest.raises(ipfsapi.exceptions.StatusError): + with pytest.raises(ipfshttpclient.exceptions.StatusError): self.client.download('/fail') def test_session(self): @@ -219,18 +219,19 @@ def test_session(self): assert res == b'okay' assert self.client._session is None + def test_stream_close(mocker): - client = ipfsapi.http.HTTPClient("localhost", 5001, "api/v0") - mocker.patch("ipfsapi.http._notify_stream_iter_closed") + client = ipfshttpclient.http.HTTPClient("localhost", 5001, "api/v0") + mocker.patch("ipfshttpclient.http._notify_stream_iter_closed") with HTTMock(return_okay): with client.request("/okay", stream=True) as response_iter: - assert ipfsapi.http._notify_stream_iter_closed.call_count == 0 - assert ipfsapi.http._notify_stream_iter_closed.call_count == 1 - + assert ipfshttpclient.http._notify_stream_iter_closed.call_count == 0 + assert ipfshttpclient.http._notify_stream_iter_closed.call_count == 1 + response_iter = client.request("/okay", stream=True) - assert ipfsapi.http._notify_stream_iter_closed.call_count == 1 + assert ipfshttpclient.http._notify_stream_iter_closed.call_count == 1 response_iter.close() - assert ipfsapi.http._notify_stream_iter_closed.call_count == 2 - + assert ipfshttpclient.http._notify_stream_iter_closed.call_count == 2 + client.request("/okay") - assert ipfsapi.http._notify_stream_iter_closed.call_count == 3 + assert ipfshttpclient.http._notify_stream_iter_closed.call_count == 3 diff --git a/test/unit/test_multipart.py b/test/unit/test_multipart.py index 7cf7af32..9e4c6ebf 100644 --- a/test/unit/test_multipart.py +++ b/test/unit/test_multipart.py @@ -17,7 +17,7 @@ import pytest import six -import ipfsapi.multipart +import ipfshttpclient.multipart class TestContentHelpers(unittest.TestCase): @@ -34,40 +34,40 @@ class TestContentHelpers(unittest.TestCase): def test_content_disposition(self): """Check that content_disposition defaults properly""" expected = {'Content-Disposition': 'file; filename="example.txt"'} - actual = ipfsapi.multipart.content_disposition('example.txt') + actual = ipfshttpclient.multipart.content_disposition('example.txt') assert expected == actual def test_content_disposition_with_type(self): """Check that content_disposition handles given disposition type""" expected = {'Content-Disposition': 'attachment; filename="example.txt"'} - actual = ipfsapi.multipart.content_disposition('example.txt', + actual = ipfshttpclient.multipart.content_disposition('example.txt', 'attachment') assert expected == actual def test_content_type(self): """Check the content_type guessing functionality.""" - actual = ipfsapi.multipart.content_type('example.txt') + actual = ipfshttpclient.multipart.content_type('example.txt') expected = {'Content-Type': 'text/plain'} assert expected == actual - actual = ipfsapi.multipart.content_type('example.jpeg') + actual = ipfshttpclient.multipart.content_type('example.jpeg') expected = {'Content-Type': 'image/jpeg'} assert expected == actual - actual = ipfsapi.multipart.content_type('example') + actual = ipfshttpclient.multipart.content_type('example') expected = {'Content-Type': 'application/octet-stream'} assert expected == actual def test_multipart_content_type(self): """Check test_multipart_content_type functionality.""" - actual = ipfsapi.multipart.multipart_content_type( + actual = ipfshttpclient.multipart.multipart_content_type( '8K5rNKlLQVyreRNncxOTeg') expected = {'Content-Type': 'multipart/mixed; boundary="8K5rNKlLQVyreRNncxOTeg"'} assert expected == actual - actual = ipfsapi.multipart.multipart_content_type( + actual = ipfshttpclient.multipart.multipart_content_type( '8K5rNKlLQVyreRNncxOTeg', 'alt') expected = {'Content-Type': 'multipart/alt; boundary="8K5rNKlLQVyreRNncxOTeg"'} @@ -95,7 +95,7 @@ def test_init_defaults(self): expected_disposition = 'file; filename="test_name"' expected_type = 'multipart/mixed; boundary="\S*"' expected_boundary_pattern = '\S*' - generator = ipfsapi.multipart.BodyGenerator(name) + generator = ipfshttpclient.multipart.BodyGenerator(name) assert generator.headers['Content-Disposition'] == expected_disposition assert re.search(expected_type, generator.headers['Content-Type']) assert re.search(expected_boundary_pattern, generator.boundary) @@ -106,8 +106,8 @@ def test_init_with_all(self): disptype = "test_disp" subtype = "test_subtype" boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) + generator = ipfshttpclient.multipart.BodyGenerator(name, disptype, + subtype, boundary) assert generator.headers == { 'Content-Disposition': 'test_disp; filename="test_name"', 'Content-Type': @@ -123,8 +123,8 @@ def test_write_headers(self): disptype = "test_disp" subtype = "test_subtype" boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) + generator = ipfshttpclient.multipart.BodyGenerator(name, disptype, + subtype, boundary) headers = "" for chunk in generator.write_headers(): if type(chunk) is not str: @@ -139,8 +139,8 @@ def test_open(self): disptype = "test_disp" subtype = "test_subtype" boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) + generator = ipfshttpclient.multipart.BodyGenerator(name, disptype, + subtype, boundary) headers = "" for chunk in generator.open(): if type(chunk) is not str: @@ -157,8 +157,8 @@ def test_file_open(self): disptype = "test_disp" subtype = "test_subtype" boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) + generator = ipfshttpclient.multipart.BodyGenerator(name, disptype, + subtype, boundary) headers = "" for chunk in generator.file_open(name): if type(chunk) is not str: @@ -173,8 +173,8 @@ def test_file_close(self): disptype = "test_disp" subtype = "test_subtype" boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) + generator = ipfshttpclient.multipart.BodyGenerator(name, disptype, + subtype, boundary) headers = "" for chunk in generator.file_close(): if type(chunk) is not str: @@ -189,8 +189,8 @@ def test_close(self): disptype = "test_disp" subtype = "test_subtype" boundary = "test_boundary" - generator = ipfsapi.multipart.BodyGenerator(name, disptype, - subtype, boundary) + generator = ipfshttpclient.multipart.BodyGenerator(name, disptype, + subtype, boundary) headers = "" for chunk in generator.close(): if type(chunk) is not str: @@ -222,7 +222,7 @@ class TestBufferedGenerator(unittest.TestCase): def test_init(self): """Test the __init__ function for default parameter values.""" name = "test_name" - instance = ipfsapi.multipart.BufferedGenerator(name) + instance = ipfshttpclient.multipart.BufferedGenerator(name) assert instance.name == name def test_file_chunks(self): @@ -236,7 +236,7 @@ def test_file_chunks(self): chunk_size = 2 path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "functional", "fake_dir", "fsdfgh") - instance = ipfsapi.multipart.BufferedGenerator(name, chunk_size) + instance = ipfshttpclient.multipart.BufferedGenerator(name, chunk_size) expected = 'dsadsad\n' output = "" open_file = open(path) @@ -252,13 +252,13 @@ def test_gen_chunks(self): """Test the gen_chunks function against example output.""" name = "fsdfgh" chunk_size = 2 - instance = ipfsapi.multipart.BufferedGenerator(name, chunk_size) + instance = ipfshttpclient.multipart.BufferedGenerator(name, chunk_size) for i in instance.gen_chunks(_generate_test_chunks(5, 5)): assert len(i) <= chunk_size def test_body(self): """Ensure that body throws a NotImplemented exception.""" - instance = ipfsapi.multipart.BufferedGenerator("name") + instance = ipfshttpclient.multipart.BufferedGenerator("name") with pytest.raises(NotImplementedError): instance.body() @@ -266,7 +266,7 @@ def test_close(self): """Test the close function against example output.""" name = "fsdfgh" chunk_size = 2 - instance = ipfsapi.multipart.BufferedGenerator(name, chunk_size) + instance = ipfshttpclient.multipart.BufferedGenerator(name, chunk_size) expected = '--\S+--\r\n' actual = '' for i in instance.close(): @@ -306,7 +306,7 @@ def test_body(self): relative_paths_list = [os.path.relpath(cur_path, os.getcwd()) for cur_path in filenames_list] - instance = ipfsapi.multipart.FileStream(relative_paths_list) + instance = ipfshttpclient.multipart.FileStream(relative_paths_list) expected = "(--\S+\r\nContent-Disposition: file; filename=\"\S+\""\ + "\r\nContent-Type: application/\S+\r\n"\ @@ -335,7 +335,7 @@ def test_body(self): # Get OS-agnostic path to test files path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "functional", "fake_dir") - instance = ipfsapi.multipart.DirectoryStream(path) + instance = ipfshttpclient.multipart.DirectoryStream(path) expected = b"^(--\S+\r\nContent-Disposition: form-data; name=\"\S+\"; filename=\"\S+\""\ + b"\r\nContent-Type: application/\S+\r\n\r\n(.|\n)*"\ + b"\r\n)+--\S+--\r\n$" @@ -362,7 +362,7 @@ def test_body(self): """Check the multipart HTTP body for the streamed directory.""" # Get OS-agnostic path to test files text = "Here is some text for this test." - instance = ipfsapi.multipart.BytesStream(text) + instance = ipfshttpclient.multipart.BytesStream(text) expected = "(--\S+\r\nContent-Disposition: file; filename=\"\S+\""\ + "\r\nContent-Type: application/\S+\r\n"\ + "\r\n(.|\n)*\r\n)+--\S+--\r\n" diff --git a/test/unit/test_utils.py b/test/unit/test_utils.py index bfbc5e80..44ae6591 100644 --- a/test/unit/test_utils.py +++ b/test/unit/test_utils.py @@ -10,7 +10,7 @@ import pickle import unittest -import ipfsapi.utils as utils +import ipfshttpclient.utils as utils class TestUtils(unittest.TestCase): """Contains unit tests for utils.py. diff --git a/tox.ini b/tox.ini index 46219f41..12ddc692 100644 --- a/tox.ini +++ b/tox.ini @@ -15,7 +15,7 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/requirements-testing.txt whitelist_externals = ipfs -passenv = IPFS_* PY_IPFSAPI_* CI +passenv = IPFS_* PY_IPFS_HTTP_CLIENT_* CI commands = python "{toxinidir}/test/run-tests.py" {posargs} @@ -37,7 +37,7 @@ python_files = tests.py addopts = --doctest-modules - --ignore ipfsapi/client.py - ipfsapi + --ignore ipfshttpclient/client.py + ipfshttpclient test/unit test/functional