From 973e75ef3c47b739c882c05b63d2eb85c8022b2c Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Thu, 5 Oct 2023 23:07:06 +0200 Subject: [PATCH 01/14] Using new FileVar class in transports --- gql/transport/aiohttp.py | 19 +++++++------ gql/transport/file_upload.py | 55 ++++++++++++++++++++++++++++++++++++ gql/transport/httpx.py | 17 ++++------- gql/transport/requests.py | 18 ++++++------ gql/utils.py | 39 +------------------------ 5 files changed, 82 insertions(+), 66 deletions(-) create mode 100644 gql/transport/file_upload.py diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index 60f42c94..72cd2391 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -14,7 +14,6 @@ from graphql import DocumentNode, ExecutionResult, print_ast from multidict import CIMultiDictProxy -from ..utils import extract_files from .appsync_auth import AppSyncAuthentication from .async_transport import AsyncTransport from .exceptions import ( @@ -23,6 +22,7 @@ TransportProtocolError, TransportServerError, ) +from .file_upload import FileVar, extract_files log = logging.getLogger(__name__) @@ -258,8 +258,8 @@ async def execute( file_map = {str(i): [path] for i, path in enumerate(files)} # Enumerate the file streams - # Will generate something like {'0': <_io.BufferedReader ...>} - file_streams = {str(i): files[path] for i, path in enumerate(files)} + # Will generate something like {'0': FileVar object} + file_vars = {str(i): files[path] for i, path in enumerate(files)} # Add the payload to the operations field operations_str = self.json_serialize(payload) @@ -273,12 +273,15 @@ async def execute( log.debug("file_map %s", file_map_str) data.add_field("map", file_map_str, content_type="application/json") - # Add the extracted files as remaining fields - for k, f in file_streams.items(): - name = getattr(f, "name", k) - content_type = getattr(f, "content_type", None) + for k, file_var in file_vars.items(): + assert isinstance(file_var, FileVar) - data.add_field(k, f, filename=name, content_type=content_type) + data.add_field( + k, + file_var.f, + filename=file_var.filename, + content_type=file_var.content_type, + ) post_args: Dict[str, Any] = {"data": data} diff --git a/gql/transport/file_upload.py b/gql/transport/file_upload.py new file mode 100644 index 00000000..483d76ee --- /dev/null +++ b/gql/transport/file_upload.py @@ -0,0 +1,55 @@ +from dataclasses import dataclass +from typing import Any, Dict, Optional, Tuple, Type + + +@dataclass +class FileVar: + f: Any # str | io.IOBase | aiohttp.StreamReader | AsyncGenerator + # Add KW_ONLY here once Python 3.9 is deprecated + filename: Optional[str] = None + content_type: Optional[str] = None + streaming: bool = False + streaming_block_size: int = 64 * 1024 + + +def extract_files( + variables: Dict, file_classes: Tuple[Type[Any], ...] +) -> Tuple[Dict, Dict]: + files = {} + + def recurse_extract(path, obj): + """ + recursively traverse obj, doing a deepcopy, but + replacing any file-like objects with nulls and + shunting the originals off to the side. + """ + nonlocal files + if isinstance(obj, list): + nulled_obj = [] + for key, value in enumerate(obj): + value = recurse_extract(f"{path}.{key}", value) + nulled_obj.append(value) + return nulled_obj + elif isinstance(obj, dict): + nulled_obj = {} + for key, value in obj.items(): + value = recurse_extract(f"{path}.{key}", value) + nulled_obj[key] = value + return nulled_obj + elif isinstance(obj, file_classes): + # extract obj from its parent and put it into files instead. + name = getattr(obj, "name", None) + content_type = getattr(obj, "content_type", None) + files[path] = FileVar(obj, filename=name, content_type=content_type) + return None + elif isinstance(obj, FileVar): + # extract obj from its parent and put it into files instead. + files[path] = obj + return None + else: + # base case: pass through unchanged + return obj + + nulled_variables = recurse_extract("variables", variables) + + return nulled_variables, files diff --git a/gql/transport/httpx.py b/gql/transport/httpx.py index cfc25dc9..3fa57182 100644 --- a/gql/transport/httpx.py +++ b/gql/transport/httpx.py @@ -11,13 +11,11 @@ Tuple, Type, Union, - cast, ) import httpx from graphql import DocumentNode, ExecutionResult, print_ast -from ..utils import extract_files from . import AsyncTransport, Transport from .exceptions import ( TransportAlreadyConnected, @@ -25,6 +23,7 @@ TransportProtocolError, TransportServerError, ) +from .file_upload import extract_files log = logging.getLogger(__name__) @@ -105,7 +104,7 @@ def _prepare_file_uploads(self, variable_values, payload) -> Dict[str, Any]: file_map: Dict[str, List[str]] = {} file_streams: Dict[str, Tuple[str, ...]] = {} - for i, (path, f) in enumerate(files.items()): + for i, (path, file_var) in enumerate(files.items()): key = str(i) # Generate the file map @@ -114,16 +113,12 @@ def _prepare_file_uploads(self, variable_values, payload) -> Dict[str, Any]: # Will generate something like {"0": ["variables.file"]} file_map[key] = [path] - # Generate the file streams - # Will generate something like - # {"0": ("variables.file", <_io.BufferedReader ...>)} - name = cast(str, getattr(f, "name", key)) - content_type = getattr(f, "content_type", None) + name = key if file_var.filename is None else file_var.filename - if content_type is None: - file_streams[key] = (name, f) + if file_var.content_type is None: + file_streams[key] = (name, file_var.f) else: - file_streams[key] = (name, f, content_type) + file_streams[key] = (name, file_var.f, file_var.content_type) # Add the payload to the operations field operations_str = self.json_serialize(payload) diff --git a/gql/transport/requests.py b/gql/transport/requests.py index 1e464104..d04f67dc 100644 --- a/gql/transport/requests.py +++ b/gql/transport/requests.py @@ -13,13 +13,13 @@ from gql.transport import Transport from ..graphql_request import GraphQLRequest -from ..utils import extract_files from .exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportServerError, ) +from .file_upload import FileVar, extract_files log = logging.getLogger(__name__) @@ -184,8 +184,8 @@ def execute( # type: ignore file_map = {str(i): [path] for i, path in enumerate(files)} # Enumerate the file streams - # Will generate something like {'0': <_io.BufferedReader ...>} - file_streams = {str(i): files[path] for i, path in enumerate(files)} + # Will generate something like {'0': FileVar object} + file_vars = {str(i): files[path] for i, path in enumerate(files)} # Add the file map field file_map_str = json.dumps(file_map) @@ -194,14 +194,14 @@ def execute( # type: ignore fields = {"operations": operations_str, "map": file_map_str} # Add the extracted files as remaining fields - for k, f in file_streams.items(): - name = getattr(f, "name", k) - content_type = getattr(f, "content_type", None) + for k, file_var in file_vars.items(): + assert isinstance(file_var, FileVar) + name = k if file_var.filename is None else file_var.filename - if content_type is None: - fields[k] = (name, f) + if file_var.content_type is None: + fields[k] = (name, file_var.f) else: - fields[k] = (name, f, content_type) + fields[k] = (name, file_var.f, file_var.content_type) # Prepare requests http to send multipart-encoded data data = MultipartEncoder(fields=fields) diff --git a/gql/utils.py b/gql/utils.py index b4265ce1..f7f0f5a7 100644 --- a/gql/utils.py +++ b/gql/utils.py @@ -1,6 +1,6 @@ """Utilities to manipulate several python objects.""" -from typing import Any, Dict, List, Tuple, Type +from typing import List # From this response in Stackoverflow @@ -12,43 +12,6 @@ def to_camel_case(snake_str): return components[0] + "".join(x.title() if x else "_" for x in components[1:]) -def extract_files( - variables: Dict, file_classes: Tuple[Type[Any], ...] -) -> Tuple[Dict, Dict]: - files = {} - - def recurse_extract(path, obj): - """ - recursively traverse obj, doing a deepcopy, but - replacing any file-like objects with nulls and - shunting the originals off to the side. - """ - nonlocal files - if isinstance(obj, list): - nulled_obj = [] - for key, value in enumerate(obj): - value = recurse_extract(f"{path}.{key}", value) - nulled_obj.append(value) - return nulled_obj - elif isinstance(obj, dict): - nulled_obj = {} - for key, value in obj.items(): - value = recurse_extract(f"{path}.{key}", value) - nulled_obj[key] = value - return nulled_obj - elif isinstance(obj, file_classes): - # extract obj from its parent and put it into files instead. - files[path] = obj - return None - else: - # base case: pass through unchanged - return obj - - nulled_variables = recurse_extract("variables", variables) - - return nulled_variables, files - - def str_first_element(errors: List) -> str: try: first_error = errors[0] From 090052d1097e735172277d063483afdc8979b6b4 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Fri, 6 Oct 2023 00:41:27 +0200 Subject: [PATCH 02/14] Allow to import FileVar from gql --- gql/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gql/__init__.py b/gql/__init__.py index 8eaa0b7c..4c9a6aa0 100644 --- a/gql/__init__.py +++ b/gql/__init__.py @@ -11,10 +11,12 @@ from .client import Client from .gql import gql from .graphql_request import GraphQLRequest +from .transport.file_upload import FileVar __all__ = [ "__version__", "gql", "Client", "GraphQLRequest", + "FileVar", ] From e2249b06da42e41f1275368cb17b57499c39f431 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Fri, 6 Oct 2023 00:41:46 +0200 Subject: [PATCH 03/14] Add tests using FileVar --- tests/test_requests.py | 379 ++++++++++++++++++++++------------------- 1 file changed, 204 insertions(+), 175 deletions(-) diff --git a/tests/test_requests.py b/tests/test_requests.py index a5ff0d8b..397ff89b 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -419,41 +419,70 @@ def test_code(): """ -@pytest.mark.aiohttp -@pytest.mark.asyncio -async def test_requests_file_upload(event_loop, aiohttp_server, run_sync_test): - from aiohttp import web - from gql.transport.requests import RequestsHTTPTransport - +def make_upload_handler( + nb_files=1, + filenames=None, + request_headers=None, + file_headers=None, + binary=False, + expected_contents=[file_1_content], + expected_operations=file_upload_mutation_1_operations, + expected_map=file_upload_mutation_1_map, +): async def single_upload_handler(request): from aiohttp import web reader = await request.multipart() + if request_headers is not None: + for k, v in request_headers.items(): + assert request.headers[k] == v + field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() - assert field_0_text == file_upload_mutation_1_operations + assert field_0_text == expected_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map + assert field_1_text == expected_map + + for i in range(nb_files): + field = await reader.next() + assert field.name == str(i) + if filenames is not None: + assert field.filename == filenames[i] - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content + if binary: + field_content = await field.read() + assert field_content == expected_contents[i] + else: + field_text = await field.text() + assert field_text == expected_contents[i] - field_3 = await reader.next() - assert field_3 is None + if file_headers is not None: + for k, v in file_headers[i].items(): + assert field.headers[k] == v + + final_field = await reader.next() + assert final_field is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) + return single_upload_handler + + +@pytest.mark.aiohttp +@pytest.mark.asyncio +async def test_requests_file_upload(event_loop, aiohttp_server, run_sync_test): + from aiohttp import web + from gql.transport.requests import RequestsHTTPTransport + app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route("POST", "/", make_upload_handler()) server = await aiohttp_server(app) url = server.make_url("/") @@ -467,6 +496,7 @@ def test_code(): file_path = test_file.filename + # Using an opened file with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} @@ -476,6 +506,18 @@ def test_code(): assert execution_result.data["success"] + # Using an opened file inside a FileVar object + from gql import FileVar + + with open(file_path, "rb") as f: + + params = {"file": FileVar(f), "other_var": 42} + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + await run_sync_test(event_loop, server, test_code) @@ -487,38 +529,12 @@ async def test_requests_file_upload_with_content_type( from aiohttp import web from gql.transport.requests import RequestsHTTPTransport - async def single_upload_handler(request): - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert field_0_text == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - # Verifying the content_type - assert field_2.headers["Content-Type"] == "application/pdf" - - field_3 = await reader.next() - assert field_3 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler(file_headers=[{"Content-Type": "application/pdf"}]), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -532,6 +548,7 @@ def test_code(): file_path = test_file.filename + # Using an opened file with open(file_path, "rb") as f: # Setting the content_type @@ -544,48 +561,85 @@ def test_code(): assert execution_result.data["success"] + # Using an opened file inside a FileVar object + from gql import FileVar + + with open(file_path, "rb") as f: + + params = { + "file": FileVar(f, content_type="application/pdf"), + "other_var": 42, + } + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio -async def test_requests_file_upload_additional_headers( +async def test_requests_file_upload_with_filename( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport - async def single_upload_handler(request): - from aiohttp import web + app = web.Application() + app.router.add_route( + "POST", + "/", + make_upload_handler(filenames=["filename1.txt"]), + ) + server = await aiohttp_server(app) - assert request.headers["X-Auth"] == "foobar" + url = server.make_url("/") - reader = await request.multipart() + def test_code(): + from gql import FileVar - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert field_0_text == file_upload_mutation_1_operations + transport = RequestsHTTPTransport(url=url) - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map + with TemporaryFile(file_1_content) as test_file: + with Client(transport=transport) as session: + query = gql(file_upload_mutation_1) - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content + file_path = test_file.filename - field_3 = await reader.next() - assert field_3 is None + with open(file_path, "rb") as f: - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) + # Setting the content_type + f.content_type = "application/pdf" + + params = { + "file": FileVar(f, filename="filename1.txt"), + "other_var": 42, + } + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + await run_sync_test(event_loop, server, test_code) + + +@pytest.mark.aiohttp +@pytest.mark.asyncio +async def test_requests_file_upload_additional_headers( + event_loop, aiohttp_server, run_sync_test +): + from aiohttp import web + from gql.transport.requests import RequestsHTTPTransport app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler(request_headers={"X-Auth": "foobar"}), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -620,36 +674,12 @@ async def test_requests_binary_file_upload(event_loop, aiohttp_server, run_sync_ # This is a sample binary file content containing all possible byte values binary_file_content = bytes(range(0, 256)) - async def binary_upload_handler(request): - - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert field_0_text == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_binary = await field_2.read() - assert field_2_binary == binary_file_content - - field_3 = await reader.next() - assert field_3 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", binary_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler(binary=True, expected_contents=[binary_file_content]), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -677,13 +707,6 @@ def test_code(): await run_sync_test(event_loop, server, test_code) -file_upload_mutation_2_operations = ( - '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' - 'uploadFile(input: { file1: $file, file2: $file }) {\\n success\\n }\\n}", ' - '"variables": {"file1": null, "file2": null}}' -) - - @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_file_upload_two_files( @@ -700,6 +723,12 @@ async def test_requests_file_upload_two_files( } """ + file_upload_mutation_2_operations = ( + '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' + 'uploadFile(input: { file1: $file, file2: $file }) {\\n success\\n }\\n}", ' + '"variables": {"file1": null, "file2": null}}' + ) + file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' file_2_content = """ @@ -707,39 +736,17 @@ async def test_requests_file_upload_two_files( This file will also be sent in the GraphQL mutation """ - async def handler(request): - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert field_0_text == file_upload_mutation_2_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_2_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - field_3 = await reader.next() - assert field_3.name == "1" - field_3_text = await field_3.text() - assert field_3_text == file_2_content - - field_4 = await reader.next() - assert field_4 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + nb_files=2, + expected_map=file_upload_mutation_2_map, + expected_operations=file_upload_mutation_2_operations, + expected_contents=[file_1_content, file_2_content], + ), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -754,6 +761,7 @@ def test_code(): query = gql(file_upload_mutation_2) + # Old method file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename @@ -774,14 +782,30 @@ def test_code(): f1.close() f2.close() - await run_sync_test(event_loop, server, test_code) + # Using FileVar + from gql import FileVar + file_path_1 = test_file_1.filename + file_path_2 = test_file_2.filename -file_upload_mutation_3_operations = ( - '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles' - "(input: { files: $files })" - ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' -) + f1 = open(file_path_1, "rb") + f2 = open(file_path_2, "rb") + + params = { + "file1": FileVar(f1), + "file2": FileVar(f2), + } + + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + f1.close() + f2.close() + + await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @@ -800,6 +824,12 @@ async def test_requests_file_upload_list_of_two_files( } """ + file_upload_mutation_3_operations = ( + '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles' + "(input: { files: $files })" + ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' + ) + file_upload_mutation_3_map = ( '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' ) @@ -809,39 +839,17 @@ async def test_requests_file_upload_list_of_two_files( This file will also be sent in the GraphQL mutation """ - async def handler(request): - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert field_0_text == file_upload_mutation_3_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_3_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - field_3 = await reader.next() - assert field_3.name == "1" - field_3_text = await field_3.text() - assert field_3_text == file_2_content - - field_4 = await reader.next() - assert field_4 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + nb_files=2, + expected_map=file_upload_mutation_3_map, + expected_operations=file_upload_mutation_3_operations, + expected_contents=[file_1_content, file_2_content], + ), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -855,6 +863,7 @@ def test_code(): query = gql(file_upload_mutation_3) + # Old method file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename @@ -872,6 +881,26 @@ def test_code(): f1.close() f2.close() + # Using FileVar + from gql import FileVar + + file_path_1 = test_file_1.filename + file_path_2 = test_file_2.filename + + f1 = open(file_path_1, "rb") + f2 = open(file_path_2, "rb") + + params = {"files": [FileVar(f1), FileVar(f2)]} + + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + f1.close() + f2.close() + await run_sync_test(event_loop, server, test_code) From 2175da605777b300ef32d7c597d304b318f93a29 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Tue, 14 Nov 2023 16:34:12 +0100 Subject: [PATCH 04/14] wip --- gql/transport/file_upload.py | 23 ++++++++++++++++++++++- gql/transport/requests.py | 17 +++++++++++++---- tests/test_requests.py | 10 ++++++++++ 3 files changed, 45 insertions(+), 5 deletions(-) diff --git a/gql/transport/file_upload.py b/gql/transport/file_upload.py index 483d76ee..fc1792a6 100644 --- a/gql/transport/file_upload.py +++ b/gql/transport/file_upload.py @@ -1,5 +1,7 @@ +import io + from dataclasses import dataclass -from typing import Any, Dict, Optional, Tuple, Type +from typing import Any, Dict, Optional, Tuple, Type, List @dataclass @@ -53,3 +55,22 @@ def recurse_extract(path, obj): nulled_variables = recurse_extract("variables", variables) return nulled_variables, files + +def open_files( + filevars: List[FileVar] +): + + for filevar in filevars: + assert isinstance(filevar, FileVar) + + if isinstance(filevar.f, str): + filevar.f = open(filevar.f, "rb") + +def close_files( + filevars: List[FileVar] +): + for filevar in filevars: + assert isinstance(filevar, FileVar) + + if isinstance(filevar.f, io.IOBase): + filevar.f.close() diff --git a/gql/transport/requests.py b/gql/transport/requests.py index d04f67dc..70fc87a4 100644 --- a/gql/transport/requests.py +++ b/gql/transport/requests.py @@ -19,7 +19,7 @@ TransportProtocolError, TransportServerError, ) -from .file_upload import FileVar, extract_files +from .file_upload import FileVar, extract_files, open_files, close_files log = logging.getLogger(__name__) @@ -170,6 +170,10 @@ def execute( # type: ignore file_classes=self.file_classes, ) + # Opening the files using the FileVar parameters + open_files(files.values()) + self.files = files + # Save the nulled variable values in the payload payload["variables"] = nulled_variable_values @@ -234,9 +238,14 @@ def execute( # type: ignore post_args.update(extra_args) # Using the created session to perform requests - response = self.session.request( - self.method, self.url, **post_args # type: ignore - ) + try: + response = self.session.request( + self.method, self.url, **post_args # type: ignore + ) + finally: + if upload_files: + close_files(self.files.values()) + self.response_headers = response.headers def raise_response_error(resp: requests.Response, reason: str): diff --git a/tests/test_requests.py b/tests/test_requests.py index 397ff89b..65b000de 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -518,6 +518,16 @@ def test_code(): assert execution_result.data["success"] + # Using an filename string inside a FileVar object + from gql import FileVar + + params = {"file": FileVar(file_path), "other_var": 42} + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + await run_sync_test(event_loop, server, test_code) From a68b974d8de993836cc9292254fbdc1017ba6ff3 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Thu, 22 May 2025 15:39:38 +0200 Subject: [PATCH 05/14] Fix linting and mypy --- gql/transport/file_upload.py | 29 +++++++++++++---------------- gql/transport/requests.py | 6 +++--- tests/test_requests.py | 19 +++++++++---------- 3 files changed, 25 insertions(+), 29 deletions(-) diff --git a/gql/transport/file_upload.py b/gql/transport/file_upload.py index fc1792a6..e529d246 100644 --- a/gql/transport/file_upload.py +++ b/gql/transport/file_upload.py @@ -1,7 +1,6 @@ import io - from dataclasses import dataclass -from typing import Any, Dict, Optional, Tuple, Type, List +from typing import Any, Dict, List, Optional, Tuple, Type @dataclass @@ -16,8 +15,8 @@ class FileVar: def extract_files( variables: Dict, file_classes: Tuple[Type[Any], ...] -) -> Tuple[Dict, Dict]: - files = {} +) -> Tuple[Dict, Dict[str, FileVar]]: + files: Dict[str, FileVar] = {} def recurse_extract(path, obj): """ @@ -27,17 +26,17 @@ def recurse_extract(path, obj): """ nonlocal files if isinstance(obj, list): - nulled_obj = [] + nulled_list = [] for key, value in enumerate(obj): value = recurse_extract(f"{path}.{key}", value) - nulled_obj.append(value) - return nulled_obj + nulled_list.append(value) + return nulled_list elif isinstance(obj, dict): - nulled_obj = {} + nulled_dict = {} for key, value in obj.items(): value = recurse_extract(f"{path}.{key}", value) - nulled_obj[key] = value - return nulled_obj + nulled_dict[key] = value + return nulled_dict elif isinstance(obj, file_classes): # extract obj from its parent and put it into files instead. name = getattr(obj, "name", None) @@ -56,9 +55,8 @@ def recurse_extract(path, obj): return nulled_variables, files -def open_files( - filevars: List[FileVar] -): + +def open_files(filevars: List[FileVar]) -> None: for filevar in filevars: assert isinstance(filevar, FileVar) @@ -66,9 +64,8 @@ def open_files( if isinstance(filevar.f, str): filevar.f = open(filevar.f, "rb") -def close_files( - filevars: List[FileVar] -): + +def close_files(filevars: List[FileVar]) -> None: for filevar in filevars: assert isinstance(filevar, FileVar) diff --git a/gql/transport/requests.py b/gql/transport/requests.py index 08664845..5fb7e827 100644 --- a/gql/transport/requests.py +++ b/gql/transport/requests.py @@ -31,7 +31,7 @@ TransportProtocolError, TransportServerError, ) -from .file_upload import FileVar, extract_files, open_files, close_files +from .file_upload import FileVar, close_files, extract_files, open_files log = logging.getLogger(__name__) @@ -191,7 +191,7 @@ def execute( # type: ignore ) # Opening the files using the FileVar parameters - open_files(files.values()) + open_files(list(files.values())) self.files = files # Save the nulled variable values in the payload @@ -264,7 +264,7 @@ def execute( # type: ignore ) finally: if upload_files: - close_files(self.files.values()) + close_files(list(self.files.values())) self.response_headers = response.headers diff --git a/tests/test_requests.py b/tests/test_requests.py index 052fb49d..a4caf10b 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -686,7 +686,7 @@ def test_code(): with open(file_path, "rb") as f: # Setting the content_type - f.content_type = "application/pdf" + f.content_type = "application/pdf" # type: ignore params = {"file": f, "other_var": 42} execution_result = session._execute( @@ -715,10 +715,9 @@ def test_code(): @pytest.mark.aiohttp @pytest.mark.asyncio -async def test_requests_file_upload_with_filename( - aiohttp_server, run_sync_test -): +async def test_requests_file_upload_with_filename(aiohttp_server, run_sync_test): from aiohttp import web + from gql.transport.requests import RequestsHTTPTransport app = web.Application() @@ -901,13 +900,13 @@ def test_code(): f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") - params = { + params_1 = { "file1": f1, "file2": f2, } execution_result = session._execute( - query, variable_values=params, upload_files=True + query, variable_values=params_1, upload_files=True ) assert execution_result.data["success"] @@ -924,13 +923,13 @@ def test_code(): f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") - params = { + params_2 = { "file1": FileVar(f1), "file2": FileVar(f2), } execution_result = session._execute( - query, variable_values=params, upload_files=True + query, variable_values=params_2, upload_files=True ) assert execution_result.data["success"] @@ -1022,10 +1021,10 @@ def test_code(): f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") - params = {"files": [FileVar(f1), FileVar(f2)]} + params_2 = {"files": [FileVar(f1), FileVar(f2)]} execution_result = session._execute( - query, variable_values=params, upload_files=True + query, variable_values=params_2, upload_files=True ) assert execution_result.data["success"] From 03c4601eefdc945027458164d1c67979bae78c68 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Thu, 22 May 2025 16:16:44 +0200 Subject: [PATCH 06/14] Add deprecation warning --- gql/transport/file_upload.py | 9 +++++ tests/test_requests.py | 72 ++++++++++++++++++++++++------------ 2 files changed, 58 insertions(+), 23 deletions(-) diff --git a/gql/transport/file_upload.py b/gql/transport/file_upload.py index e529d246..bbc46240 100644 --- a/gql/transport/file_upload.py +++ b/gql/transport/file_upload.py @@ -1,4 +1,5 @@ import io +import warnings from dataclasses import dataclass from typing import Any, Dict, List, Optional, Tuple, Type @@ -13,6 +14,9 @@ class FileVar: streaming_block_size: int = 64 * 1024 +FILE_UPLOAD_DOCS = "https://gql.readthedocs.io/en/latest/usage/file_upload.html" + + def extract_files( variables: Dict, file_classes: Tuple[Type[Any], ...] ) -> Tuple[Dict, Dict[str, FileVar]]: @@ -39,6 +43,11 @@ def recurse_extract(path, obj): return nulled_dict elif isinstance(obj, file_classes): # extract obj from its parent and put it into files instead. + warnings.warn( + "Not using FileVar for file upload is deprecated. " + f"See {FILE_UPLOAD_DOCS} for details.", + DeprecationWarning, + ) name = getattr(obj, "name", None) content_type = getattr(obj, "content_type", None) files[path] = FileVar(obj, filename=name, content_type=content_type) diff --git a/tests/test_requests.py b/tests/test_requests.py index a4caf10b..83f6d5c0 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -1,3 +1,4 @@ +import warnings from typing import Any, Dict, Mapping import pytest @@ -86,8 +87,6 @@ def test_code(): @pytest.mark.asyncio @pytest.mark.parametrize("verify_https", ["disabled", "cert_provided"]) async def test_requests_query_https(ssl_aiohttp_server, run_sync_test, verify_https): - import warnings - from aiohttp import web from gql.transport.requests import RequestsHTTPTransport @@ -625,9 +624,14 @@ def test_code(): with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} - execution_result = session._execute( - query, variable_values=params, upload_files=True - ) + + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) assert execution_result.data["success"] @@ -637,9 +641,11 @@ def test_code(): with open(file_path, "rb") as f: params = {"file": FileVar(f), "other_var": 42} - execution_result = session._execute( - query, variable_values=params, upload_files=True - ) + with warnings.catch_warnings(): + warnings.simplefilter("error") # Turn warnings into errors + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) assert execution_result.data["success"] @@ -689,9 +695,13 @@ def test_code(): f.content_type = "application/pdf" # type: ignore params = {"file": f, "other_var": 42} - execution_result = session._execute( - query, variable_values=params, upload_files=True - ) + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) assert execution_result.data["success"] @@ -788,9 +798,13 @@ def test_code(): with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} - execution_result = session._execute( - query, variable_values=params, upload_files=True - ) + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) assert execution_result.data["success"] @@ -831,9 +845,13 @@ def test_code(): params = {"file": f, "other_var": 42} - execution_result = session._execute( - query, variable_values=params, upload_files=True - ) + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) assert execution_result.data["success"] @@ -905,9 +923,13 @@ def test_code(): "file2": f2, } - execution_result = session._execute( - query, variable_values=params_1, upload_files=True - ) + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + execution_result = session._execute( + query, variable_values=params_1, upload_files=True + ) assert execution_result.data["success"] @@ -1003,9 +1025,13 @@ def test_code(): params = {"files": [f1, f2]} - execution_result = session._execute( - query, variable_values=params, upload_files=True - ) + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) assert execution_result.data["success"] From 5e6faa53d8fa81254650cf23061642bdb8f3ba86 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Thu, 22 May 2025 16:45:02 +0200 Subject: [PATCH 07/14] Modify docs --- docs/usage/file_upload.rst | 53 ++++++++++++++++++++++---------------- 1 file changed, 31 insertions(+), 22 deletions(-) diff --git a/docs/usage/file_upload.rst b/docs/usage/file_upload.rst index 10903585..07d04bd5 100644 --- a/docs/usage/file_upload.rst +++ b/docs/usage/file_upload.rst @@ -14,11 +14,14 @@ Single File In order to upload a single file, you need to: * set the file as a variable value in the mutation -* provide the opened file to the `variable_values` argument of `execute` +* create a :class:`FileVar ` object with your file path +* provide the `FileVar` instance to the `variable_values` argument of `execute` * set the `upload_files` argument to True .. code-block:: python + from gql import client, gql, FileVar + transport = AIOHTTPTransport(url='YOUR_URL') # Or transport = RequestsHTTPTransport(url='YOUR_URL') # Or transport = HTTPXTransport(url='YOUR_URL') @@ -34,32 +37,38 @@ In order to upload a single file, you need to: } ''') - with open("YOUR_FILE_PATH", "rb") as f: - - params = {"file": f} + params = {"file": FileVar("YOUR_FILE_PATH")} - result = client.execute( - query, variable_values=params, upload_files=True - ) + result = client.execute( + query, variable_values=params, upload_files=True + ) Setting the content-type ^^^^^^^^^^^^^^^^^^^^^^^^ If you need to set a specific Content-Type attribute to a file, -you can set the :code:`content_type` attribute of the file like this: +you can set the :code:`content_type` attribute of :class:`FileVar `: .. code-block:: python - with open("YOUR_FILE_PATH", "rb") as f: + # Setting the content-type to a pdf file for example + filevar = FileVar( + "YOUR_FILE_PATH", + content_type="application/pdf", + ) - # Setting the content-type to a pdf file for example - f.content_type = "application/pdf" +Setting the uploaded file name +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - params = {"file": f} +To modify the uploaded filename, use the :code:`filename` attribute of :class:`FileVar `: - result = client.execute( - query, variable_values=params, upload_files=True - ) +.. code-block:: python + + # Setting the content-type to a pdf file for example + filevar = FileVar( + "YOUR_FILE_PATH", + filename="filename1.txt", + ) File list --------- @@ -68,6 +77,8 @@ It is also possible to upload multiple files using a list. .. code-block:: python + from gql import client, gql, FileVar + transport = AIOHTTPTransport(url='YOUR_URL') # Or transport = RequestsHTTPTransport(url='YOUR_URL') # Or transport = HTTPXTransport(url='YOUR_URL') @@ -83,8 +94,8 @@ It is also possible to upload multiple files using a list. } ''') - f1 = open("YOUR_FILE_PATH_1", "rb") - f2 = open("YOUR_FILE_PATH_2", "rb") + f1 = FileVar("YOUR_FILE_PATH_1") + f2 = FileVar("YOUR_FILE_PATH_2") params = {"files": [f1, f2]} @@ -92,9 +103,6 @@ It is also possible to upload multiple files using a list. query, variable_values=params, upload_files=True ) - f1.close() - f2.close() - Streaming --------- @@ -154,7 +162,8 @@ Example: yield chunk chunk = await f.read(64*1024) - params = {"file": file_sender(file_name='YOUR_FILE_PATH')} + f1 = FileVar(file_sender(file_name='YOUR_FILE_PATH')) + params = {"file": f1} result = client.execute( query, variable_values=params, upload_files=True @@ -200,7 +209,7 @@ Example: } ''') - params = {"file": resp.content} + params = {"file": FileVar(resp.content)} result = client.execute( query, variable_values=params, upload_files=True From 97c860c970d898242e3f41068333d0a4a79c2f49 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Thu, 22 May 2025 18:19:17 +0200 Subject: [PATCH 08/14] Only close files opened by us --- gql/transport/file_upload.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/gql/transport/file_upload.py b/gql/transport/file_upload.py index bbc46240..be8b150c 100644 --- a/gql/transport/file_upload.py +++ b/gql/transport/file_upload.py @@ -12,6 +12,7 @@ class FileVar: content_type: Optional[str] = None streaming: bool = False streaming_block_size: int = 64 * 1024 + _file_opened: bool = False FILE_UPLOAD_DOCS = "https://gql.readthedocs.io/en/latest/usage/file_upload.html" @@ -72,6 +73,7 @@ def open_files(filevars: List[FileVar]) -> None: if isinstance(filevar.f, str): filevar.f = open(filevar.f, "rb") + filevar._file_opened = True def close_files(filevars: List[FileVar]) -> None: @@ -79,4 +81,6 @@ def close_files(filevars: List[FileVar]) -> None: assert isinstance(filevar, FileVar) if isinstance(filevar.f, io.IOBase): - filevar.f.close() + if filevar._file_opened: + filevar.f.close() + filevar._file_opened = False From cd182b19ef0c81f9d067a4f998f0f8e0801c26b7 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Thu, 22 May 2025 21:49:14 +0200 Subject: [PATCH 09/14] Using normal class instead of dataclass for FileVar --- gql/transport/file_upload.py | 67 ++++++++++++++++++++---------------- 1 file changed, 38 insertions(+), 29 deletions(-) diff --git a/gql/transport/file_upload.py b/gql/transport/file_upload.py index be8b150c..d71b1d6c 100644 --- a/gql/transport/file_upload.py +++ b/gql/transport/file_upload.py @@ -1,18 +1,47 @@ import io import warnings -from dataclasses import dataclass from typing import Any, Dict, List, Optional, Tuple, Type -@dataclass class FileVar: - f: Any # str | io.IOBase | aiohttp.StreamReader | AsyncGenerator - # Add KW_ONLY here once Python 3.9 is deprecated - filename: Optional[str] = None - content_type: Optional[str] = None - streaming: bool = False - streaming_block_size: int = 64 * 1024 - _file_opened: bool = False + def __init__( + self, + f: Any, # str | io.IOBase | aiohttp.StreamReader | AsyncGenerator + *, + filename: Optional[str] = None, + content_type: Optional[str] = None, + streaming: bool = False, + streaming_block_size: int = 64 * 1024, + ): + self.f = f + self.filename = filename + self.content_type = content_type + self.streaming = streaming + self.streaming_block_size = streaming_block_size + + self._file_opened: bool = False + + def open_file(self): + assert self._file_opened is False + if isinstance(self.f, str): + self.f = open(self.f, "rb") + self._file_opened = True + + def close_file(self): + if self._file_opened: + assert isinstance(self.f, io.IOBase) + self.f.close() + self._file_opened = False + + +def open_files(filevars: List[FileVar]) -> None: + for filevar in filevars: + filevar.open_file() + + +def close_files(filevars: List[FileVar]) -> None: + for filevar in filevars: + filevar.close_file() FILE_UPLOAD_DOCS = "https://gql.readthedocs.io/en/latest/usage/file_upload.html" @@ -64,23 +93,3 @@ def recurse_extract(path, obj): nulled_variables = recurse_extract("variables", variables) return nulled_variables, files - - -def open_files(filevars: List[FileVar]) -> None: - - for filevar in filevars: - assert isinstance(filevar, FileVar) - - if isinstance(filevar.f, str): - filevar.f = open(filevar.f, "rb") - filevar._file_opened = True - - -def close_files(filevars: List[FileVar]) -> None: - for filevar in filevars: - assert isinstance(filevar, FileVar) - - if isinstance(filevar.f, io.IOBase): - if filevar._file_opened: - filevar.f.close() - filevar._file_opened = False From 92145532eb84ad00f438ed624543bfc774504ff9 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Thu, 22 May 2025 23:11:03 +0200 Subject: [PATCH 10/14] Open uploaded files with aiohttp + fix tests --- gql/transport/aiohttp.py | 86 ++++--- tests/conftest.py | 59 +++++ tests/test_aiohttp.py | 486 ++++++++++++++++++++------------------- tests/test_requests.py | 111 +++------ 4 files changed, 394 insertions(+), 348 deletions(-) diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index fb535031..99295e60 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -32,7 +32,7 @@ TransportProtocolError, TransportServerError, ) -from .file_upload import FileVar, extract_files +from .file_upload import FileVar, close_files, extract_files, open_files log = logging.getLogger(__name__) @@ -207,6 +207,10 @@ async def execute( file_classes=self.file_classes, ) + # Opening the files using the FileVar parameters + open_files(list(files.values())) + self.files = files + # Save the nulled variable values in the payload payload["variables"] = nulled_variable_values @@ -270,51 +274,59 @@ async def execute( if self.session is None: raise TransportClosed("Transport is not connected") - async with self.session.post(self.url, ssl=self.ssl, **post_args) as resp: + try: + async with self.session.post(self.url, ssl=self.ssl, **post_args) as resp: - # Saving latest response headers in the transport - self.response_headers = resp.headers + # Saving latest response headers in the transport + self.response_headers = resp.headers - async def raise_response_error( - resp: aiohttp.ClientResponse, reason: str - ) -> NoReturn: - # We raise a TransportServerError if the status code is 400 or higher - # We raise a TransportProtocolError in the other cases + async def raise_response_error( + resp: aiohttp.ClientResponse, reason: str + ) -> NoReturn: + # We raise a TransportServerError if status code is 400 or higher + # We raise a TransportProtocolError in the other cases - try: - # Raise a ClientResponseError if response status is 400 or higher - resp.raise_for_status() - except ClientResponseError as e: - raise TransportServerError(str(e), e.status) from e - - result_text = await resp.text() - raise TransportProtocolError( - f"Server did not return a GraphQL result: " - f"{reason}: " - f"{result_text}" - ) + try: + # Raise ClientResponseError if response status is 400 or higher + resp.raise_for_status() + except ClientResponseError as e: + raise TransportServerError(str(e), e.status) from e - try: - result = await resp.json(loads=self.json_deserialize, content_type=None) - - if log.isEnabledFor(logging.INFO): result_text = await resp.text() - log.info("<<< %s", result_text) + raise TransportProtocolError( + f"Server did not return a GraphQL result: " + f"{reason}: " + f"{result_text}" + ) - except Exception: - await raise_response_error(resp, "Not a JSON answer") + try: + result = await resp.json( + loads=self.json_deserialize, content_type=None + ) - if result is None: - await raise_response_error(resp, "Not a JSON answer") + if log.isEnabledFor(logging.INFO): + result_text = await resp.text() + log.info("<<< %s", result_text) - if "errors" not in result and "data" not in result: - await raise_response_error(resp, 'No "data" or "errors" keys in answer') + except Exception: + await raise_response_error(resp, "Not a JSON answer") - return ExecutionResult( - errors=result.get("errors"), - data=result.get("data"), - extensions=result.get("extensions"), - ) + if result is None: + await raise_response_error(resp, "Not a JSON answer") + + if "errors" not in result and "data" not in result: + await raise_response_error( + resp, 'No "data" or "errors" keys in answer' + ) + + return ExecutionResult( + errors=result.get("errors"), + data=result.get("data"), + extensions=result.get("extensions"), + ) + finally: + if upload_files: + close_files(list(self.files.values())) def subscribe( self, diff --git a/tests/conftest.py b/tests/conftest.py index c69551b0..cef561f7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -763,3 +763,62 @@ def strip_braces_spaces(s): strip_back = re.sub(r"([^\s]) }", r"\1}", strip_front) return strip_back + + +def make_upload_handler( + nb_files=1, + filenames=None, + request_headers=None, + file_headers=None, + binary=False, + expected_contents=None, + expected_operations=None, + expected_map=None, + server_answer='{"data":{"success":true}}', +): + assert expected_contents is not None + assert expected_operations is not None + assert expected_map is not None + + async def single_upload_handler(request): + from aiohttp import web + + reader = await request.multipart() + + if request_headers is not None: + for k, v in request_headers.items(): + assert request.headers[k] == v + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert strip_braces_spaces(field_0_text) == expected_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == expected_map + + for i in range(nb_files): + field = await reader.next() + assert field.name == str(i) + if filenames is not None: + assert field.filename == filenames[i] + + if binary: + field_content = await field.read() + assert field_content == expected_contents[i] + else: + field_text = await field.text() + assert field_text == expected_contents[i] + + if file_headers is not None: + for k, v in file_headers[i].items(): + assert field.headers[k] == v + + final_field = await reader.next() + assert final_field is None + + return web.Response(text=server_answer, content_type="application/json") + + return single_upload_handler diff --git a/tests/test_aiohttp.py b/tests/test_aiohttp.py index 04417c4e..9c40e9c0 100644 --- a/tests/test_aiohttp.py +++ b/tests/test_aiohttp.py @@ -1,10 +1,11 @@ import io import json +import warnings from typing import Mapping import pytest -from gql import Client, gql +from gql import Client, FileVar, gql from gql.cli import get_parser, main from gql.transport.exceptions import ( TransportAlreadyConnected, @@ -17,7 +18,7 @@ from .conftest import ( TemporaryFile, get_localhost_ssl_context_client, - strip_braces_spaces, + make_upload_handler, ) query1_str = """ @@ -600,8 +601,6 @@ def test_code(): await run_sync_test(server, test_code) -file_upload_server_answer = '{"data":{"success":true}}' - file_upload_mutation_1 = """ mutation($file: Upload!) { uploadFile(input:{other_var:$other_var, file:$file}) { @@ -624,33 +623,6 @@ def test_code(): """ -async def single_upload_handler(request): - - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - field_3 = await reader.next() - assert field_3 is None - - return web.Response(text=file_upload_server_answer, content_type="application/json") - - @pytest.mark.asyncio async def test_aiohttp_file_upload(aiohttp_server): from aiohttp import web @@ -658,7 +630,15 @@ async def test_aiohttp_file_upload(aiohttp_server): from gql.transport.aiohttp import AIOHTTPTransport app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -673,48 +653,45 @@ async def test_aiohttp_file_upload(aiohttp_server): file_path = test_file.filename + # Using an opened file with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} # Execute query asynchronously - result = await session.execute( - query, variable_values=params, upload_files=True - ) + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + result = await session.execute( + query, variable_values=params, upload_files=True + ) success = result["success"] - assert success + # Using an opened file inside a FileVar object + with open(file_path, "rb") as f: -async def single_upload_handler_with_content_type(request): - - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map + params = {"file": FileVar(f), "other_var": 42} - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content + with warnings.catch_warnings(): + warnings.simplefilter("error") # Turn warnings into errors + result = await session.execute( + query, variable_values=params, upload_files=True + ) - # Verifying the content_type - assert field_2.headers["Content-Type"] == "application/pdf" + success = result["success"] + assert success - field_3 = await reader.next() - assert field_3 is None + # Using an filename string inside a FileVar object + params = {"file": FileVar(file_path), "other_var": 42} + result = await session.execute( + query, variable_values=params, upload_files=True + ) - return web.Response(text=file_upload_server_answer, content_type="application/json") + success = result["success"] + assert success @pytest.mark.asyncio @@ -724,7 +701,16 @@ async def test_aiohttp_file_upload_with_content_type(aiohttp_server): from gql.transport.aiohttp import AIOHTTPTransport app = web.Application() - app.router.add_route("POST", "/", single_upload_handler_with_content_type) + app.router.add_route( + "POST", + "/", + make_upload_handler( + file_headers=[{"Content-Type": "application/pdf"}], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -739,6 +725,7 @@ async def test_aiohttp_file_upload_with_content_type(aiohttp_server): file_path = test_file.filename + # Using an opened file with open(file_path, "rb") as f: # Setting the content_type @@ -746,13 +733,49 @@ async def test_aiohttp_file_upload_with_content_type(aiohttp_server): params = {"file": f, "other_var": 42} - # Execute query asynchronously + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + success = result["success"] + assert success + + # Using an opened file inside a FileVar object + with open(file_path, "rb") as f: + + params = { + "file": FileVar( + f, + content_type="application/pdf", + ), + "other_var": 42, + } + result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] + assert success + + # Using an filename string inside a FileVar object + params = { + "file": FileVar( + file_path, + content_type="application/pdf", + ), + "other_var": 42, + } + + result = await session.execute( + query, variable_values=params, upload_files=True + ) + success = result["success"] assert success @@ -763,7 +786,15 @@ async def test_aiohttp_file_upload_without_session(aiohttp_server, run_sync_test from gql.transport.aiohttp import AIOHTTPTransport app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -779,60 +810,36 @@ def test_code(): file_path = test_file.filename - with open(file_path, "rb") as f: - - params = {"file": f, "other_var": 42} - - result = client.execute( - query, variable_values=params, upload_files=True - ) + params = {"file": FileVar(file_path), "other_var": 42} - success = result["success"] + result = client.execute(query, variable_values=params, upload_files=True) - assert success + success = result["success"] + assert success await run_sync_test(server, test_code) -# This is a sample binary file content containing all possible byte values -binary_file_content = bytes(range(0, 256)) - - -async def binary_upload_handler(request): - - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_binary = await field_2.read() - assert field_2_binary == binary_file_content - - field_3 = await reader.next() - assert field_3 is None - - return web.Response(text=file_upload_server_answer, content_type="application/json") - - @pytest.mark.asyncio async def test_aiohttp_binary_file_upload(aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport + # This is a sample binary file content containing all possible byte values + binary_file_content = bytes(range(0, 256)) + app = web.Application() - app.router.add_route("POST", "/", binary_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + binary=True, + expected_contents=[binary_file_content], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + ), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -847,14 +854,12 @@ async def test_aiohttp_binary_file_upload(aiohttp_server): file_path = test_file.filename - with open(file_path, "rb") as f: - - params = {"file": f, "other_var": 42} + params = {"file": FileVar(file_path), "other_var": 42} - # Execute query asynchronously - result = await session.execute( - query, variable_values=params, upload_files=True - ) + # Execute query asynchronously + result = await session.execute( + query, variable_values=params, upload_files=True + ) success = result["success"] @@ -867,13 +872,25 @@ async def test_aiohttp_stream_reader_upload(aiohttp_server): from gql.transport.aiohttp import AIOHTTPTransport + # This is a sample binary file content containing all possible byte values + binary_file_content = bytes(range(0, 256)) + async def binary_data_handler(request): return web.Response( body=binary_file_content, content_type="binary/octet-stream" ) app = web.Application() - app.router.add_route("POST", "/", binary_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + binary=True, + expected_contents=[binary_file_content], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + ), + ) app.router.add_route("GET", "/binary_data", binary_data_handler) server = await aiohttp_server(app) @@ -883,19 +900,36 @@ async def binary_data_handler(request): transport = AIOHTTPTransport(url=url, timeout=10) + # Not using FileVar async with Client(transport=transport) as session: query = gql(file_upload_mutation_1) async with ClientSession() as client: async with client.get(binary_data_url) as resp: params = {"file": resp.content, "other_var": 42} - # Execute query asynchronously + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + success = result["success"] + assert success + + # Using FileVar + async with Client(transport=transport) as session: + query = gql(file_upload_mutation_1) + async with ClientSession() as client: + async with client.get(binary_data_url) as resp: + params = {"file": FileVar(resp.content), "other_var": 42} + result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] - assert success @@ -906,30 +940,59 @@ async def test_aiohttp_async_generator_upload(aiohttp_server): from gql.transport.aiohttp import AIOHTTPTransport + # This is a sample binary file content containing all possible byte values + binary_file_content = bytes(range(0, 256)) + app = web.Application() - app.router.add_route("POST", "/", binary_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + binary=True, + expected_contents=[binary_file_content], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + ), + ) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) + query = gql(file_upload_mutation_1) + with TemporaryFile(binary_file_content) as test_file: + file_path = test_file.filename + + async def file_sender(file_name): + async with aiofiles.open(file_name, "rb") as f: + chunk = await f.read(64 * 1024) + while chunk: + yield chunk + chunk = await f.read(64 * 1024) + + # Not using FileVar async with Client(transport=transport) as session: - query = gql(file_upload_mutation_1) + params = {"file": file_sender(file_path), "other_var": 42} - file_path = test_file.filename + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + result = await session.execute( + query, variable_values=params, upload_files=True + ) - async def file_sender(file_name): - async with aiofiles.open(file_name, "rb") as f: - chunk = await f.read(64 * 1024) - while chunk: - yield chunk - chunk = await f.read(64 * 1024) + success = result["success"] + assert success - params = {"file": file_sender(file_path), "other_var": 42} + # Using FileVar + async with Client(transport=transport) as session: + + params = {"file": FileVar(file_sender(file_path)), "other_var": 42} # Execute query asynchronously result = await session.execute( @@ -937,71 +1000,47 @@ async def file_sender(file_name): ) success = result["success"] - assert success -file_upload_mutation_2 = """ - mutation($file1: Upload!, $file2: Upload!) { - uploadFile(input:{file1:$file, file2:$file}) { - success - } - } -""" - -file_upload_mutation_2_operations = ( - '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' - 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' - '"variables": {"file1": null, "file2": null}}' -) - -file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' - -file_2_content = """ -This is a second test file -This file will also be sent in the GraphQL mutation -""" - - @pytest.mark.asyncio async def test_aiohttp_file_upload_two_files(aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport - async def handler(request): - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_2_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_2_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content + file_upload_mutation_2 = """ + mutation($file1: Upload!, $file2: Upload!) { + uploadFile(input:{file1:$file, file2:$file}) { + success + } + } + """ - field_3 = await reader.next() - assert field_3.name == "1" - field_3_text = await field_3.text() - assert field_3_text == file_2_content + file_upload_mutation_2_operations = ( + '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' + 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' + '"variables": {"file1": null, "file2": null}}' + ) - field_4 = await reader.next() - assert field_4 is None + file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) + file_2_content = """ + This is a second test file + This file will also be sent in the GraphQL mutation + """ app = web.Application() - app.router.add_route("POST", "/", handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + nb_files=2, + expected_map=file_upload_mutation_2_map, + expected_operations=file_upload_mutation_2_operations, + expected_contents=[file_1_content, file_2_content], + ), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -1018,82 +1057,60 @@ async def handler(request): file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename - f1 = open(file_path_1, "rb") - f2 = open(file_path_2, "rb") - params = { - "file1": f1, - "file2": f2, + "file1": FileVar(file_path_1), + "file2": FileVar(file_path_2), } result = await session.execute( query, variable_values=params, upload_files=True ) - f1.close() - f2.close() - success = result["success"] assert success -file_upload_mutation_3 = """ - mutation($files: [Upload!]!) { - uploadFiles(input:{files:$files}) { - success - } - } -""" - -file_upload_mutation_3_operations = ( - '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles(' - "input: {files: $files})" - ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' -) - -file_upload_mutation_3_map = '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' - - @pytest.mark.asyncio async def test_aiohttp_file_upload_list_of_two_files(aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport - async def handler(request): - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_3_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_3_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content + file_upload_mutation_3 = """ + mutation($files: [Upload!]!) { + uploadFiles(input:{files:$files}) { + success + } + } + """ - field_3 = await reader.next() - assert field_3.name == "1" - field_3_text = await field_3.text() - assert field_3_text == file_2_content + file_upload_mutation_3_operations = ( + '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles' + "(input: {files: $files})" + ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' + ) - field_4 = await reader.next() - assert field_4 is None + file_upload_mutation_3_map = ( + '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' + ) - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) + file_2_content = """ + This is a second test file + This file will also be sent in the GraphQL mutation + """ app = web.Application() - app.router.add_route("POST", "/", handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + nb_files=2, + expected_map=file_upload_mutation_3_map, + expected_operations=file_upload_mutation_3_operations, + expected_contents=[file_1_content, file_2_content], + ), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -1110,19 +1127,18 @@ async def handler(request): file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename - f1 = open(file_path_1, "rb") - f2 = open(file_path_2, "rb") - - params = {"files": [f1, f2]} + params = { + "files": [ + FileVar(file_path_1), + FileVar(file_path_2), + ], + } # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) - f1.close() - f2.close() - success = result["success"] assert success diff --git a/tests/test_requests.py b/tests/test_requests.py index 83f6d5c0..1cec1360 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -3,7 +3,7 @@ import pytest -from gql import Client, gql +from gql import Client, FileVar, gql from gql.transport.exceptions import ( TransportAlreadyConnected, TransportClosed, @@ -15,7 +15,7 @@ from .conftest import ( TemporaryFile, get_localhost_ssl_context_client, - strip_braces_spaces, + make_upload_handler, ) # Marking all tests in this file with the requests marker @@ -518,8 +518,6 @@ def test_code(): await run_sync_test(server, test_code) -file_upload_server_answer = '{"data":{"success":true}}' - file_upload_mutation_1 = """ mutation($file: Upload!) { uploadFile(input:{other_var:$other_var, file:$file}) { @@ -542,62 +540,6 @@ def test_code(): """ -def make_upload_handler( - nb_files=1, - filenames=None, - request_headers=None, - file_headers=None, - binary=False, - expected_contents=[file_1_content], - expected_operations=file_upload_mutation_1_operations, - expected_map=file_upload_mutation_1_map, -): - async def single_upload_handler(request): - from aiohttp import web - - reader = await request.multipart() - - if request_headers is not None: - for k, v in request_headers.items(): - assert request.headers[k] == v - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == expected_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == expected_map - - for i in range(nb_files): - field = await reader.next() - assert field.name == str(i) - if filenames is not None: - assert field.filename == filenames[i] - - if binary: - field_content = await field.read() - assert field_content == expected_contents[i] - else: - field_text = await field.text() - assert field_text == expected_contents[i] - - if file_headers is not None: - for k, v in file_headers[i].items(): - assert field.headers[k] == v - - final_field = await reader.next() - assert final_field is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - - return single_upload_handler - - @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_file_upload(aiohttp_server, run_sync_test): @@ -606,7 +548,15 @@ async def test_requests_file_upload(aiohttp_server, run_sync_test): from gql.transport.requests import RequestsHTTPTransport app = web.Application() - app.router.add_route("POST", "/", make_upload_handler()) + app.router.add_route( + "POST", + "/", + make_upload_handler( + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) server = await aiohttp_server(app) url = server.make_url("/") @@ -636,8 +586,6 @@ def test_code(): assert execution_result.data["success"] # Using an opened file inside a FileVar object - from gql import FileVar - with open(file_path, "rb") as f: params = {"file": FileVar(f), "other_var": 42} @@ -650,8 +598,6 @@ def test_code(): assert execution_result.data["success"] # Using an filename string inside a FileVar object - from gql import FileVar - params = {"file": FileVar(file_path), "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True @@ -673,7 +619,12 @@ async def test_requests_file_upload_with_content_type(aiohttp_server, run_sync_t app.router.add_route( "POST", "/", - make_upload_handler(file_headers=[{"Content-Type": "application/pdf"}]), + make_upload_handler( + file_headers=[{"Content-Type": "application/pdf"}], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), ) server = await aiohttp_server(app) @@ -706,8 +657,6 @@ def test_code(): assert execution_result.data["success"] # Using an opened file inside a FileVar object - from gql import FileVar - with open(file_path, "rb") as f: params = { @@ -734,14 +683,18 @@ async def test_requests_file_upload_with_filename(aiohttp_server, run_sync_test) app.router.add_route( "POST", "/", - make_upload_handler(filenames=["filename1.txt"]), + make_upload_handler( + filenames=["filename1.txt"], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), ) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): - from gql import FileVar transport = RequestsHTTPTransport(url=url) @@ -780,7 +733,12 @@ async def test_requests_file_upload_additional_headers(aiohttp_server, run_sync_ app.router.add_route( "POST", "/", - make_upload_handler(request_headers={"X-Auth": "foobar"}), + make_upload_handler( + request_headers={"X-Auth": "foobar"}, + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), ) server = await aiohttp_server(app) @@ -825,7 +783,12 @@ async def test_requests_binary_file_upload(aiohttp_server, run_sync_test): app.router.add_route( "POST", "/", - make_upload_handler(binary=True, expected_contents=[binary_file_content]), + make_upload_handler( + binary=True, + expected_contents=[binary_file_content], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + ), ) server = await aiohttp_server(app) @@ -937,8 +900,6 @@ def test_code(): f2.close() # Using FileVar - from gql import FileVar - file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename @@ -1039,8 +1000,6 @@ def test_code(): f2.close() # Using FileVar - from gql import FileVar - file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename From 819440b68ffd9b4e492e6ec51dea70eaa292f680 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Thu, 22 May 2025 23:57:07 +0200 Subject: [PATCH 11/14] Implement file streaming with aiohttp --- docs/usage/file_upload.rst | 45 +++++++++++++++++++++--------------- gql/transport/aiohttp.py | 2 +- gql/transport/file_upload.py | 40 ++++++++++++++++++++++++++------ tests/test_aiohttp.py | 16 +++++++++++++ 4 files changed, 77 insertions(+), 26 deletions(-) diff --git a/docs/usage/file_upload.rst b/docs/usage/file_upload.rst index 07d04bd5..7793354b 100644 --- a/docs/usage/file_upload.rst +++ b/docs/usage/file_upload.rst @@ -128,18 +128,8 @@ Streaming local files aiohttp allows to upload files using an asynchronous generator. See `Streaming uploads on aiohttp docs`_. - -In order to stream local files, instead of providing opened files to the -`variable_values` argument of `execute`, you need to provide an async generator -which will provide parts of the files. - -You can use `aiofiles`_ -to read the files in chunks and create this asynchronous generator. - -.. _Streaming uploads on aiohttp docs: https://docs.aiohttp.org/en/stable/client_quickstart.html#streaming-uploads -.. _aiofiles: https://github.com/Tinche/aiofiles - -Example: +From gql version 4.0, it is possible to activate file streaming simply by +setting the `streaming` argument of :class:`FileVar ` to `True` .. code-block:: python @@ -155,19 +145,38 @@ Example: } ''') + f1 = FileVar( + file_name='YOUR_FILE_PATH', + streaming=True, + ) + + params = {"file": f1} + + result = client.execute( + query, variable_values=params, upload_files=True + ) + +Another option is to use an async generator to provide parts of the file. + +You can use `aiofiles`_ +to read the files in chunks and create this asynchronous generator. + +.. _Streaming uploads on aiohttp docs: https://docs.aiohttp.org/en/stable/client_quickstart.html#streaming-uploads +.. _aiofiles: https://github.com/Tinche/aiofiles + +.. code-block:: python + async def file_sender(file_name): async with aiofiles.open(file_name, 'rb') as f: - chunk = await f.read(64*1024) - while chunk: - yield chunk - chunk = await f.read(64*1024) + while chunk := await f.read(64*1024): + yield chunk f1 = FileVar(file_sender(file_name='YOUR_FILE_PATH')) params = {"file": f1} result = client.execute( - query, variable_values=params, upload_files=True - ) + query, variable_values=params, upload_files=True + ) Streaming downloaded files ^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index 99295e60..b2633abb 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -208,7 +208,7 @@ async def execute( ) # Opening the files using the FileVar parameters - open_files(list(files.values())) + open_files(list(files.values()), transport_supports_streaming=True) self.files = files # Save the nulled variable values in the payload diff --git a/gql/transport/file_upload.py b/gql/transport/file_upload.py index d71b1d6c..a2fd4e56 100644 --- a/gql/transport/file_upload.py +++ b/gql/transport/file_upload.py @@ -21,22 +21,48 @@ def __init__( self._file_opened: bool = False - def open_file(self): + def open_file( + self, + transport_supports_streaming: bool = False, + ) -> None: assert self._file_opened is False - if isinstance(self.f, str): - self.f = open(self.f, "rb") - self._file_opened = True - def close_file(self): + if self.streaming: + assert ( + transport_supports_streaming + ), "streaming not supported on this transport" + self._make_file_streamer() + else: + if isinstance(self.f, str): + self.f = open(self.f, "rb") + self._file_opened = True + + def close_file(self) -> None: if self._file_opened: assert isinstance(self.f, io.IOBase) self.f.close() self._file_opened = False + def _make_file_streamer(self) -> None: + assert isinstance(self.f, str), "streaming option needs a filepath str" + + import aiofiles + + async def file_sender(file_name): + async with aiofiles.open(file_name, "rb") as f: + while chunk := await f.read(self.streaming_block_size): + yield chunk + + self.f = file_sender(self.f) + + +def open_files( + filevars: List[FileVar], + transport_supports_streaming: bool = False, +) -> None: -def open_files(filevars: List[FileVar]) -> None: for filevar in filevars: - filevar.open_file() + filevar.open_file(transport_supports_streaming=transport_supports_streaming) def close_files(filevars: List[FileVar]) -> None: diff --git a/tests/test_aiohttp.py b/tests/test_aiohttp.py index 9c40e9c0..c7ca0dcd 100644 --- a/tests/test_aiohttp.py +++ b/tests/test_aiohttp.py @@ -1002,6 +1002,22 @@ async def file_sender(file_name): success = result["success"] assert success + # Using FileVar with new streaming support + async with Client(transport=transport) as session: + + params = { + "file": FileVar(file_path, streaming=True), + "other_var": 42, + } + + # Execute query asynchronously + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + success = result["success"] + assert success + @pytest.mark.asyncio async def test_aiohttp_file_upload_two_files(aiohttp_server): From a9dc7b52739ce2bcb179918c231f827ff1b59b94 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Fri, 23 May 2025 00:10:07 +0200 Subject: [PATCH 12/14] Add aiofiles dependency --- setup.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index aed15440..706a80c3 100644 --- a/setup.py +++ b/setup.py @@ -58,8 +58,12 @@ "botocore>=1.21,<2", ] +install_aiofiles_requires = [ + "aiofiles", +] + install_all_requires = ( - install_aiohttp_requires + install_requests_requires + install_httpx_requires + install_websockets_requires + install_botocore_requires + install_aiohttp_requires + install_requests_requires + install_httpx_requires + install_websockets_requires + install_botocore_requires + install_aiofiles_requires ) # Get version from __version__.py file @@ -107,6 +111,7 @@ "httpx": install_httpx_requires, "websockets": install_websockets_requires, "botocore": install_botocore_requires, + "aiofiles": install_aiofiles_requires, }, include_package_data=True, zip_safe=False, From fde571b061ab83e6eebc1cb5188fe3da4863d9d0 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Fri, 23 May 2025 00:46:17 +0200 Subject: [PATCH 13/14] Open uploaded files with httpx + fix tests --- gql/transport/httpx.py | 18 +- tests/test_httpx.py | 368 ++++++++++++++------------------------ tests/test_httpx_async.py | 326 ++++++++++++++------------------- 3 files changed, 285 insertions(+), 427 deletions(-) diff --git a/gql/transport/httpx.py b/gql/transport/httpx.py index 96a71fd5..eb15ac57 100644 --- a/gql/transport/httpx.py +++ b/gql/transport/httpx.py @@ -24,7 +24,7 @@ TransportProtocolError, TransportServerError, ) -from .file_upload import extract_files +from .file_upload import close_files, extract_files, open_files log = logging.getLogger(__name__) @@ -103,6 +103,10 @@ def _prepare_file_uploads( file_classes=self.file_classes, ) + # Opening the files using the FileVar parameters + open_files(list(files.values())) + self.files = files + # Save the nulled variable values in the payload payload["variables"] = nulled_variable_values @@ -227,7 +231,11 @@ def execute( # type: ignore upload_files, ) - response = self.client.post(self.url, **post_args) + try: + response = self.client.post(self.url, **post_args) + finally: + if upload_files: + close_files(list(self.files.values())) return self._prepare_result(response) @@ -290,7 +298,11 @@ async def execute( upload_files, ) - response = await self.client.post(self.url, **post_args) + try: + response = await self.client.post(self.url, **post_args) + finally: + if upload_files: + close_files(list(self.files.values())) return self._prepare_result(response) diff --git a/tests/test_httpx.py b/tests/test_httpx.py index d129f022..7c34e73b 100644 --- a/tests/test_httpx.py +++ b/tests/test_httpx.py @@ -2,7 +2,7 @@ import pytest -from gql import Client, gql +from gql import Client, FileVar, gql from gql.transport.exceptions import ( TransportAlreadyConnected, TransportClosed, @@ -14,7 +14,7 @@ from .conftest import ( TemporaryFile, get_localhost_ssl_context_client, - strip_braces_spaces, + make_upload_handler, ) # Marking all tests in this file with the httpx marker @@ -516,8 +516,6 @@ def test_code(): await run_sync_test(server, test_code) -file_upload_server_answer = '{"data":{"success":true}}' - file_upload_mutation_1 = """ mutation($file: Upload!) { uploadFile(input:{other_var:$other_var, file:$file}) { @@ -547,35 +545,16 @@ async def test_httpx_file_upload(aiohttp_server, run_sync_test): from gql.transport.httpx import HTTPXTransport - async def single_upload_handler(request): - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - field_3 = await reader.next() - assert field_3 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -589,15 +568,41 @@ def test_code(): file_path = test_file.filename + # Using an opened file with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + # Using an opened file inside a FileVar object + with open(file_path, "rb") as f: + + params = {"file": FileVar(f), "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] + # Using an filename string inside a FileVar object + params = { + "file": FileVar(file_path), + "other_var": 42, + } + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + await run_sync_test(server, test_code) @@ -608,38 +613,17 @@ async def test_httpx_file_upload_with_content_type(aiohttp_server, run_sync_test from gql.transport.httpx import HTTPXTransport - async def single_upload_handler(request): - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - # Verifying the content_type - assert field_2.headers["Content-Type"] == "application/pdf" - - field_3 = await reader.next() - assert field_3 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + file_headers=[{"Content-Type": "application/pdf"}], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -653,18 +637,34 @@ def test_code(): file_path = test_file.filename + # Using an opened file with open(file_path, "rb") as f: # Setting the content_type f.content_type = "application/pdf" # type: ignore params = {"file": f, "other_var": 42} - execution_result = session._execute( - query, variable_values=params, upload_files=True - ) + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) assert execution_result.data["success"] + # Using FileVar + params = { + "file": FileVar(file_path, content_type="application/pdf"), + "other_var": 42, + } + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + await run_sync_test(server, test_code) @@ -675,37 +675,17 @@ async def test_httpx_file_upload_additional_headers(aiohttp_server, run_sync_tes from gql.transport.httpx import HTTPXTransport - async def single_upload_handler(request): - from aiohttp import web - - assert request.headers["X-Auth"] == "foobar" - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - field_3 = await reader.next() - assert field_3 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + request_headers={"X-Auth": "foobar"}, + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -719,14 +699,12 @@ def test_code(): file_path = test_file.filename - with open(file_path, "rb") as f: - - params = {"file": f, "other_var": 42} - execution_result = session._execute( - query, variable_values=params, upload_files=True - ) + params = {"file": FileVar(file_path), "other_var": 42} + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) - assert execution_result.data["success"] + assert execution_result.data["success"] await run_sync_test(server, test_code) @@ -741,36 +719,17 @@ async def test_httpx_binary_file_upload(aiohttp_server, run_sync_test): # This is a sample binary file content containing all possible byte values binary_file_content = bytes(range(0, 256)) - async def binary_upload_handler(request): - - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_binary = await field_2.read() - assert field_2_binary == binary_file_content - - field_3 = await reader.next() - assert field_3 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", binary_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + binary=True, + expected_contents=[binary_file_content], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -785,26 +744,17 @@ def test_code(): file_path = test_file.filename - with open(file_path, "rb") as f: + params = {"file": FileVar(file_path), "other_var": 42} - params = {"file": f, "other_var": 42} + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) - execution_result = session._execute( - query, variable_values=params, upload_files=True - ) - - assert execution_result.data["success"] + assert execution_result.data["success"] await run_sync_test(server, test_code) -file_upload_mutation_2_operations = ( - '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' - 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' - '"variables": {"file1": null, "file2": null}}' -) - - @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_two_files(aiohttp_server, run_sync_test): @@ -820,6 +770,12 @@ async def test_httpx_file_upload_two_files(aiohttp_server, run_sync_test): } """ + file_upload_mutation_2_operations = ( + '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' + 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' + '"variables": {"file1": null, "file2": null}}' + ) + file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' file_2_content = """ @@ -827,39 +783,17 @@ async def test_httpx_file_upload_two_files(aiohttp_server, run_sync_test): This file will also be sent in the GraphQL mutation """ - async def handler(request): - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_2_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_2_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - field_3 = await reader.next() - assert field_3.name == "1" - field_3_text = await field_3.text() - assert field_3_text == file_2_content - - field_4 = await reader.next() - assert field_4 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + nb_files=2, + expected_map=file_upload_mutation_2_map, + expected_operations=file_upload_mutation_2_operations, + expected_contents=[file_1_content, file_2_content], + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -877,12 +811,9 @@ def test_code(): file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename - f1 = open(file_path_1, "rb") - f2 = open(file_path_2, "rb") - params = { - "file1": f1, - "file2": f2, + "file1": FileVar(file_path_1), + "file2": FileVar(file_path_2), } execution_result = session._execute( @@ -891,19 +822,9 @@ def test_code(): assert execution_result.data["success"] - f1.close() - f2.close() - await run_sync_test(server, test_code) -file_upload_mutation_3_operations = ( - '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles' - "(input: {files: $files})" - ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' -) - - @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_list_of_two_files(aiohttp_server, run_sync_test): @@ -919,6 +840,12 @@ async def test_httpx_file_upload_list_of_two_files(aiohttp_server, run_sync_test } """ + file_upload_mutation_3_operations = ( + '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles' + "(input: {files: $files})" + ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' + ) + file_upload_mutation_3_map = ( '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' ) @@ -928,39 +855,17 @@ async def test_httpx_file_upload_list_of_two_files(aiohttp_server, run_sync_test This file will also be sent in the GraphQL mutation """ - async def handler(request): - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_3_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_3_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - field_3 = await reader.next() - assert field_3.name == "1" - field_3_text = await field_3.text() - assert field_3_text == file_2_content - - field_4 = await reader.next() - assert field_4 is None - - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) - app = web.Application() - app.router.add_route("POST", "/", handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + nb_files=2, + expected_map=file_upload_mutation_3_map, + expected_operations=file_upload_mutation_3_operations, + expected_contents=[file_1_content, file_2_content], + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -977,10 +882,12 @@ def test_code(): file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename - f1 = open(file_path_1, "rb") - f2 = open(file_path_2, "rb") - - params = {"files": [f1, f2]} + params = { + "files": [ + FileVar(file_path_1), + FileVar(file_path_2), + ], + } execution_result = session._execute( query, variable_values=params, upload_files=True @@ -988,9 +895,6 @@ def test_code(): assert execution_result.data["success"] - f1.close() - f2.close() - await run_sync_test(server, test_code) diff --git a/tests/test_httpx_async.py b/tests/test_httpx_async.py index 49ea6a24..ddacbc14 100644 --- a/tests/test_httpx_async.py +++ b/tests/test_httpx_async.py @@ -4,7 +4,7 @@ import pytest -from gql import Client, gql +from gql import Client, FileVar, gql from gql.cli import get_parser, main from gql.transport.exceptions import ( TransportAlreadyConnected, @@ -17,7 +17,7 @@ from .conftest import ( TemporaryFile, get_localhost_ssl_context_client, - strip_braces_spaces, + make_upload_handler, ) query1_str = """ @@ -613,8 +613,6 @@ def test_code(): await run_sync_test(server, test_code) -file_upload_server_answer = '{"data":{"success":true}}' - file_upload_mutation_1 = """ mutation($file: Upload!) { uploadFile(input:{other_var:$other_var, file:$file}) { @@ -637,33 +635,6 @@ def test_code(): """ -async def single_upload_handler(request): - - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content - - field_3 = await reader.next() - assert field_3 is None - - return web.Response(text=file_upload_server_answer, content_type="application/json") - - @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload(aiohttp_server): @@ -672,7 +643,15 @@ async def test_httpx_file_upload(aiohttp_server): from gql.transport.httpx import HTTPXAsyncTransport app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -687,17 +666,45 @@ async def test_httpx_file_upload(aiohttp_server): file_path = test_file.filename + # Using an opened file with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} + # Execute query asynchronously + with pytest.warns( + DeprecationWarning, + match="Not using FileVar for file upload is deprecated", + ): + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + success = result["success"] + assert success + + # Using an opened file inside a FileVar object + with open(file_path, "rb") as f: + + params = {"file": FileVar(f), "other_var": 42} + # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] + assert success + + # Using an filename string inside a FileVar object + params = {"file": FileVar(file_path), "other_var": 42} + + # Execute query asynchronously + result = await session.execute( + query, variable_values=params, upload_files=True + ) + success = result["success"] assert success @@ -709,7 +716,15 @@ async def test_httpx_file_upload_without_session(aiohttp_server, run_sync_test): from gql.transport.httpx import HTTPXAsyncTransport app = web.Application() - app.router.add_route("POST", "/", single_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -725,52 +740,17 @@ def test_code(): file_path = test_file.filename - with open(file_path, "rb") as f: + params = {"file": FileVar(file_path), "other_var": 42} - params = {"file": f, "other_var": 42} + result = client.execute(query, variable_values=params, upload_files=True) - result = client.execute( - query, variable_values=params, upload_files=True - ) - - success = result["success"] + success = result["success"] - assert success + assert success await run_sync_test(server, test_code) -# This is a sample binary file content containing all possible byte values -binary_file_content = bytes(range(0, 256)) - - -async def binary_upload_handler(request): - - from aiohttp import web - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_1_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_binary = await field_2.read() - assert field_2_binary == binary_file_content - - field_3 = await reader.next() - assert field_3 is None - - return web.Response(text=file_upload_server_answer, content_type="application/json") - - @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_binary_file_upload(aiohttp_server): @@ -778,8 +758,20 @@ async def test_httpx_binary_file_upload(aiohttp_server): from gql.transport.httpx import HTTPXAsyncTransport + # This is a sample binary file content containing all possible byte values + binary_file_content = bytes(range(0, 256)) + app = web.Application() - app.router.add_route("POST", "/", binary_upload_handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + binary=True, + expected_contents=[binary_file_content], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -794,42 +786,18 @@ async def test_httpx_binary_file_upload(aiohttp_server): file_path = test_file.filename - with open(file_path, "rb") as f: - - params = {"file": f, "other_var": 42} + params = {"file": FileVar(file_path), "other_var": 42} - # Execute query asynchronously - result = await session.execute( - query, variable_values=params, upload_files=True - ) + # Execute query asynchronously + result = await session.execute( + query, variable_values=params, upload_files=True + ) success = result["success"] assert success -file_upload_mutation_2 = """ - mutation($file1: Upload!, $file2: Upload!) { - uploadFile(input:{file1:$file, file2:$file}) { - success - } - } -""" - -file_upload_mutation_2_operations = ( - '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' - 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' - '"variables": {"file1": null, "file2": null}}' -) - -file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' - -file_2_content = """ -This is a second test file -This file will also be sent in the GraphQL mutation -""" - - @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_two_files(aiohttp_server): @@ -837,39 +805,38 @@ async def test_httpx_file_upload_two_files(aiohttp_server): from gql.transport.httpx import HTTPXAsyncTransport - async def handler(request): - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_2_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_2_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content + file_upload_mutation_2 = """ + mutation($file1: Upload!, $file2: Upload!) { + uploadFile(input:{file1:$file, file2:$file}) { + success + } + } + """ - field_3 = await reader.next() - assert field_3.name == "1" - field_3_text = await field_3.text() - assert field_3_text == file_2_content + file_upload_mutation_2_operations = ( + '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' + 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' + '"variables": {"file1": null, "file2": null}}' + ) - field_4 = await reader.next() - assert field_4 is None + file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) + file_2_content = """ + This is a second test file + This file will also be sent in the GraphQL mutation + """ app = web.Application() - app.router.add_route("POST", "/", handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + nb_files=2, + expected_map=file_upload_mutation_2_map, + expected_operations=file_upload_mutation_2_operations, + expected_contents=[file_1_content, file_2_content], + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -886,43 +853,19 @@ async def handler(request): file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename - f1 = open(file_path_1, "rb") - f2 = open(file_path_2, "rb") - params = { - "file1": f1, - "file2": f2, + "file1": FileVar(file_path_1), + "file2": FileVar(file_path_2), } result = await session.execute( query, variable_values=params, upload_files=True ) - f1.close() - f2.close() - success = result["success"] - assert success -file_upload_mutation_3 = """ - mutation($files: [Upload!]!) { - uploadFiles(input:{files:$files}) { - success - } - } -""" - -file_upload_mutation_3_operations = ( - '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles(' - "input: {files: $files})" - ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' -) - -file_upload_mutation_3_map = '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' - - @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_list_of_two_files(aiohttp_server): @@ -930,39 +873,40 @@ async def test_httpx_file_upload_list_of_two_files(aiohttp_server): from gql.transport.httpx import HTTPXAsyncTransport - async def handler(request): - - reader = await request.multipart() - - field_0 = await reader.next() - assert field_0.name == "operations" - field_0_text = await field_0.text() - assert strip_braces_spaces(field_0_text) == file_upload_mutation_3_operations - - field_1 = await reader.next() - assert field_1.name == "map" - field_1_text = await field_1.text() - assert field_1_text == file_upload_mutation_3_map - - field_2 = await reader.next() - assert field_2.name == "0" - field_2_text = await field_2.text() - assert field_2_text == file_1_content + file_upload_mutation_3 = """ + mutation($files: [Upload!]!) { + uploadFiles(input:{files:$files}) { + success + } + } + """ - field_3 = await reader.next() - assert field_3.name == "1" - field_3_text = await field_3.text() - assert field_3_text == file_2_content + file_upload_mutation_3_operations = ( + '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles' + "(input: {files: $files})" + ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' + ) - field_4 = await reader.next() - assert field_4 is None + file_upload_mutation_3_map = ( + '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' + ) - return web.Response( - text=file_upload_server_answer, content_type="application/json" - ) + file_2_content = """ + This is a second test file + This file will also be sent in the GraphQL mutation + """ app = web.Application() - app.router.add_route("POST", "/", handler) + app.router.add_route( + "POST", + "/", + make_upload_handler( + nb_files=2, + expected_map=file_upload_mutation_3_map, + expected_operations=file_upload_mutation_3_operations, + expected_contents=[file_1_content, file_2_content], + ), + ) server = await aiohttp_server(app) url = str(server.make_url("/")) @@ -979,21 +923,19 @@ async def handler(request): file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename - f1 = open(file_path_1, "rb") - f2 = open(file_path_2, "rb") - - params = {"files": [f1, f2]} + params = { + "files": [ + FileVar(file_path_1), + FileVar(file_path_2), + ], + } # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) - f1.close() - f2.close() - success = result["success"] - assert success From 7bf5f8743e9ca21cc1b8f4c4f137de61e8552240 Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Fri, 23 May 2025 12:25:21 +0200 Subject: [PATCH 14/14] Ensure that by default the filename is the basename of the file --- gql/transport/file_upload.py | 5 ++ tests/test_aiohttp.py | 95 ++++++++++++++++++++++++++++++++++++ tests/test_httpx.py | 50 +++++++++++++++++++ tests/test_requests.py | 53 ++++++++++++++++++-- 4 files changed, 200 insertions(+), 3 deletions(-) diff --git a/gql/transport/file_upload.py b/gql/transport/file_upload.py index a2fd4e56..8673ab60 100644 --- a/gql/transport/file_upload.py +++ b/gql/transport/file_upload.py @@ -1,4 +1,5 @@ import io +import os import warnings from typing import Any, Dict, List, Optional, Tuple, Type @@ -34,6 +35,10 @@ def open_file( self._make_file_streamer() else: if isinstance(self.f, str): + if self.filename is None: + # By default we set the filename to the basename + # of the opened file + self.filename = os.path.basename(self.f) self.f = open(self.f, "rb") self._file_opened = True diff --git a/tests/test_aiohttp.py b/tests/test_aiohttp.py index c7ca0dcd..fe36585e 100644 --- a/tests/test_aiohttp.py +++ b/tests/test_aiohttp.py @@ -1,5 +1,6 @@ import io import json +import os import warnings from typing import Mapping @@ -779,6 +780,100 @@ async def test_aiohttp_file_upload_with_content_type(aiohttp_server): assert success +@pytest.mark.asyncio +async def test_aiohttp_file_upload_default_filename_is_basename(aiohttp_server): + from aiohttp import web + + from gql.transport.aiohttp import AIOHTTPTransport + + app = web.Application() + + with TemporaryFile(file_1_content) as test_file: + file_path = test_file.filename + file_basename = os.path.basename(file_path) + + app.router.add_route( + "POST", + "/", + make_upload_handler( + filenames=[file_basename], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) + server = await aiohttp_server(app) + + url = server.make_url("/") + + transport = AIOHTTPTransport(url=url, timeout=10) + + async with Client(transport=transport) as session: + + query = gql(file_upload_mutation_1) + + params = { + "file": FileVar( + file_path, + ), + "other_var": 42, + } + + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + success = result["success"] + assert success + + +@pytest.mark.asyncio +async def test_aiohttp_file_upload_with_filename(aiohttp_server): + from aiohttp import web + + from gql.transport.aiohttp import AIOHTTPTransport + + app = web.Application() + + with TemporaryFile(file_1_content) as test_file: + file_path = test_file.filename + + app.router.add_route( + "POST", + "/", + make_upload_handler( + filenames=["filename1.txt"], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) + server = await aiohttp_server(app) + + url = server.make_url("/") + + transport = AIOHTTPTransport(url=url, timeout=10) + + async with Client(transport=transport) as session: + + query = gql(file_upload_mutation_1) + + params = { + "file": FileVar( + file_path, + filename="filename1.txt", + ), + "other_var": 42, + } + + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + success = result["success"] + assert success + + @pytest.mark.asyncio async def test_aiohttp_file_upload_without_session(aiohttp_server, run_sync_test): from aiohttp import web diff --git a/tests/test_httpx.py b/tests/test_httpx.py index 7c34e73b..9558e137 100644 --- a/tests/test_httpx.py +++ b/tests/test_httpx.py @@ -1,3 +1,4 @@ +import os from typing import Any, Dict, Mapping import pytest @@ -668,6 +669,55 @@ def test_code(): await run_sync_test(server, test_code) +@pytest.mark.aiohttp +@pytest.mark.asyncio +async def test_httpx_file_upload_default_filename_is_basename( + aiohttp_server, run_sync_test +): + from aiohttp import web + + from gql.transport.httpx import HTTPXTransport + + app = web.Application() + + with TemporaryFile(file_1_content) as test_file: + file_path = test_file.filename + file_basename = os.path.basename(file_path) + + app.router.add_route( + "POST", + "/", + make_upload_handler( + filenames=[file_basename], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) + server = await aiohttp_server(app) + + url = str(server.make_url("/")) + + def test_code(): + transport = HTTPXTransport(url=url) + + with Client(transport=transport) as session: + query = gql(file_upload_mutation_1) + + # Using FileVar + params = { + "file": FileVar(file_path), + "other_var": 42, + } + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + await run_sync_test(server, test_code) + + @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_additional_headers(aiohttp_server, run_sync_test): diff --git a/tests/test_requests.py b/tests/test_requests.py index 1cec1360..c184e230 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -1,3 +1,4 @@ +import os import warnings from typing import Any, Dict, Mapping @@ -672,6 +673,55 @@ def test_code(): await run_sync_test(server, test_code) +@pytest.mark.aiohttp +@pytest.mark.asyncio +async def test_requests_file_upload_default_filename_is_basename( + aiohttp_server, run_sync_test +): + from aiohttp import web + + from gql.transport.requests import RequestsHTTPTransport + + app = web.Application() + + with TemporaryFile(file_1_content) as test_file: + file_path = test_file.filename + file_basename = os.path.basename(file_path) + + app.router.add_route( + "POST", + "/", + make_upload_handler( + filenames=[file_basename], + expected_map=file_upload_mutation_1_map, + expected_operations=file_upload_mutation_1_operations, + expected_contents=[file_1_content], + ), + ) + server = await aiohttp_server(app) + + url = server.make_url("/") + + def test_code(): + + transport = RequestsHTTPTransport(url=url) + + with Client(transport=transport) as session: + query = gql(file_upload_mutation_1) + + params = { + "file": FileVar(file_path), + "other_var": 42, + } + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + await run_sync_test(server, test_code) + + @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_file_upload_with_filename(aiohttp_server, run_sync_test): @@ -706,9 +756,6 @@ def test_code(): with open(file_path, "rb") as f: - # Setting the content_type - f.content_type = "application/pdf" # type: ignore - params = { "file": FileVar(f, filename="filename1.txt"), "other_var": 42,