diff --git a/.github/workflows/build-and-test-pr.yml b/.github/workflows/build-and-test-pr.yml index 68ad165e..ceb98dd8 100644 --- a/.github/workflows/build-and-test-pr.yml +++ b/.github/workflows/build-and-test-pr.yml @@ -44,7 +44,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [ "3.8", "3.9","3.10" ] + python-version: [ "3.9","3.10", "3.11", "3.12", "3.13" ] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} diff --git a/.gitignore b/.gitignore index debb3865..43faab26 100644 --- a/.gitignore +++ b/.gitignore @@ -10,4 +10,6 @@ _build venv .vscode .coverage -cov.xml \ No newline at end of file +api-saagie.iml +out/ +.env diff --git a/README.md b/README.md index 7df1d242..b1296b92 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,8 @@ pip install saagieapi== | >= 2023.05 | >= 2.10.0 | | >= 2024.01 | >= 2.11.0 | | >= 2024.02 | >= 2.12.0 | +| >= 2024.03 | >= 2.13.0 | +| >= 2024.05 | >= 2.14.0 | ## Contributing diff --git a/pyproject.toml b/pyproject.toml index bcea57eb..2a829459 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "saagieapi" -version = "2.13.0" +version = "2.14.0" description = "Python API to interact with Saagie" authors = ["Saagie"] license = "GLWTPL" diff --git a/saagieapi/apps/apps.py b/saagieapi/apps/apps.py index 882cd6ac..b11dc381 100644 --- a/saagieapi/apps/apps.py +++ b/saagieapi/apps/apps.py @@ -1467,17 +1467,19 @@ def count_history_statuses(self, history_id, version_number, start_time): def get_logs( self, + project_id: str, app_id: str, app_execution_id: str, limit: int = None, skip: int = None, log_stream: str = None, - start_at: str = None, ): """Get logs of the app Parameters ---------- + project_id : str + UUID of your project app_id : str UUID of your app app_execution_id : str @@ -1490,9 +1492,6 @@ def get_logs( Stream of logs to follow. Values accepted : [ENVVARS_STDOUT, ENVVARS_STDERR, ORCHESTRATION_STDOUT, ORCHESTRATION_STDERR, STDERR, STDOUT] By default, all the streams are retrieved - start_at: str, optional - Get logs since a specific datetime. - Following formats accepted : "2024-04-09 10:00:00" and "2024-04-09T10:00:00" Returns ------- @@ -1508,43 +1507,35 @@ def get_logs( ... skip=5, ... start_at="2024-04-09 10:00:00" ... ) + { - "appLogs": { - "count": 25, - "content": [ - { - "index": 5, - "value": "[I 2024-04-09 13:38:36.982 ServerApp] jupyterlab_git | extension was successfully linked.", - "containerId": "d7104fa7371c5ed6ef540fa8b0620a654a0e02c57136e29f0fcc03d16e36d74f", - "stream": "STDERR", - "recordAt": "2024-04-09T13:38:36.982473892Z" - }, - { - "index": 6, - "value": "[W 2024-04-09 13:38:36.987 NotebookApp] 'ip' has moved from NotebookApp to ServerApp. This config will be passed to ServerApp. Be sure to update your config before our next release.", - "containerId": "d7104fa7371c5ed6ef540fa8b0620a654a0e02c57136e29f0fcc03d16e36d74f", - "stream": "STDERR", - "recordAt": "2024-04-09T13:38:36.987400105Z" - } - ] + "logs": [ + { + "index": 0, + "stream": "STDERR", + "time": "2024-12-11T14:27:42.858298425Z", + "value": "[I 2024-04-09 13:38:36.982 ServerApp] jupyterlab_git | extension was successfully linked.", + }, + { + "index": 1, + "stream": "STDERR", + "time": "2024-12-11T14:27:42.859697094Z", + "value": "AH00558: httpd: Could not reliably determine the server's fully qualified domain name, using 10.4.3.20. Set the 'ServerName' directive globally to suppress this message" } + ], + "limit": 10000, + "total": 5, + "order": "asc", + "source": "elastic" } """ - params = { - "appId": app_id, - "appExecutionId": app_execution_id, - } - - if limit: - params["limit"] = limit - - if skip: - params["skip"] = skip - - if log_stream: - params["stream"] = log_stream - - if start_at: - params["recordAt"] = start_at - - return self.saagie_api.client.execute(query=gql(GQL_GET_APP_LOG), variable_values=params) + if limit is None: + limit = 10000 + if skip is None: + skip = 0 + if log_stream is None: + log_stream = "ENVVARS_STDOUT,ENVVARS_STDERR,ORCHESTRATION_STDOUT,ORCHESTRATION_STDERR,STDERR,STDOUT" + url = f"{self.saagie_api.url_saagie}log-proxy/api/logs/{self.saagie_api.realm}/platform/{self.saagie_api.platform}/project/{project_id}/app_execution/{app_execution_id}?limit={limit}&skip={skip}&streams={log_stream}" + response = self.saagie_api.request_client.send(method="GET", url=url, raise_for_status=True) + + return response.json() diff --git a/saagieapi/apps/gql_queries.py b/saagieapi/apps/gql_queries.py index f8a67703..3435f9c7 100644 --- a/saagieapi/apps/gql_queries.py +++ b/saagieapi/apps/gql_queries.py @@ -452,19 +452,3 @@ countAppHistoryStatuses(appHistoryId: $appHistoryId, versionNumber: $versionNumber, startTime: $startTime) } """ - -GQL_GET_APP_LOG = """ -query appLogs($appId: UUID!, $appExecutionId: UUID!, $limit: Int, $skip: Int, $stream: LogStream, $recordAt: String) { - appLogs(appId: $appId, appExecutionId: $appExecutionId, limit: $limit, skip: $skip, stream: $stream, recordAt: $recordAt) - { - count - content { - index - value - containerId - stream - recordAt - } - } -} -""" diff --git a/saagieapi/gql_queries.py b/saagieapi/gql_queries.py index 433e6e0b..1cb087b8 100644 --- a/saagieapi/gql_queries.py +++ b/saagieapi/gql_queries.py @@ -132,59 +132,3 @@ ) } """ - -GQL_COUNT_CONDITION_LOGS = """ -query conditionPipelineCountFilteredLogs($conditionInstanceId: UUID!, - $projectID: UUID!, - $streams: [LogStream]!) { - conditionPipelineCountFilteredLogs ( - conditionInstanceID: $conditionInstanceId, - projectID: $projectID, - streams: $streams - ) -} -""" - -GQL_GET_CONDITION_LOGS_BY_CONDITION = """ -query conditionPipelineByNodeIdFilteredLogs($pipelineInstanceID: UUID!, - $conditionNodeID: UUID!, - $projectID: UUID!, - $streams: [LogStream]!) { - conditionPipelineByNodeIdFilteredLogs( - pipelineInstanceID: $pipelineInstanceID, - conditionNodeID: $conditionNodeID, - projectID: $projectID, - streams: $streams - ) { - count - content { - index - value - stream - } - } -} -""" - -GQL_GET_CONDITION_LOGS_BY_INSTANCE = """ -query conditionPipelineFilteredLogs($conditionInstanceId: UUID!, - $projectId: UUID!, - $limit: Int, - $skip: Int, - $streams: [LogStream]!) { - conditionPipelineFilteredLogs( - conditionInstanceID: $conditionInstanceId, - projectID:$projectId, - limit: $limit, - skip: $skip, - streams: $streams - ) { - count - content { - index - value - stream - } - } -} -""" diff --git a/saagieapi/jobs/jobs.py b/saagieapi/jobs/jobs.py index d291fa68..b7a004bc 100644 --- a/saagieapi/jobs/jobs.py +++ b/saagieapi/jobs/jobs.py @@ -1457,10 +1457,8 @@ def __launch_request(self, file: str, payload_str: str, params: Dict) -> Dict: Dict of the request response """ if file: - file_info = Path(file) - os.chdir(file_info.parent) - file = Path(file_info.name) - with file.open(mode="rb") as file_content: + file_info = Path(file).absolute() + with file_info.open(mode="rb") as file_content: params["file"] = file_content try: req = self.saagie_api.client.execute( @@ -1612,8 +1610,7 @@ def import_from_json( if version_path.exists(): if path_to_package := next(version_path.iterdir(), None): - os.chdir(path_to_package.parent) - file_name = path_to_package.name + file_name = path_to_package.absolute() else: file_name = "" diff --git a/saagieapi/repositories/gql_queries.py b/saagieapi/repositories/gql_queries.py index 386490d1..dffc818b 100644 --- a/saagieapi/repositories/gql_queries.py +++ b/saagieapi/repositories/gql_queries.py @@ -206,15 +206,6 @@ } lastReversibleId } - connectionTypes { - id - label - actions { - checkConnection { - scriptId - } - } - } technologies { id technologyId @@ -258,35 +249,6 @@ lastUpdate } } - ... on ExtJobTechnology { - iconUrl - contexts { - id - label - available - missingFacets - description - recommended - trustLevel - deprecationDate - lastUpdate - connectionTypeUUID - actions { - getStatus { - scriptId - } - start { - scriptId - } - stop { - scriptId - } - getLogs { - scriptId - } - } - } - } ... on SparkTechnology { contexts { id diff --git a/saagieapi/repositories/repositories.py b/saagieapi/repositories/repositories.py index 8b70bd2a..e0aa540e 100644 --- a/saagieapi/repositories/repositories.py +++ b/saagieapi/repositories/repositories.py @@ -168,17 +168,6 @@ def get_info( ], 'lastReversibleId': 'a17c73ed-fca1-4f25-a343-914c7ac23bae' }, - 'connectionTypes': [ - { - 'id': '5b4b8ffb-9228-4f7a-9d39-67fd3c2862d3', - 'label': 'AWS Connection', - 'actions': { - 'checkConnection': { - 'scriptId': '9359e392-58a0-42db-9ce9-b68679aa9131' - } - } - } - ], 'technologies': [ { 'id': '1bf79f1d-7e2d-4daf-976d-8702114ab507', @@ -204,43 +193,6 @@ def get_info( } ] }, - { - 'id': 'db34c9b9-47c7-4dc6-8c3c-2d8ccf5afa11', - 'technologyId': 'aws-lambda', - 'label': 'AWS Lambda', - 'icon': 'aws-lambda', - 'repositoryId': '9fcbddfe-a7b7-4d25-807c-ad030782c923', - 'available': True, - 'missingFacets': [], - 'description': 'Run code without thinking about servers. Pay only for the compute time you consume', - 'iconUrl': None, - 'contexts': [ - { - 'id': 'functions', - 'label': 'Functions', - 'available': True, - 'missingFacets': [], - 'description': 'AWS Lambda Functions', - 'recommended': False, - 'trustLevel': 'Experimental', - 'deprecationDate': None, - 'lastUpdate': '2022-08-31T13:05:32.031Z', - 'connectionTypeUUID': '5b4b8ffb-9228-4f7a-9d39-67fd3c2862d3', - 'actions': { - 'getStatus': { - 'scriptId': '50794533-091b-4d66-9463-96f0ce255785' - }, - 'start': { - 'scriptId': '50794533-091b-4d66-9463-96f0ce255785' - }, - 'stop': None, - 'getLogs': { - 'scriptId': '50794533-091b-4d66-9463-96f0ce255785' - } - } - } - ] - } ] } } diff --git a/saagieapi/saagie_api.py b/saagieapi/saagie_api.py index 4139f094..ca0770e3 100644 --- a/saagieapi/saagie_api.py +++ b/saagieapi/saagie_api.py @@ -11,10 +11,7 @@ from .env_vars import EnvVars from .gql_queries import ( GQL_CHECK_CUSTOM_EXPRESSION, - GQL_COUNT_CONDITION_LOGS, GQL_GET_CLUSTER_INFO, - GQL_GET_CONDITION_LOGS_BY_CONDITION, - GQL_GET_CONDITION_LOGS_BY_INSTANCE, GQL_GET_PLATFORM_INFO, GQL_GET_REPOSITORIES_INFO, GQL_GET_RUNTIMES, @@ -72,6 +69,7 @@ def __init__( self.url_saagie = url_saagie self.realm = realm + self.platform = id_platform self.auth = BearerAuth( realm=self.realm, url=self.url_saagie, platform=id_platform, login=user, password=password ) @@ -702,7 +700,12 @@ def check_condition_expression(self, expression: str, project_id: str, variables } return self.client.execute(query=gql(GQL_CHECK_CUSTOM_EXPRESSION), variable_values=params) - def count_condition_logs(self, condition_instance_id: str, project_id: str, streams: List[str]) -> Dict: + def count_condition_logs( + self, + condition_instance_id: str, + project_id: str, + streams: List[str], + ) -> Dict: """Get number of logs line for an instance of a condition on Environment Variable Parameters @@ -712,7 +715,7 @@ def count_condition_logs(self, condition_instance_id: str, project_id: str, stre project_id : str UUID of the project streams : List[str] - List of logs files name to see (example : STDERR, STDOUT) + List of logs files name to see (example : ENVVARS_STDOUT, ENVVARS_STDERR, ORCHESTRATION_STDOUT, ORCHESTRATION_STDERR, STDOUT, STDERR) Returns ------- @@ -726,99 +729,21 @@ def count_condition_logs(self, condition_instance_id: str, project_id: str, stre ... project_id="your_project_id", ... streams=["STDOUT"] ... ) - { - "data": { - "conditionPipelineCountFilteredLogs": 4 - } - } - """ - - params = {"conditionInstanceId": condition_instance_id, "projectID": project_id, "streams": streams} - - return self.client.execute(query=gql(GQL_COUNT_CONDITION_LOGS), variable_values=params) - def get_condition_instance_logs_by_condition( - self, - condition_id: str, - project_id: str, - pipeline_instance_id: str, - streams: List[str], - limit: int = None, - skip: int = None, - ) -> Dict: - """Get logs for a condition on Environment Variable of a pipeline instance - - Parameters - ---------- - condition_id : str - UUID of the condition - project_id : str - UUID of the project - pipeline_instance_id ! str - UUID of the pipeline instance - streams : List[str] - List of logs files name to see (example : STDERR, STDOUT) - limit : int - Number of logs lines to return from the beginning - skip : int - Number of logs lines to doesn't display from the beginning - - Returns - ------- - dict - Dict of logs lines - - Examples - -------- - >>> saagieapi.get_condition_instance_logs_by_condition( - ... condition_id="condition_node_id", - ... project_id="project_id", - ... pipeline_instance_id="pipeline_instance_id", - ... streams=["STDOUT"] - ... ) { - "data": { - "conditionPipelineByNodeIdFilteredLogs": { - "count": 4, - "content": [ - { - "index": 0, - "value": "2023/05/15 12:55:19 INFO [evaluate_condition] Condition: 'tube_name.contains(\"Tube\") ||", - "stream": "STDOUT" - }, - { - "index": 1, - "value": "double(diameter) > 1.0'", - "stream": "STDOUT" - }, - { - "index": 2, - "value": "2023/05/15 12:55:19 INFO [evaluate_condition] Condition evaluation took: 4.736725ms", - "stream": "STDOUT" - }, - { - "index": 3, - "value": "2023/05/15 12:55:19 INFO [evaluate_condition] Result: true", - "stream": "STDOUT" - } - ] - } - } + "logs": [ + ], + "limit": 1, + "total": 5, + "order": "asc", + "source": "elastic" } - """ # pylint: disable=line-too-long - params = { - "conditionNodeID": condition_id, - "projectID": project_id, - "pipelineInstanceID": pipeline_instance_id, - "streams": streams, - } - if limit: - params["limit"] = limit - - if skip: - params["skip"] = skip + """ + log_stream = ",".join(streams) + url = f"{self.url_saagie}log-proxy/api/logs/{self.realm}/platform/{self.platform}/project/{project_id}/condition_instance/{condition_instance_id}?limit=1&skip=0&streams={log_stream}" + response = self.request_client.send(method="GET", url=url, raise_for_status=True) - return self.client.execute(query=gql(GQL_GET_CONDITION_LOGS_BY_CONDITION), variable_values=params) + return response.json() def get_condition_instance_logs_by_instance( self, @@ -837,7 +762,7 @@ def get_condition_instance_logs_by_instance( project_id : str UUID of the project streams : List[str] - List of logs files name to see (example : STDERR, STDOUT) + List of logs files name to see (example : ENVVARS_STDOUT, ENVVARS_STDERR, ORCHESTRATION_STDOUT, ORCHESTRATION_STDERR, STDOUT, STDERR) limit : int Number of logs lines to return from the beginning skip : int @@ -855,44 +780,27 @@ def get_condition_instance_logs_by_instance( ... project_id="project_id" ... ) { - "data": { - "conditionPipelineByNodeIdFilteredLogs": { - "count": 4, - "content": [ - { - "index": 0, - "value": "2023/05/15 12:55:19 INFO [evaluate_condition] Condition: 'tube_name.contains(\"Tube\") ||", - "stream": "STDOUT" - }, - { - "index": 1, - "value": "double(diameter) > 1.0'", - "stream": "STDOUT" - }, - { - "index": 2, - "value": "2023/05/15 12:55:19 INFO [evaluate_condition] Condition evaluation took: 4.736725ms", - "stream": "STDOUT" - }, - { - "index": 3, - "value": "2023/05/15 12:55:19 INFO [evaluate_condition] Result: true", - "stream": "STDOUT" - } - ] + "logs": [ + { + "index": 0, + "value": "2023/05/15 12:55:19 INFO [evaluate_condition] Condition: 'tube_name.contains(\"Tube\") ||", + "stream": "STDOUT" } - } + ], + "limit": 1, + "total": 1, + "order": "asc", + "source": "elastic" } """ # pylint: disable=line-too-long - params = { - "conditionInstanceId": condition_instance_id, - "projectId": project_id, - "streams": streams, - } - if limit: - params["limit"] = limit - if skip: - params["skip"] = skip + if limit is None: + limit = 10000 + if skip is None: + skip = 0 + log_stream = ",".join(streams) + + url = f"{self.url_saagie}log-proxy/api/logs/{self.realm}/platform/{self.platform}/project/{project_id}/condition_instance/{condition_instance_id}?limit={limit}&skip={skip}&streams={log_stream}" + response = self.request_client.send(method="GET", url=url, raise_for_status=True) - return self.client.execute(query=gql(GQL_GET_CONDITION_LOGS_BY_INSTANCE), variable_values=params) + return response.json() diff --git a/tests/integration/apps_integration_test.py b/tests/integration/apps_integration_test.py index 2c9f9357..79a81b37 100644 --- a/tests/integration/apps_integration_test.py +++ b/tests/integration/apps_integration_test.py @@ -351,14 +351,15 @@ def test_count_history_statuses(create_global_project, create_then_delete_app_fr @staticmethod def test_get_app_logs(create_global_project, create_then_delete_app_from_scratch): conf = create_global_project + project_id = conf.project_id + app_id = create_then_delete_app_from_scratch app_info = conf.saagie_api.apps.run(app_id=app_id) app_info = conf.saagie_api.apps.get_info(app_id=app_id) - app_execution_id = app_info["app"]["history"]["currentExecutionId"] - result = conf.saagie_api.apps.get_logs(app_id=app_id, app_execution_id=app_execution_id) + result = conf.saagie_api.apps.get_logs(project_id=project_id, app_id=app_id, app_execution_id=app_execution_id) - assert "appLogs" in result + assert "logs" in result diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 68908ed4..200b3ded 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -109,7 +109,7 @@ def delete_test_global_env_var(conf): Conf.saagie_api.projects.delete(Conf.project_id) # Delete output directory if it wasn't present before - if not Conf.output_dir_present: + if Conf.output_dir_present: shutil.rmtree(Conf.output_dir) # Delete global environment variable diff --git a/tests/integration/jobs_integration_test.py b/tests/integration/jobs_integration_test.py index 27955fc5..703e76a5 100644 --- a/tests/integration/jobs_integration_test.py +++ b/tests/integration/jobs_integration_test.py @@ -110,6 +110,8 @@ def test_stop_job(create_then_delete_job, create_global_project): job_instance_status = conf.saagie_api.jobs.get_instance(job_instance_id)["jobInstance"]["status"] + time.sleep(30) + assert job_instance_status in ("KILLED", "KILLING") @staticmethod diff --git a/tests/integration/repositories_integration_test.py b/tests/integration/repositories_integration_test.py index 6e489029..6b4d9929 100644 --- a/tests/integration/repositories_integration_test.py +++ b/tests/integration/repositories_integration_test.py @@ -61,8 +61,8 @@ def test_synchronize_repository_from_zip(create_then_delete_repository_from_zip, conf = create_global_project repository_id = create_then_delete_repository_from_zip - repository_input = {"file_name": "new_technologies.zip"} - file_path = os.path.join(conf.dir_path, "resources", "repositories", repository_input["file_name"]) + file_path = os.path.join(conf.dir_path, "resources", "repositories", "new_technologies.zip") + repository_input = {"file_name": file_path} conf.saagie_api.repositories.synchronize(repository_id, file=file_path) repository_info = conf.saagie_api.repositories.get_info(repository_id) @@ -95,9 +95,8 @@ def create_then_delete_repository_from_zip(create_repository_from_zip, create_gl def test_revert_last_synchronization_from_zip(create_then_delete_repository_from_zip, create_global_project): conf = create_global_project repository_id = create_then_delete_repository_from_zip - previous_zip_name = "technologies.zip" - new_zip_name = "new_technologies.zip" - new_file_path = os.path.join(conf.dir_path, "resources", "repositories", new_zip_name) + previous_zip_path = os.path.join(conf.dir_path, "resources", "repositories", "technologies.zip") + new_file_path = os.path.join(conf.dir_path, "resources", "repositories", "new_technologies.zip") conf.saagie_api.repositories.synchronize(repository_id, file=new_file_path) new_repository_info = conf.saagie_api.repositories.get_info(repository_id) @@ -105,8 +104,8 @@ def test_revert_last_synchronization_from_zip(create_then_delete_repository_from conf.saagie_api.repositories.revert_last_synchronization(repository_id) repository_info = conf.saagie_api.repositories.get_info(repository_id) - assert new_repository_info["repository"]["source"]["name"] == new_zip_name - assert repository_info["repository"]["source"]["name"] == previous_zip_name + assert new_repository_info["repository"]["source"]["name"] == new_file_path + assert repository_info["repository"]["source"]["name"] == previous_zip_path @staticmethod def test_create_repository_from_zip(create_then_delete_repository_from_zip, create_global_project): diff --git a/tests/integration/saagie_api_integration_test.py b/tests/integration/saagie_api_integration_test.py index 846834ca..4bafdc80 100644 --- a/tests/integration/saagie_api_integration_test.py +++ b/tests/integration/saagie_api_integration_test.py @@ -18,46 +18,20 @@ def test_count_condition_logs(create_global_project, create_graph_pipeline): if pipeline_info["graphPipeline"]["instances"]: for cond_inst in pipeline_info["graphPipeline"]["instances"][0]["conditionsInstance"]: if cond_inst["id"]: - cond_inst_id = cond_inst["id"] + condition_node_id = cond_inst["conditionNodeId"] + condition_instance_id = cond_inst["id"] break - nb_logs = conf.saagie_api.count_condition_logs( - condition_instance_id=cond_inst_id, project_id=conf.project_id, streams=["STDOUT", "STDERR"] - ) - - conf.saagie_api.pipelines.delete(pipeline_id) - conf.saagie_api.jobs.delete(job_id) - - # nb logs lines can be different for 2 instances, test only that result is int - assert isinstance(nb_logs["conditionPipelineCountFilteredLogs"], int) - - @staticmethod - def test_get_condition_instance_logs_by_condition(create_global_project, create_graph_pipeline): - conf = create_global_project - - pipeline_id, job_id = create_graph_pipeline - conf.saagie_api.pipelines.run_with_callback(pipeline_id=pipeline_id) - - pipeline_info = conf.saagie_api.pipelines.get_info(pipeline_id=pipeline_id) - - if pipeline_info["graphPipeline"]["instances"]: - pipeline_instance_id = pipeline_info["graphPipeline"]["instances"][0]["id"] - for cond_inst in pipeline_info["graphPipeline"]["instances"][0]["conditionsInstance"]: - if cond_inst["id"]: - cond_id = cond_inst["conditionNodeId"] - break - - logs = conf.saagie_api.get_condition_instance_logs_by_condition( - condition_id=cond_id, + logs = conf.saagie_api.count_condition_logs( + condition_instance_id=condition_instance_id, project_id=conf.project_id, - pipeline_instance_id=pipeline_instance_id, streams=["STDOUT", "STDERR"], ) conf.saagie_api.pipelines.delete(pipeline_id) conf.saagie_api.jobs.delete(job_id) - assert "content" in logs["conditionPipelineByNodeIdFilteredLogs"] + assert 5 == logs["total"] @staticmethod def test_get_condition_instance_logs_by_instance(create_global_project, create_graph_pipeline): @@ -71,11 +45,12 @@ def test_get_condition_instance_logs_by_instance(create_global_project, create_g if pipeline_info["graphPipeline"]["instances"]: for cond_inst in pipeline_info["graphPipeline"]["instances"][0]["conditionsInstance"]: if cond_inst["id"]: - cond_inst_id = cond_inst["id"] + condition_node_id = cond_inst["conditionNodeId"] + condition_instance_id = cond_inst["id"] break logs = conf.saagie_api.get_condition_instance_logs_by_instance( - condition_instance_id=cond_inst_id, + condition_instance_id=condition_instance_id, project_id=conf.project_id, streams=["STDOUT", "STDERR"], ) @@ -83,4 +58,4 @@ def test_get_condition_instance_logs_by_instance(create_global_project, create_g conf.saagie_api.pipelines.delete(pipeline_id) conf.saagie_api.jobs.delete(job_id) - assert "content" in logs["conditionPipelineFilteredLogs"] + assert 5 == logs["total"] diff --git a/tests/unit/apps_unit_test.py b/tests/unit/apps_unit_test.py index 238ff3fb..2d310611 100644 --- a/tests/unit/apps_unit_test.py +++ b/tests/unit/apps_unit_test.py @@ -785,39 +785,3 @@ def test_count_history_app_statuses(self, saagie_api_mock): app.count_history_statuses(history_id=history_id, version_number=1, start_time="2024-04-10T14:26:27.073Z") saagie_api_mock.client.execute.assert_called_with(query=expected_query, variable_values=params) - - def test_get_app_logs_gql(self): - query = gql(GQL_GET_APP_LOG) - self.client.validate(query) - - def test_get_app_logs(self, saagie_api_mock): - app = Apps(saagie_api_mock) - - app_id = "70e85ade-d6cc-4a90-8d7d-639adbd25e5d" - app_execution_id = "e3e31074-4a12-450e-96e4-0eae7801dfca" - limit = 2 - skip = 5 - stream = "STDERR" - start_at = "2024-04-09 10:00:00" - - params = { - "appId": app_id, - "appExecutionId": app_execution_id, - "limit": limit, - "skip": skip, - "stream": stream, - "recordAt": start_at, - } - - expected_query = gql(GQL_GET_APP_LOG) - - app.get_logs( - app_id=app_id, - app_execution_id=app_execution_id, - limit=limit, - skip=skip, - log_stream=stream, - start_at=start_at, - ) - - saagie_api_mock.client.execute.assert_called_with(query=expected_query, variable_values=params) diff --git a/tests/unit/resources/schema.graphqls b/tests/unit/resources/schema.graphqls index 22292ad6..06280fe8 100644 --- a/tests/unit/resources/schema.graphqls +++ b/tests/unit/resources/schema.graphqls @@ -330,20 +330,6 @@ type Query { # List job and pipeline instances across all accessible projects in a timeframe. projectReports(timeSlot: Int!, limit: Int!, skip: Int!): ProjectReports! - - # Get connections related to a project. - getConnectionsByProject(projectId: UUID!): [Connection]! - - # Get a connection's details from its UUID. - # - # You can only get a connection's details if you have at least the viewer role in the project associated to the connection or in all projects. - connection(id: UUID!): Connection - - # Get Ext-Job values for a given param. - getExtJobParamValues(connectionId: UUID!, technologyId: UUID!, contextId: String!, paramId: String!, additionalContext: [EntryInput]!): ExtJobParamValueCallResult! - - # Check if the connection is properly configured. - checkConnection(projectId: UUID!, connectionId: UUID, repositoryId: UUID!, connectionTypeId: String!, parameters: [EntryInput]!): CheckConnectionCallResult! } type Mutation { @@ -769,25 +755,6 @@ type Mutation { # # You can only duplicate a volume if you have at least role editor on the project associated to the volume or on all projects. duplicateVolume(originalVolumeId: UUID!): Volume! - - # Create a Connection related to a project. - # **Fields validation:** - # - name: must be unique (case insensitive) in the project, shorter than 255 characters and cannot be empty. - # - project: must exists. Use 'projects' query to list all projects. - # - connectionType: must exists in Technology-Catalog. - # - Values: must comply with Connection Type - createProjectConnection(connectionInput: ConnectionInput): Connection! - - # Edit a Connection related to a project. - # **Fields validation:** - # - name: must be unique (case insensitive) in the project, shorter than 255 characters and cannot be empty. - # - project: must exists. Use 'projects' query to list all projects. - # - connectionType: must exists in Technology-Catalog. - # - Values: must comply with Connection Type - editProjectConnection(connectionInput: ConnectionEditionInput!): Connection! - - # Delete a batch of Connections related to one project. - deleteProjectConnections(connectionIds: [UUID!]!): Boolean! } input ProjectInput { @@ -868,18 +835,11 @@ input JobVersionInput { extraTechnology: ExtraTechnologyInput # DEPRECATED. Use JobInput.doesUseGPU instead. doesUseGPU: Boolean - connectionId: UUID - externalJobParams: [ExternalJobParamInput!] # Version reference url (http or https) pushed only by API sourceUrl: String } -input ExternalJobParamInput { - id: String! - value: String -} - input JobPackageInput { name: String! # DEPRECATED. Use the same field at root of jobVersion instead. @@ -1581,8 +1541,6 @@ type JobVersion { deletableState: JobVersionDeletableState creator: String! doesUseGPU: Boolean @deprecated(reason: "Use Job.doesUseGPU instead") - connectionId: UUID - externalJobParams: [ExternalJobParam!] # Version reference url (http or https) pushed only by API. sourceUrl: String @@ -1593,11 +1551,6 @@ type JobVersionDeletableState { reasons: [JobVersionDeletableStateReason!] } -type ExternalJobParam { - id: String! - value: String! -} - type JobPackage { name: String! downloadUrl: String @@ -1968,65 +1921,6 @@ type PipelineInstanceReport { status: InstanceStatus! } -input ConnectionInput { - projectId: UUID!, - repositoryId: UUID!, - connectionTypeId: String!, - name: String!, - parameters: [ConnectionParamValueInput!]! -} - -input ConnectionEditionInput { - id: UUID!, - name: String!, - parameters: [ConnectionParamValueInput!]! -} - -input ConnectionParamValueInput { - id: String! - value: String - isPassword: Boolean! -} - -type Connection { - id: UUID! - projectId: UUID!, - name: String, - repositoryId: UUID!, - connectionTypeId: String!, - parameters: [ConnectionParamValue!]!, - creator: String!, - creationDate: DateTime, - lastUpdate: DateTime, - jobs: [Job!] -} - -type ConnectionParamValue { - id: String! - value: String - isPassword: Boolean! -} - -type ExtJobParamValueCallResult { - result: [ExtJobParamValue!] - error: String -} - -type ExtJobParamValue { - id: String! - label: String! -} - -type CheckConnectionCallResult { - result: CheckConnectionResult - error: String -} - -type CheckConnectionResult { - ok: Boolean! - message: String -} - type Entry { key: String! value: String diff --git a/tests/unit/resources/technology_schema.graphqls b/tests/unit/resources/technology_schema.graphqls index 918a7112..4aae4722 100644 --- a/tests/unit/resources/technology_schema.graphqls +++ b/tests/unit/resources/technology_schema.graphqls @@ -4,12 +4,6 @@ schema { } type Query { - "Get a connection type details by repository UUID and connectionTypeId" - connectionType(connectionTypeId: String!, repositoryId: UUID!): ConnectionType! - "Get the list of connection Types" - connectionTypes: [ConnectionType!]! - "Get multiple connection Types details by providing their UUID" - connectionTypesByIds(ids: [UUID!]!): [ConnectionType!]! "Get all enabled facets" facets: [Facet!]! "List all repositories" @@ -39,8 +33,6 @@ type Mutation { "Represents a repository with at least one technology to synchronize" type Repository { - "List of connection types of the repository" - connectionTypes: [ConnectionType!]! "Date of creation of the repository" creationDate: Instant! "Creator of the repository" @@ -130,22 +122,6 @@ type SparkTechnology implements Technology { defaultResources: ResourceSettings! } -type ExtJobTechnology implements Technology { - available: Boolean! - contexts: [ExtJobContext!]! - customFlags: [Flag!]! - description: String - facets: [String!]! - icon: String - iconUrl: String - id: UUID! - label: String! - lastUpdate: Instant! - missingFacets: [String!]! - repositoryId: UUID! - technologyId: String! -} - interface Context { available: Boolean! deprecationDate: Instant @@ -222,24 +198,6 @@ type SparkJobContext implements Context { defaultResources: ResourceSettings! } -type ExtJobContext implements Context { - actions: ExtJobActions! - available: Boolean! - connectionTypeId: String! - connectionTypeUUID: UUID! - deprecationDate: Instant - description: String - facets: [String!]! - id: String! - label: String! - lastUpdate: Instant - missingFacets: [String!]! - parameters: [ExtJobParameter!]! - recommended: Boolean! - trustLevel: TrustLevel! -} - - type ResourceSettings { cpu: CpuResourceSettings! memory: MemoryResourceSettings! @@ -318,43 +276,11 @@ type VolumeInfo { size: DataSize! } -type ExtJobActions { - getLogs: ScriptCall - getStatus: ScriptCall! - start: ScriptCall! - stop: ScriptCall -} - -type ExtJobParameter { - comment: String - defaultValue: String - dependsOn: [String!] - dynamicValues: ScriptCall - id: String! - label: String! - mandatory: Boolean! - staticValues: [ExtJobParameterStaticValue!] - type: ExtFeatureType! -} - type ScriptCall { function: String! scriptId: UUID! } -type ExtJobParameterStaticValue { - id: String! - label: String! -} - -enum ExtFeatureType { - DYNAMIC_SELECT - PASSWORD - STATIC_SELECT - TEXT - TOGGLE -} - type Script { id: UUID! lastUpdate: Instant! @@ -413,8 +339,6 @@ enum SynchronizationTriggerType { } type SuccessfulSynchronization implements SynchronizationReport { - "List of connection types synchronization report" - connectionTypeReports: [ConnectionTypeSynchronizationReport!]! "Date of synchronization end" endedAt: Instant! "Unique identifier (UUID format: see [RFC 4122](https://tools.ietf.org/html/rfc4122))" @@ -490,17 +414,6 @@ type ContextSynchronizationReport { status: SynchronizationStatus! } -type ConnectionTypeSynchronizationReport { - "Connection type ID" - connectionTypeId: String! - "Connection type label" - connectionTypeLabel: String! - "Details on the Connection type synchronization status, including the details of the failure if applicable" - message: String - "Connection type synchronization status: Created, Updated, Deleted or Failed" - status: SynchronizationStatus! -} - enum SynchronizationStatus { "Created" CREATED @@ -522,69 +435,8 @@ type Manual { uselessField: Boolean } -"Represent a connection Type that could be used with an external technology" -type ConnectionType { - "The actions associated with the connection" - actions: ConnectionTypeActions - "The connection Type id" - connectionTypeId: String! - "The connection Type database unique id" - id: UUID! - "The connection Type label" - label: String! - "The last time the connection was updated" - lastUpdate: Instant - "The connection Type list of parameters" - parameters: [ConnectionTypeParameter!]! - "The connection Type repository id" - repositoryId: UUID! -} - -"The actions of a connection type" -type ConnectionTypeActions { - checkConnection: ScriptCall -} - -"Represent an item describing available parameter for a connection" -type ConnectionTypeParameter { - "Comment of the parameter" - comment: String - "Default value of the parameter" - defaultValue: String - "Link to another depending parameters, via names" - dependsOn: [String!] - "Id of the parameter" - id: String! - "Label of the parameter" - label: String! - "Mandatory true/false of this parameter" - mandatory: Boolean! - "Possible values of the parameter" - staticValues: [ConnectionTypeParameterStaticValue!] - "Type of the parameter: TEXT, PASSWORD, STATIC_SELECT or TOGGLE" - type: ConnectionTypeParameterEnum! -} - -"Type of the parameter : TEXT, PASSWORD, STATIC_SELECT or TOGGLE" -enum ConnectionTypeParameterEnum { - PASSWORD - STATIC_SELECT - TEXT - TOGGLE -} - -"A value for a static select" -type ConnectionTypeParameterStaticValue { - "The id used in the scripts" - id: String! - "The human readable description of the value" - label: String! -} - "Represents a repository that have just been deleted" type DeletedRepository { - "List of connection type of the repository" - connectionTypes: [ConnectionType!]! "Date of creation of the repository" creationDate: Instant! "Creator of the repository" diff --git a/tests/unit/saagie_api_unit_test.py b/tests/unit/saagie_api_unit_test.py index 241d877b..884bc295 100644 --- a/tests/unit/saagie_api_unit_test.py +++ b/tests/unit/saagie_api_unit_test.py @@ -9,10 +9,7 @@ from saagieapi import SaagieApi from saagieapi.gql_queries import ( GQL_CHECK_CUSTOM_EXPRESSION, - GQL_COUNT_CONDITION_LOGS, GQL_GET_CLUSTER_INFO, - GQL_GET_CONDITION_LOGS_BY_CONDITION, - GQL_GET_CONDITION_LOGS_BY_INSTANCE, GQL_GET_PLATFORM_INFO, GQL_GET_REPOSITORIES_INFO, ) @@ -142,15 +139,3 @@ def test_check_technology_configured_technology_not_configured(): def test_check_custom_expression(self): query = gql(GQL_CHECK_CUSTOM_EXPRESSION) self.client.validate(query) - - def test_count_condition_logs(self): - query = gql(GQL_COUNT_CONDITION_LOGS) - self.client.validate(query) - - def test_get_condition_logs_by_condition(self): - query = gql(GQL_GET_CONDITION_LOGS_BY_CONDITION) - self.client.validate(query) - - def test_get_condition_logs_by_instance(self): - query = gql(GQL_GET_CONDITION_LOGS_BY_INSTANCE) - self.client.validate(query)