From 01139fd95ff42fc866e11dc4546bd0a7b35d3160 Mon Sep 17 00:00:00 2001 From: Olivier Le Thanh Duong Date: Thu, 20 Feb 2025 10:31:24 +0100 Subject: [PATCH 1/2] Fix start error in load_persistent_executions Fix parse error that prevented Supervisor from starting when loading persistant executions, as it could not parse the gpu field Traceback (most recent call last): File "/usr/lib/python3.10/runpy.py", line 196, in _run_module_as_main return _run_code(code, main_globals, None, File "/usr/lib/python3.10/runpy.py", line 86, in _run_code exec(code, run_globals) File "/opt/aleph-vm/aleph/vm/orchestrator/__main__.py", line 4, in main() File "/opt/aleph-vm/aleph/vm/orchestrator/cli.py", line 379, in main supervisor.run() File "/opt/aleph-vm/aleph/vm/orchestrator/supervisor.py", line 184, in run asyncio.run(pool.load_persistent_executions()) File "/usr/lib/python3.10/asyncio/runners.py", line 44, in run return loop.run_until_complete(main) File "/usr/lib/python3.10/asyncio/base_events.py", line 649, in run_until_complete return future.result() File "/opt/aleph-vm/aleph/vm/pool.py", line 252, in load_persistent_executions execution.gpus = parse_raw_as(List[HostGPU], saved_execution.gpus) File "pydantic/tools.py", line 74, in pydantic.tools.parse_raw_as obj = load_str_bytes( File "pydantic/parse.py", line 37, in pydantic.parse.load_str_bytes return json_loads(b) File "/usr/lib/python3.10/json/__init__.py", line 339, in loads raise TypeError(f'the JSON object must be str, bytes or bytearray, ' TypeError: the JSON object must be str, bytes or bytearray, not NoneType --- src/aleph/vm/pool.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aleph/vm/pool.py b/src/aleph/vm/pool.py index 90cd697a1..edcccd43a 100644 --- a/src/aleph/vm/pool.py +++ b/src/aleph/vm/pool.py @@ -249,7 +249,7 @@ async def load_persistent_executions(self): if execution.is_running: # TODO: Improve the way that we re-create running execution # Load existing GPUs assigned to VMs - execution.gpus = parse_raw_as(List[HostGPU], saved_execution.gpus) + execution.gpus = parse_raw_as(List[HostGPU], saved_execution.gpus) if saved_execution.gpus else [] # Load and instantiate the rest of resources and already assigned GPUs await execution.prepare() if self.network: From 367cddc5a12918dc9efa415635c10ed612694ae2 Mon Sep 17 00:00:00 2001 From: Olivier Le Thanh Duong Date: Thu, 20 Feb 2025 10:52:08 +0100 Subject: [PATCH 2/2] Fix packaging issue caused by sevctl --- packaging/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packaging/Makefile b/packaging/Makefile index a601f1c9a..eaea196bb 100644 --- a/packaging/Makefile +++ b/packaging/Makefile @@ -48,7 +48,7 @@ download-ipfs-kubo: target-dir build-dir curl -fsSL https://github.com/ipfs/kubo/releases/download/v0.23.0/kubo_v0.23.0_linux-amd64.tar.gz | tar -xz --directory ./target/kubo target/bin/sevctl: - cargo install --git https://github.com/virtee/sevctl.git --rev v0.6.0 --target x86_64-unknown-linux-gnu --root ./target + cargo install --locked --git https://github.com/virtee/sevctl.git --rev v0.6.0 --target x86_64-unknown-linux-gnu --root ./target ./target/bin/sevctl -V version: