diff --git a/src/cpp/src/whisper/pipeline.cpp b/src/cpp/src/whisper/pipeline.cpp index a3c18fe0ce..781956f919 100644 --- a/src/cpp/src/whisper/pipeline.cpp +++ b/src/cpp/src/whisper/pipeline.cpp @@ -182,7 +182,7 @@ ov::genai::WhisperPipeline::WhisperPipeline(const std::filesystem::path& models_ auto start_time = std::chrono::steady_clock::now(); if (device == "NPU") { auto properties_copy = properties; - const bool use_static_pipeline = utils::pop_or_default(properties_copy, "STATIC_PIPELINE", true); + const bool use_static_pipeline = utils::pop_or_default(properties_copy, "STATIC_PIPELINE", false); if (!use_static_pipeline) { m_impl = std::make_unique(models_path, device, properties_copy); } else { diff --git a/tests/python_tests/test_whisper_pipeline_static.py b/tests/python_tests/test_whisper_pipeline_static.py index bb68e64634..44f5724df8 100644 --- a/tests/python_tests/test_whisper_pipeline_static.py +++ b/tests/python_tests/test_whisper_pipeline_static.py @@ -16,7 +16,8 @@ # and robustness of the WhisperStaticPipeline on NPUW:CPU. config = {"NPU_USE_NPUW" : "YES", "NPUW_DEVICES" : "CPU", - "NPUW_ONLINE_PIPELINE" : "NONE"} + "NPUW_ONLINE_PIPELINE" : "NONE", + "STATIC_PIPELINE": True} def load_and_save_whisper_model(params, stateful=False, **tokenizer_kwargs): model_id, path = params