From d140a17d8d4c8d94e8d0228bc2e03aaea68f2986 Mon Sep 17 00:00:00 2001 From: Alessio Gozzoli Date: Tue, 18 Nov 2025 10:23:40 +0100 Subject: [PATCH] Adding metadata to the tutorials --- ...o_compare_two_ai_models_with_label_studio.md | 2 ++ ...onnect_Hugging_Face_with_Label_Studio_SDK.md | 2 ++ ...nd_Evaluate_your_models_with_Label_Studio.md | 17 +++-------------- ...ug_agents_with_LangSmith_and_Label_Studio.md | 2 ++ ..._in_your_research_stack_with_Label_Studio.md | 2 ++ ...tator_agreement_and_build_human_consensus.md | 3 +++ ...chat_evals_with_chainlit_and_label_studio.md | 2 ++ docs/source/tutorials/index.md | 2 ++ 8 files changed, 18 insertions(+), 14 deletions(-) diff --git a/docs/source/tutorials/how_to_compare_two_ai_models_with_label_studio.md b/docs/source/tutorials/how_to_compare_two_ai_models_with_label_studio.md index 6cfb81f35960..7f4546fe839d 100644 --- a/docs/source/tutorials/how_to_compare_two_ai_models_with_label_studio.md +++ b/docs/source/tutorials/how_to_compare_two_ai_models_with_label_studio.md @@ -9,6 +9,8 @@ ipynb_repo_path: tutorials/how-to-compare-two-ai-models-with-label-studio/how_to repo_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/tree/main/tutorials/how-to-compare-two-ai-models-with-label-studio report_bug_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/issues/new thumbnail: /images/tutorials/tutorials-compare-ai-models.png +meta_title: How to Compare Two AI Models with Label Studio +meta_description: Learn how to compare and evaluate two AI models with the Label Studio SDK. --- ## Why this matters diff --git a/docs/source/tutorials/how_to_connect_Hugging_Face_with_Label_Studio_SDK.md b/docs/source/tutorials/how_to_connect_Hugging_Face_with_Label_Studio_SDK.md index ad3d0f752b42..7d38f099deea 100644 --- a/docs/source/tutorials/how_to_connect_Hugging_Face_with_Label_Studio_SDK.md +++ b/docs/source/tutorials/how_to_connect_Hugging_Face_with_Label_Studio_SDK.md @@ -9,6 +9,8 @@ ipynb_repo_path: tutorials/how-to-connect-Hugging-Face-with-Label-Studio-SDK/how repo_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/tree/main/tutorials/how-to-connect-Hugging-Face-with-Label-Studio-SDK report_bug_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/issues/new thumbnail: /images/tutorials/tutorials-hugging-face-ls-sdk.png +meta_title: How to Connect Hugging Face with Label Studio SDK +meta_description: Learn how to create a NLP workflow by integrating Hugging Face datasets and models with Label Studio for annotation and active learning. --- **A Complete Guide to Connecting Hugging Face and Label Studio** diff --git a/docs/source/tutorials/how_to_create_a_Benchmark_and_Evaluate_your_models_with_Label_Studio.md b/docs/source/tutorials/how_to_create_a_Benchmark_and_Evaluate_your_models_with_Label_Studio.md index b2162789d2fc..1401238482fe 100644 --- a/docs/source/tutorials/how_to_create_a_Benchmark_and_Evaluate_your_models_with_Label_Studio.md +++ b/docs/source/tutorials/how_to_create_a_Benchmark_and_Evaluate_your_models_with_Label_Studio.md @@ -9,6 +9,8 @@ ipynb_repo_path: tutorials/how-to-create-benchmark-and-evaluate-your-models/how_ repo_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/tree/main/tutorials/how-to-create-benchmark-and-evaluate-your-models report_bug_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/issues/new thumbnail: /images/tutorials/tutorials-ai-benchmark-and-eval.png +meta_title: How to Connect Hugging Face with Label Studio SDK +meta_description: Learn how to use the Label Studio SDK to create a high-quality benchmark dataset to evaluate multiple AI models --- Evaluating models is only as good as the benchmark you test them against. In this tutorial, you'll learn how to use **Label Studio** to create a high-quality benchmark dataset, label it with human expertise, and then evaluate multiple AI models against it — all using the **Label Studio SDK**. @@ -371,19 +373,6 @@ wait_for_runs_to_complete(versions) version 46549 completed - -```python -project = ls.projects.get(193733) -prompt = ls.prompts.get(37050) -versions = ls.prompts.versions.list(prompt_id=prompt.id) -``` - - /usr/local/lib/python3.12/dist-packages/pydantic/main.py:463: UserWarning: Pydantic serializer warnings: - PydanticSerializationUnexpectedValue(Expected `str` - serialized value may not be as expected [input_value=[], input_type=list]) - PydanticSerializationUnexpectedValue(Expected `str` - serialized value may not be as expected [input_value=90367, input_type=int]) - return self.__pydantic_serializer__.to_python( - - ### Collect Run Costs Let’s retrieve the **costs for each model run** to include them as additional data points. @@ -548,4 +537,4 @@ Keep iterating on what you built today: - [Blog: Why Benchmarks Matter for Evaluating LLMs](https://labelstud.io/blog/why-benchmarks-matter-for-evaluating-llms/) - [Blog: How to Build AI Benchmarks That Evolve with Your Models](https://labelstud.io/blog/how-to-build-ai-benchmarks-that-evolve-with-your-models/) - [Blog: Evaluating the GPT-5 Series on Custom Benchmarks](https://labelstud.io/blog/evaluating-the-gpt-5-series-on-custom-benchmarks/) -- [Blog: How LegalBenchmarks.AI Built a Domain-Specific AI Benchmark](https://labelstud.io/blog/how-legalbenchmarks-ai-built-a-domain-specific-ai-benchmark/) +- [Blog: How LegalBenchmarks.AI Built a Domain-Specific AI Benchmark](https://labelstud.io/blog/how-legalbenchmarks-ai-built-a-domain-specific-ai-benchmark/) diff --git a/docs/source/tutorials/how_to_debug_agents_with_LangSmith_and_Label_Studio.md b/docs/source/tutorials/how_to_debug_agents_with_LangSmith_and_Label_Studio.md index 9d2657efdbf6..b1db89d528b6 100644 --- a/docs/source/tutorials/how_to_debug_agents_with_LangSmith_and_Label_Studio.md +++ b/docs/source/tutorials/how_to_debug_agents_with_LangSmith_and_Label_Studio.md @@ -9,6 +9,8 @@ ipynb_repo_path: tutorials/how-to-debug-agents-with-LangSmith-and-Label-Studio/h repo_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/tree/main/tutorials/how-to-debug-agents-with-LangSmith-and-Label-Studio report_bug_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/issues/new thumbnail: /images/tutorials/tutorials-debug-agents-langsmith.png +meta_title: How to Debug Agents with LangSmith and Label Studio +meta_description: Learn how LangSmith and Label Studio can work together to debug and evaluate AI Agents. --- ## 0. Label Studio Requirements diff --git a/docs/source/tutorials/how_to_embed_evaluation_workflows_in_your_research_stack_with_Label_Studio.md b/docs/source/tutorials/how_to_embed_evaluation_workflows_in_your_research_stack_with_Label_Studio.md index 286987f4c2e1..8b9bf9b26d0b 100644 --- a/docs/source/tutorials/how_to_embed_evaluation_workflows_in_your_research_stack_with_Label_Studio.md +++ b/docs/source/tutorials/how_to_embed_evaluation_workflows_in_your_research_stack_with_Label_Studio.md @@ -9,6 +9,8 @@ ipynb_repo_path: tutorials/how-to-embed-evaluation-workflows-in-your-research-st repo_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/tree/main/tutorials/how-to-embed-evaluation-workflows-in-your-research-stack-with-label-studio report_bug_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/issues/new thumbnail: /images/tutorials/tutorials-eval-flows-research-stack.png +meta_title: How to Embed Evaluation Workflows in Your Research Stack with Label Studio +meta_description: Learn how to build an embedded evaluation workflow directly into your jupyer notebook with Label Studio. --- ## Label Studio Requirements diff --git a/docs/source/tutorials/how_to_measure_inter_annotator_agreement_and_build_human_consensus.md b/docs/source/tutorials/how_to_measure_inter_annotator_agreement_and_build_human_consensus.md index e586d35857ec..170606e8537c 100644 --- a/docs/source/tutorials/how_to_measure_inter_annotator_agreement_and_build_human_consensus.md +++ b/docs/source/tutorials/how_to_measure_inter_annotator_agreement_and_build_human_consensus.md @@ -9,6 +9,9 @@ ipynb_repo_path: tutorials/how-to-measure-inter-annotator-agreement-and-build-hu repo_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/tree/main/tutorials/how-to-measure-inter-annotator-agreement-and-build-human-consensus report_bug_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/issues/new thumbnail: /images/tutorials/tutorials-inter-annotator-agreement-and-consensus.png +meta_title: "How to Measure Inter-Annotator Agreement and Build Human Consensus with Label Studio" +meta_description: Learn how to measure inter-annotator agreement, build human consensus, establish ground truth and +compare model predictions using the Label Studio SDK. --- This tutorial walks through a practical workflow to measure inter-annotator agreement, build human consensus, establish ground truth and diff --git a/docs/source/tutorials/how_to_multi_turn_chat_evals_with_chainlit_and_label_studio.md b/docs/source/tutorials/how_to_multi_turn_chat_evals_with_chainlit_and_label_studio.md index 8d15efa7f307..10b2c10023fb 100644 --- a/docs/source/tutorials/how_to_multi_turn_chat_evals_with_chainlit_and_label_studio.md +++ b/docs/source/tutorials/how_to_multi_turn_chat_evals_with_chainlit_and_label_studio.md @@ -9,6 +9,8 @@ ipynb_repo_path: tutorials/how-to-multi-turn-chat-evals-with-chainlit-and-label- repo_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/tree/main/tutorials/how-to-multi-turn-chat-evals-with-chainlit-and-label-studio report_bug_url: https://github.com/HumanSignal/awesome-label-studio-tutorials/issues/new thumbnail: /images/tutorials/tutorials-eval-multi-turn-chainlit.png +meta_title: "How to Evaluate Multi-Turn AI Conversations with Chainlit and Label Studio" +meta_description: Learn how to create a Label Studio project for evaluating chatbot conversations using the Chatbot Evaluation template. --- This notebook demonstrates how to create a Label Studio project for evaluating chatbot conversations using the Chatbot Evaluation template. diff --git a/docs/source/tutorials/index.md b/docs/source/tutorials/index.md index 9c2a21e2cbe4..a1cc32d6dc62 100644 --- a/docs/source/tutorials/index.md +++ b/docs/source/tutorials/index.md @@ -1,5 +1,7 @@ --- title: Tutorials +meta_title: Tutorials +meta_description: A curated list of tutorials to help you get started or learn how to integrate Label Studio into your workflow. layout: tutorials hide_sidebar: true ---