From 057f940743b4e5efeac03f16dc9dd5ded4086baf Mon Sep 17 00:00:00 2001 From: Natan Yellin Date: Fri, 22 Nov 2024 21:16:54 +0200 Subject: [PATCH 1/5] Improve logging (#211) --- holmes/core/llm.py | 7 ------ holmes/core/tool_calling_llm.py | 4 ++-- holmes/main.py | 41 ++++++++++++++++----------------- 3 files changed, 22 insertions(+), 30 deletions(-) diff --git a/holmes/core/llm.py b/holmes/core/llm.py index 7167c771..0c8d5be0 100644 --- a/holmes/core/llm.py +++ b/holmes/core/llm.py @@ -125,14 +125,7 @@ def completion(self, messages: List[Dict[str, Any]], tools: Optional[List[Tool]] drop_params=drop_params ) - - if isinstance(result, ModelResponse): - response = result.choices[0] - response_message = response.message - # when asked to run tools, we expect no response other than the request to run tools unless bedrock - if response_message.content and ('bedrock' not in self.model and logging.DEBUG != logging.root.level): - logging.warning(f"got unexpected response when tools were given: {response_message.content}") return result else: raise Exception(f"Unexpected type returned by the LLM {type(result)}") diff --git a/holmes/core/tool_calling_llm.py b/holmes/core/tool_calling_llm.py index 89f1e0fe..6efebb88 100644 --- a/holmes/core/tool_calling_llm.py +++ b/holmes/core/tool_calling_llm.py @@ -118,7 +118,7 @@ def call( messages, max_context_size, maximum_output_token ) - logging.debug(f"sending messages {messages}") + logging.debug(f"sending messages={messages}\n\ntools={tools}") try: full_response = self.llm.completion( messages=parse_messages_tags(messages), @@ -128,7 +128,7 @@ def call( response_format=response_format, drop_params=True, ) - logging.debug(f"got response {full_response}") + logging.debug(f"got response {full_response.to_json()}") # catch a known error that occurs with Azure and replace the error message with something more obvious to the user except BadRequestError as e: if ( diff --git a/holmes/main.py b/holmes/main.py index 10568d58..bfc0b79d 100644 --- a/holmes/main.py +++ b/holmes/main.py @@ -51,7 +51,7 @@ class Verbosity(Enum): NORMAL = 0 - LOG_QUERIES = 1 + LOG_QUERIES = 1 # TODO: currently unused VERBOSE = 2 VERY_VERBOSE = 3 @@ -65,24 +65,7 @@ def cli_flags_to_verbosity(verbose_flags: List[bool]) -> Verbosity: else: return Verbosity.VERY_VERBOSE -def init_logging(verbose_flags: List[bool] = None): - verbosity = cli_flags_to_verbosity(verbose_flags) - - if verbosity == Verbosity.VERY_VERBOSE: - logging.basicConfig(level=logging.DEBUG, format="%(message)s", handlers=[RichHandler(show_level=False, show_time=False)]) - else: - logging.basicConfig(level=logging.INFO, format="%(message)s", handlers=[RichHandler(show_level=False, show_time=False)]) - - if verbosity.value >= Verbosity.NORMAL.value: - logging.info(f"verbosity is {verbosity}") - - if verbosity.value >= Verbosity.LOG_QUERIES.value: - # TODO - pass - - if verbosity.value >= Verbosity.VERBOSE.value: - logging.getLogger().setLevel(logging.DEBUG) - +def suppress_noisy_logs(): # disable INFO logs from OpenAI logging.getLogger("httpx").setLevel(logging.WARNING) # disable INFO logs from LiteLLM @@ -94,8 +77,24 @@ def init_logging(verbose_flags: List[bool] = None): logging.getLogger("openai._base_client").setLevel(logging.INFO) logging.getLogger("httpcore").setLevel(logging.INFO) logging.getLogger("markdown_it").setLevel(logging.INFO) - # Suppress UserWarnings from the slack_sdk module + # suppress UserWarnings from the slack_sdk module warnings.filterwarnings("ignore", category=UserWarning, module="slack_sdk.*") + +def init_logging(verbose_flags: List[bool] = None): + verbosity = cli_flags_to_verbosity(verbose_flags) + + if verbosity == Verbosity.VERY_VERBOSE: + logging.basicConfig(level=logging.DEBUG, format="%(message)s", handlers=[RichHandler(show_level=False, show_time=False)]) + elif verbosity == Verbosity.VERBOSE: + logging.basicConfig(level=logging.INFO, format="%(message)s", handlers=[RichHandler(show_level=False, show_time=False)]) + logging.getLogger().setLevel(logging.DEBUG) + suppress_noisy_logs() + else: + logging.basicConfig(level=logging.INFO, format="%(message)s", handlers=[RichHandler(show_level=False, show_time=False)]) + suppress_noisy_logs() + + logging.debug(f"verbosity is {verbosity}") + return Console() # Common cli options @@ -138,7 +137,7 @@ def init_logging(verbose_flags: List[bool] = None): [], "--verbose", "-v", - help="Verbose output. You can pass multiple times to increase the verbosity. e.g. -v or -vv or -vvv or -vvvv", + help="Verbose output. You can pass multiple times to increase the verbosity. e.g. -v or -vv or -vvv", ) opt_echo_request: bool = typer.Option( True, From 3383b2f451bc5f29e4061806d4b1ce4570f36c2b Mon Sep 17 00:00:00 2001 From: Tomer Date: Sun, 24 Nov 2024 11:13:33 +0200 Subject: [PATCH 2/5] Curl in base (#212) --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index 1fe13f9a..2663198c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -70,6 +70,8 @@ RUN python -m playwright install firefox --with-deps # We're installing here libexpat1, to upgrade the package to include a fix to 3 high CVEs. CVE-2024-45491,CVE-2024-45490,CVE-2024-45492 RUN apt-get update \ && apt-get install -y \ + curl \ + jq \ git \ apt-transport-https \ gnupg2 \ From 57951e1a4735b356bd5e816e131e0f182fe1e298 Mon Sep 17 00:00:00 2001 From: Nicolas Herment Date: Mon, 25 Nov 2024 09:45:20 +0100 Subject: [PATCH 3/5] Add llm evaluations (#204) This PR has mostly 3 changes: - Improvement of the prompt to prevent the LLM mentioning separately `kubectl logs` and `kubectl logs --previous` - Introduction of a test suite for investigations, including the ability to mock the DB access - Integration with brantrust,dev although that code is currently commented out as I need to talk to them as I hit the freemium limits. It's a big PR and most of it is the refactor of the existing mock mechanism to work well with the DAL. --- .gitignore | 2 + holmes/config.py | 7 +- holmes/core/investigation.py | 47 + holmes/core/supabase_dal.py | 12 +- holmes/core/tool_calling_llm.py | 2 +- .../prompts/_general_instructions.jinja2 | 5 +- poetry.lock | 2268 +++++------------ pyproject.toml | 3 +- server.py | 41 +- tests/llm/__init__.py | 0 .../kubectl_find_resource.txt} | 0 .../kubectl_find_resource_node.txt | 0 .../kubectl_find_resource_pod_by_keyword.txt | 23 + .../kubectl_find_resource_pod_by_name.txt | 0 .../1_how_many_pods/kubectl_get.txt | 0 .../1_how_many_pods/kubectl_get_all.txt | 0 .../1_how_many_pods/kubectl_get_node.txt | 3 + .../1_how_many_pods/kubectl_get_pod.txt | 2 + .../1_how_many_pods/kubectl_top_pods.txt | 6 + .../1_how_many_pods/test_case.yaml | 0 .../kubectl_describe.txt | 0 .../kubectl_find_resource.txt | 0 .../2_what_is_wrong_with_pod/kubectl_logs.txt | 0 .../kubectl_previous_logs.txt | 0 .../2_what_is_wrong_with_pod/test_case.yaml | 0 .../kubectl_describe.txt | 0 .../kubectl_get_all.txt | 0 .../kubectl_get_pod.txt | 0 .../test_case.yaml | 0 .../4_related_k8s_events/kubectl_describe.txt | 0 .../4_related_k8s_events/kubectl_events.txt | 0 .../kubectl_find_resource.txt | 0 .../kubectl_lineage_parents.txt | 0 .../4_related_k8s_events/test_case.yaml | 0 .../5_image_version/kubectl_find_resource.txt | 0 .../5_image_version/kubectl_get_yaml.txt | 0 .../5_image_version/test_case.yaml | 0 .../6_explain_issue/fetch_finding_by_id.txt | 0 .../6_explain_issue/test_case.yaml | 0 .../01_oom_kill/investigate_request.json | 23 + .../01_oom_kill/issue_data.json | 1 + .../01_oom_kill/kubectl_describe.txt | 6 + .../01_oom_kill/kubectl_find_resource_1.txt | 5 + .../01_oom_kill/kubectl_find_resource_2.txt | 5 + .../01_oom_kill/kubectl_find_resource_3.txt | 5 + .../01_oom_kill/kubectl_get_all.txt | 47 + .../01_oom_kill/kubectl_logs.txt | 6 + .../01_oom_kill/kubectl_previous_logs.txt | 6 + .../01_oom_kill/resource_instructions.json | 1 + .../01_oom_kill/test_case.yaml | 37 + .../02_crashloop_backoff/fetch_finding.txt | 47 + .../investigate_request.json | 20 + .../02_crashloop_backoff/issue_data.json | 1 + .../02_crashloop_backoff/kubectl_describe.txt | 72 + .../kubectl_describe_pod.txt | 6 + .../kubectl_find_resource.txt | 5 + .../02_crashloop_backoff/kubectl_get_all.txt | 65 + .../02_crashloop_backoff/kubectl_logs.txt | 6 + .../kubectl_previous_logs.txt | 6 + .../resource_instructions.json | 1 + .../02_crashloop_backoff/test_case.yaml | 32 + .../03_cpu_throttling/fetch_finding_by_id.txt | 3 + .../investigate_request.json | 20 + .../03_cpu_throttling/issue_data.json | 1 + .../03_cpu_throttling/kubectl_describe.txt | 6 + .../kubectl_find_resource.txt | 5 + .../03_cpu_throttling/kubectl_get_all.txt | 65 + .../03_cpu_throttling/kubectl_logs.txt | 6 + .../kubectl_previous_logs.txt | 6 + .../resource_instructions.json | 1 + .../03_cpu_throttling/test_case.yaml | 22 + .../fetch_finding_by_id.txt | 42 + .../investigate_request.json | 22 + .../04_image_pull_backoff/issue_data.json | 41 + .../kubectl_describe.txt | 61 + .../04_image_pull_backoff/kubectl_logs.txt | 3 + .../kubectl_previous_logs.txt | 3 + .../resource_instructions.json | 1 + .../04_image_pull_backoff/test_case.yaml | 18 + .../05_crashpod/fetch_finding_by_id.txt | 3 + .../05_crashpod/investigate_request.json | 22 + .../05_crashpod/issue_data.json | 1 + .../05_crashpod/kubectl_describe.txt | 6 + .../05_crashpod/kubectl_find_resource.txt | 5 + .../kubectl_find_resource_w_namespace.txt | 5 + .../05_crashpod/kubectl_get_all.txt | 65 + .../05_crashpod/kubectl_logs.txt | 6 + .../05_crashpod/kubectl_previous_logs.txt | 6 + .../05_crashpod/resource_instructions.json | 1 + .../05_crashpod/test_case.yaml | 27 + .../06_job_failure/fetch_finding_by_id.txt | 77 + .../06_job_failure/investigate_request.json | 22 + .../06_job_failure/issue_data.json | 76 + .../06_job_failure/kubectl_describe.txt | 45 + .../06_job_failure/kubectl_get.txt | 3 + .../06_job_failure/kubectl_logs.txt | 6 + .../06_job_failure/kubectl_logs_tgmn7.txt | 6 + .../06_job_failure/kubectl_previous_logs.txt | 6 + .../kubectl_previous_logs_tgmn7.txt | 6 + .../06_job_failure/resource_instructions.json | 1 + .../06_job_failure/test_case.yaml | 16 + .../fetch_finding_by_id.txt | 3 + .../investigate_request.json | 22 + .../07_job_syntax_error/issue_data.json | 1 + .../kubectl_describe_job.txt | 40 + .../kubectl_describe_pod.txt | 61 + .../kubectl_find_resource.txt | 9 + .../07_job_syntax_error/kubectl_get.txt | 6 + .../07_job_syntax_error/kubectl_get_all.txt | 65 + .../kubectl_lineage_children.txt | 44 + .../07_job_syntax_error/kubectl_logs.txt | 5 + .../kubectl_logs_5fdj6.txt | 5 + .../kubectl_logs_bg7vg.txt | 5 + .../kubectl_previous_logs.txt | 5 + .../resource_instructions.json | 1 + .../07_job_syntax_error/test_case.yaml | 26 + .../fetch_finding_by_id.txt | 3 + .../investigate_request.json | 22 + .../08_memory_pressure/issue_data.json | 1 + .../08_memory_pressure/kubectl_describe.txt | 103 + .../08_memory_pressure/kubectl_get.txt | 6 + .../08_memory_pressure/kubectl_get_all.txt | 8 + .../08_memory_pressure/kubectl_top_pods.txt | 6 + .../resource_instructions.json | 1 + .../08_memory_pressure/test_case.yaml | 19 + .../09_high_latency/fetch_finding_by_id.txt | 21 + .../09_high_latency/investigate_request.json | 22 + .../09_high_latency/issue_data.json | 20 + .../kubectl_describe_deployment.txt | 47 + .../09_high_latency/kubectl_describe_pod.txt | 68 + .../09_high_latency/kubectl_find_resource.txt | 5 + .../09_high_latency/kubectl_get.txt | 6 + .../09_high_latency/kubectl_get_all.txt | 65 + .../kubectl_lineage_children.txt | 19 + .../09_high_latency/kubectl_logs.txt | 247 ++ .../kubectl_logs_incorrect.txt | 2 + .../09_high_latency/kubectl_previous_logs.txt | 7 + .../09_high_latency/kubectl_top_pods.txt | 6 + .../resource_instructions.json | 1 + .../09_high_latency/test_case.yaml | 16 + tests/llm/test_ask_holmes.py | 108 + tests/llm/test_investigate.py | 258 ++ tests/{ => llm}/test_mocks.py | 6 +- tests/llm/utils/__init__.py | 0 tests/llm/utils/classifiers.py | 60 + tests/llm/utils/constants.py | 4 + tests/llm/utils/mock_dal.py | 71 + tests/{ => llm/utils}/mock_toolset.py | 2 +- tests/llm/utils/mock_utils.py | 206 ++ tests/llm/utils/system.py | 31 + tests/test_chat.py | 83 - tests/test_fetch_url.py | 9 +- tests/utils.py | 105 - 153 files changed, 3622 insertions(+), 1901 deletions(-) create mode 100644 holmes/core/investigation.py create mode 100644 tests/llm/__init__.py rename tests/{fixtures/test_chat/1_how_many_pods/kubectl_find_resource_pod_by_keyword.txt => llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource.txt} (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/1_how_many_pods/kubectl_find_resource_node.txt (100%) create mode 100644 tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource_pod_by_keyword.txt rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/1_how_many_pods/kubectl_find_resource_pod_by_name.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/1_how_many_pods/kubectl_get.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/1_how_many_pods/kubectl_get_all.txt (100%) create mode 100644 tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_node.txt create mode 100644 tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_pod.txt create mode 100644 tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_top_pods.txt rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/1_how_many_pods/test_case.yaml (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/2_what_is_wrong_with_pod/kubectl_describe.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/2_what_is_wrong_with_pod/kubectl_find_resource.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/2_what_is_wrong_with_pod/kubectl_logs.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/2_what_is_wrong_with_pod/kubectl_previous_logs.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/2_what_is_wrong_with_pod/test_case.yaml (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/3_what_is_the_command_to_port_forward/kubectl_describe.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/3_what_is_the_command_to_port_forward/kubectl_get_all.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/3_what_is_the_command_to_port_forward/kubectl_get_pod.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/3_what_is_the_command_to_port_forward/test_case.yaml (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/4_related_k8s_events/kubectl_describe.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/4_related_k8s_events/kubectl_events.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/4_related_k8s_events/kubectl_find_resource.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/4_related_k8s_events/kubectl_lineage_parents.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/4_related_k8s_events/test_case.yaml (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/5_image_version/kubectl_find_resource.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/5_image_version/kubectl_get_yaml.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/5_image_version/test_case.yaml (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/6_explain_issue/fetch_finding_by_id.txt (100%) rename tests/{fixtures/test_chat => llm/fixtures/test_ask_holmes}/6_explain_issue/test_case.yaml (100%) create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/investigate_request.json create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/issue_data.json create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_describe.txt create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_1.txt create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_2.txt create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_3.txt create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_get_all.txt create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_previous_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/resource_instructions.json create mode 100644 tests/llm/fixtures/test_investigate/01_oom_kill/test_case.yaml create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/fetch_finding.txt create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/investigate_request.json create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/issue_data.json create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_describe.txt create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_describe_pod.txt create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_find_resource.txt create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_get_all.txt create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_previous_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/resource_instructions.json create mode 100644 tests/llm/fixtures/test_investigate/02_crashloop_backoff/test_case.yaml create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/fetch_finding_by_id.txt create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/investigate_request.json create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/issue_data.json create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_describe.txt create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_find_resource.txt create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_get_all.txt create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_previous_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/resource_instructions.json create mode 100644 tests/llm/fixtures/test_investigate/03_cpu_throttling/test_case.yaml create mode 100644 tests/llm/fixtures/test_investigate/04_image_pull_backoff/fetch_finding_by_id.txt create mode 100644 tests/llm/fixtures/test_investigate/04_image_pull_backoff/investigate_request.json create mode 100644 tests/llm/fixtures/test_investigate/04_image_pull_backoff/issue_data.json create mode 100644 tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_describe.txt create mode 100644 tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_previous_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/04_image_pull_backoff/resource_instructions.json create mode 100644 tests/llm/fixtures/test_investigate/04_image_pull_backoff/test_case.yaml create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/fetch_finding_by_id.txt create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/investigate_request.json create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/issue_data.json create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/kubectl_describe.txt create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/kubectl_find_resource.txt create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/kubectl_find_resource_w_namespace.txt create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/kubectl_get_all.txt create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/kubectl_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/kubectl_previous_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/resource_instructions.json create mode 100644 tests/llm/fixtures/test_investigate/05_crashpod/test_case.yaml create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/fetch_finding_by_id.txt create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/investigate_request.json create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/issue_data.json create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/kubectl_describe.txt create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/kubectl_get.txt create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/kubectl_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/kubectl_logs_tgmn7.txt create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/kubectl_previous_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/kubectl_previous_logs_tgmn7.txt create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/resource_instructions.json create mode 100644 tests/llm/fixtures/test_investigate/06_job_failure/test_case.yaml create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/fetch_finding_by_id.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/investigate_request.json create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/issue_data.json create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_describe_job.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_describe_pod.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_find_resource.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_get.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_get_all.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_lineage_children.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs_5fdj6.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs_bg7vg.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_previous_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/resource_instructions.json create mode 100644 tests/llm/fixtures/test_investigate/07_job_syntax_error/test_case.yaml create mode 100644 tests/llm/fixtures/test_investigate/08_memory_pressure/fetch_finding_by_id.txt create mode 100644 tests/llm/fixtures/test_investigate/08_memory_pressure/investigate_request.json create mode 100644 tests/llm/fixtures/test_investigate/08_memory_pressure/issue_data.json create mode 100644 tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_describe.txt create mode 100644 tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_get.txt create mode 100644 tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_get_all.txt create mode 100644 tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_top_pods.txt create mode 100644 tests/llm/fixtures/test_investigate/08_memory_pressure/resource_instructions.json create mode 100644 tests/llm/fixtures/test_investigate/08_memory_pressure/test_case.yaml create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/fetch_finding_by_id.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/investigate_request.json create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/issue_data.json create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_describe_deployment.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_describe_pod.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_find_resource.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_get.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_get_all.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_lineage_children.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_logs_incorrect.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_previous_logs.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/kubectl_top_pods.txt create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/resource_instructions.json create mode 100644 tests/llm/fixtures/test_investigate/09_high_latency/test_case.yaml create mode 100644 tests/llm/test_ask_holmes.py create mode 100644 tests/llm/test_investigate.py rename tests/{ => llm}/test_mocks.py (97%) create mode 100644 tests/llm/utils/__init__.py create mode 100644 tests/llm/utils/classifiers.py create mode 100644 tests/llm/utils/constants.py create mode 100644 tests/llm/utils/mock_dal.py rename tests/{ => llm/utils}/mock_toolset.py (98%) create mode 100644 tests/llm/utils/mock_utils.py create mode 100644 tests/llm/utils/system.py delete mode 100644 tests/test_chat.py delete mode 100644 tests/utils.py diff --git a/.gitignore b/.gitignore index 83b1ca3c..0f64441f 100644 --- a/.gitignore +++ b/.gitignore @@ -161,3 +161,5 @@ cython_debug/ playwright.png .deepeval* pyrightconfig.json + +*.AUTOGENERATED diff --git a/holmes/config.py b/holmes/config.py index bba14414..ed99b302 100644 --- a/holmes/config.py +++ b/holmes/config.py @@ -10,6 +10,7 @@ from pydash.arrays import concat from rich.console import Console + from holmes.core.runbooks import RunbookManager from holmes.core.supabase_dal import SupabaseDal from holmes.core.tool_calling_llm import (IssueInvestigator, ToolCallingLLM, @@ -102,7 +103,7 @@ def load_from_env(cls): kwargs[field_name] = val return cls(**kwargs) - def _create_tool_executor( + def create_tool_executor( self, console: Console, allowed_toolsets: ToolsetPattern, dal:Optional[SupabaseDal] ) -> ToolExecutor: all_toolsets = load_builtin_toolsets(dal=dal) @@ -146,7 +147,7 @@ def _create_tool_executor( def create_toolcalling_llm( self, console: Console, allowed_toolsets: ToolsetPattern, dal:Optional[SupabaseDal] = None ) -> ToolCallingLLM: - tool_executor = self._create_tool_executor(console, allowed_toolsets, dal) + tool_executor = self.create_tool_executor(console, allowed_toolsets, dal) return ToolCallingLLM( tool_executor, self.max_steps, @@ -164,7 +165,7 @@ def create_issue_investigator( all_runbooks.extend(load_runbooks_from_file(runbook_path)) runbook_manager = RunbookManager(all_runbooks) - tool_executor = self._create_tool_executor(console, allowed_toolsets, dal) + tool_executor = self.create_tool_executor(console, allowed_toolsets, dal) return IssueInvestigator( tool_executor, runbook_manager, diff --git a/holmes/core/investigation.py b/holmes/core/investigation.py new file mode 100644 index 00000000..042dcfd2 --- /dev/null +++ b/holmes/core/investigation.py @@ -0,0 +1,47 @@ + +from rich.console import Console +from holmes.common.env_vars import ALLOWED_TOOLSETS, HOLMES_POST_PROCESSING_PROMPT +from holmes.config import Config +from holmes.core.issue import Issue +from holmes.core.models import InvestigateRequest, InvestigationResult +from holmes.core.supabase_dal import SupabaseDal +from holmes.utils.robusta import load_robusta_api_key + + +def investigate_issues(investigate_request: InvestigateRequest, dal: SupabaseDal, config: Config, console:Console): + load_robusta_api_key(dal=dal, config=config) + context = dal.get_issue_data( + investigate_request.context.get("robusta_issue_id") + ) + + resource_instructions = dal.get_resource_instructions( + "alert", investigate_request.context.get("issue_type") + ) + raw_data = investigate_request.model_dump() + if context: + raw_data["extra_context"] = context + + ai = config.create_issue_investigator( + console, allowed_toolsets=ALLOWED_TOOLSETS, dal=dal + ) + issue = Issue( + id=context["id"] if context else "", + name=investigate_request.title, + source_type=investigate_request.source, + source_instance_id=investigate_request.source_instance_id, + raw=raw_data, + ) + + investigation = ai.investigate( + issue, + prompt=investigate_request.prompt_template, + console=console, + post_processing_prompt=HOLMES_POST_PROCESSING_PROMPT, + instructions=resource_instructions, + ) + + return InvestigationResult( + analysis=investigation.result, + tool_calls=investigation.tool_calls or [], + instructions=investigation.instructions, + ) diff --git a/holmes/core/supabase_dal.py b/holmes/core/supabase_dal.py index 46292cb0..a3c3c560 100644 --- a/holmes/core/supabase_dal.py +++ b/holmes/core/supabase_dal.py @@ -3,7 +3,7 @@ import logging import os import threading -from typing import Dict, Optional, List +from typing import Dict, Optional, List, Tuple from uuid import uuid4 import yaml @@ -31,7 +31,6 @@ class RobustaConfig(BaseModel): sinks_config: List[Dict[str, Dict]] - class RobustaToken(BaseModel): store_url: str api_key: str @@ -127,10 +126,11 @@ def sign_in(self) -> str: self.client.postgrest.auth(res.session.access_token) return res.user.id - def get_issue_data(self, issue_id: str) -> Optional[Dict]: + def get_issue_data(self, issue_id: Optional[str]) -> Optional[Dict]: # TODO this could be done in a single atomic SELECT, but there is no # foreign key relation between Issues and Evidence. - + if not issue_id: + return None if not self.enabled: # store not initialized return None issue_data = None @@ -145,7 +145,7 @@ def get_issue_data(self, issue_id: str) -> Optional[Dict]: if len(issue_response.data): issue_data = issue_response.data[0] - except: # e.g. invalid id format + except Exception: # e.g. invalid id format logging.exception("Supabase error while retrieving issue data") return None if not issue_data: @@ -205,7 +205,7 @@ def create_session_token(self) -> str: ).execute() return token - def get_ai_credentials(self) -> (str, str): + def get_ai_credentials(self) -> Tuple[str, str]: with self.lock: session_token = self.token_cache.get("session_token") if not session_token: diff --git a/holmes/core/tool_calling_llm.py b/holmes/core/tool_calling_llm.py index 6efebb88..48ec10c9 100644 --- a/holmes/core/tool_calling_llm.py +++ b/holmes/core/tool_calling_llm.py @@ -2,7 +2,6 @@ import json import logging import textwrap -import os from typing import List, Optional, Dict from holmes.utils.tags import format_tags_in_string, parse_messages_tags from holmes.plugins.prompts import load_and_render_prompt @@ -333,6 +332,7 @@ def investigate( post_processing_prompt: Optional[str] = None, ) -> LLMResult: runbooks = self.runbook_manager.get_instructions_for_issue(issue) + if instructions != None and instructions.instructions: runbooks.extend(instructions.instructions) diff --git a/holmes/plugins/prompts/_general_instructions.jinja2 b/holmes/plugins/prompts/_general_instructions.jinja2 index 77a6ec6e..cc0f0ddb 100644 --- a/holmes/plugins/prompts/_general_instructions.jinja2 +++ b/holmes/plugins/prompts/_general_instructions.jinja2 @@ -18,13 +18,14 @@ If investigating Kubernetes problems: * run as many kubectl commands as you need to gather more information, then respond. * if possible, do so repeatedly on different Kubernetes objects. * for example, for deployments first run kubectl on the deployment then a replicaset inside it, then a pod inside that. -* when investigating a pod that crashed or application errors, always run kubectl_describe and fetch logs with both kubectl_previous_logs and kubectl_logs so that you see current logs and any logs from before a crash. +* use both kubectl_previous_logs and kubectl_logs when reading logs. Treat the output of both as a single unified logs stream +* when investigating a pod that crashed or application errors, always run kubectl_describe and fetch the logs * do not give an answer like "The pod is pending" as that doesn't state why the pod is pending and how to fix it. * do not give an answer like "Pod's node affinity/selector doesn't match any available nodes" because that doesn't include data on WHICH label doesn't match * if investigating an issue on many pods, there is no need to check more than 3 individual pods in the same deployment. pick up to a representative 3 from each deployment if relevant * if the user says something isn't working, ALWAYS: ** use kubectl_describe on the owner workload + individual pods and look for any transient issues they might have been referring to -** check the application aspects with kubectl_logs + kubectl_previous_logs and other relevant tools +** check the application aspects through the logs (kubectl_logs and kubectl_previous_logs) and other relevant tools ** look for misconfigured ingresses/services etc Special cases and how to reply: diff --git a/poetry.lock b/poetry.lock index 4ecdee6c..2a46ae7d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,112 +13,98 @@ files = [ [[package]] name = "aiohttp" -version = "3.10.10" +version = "3.11.2" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be7443669ae9c016b71f402e43208e13ddf00912f47f623ee5994e12fc7d4b3f"}, - {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b06b7843929e41a94ea09eb1ce3927865387e3e23ebe108e0d0d09b08d25be9"}, - {file = "aiohttp-3.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:333cf6cf8e65f6a1e06e9eb3e643a0c515bb850d470902274239fea02033e9a8"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:274cfa632350225ce3fdeb318c23b4a10ec25c0e2c880eff951a3842cf358ac1"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9e5e4a85bdb56d224f412d9c98ae4cbd032cc4f3161818f692cd81766eee65a"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b606353da03edcc71130b52388d25f9a30a126e04caef1fd637e31683033abd"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab5a5a0c7a7991d90446a198689c0535be89bbd6b410a1f9a66688f0880ec026"}, - {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578a4b875af3e0daaf1ac6fa983d93e0bbfec3ead753b6d6f33d467100cdc67b"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8105fd8a890df77b76dd3054cddf01a879fc13e8af576805d667e0fa0224c35d"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3bcd391d083f636c06a68715e69467963d1f9600f85ef556ea82e9ef25f043f7"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fbc6264158392bad9df19537e872d476f7c57adf718944cc1e4495cbabf38e2a"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e48d5021a84d341bcaf95c8460b152cfbad770d28e5fe14a768988c461b821bc"}, - {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2609e9ab08474702cc67b7702dbb8a80e392c54613ebe80db7e8dbdb79837c68"}, - {file = "aiohttp-3.10.10-cp310-cp310-win32.whl", hash = "sha256:84afcdea18eda514c25bc68b9af2a2b1adea7c08899175a51fe7c4fb6d551257"}, - {file = "aiohttp-3.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:9c72109213eb9d3874f7ac8c0c5fa90e072d678e117d9061c06e30c85b4cf0e6"}, - {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c30a0eafc89d28e7f959281b58198a9fa5e99405f716c0289b7892ca345fe45f"}, - {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:258c5dd01afc10015866114e210fb7365f0d02d9d059c3c3415382ab633fcbcb"}, - {file = "aiohttp-3.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:15ecd889a709b0080f02721255b3f80bb261c2293d3c748151274dfea93ac871"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3935f82f6f4a3820270842e90456ebad3af15810cf65932bd24da4463bc0a4c"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:413251f6fcf552a33c981c4709a6bba37b12710982fec8e558ae944bfb2abd38"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1720b4f14c78a3089562b8875b53e36b51c97c51adc53325a69b79b4b48ebcb"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:679abe5d3858b33c2cf74faec299fda60ea9de62916e8b67e625d65bf069a3b7"}, - {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79019094f87c9fb44f8d769e41dbb664d6e8fcfd62f665ccce36762deaa0e911"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2fb38c2ed905a2582948e2de560675e9dfbee94c6d5ccdb1301c6d0a5bf092"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a3f00003de6eba42d6e94fabb4125600d6e484846dbf90ea8e48a800430cc142"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1bbb122c557a16fafc10354b9d99ebf2f2808a660d78202f10ba9d50786384b9"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:30ca7c3b94708a9d7ae76ff281b2f47d8eaf2579cd05971b5dc681db8caac6e1"}, - {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:df9270660711670e68803107d55c2b5949c2e0f2e4896da176e1ecfc068b974a"}, - {file = "aiohttp-3.10.10-cp311-cp311-win32.whl", hash = "sha256:aafc8ee9b742ce75044ae9a4d3e60e3d918d15a4c2e08a6c3c3e38fa59b92d94"}, - {file = "aiohttp-3.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:362f641f9071e5f3ee6f8e7d37d5ed0d95aae656adf4ef578313ee585b585959"}, - {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9294bbb581f92770e6ed5c19559e1e99255e4ca604a22c5c6397b2f9dd3ee42c"}, - {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8fa23fe62c436ccf23ff930149c047f060c7126eae3ccea005f0483f27b2e28"}, - {file = "aiohttp-3.10.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c6a5b8c7926ba5d8545c7dd22961a107526562da31a7a32fa2456baf040939f"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:007ec22fbc573e5eb2fb7dec4198ef8f6bf2fe4ce20020798b2eb5d0abda6138"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9627cc1a10c8c409b5822a92d57a77f383b554463d1884008e051c32ab1b3742"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50edbcad60d8f0e3eccc68da67f37268b5144ecc34d59f27a02f9611c1d4eec7"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a45d85cf20b5e0d0aa5a8dca27cce8eddef3292bc29d72dcad1641f4ed50aa16"}, - {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b00807e2605f16e1e198f33a53ce3c4523114059b0c09c337209ae55e3823a8"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f2d4324a98062be0525d16f768a03e0bbb3b9fe301ceee99611dc9a7953124e6"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:438cd072f75bb6612f2aca29f8bd7cdf6e35e8f160bc312e49fbecab77c99e3a"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:baa42524a82f75303f714108fea528ccacf0386af429b69fff141ffef1c534f9"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a7d8d14fe962153fc681f6366bdec33d4356f98a3e3567782aac1b6e0e40109a"}, - {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1277cd707c465cd09572a774559a3cc7c7a28802eb3a2a9472588f062097205"}, - {file = "aiohttp-3.10.10-cp312-cp312-win32.whl", hash = "sha256:59bb3c54aa420521dc4ce3cc2c3fe2ad82adf7b09403fa1f48ae45c0cbde6628"}, - {file = "aiohttp-3.10.10-cp312-cp312-win_amd64.whl", hash = "sha256:0e1b370d8007c4ae31ee6db7f9a2fe801a42b146cec80a86766e7ad5c4a259cf"}, - {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ad7593bb24b2ab09e65e8a1d385606f0f47c65b5a2ae6c551db67d6653e78c28"}, - {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1eb89d3d29adaf533588f209768a9c02e44e4baf832b08118749c5fad191781d"}, - {file = "aiohttp-3.10.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3fe407bf93533a6fa82dece0e74dbcaaf5d684e5a51862887f9eaebe6372cd79"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aed5155f819873d23520919e16703fc8925e509abbb1a1491b0087d1cd969e"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f05e9727ce409358baa615dbeb9b969db94324a79b5a5cea45d39bdb01d82e6"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dffb610a30d643983aeb185ce134f97f290f8935f0abccdd32c77bed9388b42"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa6658732517ddabe22c9036479eabce6036655ba87a0224c612e1ae6af2087e"}, - {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:741a46d58677d8c733175d7e5aa618d277cd9d880301a380fd296975a9cdd7bc"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e00e3505cd80440f6c98c6d69269dcc2a119f86ad0a9fd70bccc59504bebd68a"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ffe595f10566f8276b76dc3a11ae4bb7eba1aac8ddd75811736a15b0d5311414"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdfcf6443637c148c4e1a20c48c566aa694fa5e288d34b20fcdc58507882fed3"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d183cf9c797a5291e8301790ed6d053480ed94070637bfaad914dd38b0981f67"}, - {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:77abf6665ae54000b98b3c742bc6ea1d1fb31c394bcabf8b5d2c1ac3ebfe7f3b"}, - {file = "aiohttp-3.10.10-cp313-cp313-win32.whl", hash = "sha256:4470c73c12cd9109db8277287d11f9dd98f77fc54155fc71a7738a83ffcc8ea8"}, - {file = "aiohttp-3.10.10-cp313-cp313-win_amd64.whl", hash = "sha256:486f7aabfa292719a2753c016cc3a8f8172965cabb3ea2e7f7436c7f5a22a151"}, - {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1b66ccafef7336a1e1f0e389901f60c1d920102315a56df85e49552308fc0486"}, - {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:acd48d5b80ee80f9432a165c0ac8cbf9253eaddb6113269a5e18699b33958dbb"}, - {file = "aiohttp-3.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3455522392fb15ff549d92fbf4b73b559d5e43dc522588f7eb3e54c3f38beee7"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c3b868724137f713a38376fef8120c166d1eadd50da1855c112fe97954aed8"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:da1dee8948d2137bb51fbb8a53cce6b1bcc86003c6b42565f008438b806cccd8"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5ce2ce7c997e1971b7184ee37deb6ea9922ef5163c6ee5aa3c274b05f9e12fa"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28529e08fde6f12eba8677f5a8608500ed33c086f974de68cc65ab218713a59d"}, - {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7db54c7914cc99d901d93a34704833568d86c20925b2762f9fa779f9cd2e70f"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03a42ac7895406220124c88911ebee31ba8b2d24c98507f4a8bf826b2937c7f2"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7e338c0523d024fad378b376a79faff37fafb3c001872a618cde1d322400a572"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:038f514fe39e235e9fef6717fbf944057bfa24f9b3db9ee551a7ecf584b5b480"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:64f6c17757251e2b8d885d728b6433d9d970573586a78b78ba8929b0f41d045a"}, - {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:93429602396f3383a797a2a70e5f1de5df8e35535d7806c9f91df06f297e109b"}, - {file = "aiohttp-3.10.10-cp38-cp38-win32.whl", hash = "sha256:c823bc3971c44ab93e611ab1a46b1eafeae474c0c844aff4b7474287b75fe49c"}, - {file = "aiohttp-3.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:54ca74df1be3c7ca1cf7f4c971c79c2daf48d9aa65dea1a662ae18926f5bc8ce"}, - {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01948b1d570f83ee7bbf5a60ea2375a89dfb09fd419170e7f5af029510033d24"}, - {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9fc1500fd2a952c5c8e3b29aaf7e3cc6e27e9cfc0a8819b3bce48cc1b849e4cc"}, - {file = "aiohttp-3.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f614ab0c76397661b90b6851a030004dac502e48260ea10f2441abd2207fbcc7"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00819de9e45d42584bed046314c40ea7e9aea95411b38971082cad449392b08c"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05646ebe6b94cc93407b3bf34b9eb26c20722384d068eb7339de802154d61bc5"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:998f3bd3cfc95e9424a6acd7840cbdd39e45bc09ef87533c006f94ac47296090"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9010c31cd6fa59438da4e58a7f19e4753f7f264300cd152e7f90d4602449762"}, - {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ea7ffc6d6d6f8a11e6f40091a1040995cdff02cfc9ba4c2f30a516cb2633554"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ef9c33cc5cbca35808f6c74be11eb7f5f6b14d2311be84a15b594bd3e58b5527"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ce0cdc074d540265bfeb31336e678b4e37316849d13b308607efa527e981f5c2"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:597a079284b7ee65ee102bc3a6ea226a37d2b96d0418cc9047490f231dc09fe8"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7789050d9e5d0c309c706953e5e8876e38662d57d45f936902e176d19f1c58ab"}, - {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e7f8b04d83483577fd9200461b057c9f14ced334dcb053090cea1da9c8321a91"}, - {file = "aiohttp-3.10.10-cp39-cp39-win32.whl", hash = "sha256:c02a30b904282777d872266b87b20ed8cc0d1501855e27f831320f471d54d983"}, - {file = "aiohttp-3.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:edfe3341033a6b53a5c522c802deb2079eee5cbfbb0af032a55064bd65c73a23"}, - {file = "aiohttp-3.10.10.tar.gz", hash = "sha256:0631dd7c9f0822cc61c88586ca76d5b5ada26538097d0f1df510b082bad3411a"}, + {file = "aiohttp-3.11.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:783741f534c14957fbe657d62a34b947ec06db23d45a2fd4a8aeb73d9c84d7e6"}, + {file = "aiohttp-3.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:435f7a08d8aa42371a94e7c141205a9cb092ba551084b5e0c57492e6673601a3"}, + {file = "aiohttp-3.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c681f34e2814bc6e1eef49752b338061b94a42c92734d0be9513447d3f83718c"}, + {file = "aiohttp-3.11.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73a664478ae1ea011b5a710fb100b115ca8b2146864fa0ce4143ff944df714b8"}, + {file = "aiohttp-3.11.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1d06c8fd8b453c3e553c956bd3b8395100401060430572174bb7876dd95ad49"}, + {file = "aiohttp-3.11.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b1f4844909321ef2c1cee50ddeccbd6018cd8c8d1ddddda3f553e94a5859497"}, + {file = "aiohttp-3.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdc6f8dce09281ae534eaf08a54f0d38612398375f28dad733a8885f3bf9b978"}, + {file = "aiohttp-3.11.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2d942421cf3a1d1eceae8fa192f1fbfb74eb9d3e207d35ad2696bd2ce2c987c"}, + {file = "aiohttp-3.11.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:08ebe7a1d6c1e5ca766d68407280d69658f5f98821c2ba6c41c63cabfed159af"}, + {file = "aiohttp-3.11.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2793d3297f3e49015140e6d3ea26142c967e07998e2fb00b6ee8d041138fbc4e"}, + {file = "aiohttp-3.11.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4a23475d8d5c56e447b7752a1e2ac267c1f723f765e406c81feddcd16cdc97bc"}, + {file = "aiohttp-3.11.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:556564d89e2f4a6e8fe000894c03e4e84cf0b6cfa5674e425db122633ee244d1"}, + {file = "aiohttp-3.11.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:57993f406ce3f114b2a6756d7809be3ffd0cc40f33e8f8b9a4aa1b027fd4e3eb"}, + {file = "aiohttp-3.11.2-cp310-cp310-win32.whl", hash = "sha256:177b000efaf8d2f7012c649e8aee5b0bf488677b1162be5e7511aa4f9d567607"}, + {file = "aiohttp-3.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:ff5d22eece44528023254b595c670dfcf9733ac6af74c4b6cb4f6a784dc3870c"}, + {file = "aiohttp-3.11.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:50e0aee4adc9abcd2109c618a8d1b2c93b85ac277b24a003ab147d91e068b06d"}, + {file = "aiohttp-3.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9aa4e68f1e4f303971ec42976fb170204fb5092de199034b57199a1747e78a2d"}, + {file = "aiohttp-3.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d84930b4145991214602372edd7305fc76b700220db79ac0dd57d3afd0f0a1ca"}, + {file = "aiohttp-3.11.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ec8afd362356b8798c8caa806e91deb3f0602d8ffae8e91d2d3ced2a90c35e"}, + {file = "aiohttp-3.11.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb0544a0e8294a5a5e20d3cacdaaa9a911d7c0a9150f5264aef36e7d8fdfa07e"}, + {file = "aiohttp-3.11.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7b0a1618060e3f5aa73d3526ca2108a16a1b6bf86612cd0bb2ddcbef9879d06"}, + {file = "aiohttp-3.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d878a0186023ac391861958035174d0486f3259cabf8fd94e591985468da3ea"}, + {file = "aiohttp-3.11.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e33a7eddcd07545ccf5c3ab230f60314a17dc33e285475e8405e26e21f02660"}, + {file = "aiohttp-3.11.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4d7fad8c456d180a6d2f44c41cfab4b80e2e81451815825097db48b8293f59d5"}, + {file = "aiohttp-3.11.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d954ba0eae7f33884d27dc00629ca4389d249eb8d26ca07c30911257cae8c96"}, + {file = "aiohttp-3.11.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:afa55e863224e664a782effa62245df73fdfc55aee539bed6efacf35f6d4e4b7"}, + {file = "aiohttp-3.11.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:10a5f91c319d9d4afba812f72984816b5fcd20742232ff7ecc1610ffbf3fc64d"}, + {file = "aiohttp-3.11.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6e8e19a80ba194db5c06915a9df23c0c06e0e9ca9a4db9386a6056cca555a027"}, + {file = "aiohttp-3.11.2-cp311-cp311-win32.whl", hash = "sha256:9c8d1db4f65bbc9d75b7b271d68fb996f1c8c81a525263862477d93611856c2d"}, + {file = "aiohttp-3.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:2adb967454e10e69478ba4a8d8afbba48a7c7a8619216b7c807f8481cc66ddfb"}, + {file = "aiohttp-3.11.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f833a80d9de9307d736b6af58c235b17ef7f90ebea7b9c49cd274dec7a66a2f1"}, + {file = "aiohttp-3.11.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:382f853516664d2ebfc75dc01da4a10fdef5edcb335fe7b45cf471ce758ecb18"}, + {file = "aiohttp-3.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d3a2bcf6c81639a165da93469e1e0aff67c956721f3fa9c0560f07dd1e505116"}, + {file = "aiohttp-3.11.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de3b4d5fb5d69749104b880a157f38baeea7765c93d9cd3837cedd5b84729e10"}, + {file = "aiohttp-3.11.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a90a0dc4b054b5af299a900bf950fe8f9e3e54322bc405005f30aa5cacc5c98"}, + {file = "aiohttp-3.11.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32334f35824811dd20a12cc90825d000e6b50faaeaa71408d42269151a66140d"}, + {file = "aiohttp-3.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cba0b8d25aa2d450762f3dd6df85498f5e7c3ad0ddeb516ef2b03510f0eea32"}, + {file = "aiohttp-3.11.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bbb2dbc2701ab7e9307ca3a8fa4999c5b28246968e0a0202a5afabf48a42e22"}, + {file = "aiohttp-3.11.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97fba98fc5d9ccd3d33909e898d00f2494d6a9eec7cbda3d030632e2c8bb4d00"}, + {file = "aiohttp-3.11.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0ebdf5087e2ce903d8220cc45dcece90c2199ae4395fd83ca616fcc81010db2c"}, + {file = "aiohttp-3.11.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:122768e3ae9ce74f981b46edefea9c6e5a40aea38aba3ac50168e6370459bf20"}, + {file = "aiohttp-3.11.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5587da333b7d280a312715b843d43e734652aa382cba824a84a67c81f75b338b"}, + {file = "aiohttp-3.11.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:85de9904bc360fd29a98885d2bfcbd4e02ab33c53353cb70607f2bea2cb92468"}, + {file = "aiohttp-3.11.2-cp312-cp312-win32.whl", hash = "sha256:b470de64d17156c37e91effc109d3b032b39867000e2c126732fe01d034441f9"}, + {file = "aiohttp-3.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:3f617a48b70f4843d54f52440ea1e58da6bdab07b391a3a6aed8d3b311a4cc04"}, + {file = "aiohttp-3.11.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5d90b5a3b0f32a5fecf5dd83d828713986c019585f5cddf40d288ff77f366615"}, + {file = "aiohttp-3.11.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d23854e5867650d40cba54d49956aad8081452aa80b2cf0d8c310633f4f48510"}, + {file = "aiohttp-3.11.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:486273d3b5af75a80c31c311988931bdd2a4b96a74d5c7f422bad948f99988ef"}, + {file = "aiohttp-3.11.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9075313f8e41b481e4cb10af405054564b0247dc335db5398ed05f8ec38787e2"}, + {file = "aiohttp-3.11.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44b69c69c194ffacbc50165911cf023a4b1b06422d1e1199d3aea82eac17004e"}, + {file = "aiohttp-3.11.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b339d91ac9060bd6ecdc595a82dc151045e5d74f566e0864ef3f2ba0887fec42"}, + {file = "aiohttp-3.11.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64e8f5178958a9954043bc8cd10a5ae97352c3f2fc99aa01f2aebb0026010910"}, + {file = "aiohttp-3.11.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3129151378f858cdc4a0a4df355c9a0d060ab49e2eea7e62e9f085bac100551b"}, + {file = "aiohttp-3.11.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:14eb6c628432720e41b4fab1ada879d56cfe7034159849e083eb536b4c2afa99"}, + {file = "aiohttp-3.11.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e57a10aacedcf24666f4c90d03e599f71d172d1c5e00dcf48205c445806745b0"}, + {file = "aiohttp-3.11.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:66e58a2e8c7609a3545c4b38fb8b01a6b8346c4862e529534f7674c5265a97b8"}, + {file = "aiohttp-3.11.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9b6d15adc9768ff167614ca853f7eeb6ee5f1d55d5660e3af85ce6744fed2b82"}, + {file = "aiohttp-3.11.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2914061f5ca573f990ec14191e6998752fa8fe50d518e3405410353c3f44aa5d"}, + {file = "aiohttp-3.11.2-cp313-cp313-win32.whl", hash = "sha256:1c2496182e577042e0e07a328d91c949da9e77a2047c7291071e734cd7a6e780"}, + {file = "aiohttp-3.11.2-cp313-cp313-win_amd64.whl", hash = "sha256:cccb2937bece1310c5c0163d0406aba170a2e5fb1f0444d7b0e7fdc9bd6bb713"}, + {file = "aiohttp-3.11.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:994cb893936dd2e1803655ae8667a45066bfd53360b148e22b4e3325cc5ea7a3"}, + {file = "aiohttp-3.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3666c750b73ce463a413692e3a57c60f7089e2d9116a2aa5a0f0eaf2ae325148"}, + {file = "aiohttp-3.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6ad9a7d2a3a0f235184426425f80bd3b26c66b24fd5fddecde66be30c01ebe6e"}, + {file = "aiohttp-3.11.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c979fc92aba66730b66099cd5becb42d869a26c0011119bc1c2478408a8bf7a"}, + {file = "aiohttp-3.11.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:766d0ebf8703d28f854f945982aa09224d5a27a29594c70d921c43c3930fe7ac"}, + {file = "aiohttp-3.11.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79efd1ee3827b2f16797e14b1e45021206c3271249b4d0025014466d416d7413"}, + {file = "aiohttp-3.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d6e069b882c1fdcbe5577dc4be372eda705180197140577a4cddb648c29d22e"}, + {file = "aiohttp-3.11.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e9a766c346b2ed7e88937919d84ed64b4ef489dad1d8939f806ee52901dc142"}, + {file = "aiohttp-3.11.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2b02a68b9445c70d7f5c8b578c5f5e5866b1d67ca23eb9e8bc8658ae9e3e2c74"}, + {file = "aiohttp-3.11.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:374baefcb1b6275f350da605951f5f02487a9bc84a574a7d5b696439fabd49a3"}, + {file = "aiohttp-3.11.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d2f991c18132f3e505c108147925372ffe4549173b7c258cf227df1c5977a635"}, + {file = "aiohttp-3.11.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:34f37c59b12bc3afc52bab6fcd9cd3be82ff01c4598a84cbea934ccb3a9c54a0"}, + {file = "aiohttp-3.11.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:33af11eca7bb0f5c6ffaf5e7d9d2336c2448f9c6279b93abdd6f3c35f9ee321f"}, + {file = "aiohttp-3.11.2-cp39-cp39-win32.whl", hash = "sha256:83a70e22e0f6222effe7f29fdeba6c6023f9595e59a0479edacfbd7de4b77bb7"}, + {file = "aiohttp-3.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:c28c1677ea33ccb8b14330560094cc44d3ff4fad617a544fd18beb90403fe0f1"}, + {file = "aiohttp-3.11.2.tar.gz", hash = "sha256:68d1f46f9387db3785508f5225d3acbc5825ca13d9c29f2b5cce203d5863eb79"}, ] [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -yarl = ">=1.12.0,<2.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] @@ -170,26 +156,15 @@ doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = "*" -files = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] - [[package]] name = "async-timeout" -version = "4.0.3" +version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] @@ -211,6 +186,30 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +[[package]] +name = "autoevals" +version = "0.0.103" +description = "Universal library for evaluating AI models" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "autoevals-0.0.103-py3-none-any.whl", hash = "sha256:b25c028dcfbc4b1f2ce5e30692a0021f8ad9fef50ebdd22cb51a29219b1115b0"}, + {file = "autoevals-0.0.103.tar.gz", hash = "sha256:e7bcf65ba1da48aad15b9f97929c061103cbea821bbc667816b1b2de22c0fb28"}, +] + +[package.dependencies] +braintrust-core = "0.0.54" +chevron = "*" +jsonschema = "*" +levenshtein = "*" +pyyaml = "*" + +[package.extras] +all = ["IPython", "black", "build", "flake8", "flake8-isort", "isort (==5.12.0)", "numpy", "pre-commit", "pydoc-markdown", "pytest", "scipy", "twine"] +dev = ["IPython", "black", "build", "flake8", "flake8-isort", "isort (==5.12.0)", "pre-commit", "pytest", "twine"] +doc = ["pydoc-markdown"] +scipy = ["numpy", "scipy"] + [[package]] name = "beautifulsoup4" version = "4.12.3" @@ -234,17 +233,17 @@ lxml = ["lxml"] [[package]] name = "boto3" -version = "1.35.56" +version = "1.35.63" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.56-py3-none-any.whl", hash = "sha256:d04608cf40f429025eb66b52b835bdc333436022918788853ed0bbbba6dd2f09"}, - {file = "boto3-1.35.56.tar.gz", hash = "sha256:6fcc510a4e747e85f84046b0ba0e5b178e89ba0f8ac9e2b6ebb4cc925c68c23b"}, + {file = "boto3-1.35.63-py3-none-any.whl", hash = "sha256:d0f938d4f6f392b6ffc5e75fff14a42e5bbb5228675a0367c8af55398abadbec"}, + {file = "boto3-1.35.63.tar.gz", hash = "sha256:deb593d9a0fb240deb4c43e4da8e6626d7c36be7b2fd2fe28f49d44d395b7de0"}, ] [package.dependencies] -botocore = ">=1.35.56,<1.36.0" +botocore = ">=1.35.63,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -253,26 +252,65 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.56" +version = "1.35.63" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.56-py3-none-any.whl", hash = "sha256:4be97f7bc1fbf33ad71ee1b678cea0ecf9638e61d5f566a46f261cde969dd690"}, - {file = "botocore-1.35.56.tar.gz", hash = "sha256:8a9e752c8e87a423575ac528340a35d4318b8576ae4c6e0acfe5a3867f6bbccf"}, + {file = "botocore-1.35.63-py3-none-any.whl", hash = "sha256:0ca1200694a4c0a3fa846795d8e8a08404c214e21195eb9e010c4b8a4ca78a4a"}, + {file = "botocore-1.35.63.tar.gz", hash = "sha256:2b8196bab0a997d206c3d490b52e779ef47dffb68c57c685443f77293aca1589"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = [ - {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, ] [package.extras] crt = ["awscrt (==0.22.0)"] +[[package]] +name = "braintrust" +version = "0.0.168" +description = "SDK for integrating Braintrust" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "braintrust-0.0.168-py3-none-any.whl", hash = "sha256:24572964b1cb9c189c087d656dc9be80c9c28ed4e87fd3f8f1887e46766052b3"}, + {file = "braintrust-0.0.168.tar.gz", hash = "sha256:fbf93f78f32951e25fc9eac862ba20c5b58a913c4601e6fe39fcc40d42702247"}, +] + +[package.dependencies] +braintrust-core = "0.0.54" +chevron = "*" +exceptiongroup = "1.2.0" +GitPython = "*" +python-dotenv = "*" +python-slugify = "*" +requests = "*" +sseclient-py = "*" +tqdm = "*" + +[package.extras] +all = ["IPython", "black", "boto3", "build", "flake8", "flake8-isort", "isort (==5.10.1)", "pre-commit", "psycopg2-binary", "pydoc-markdown", "pytest", "twine", "uv"] +cli = ["boto3", "psycopg2-binary", "uv"] +dev = ["IPython", "black", "build", "flake8", "flake8-isort", "isort (==5.10.1)", "pre-commit", "pytest", "twine"] +doc = ["pydoc-markdown"] + +[[package]] +name = "braintrust-core" +version = "0.0.54" +description = "Shared core dependencies for Braintrust packages" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "braintrust_core-0.0.54-py3-none-any.whl", hash = "sha256:8127a6440bd4aa0824a25b7547a4377ea8b57bd8daeca6650779c6d60eac936c"}, + {file = "braintrust_core-0.0.54.tar.gz", hash = "sha256:2ba3d339dfdc14b3b10a1ff523eec18bcd3ae775c30a99093fbf224095f91603"}, +] + [[package]] name = "bs4" version = "0.0.2" @@ -423,6 +461,17 @@ files = [ {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] +[[package]] +name = "chevron" +version = "0.14.0" +description = "Mustache templating language renderer" +optional = false +python-versions = "*" +files = [ + {file = "chevron-0.14.0-py3-none-any.whl", hash = "sha256:fbf996a709f8da2e745ef763f482ce2d311aa817d287593a5b990d6d6e4f0443"}, + {file = "chevron-0.14.0.tar.gz", hash = "sha256:87613aafdf6d77b6a90ff073165a61ae5086e21ad49057aa0e53681601800ebf"}, +] + [[package]] name = "click" version = "8.1.7" @@ -465,119 +514,6 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] development = ["black", "flake8", "mypy", "pytest", "types-colorama"] -[[package]] -name = "dataclasses-json" -version = "0.6.7" -description = "Easily serialize dataclasses to and from JSON." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, - {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, -] - -[package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" - -[[package]] -name = "datasets" -version = "2.14.4" -description = "HuggingFace community-driven open-source library of datasets" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "datasets-2.14.4-py3-none-any.whl", hash = "sha256:29336bd316a7d827ccd4da2236596279b20ca2ac78f64c04c9483da7cbc2459b"}, - {file = "datasets-2.14.4.tar.gz", hash = "sha256:ef29c2b5841de488cd343cfc26ab979bff77efa4d2285af51f1ad7db5c46a83b"}, -] - -[package.dependencies] -aiohttp = "*" -dill = ">=0.3.0,<0.3.8" -fsspec = {version = ">=2021.11.1", extras = ["http"]} -huggingface-hub = ">=0.14.0,<1.0.0" -multiprocess = "*" -numpy = ">=1.17" -packaging = "*" -pandas = "*" -pyarrow = ">=8.0.0" -pyyaml = ">=5.1" -requests = ">=2.19.0" -tqdm = ">=4.62.1" -xxhash = "*" - -[package.extras] -apache-beam = ["apache-beam (>=2.26.0,<2.44.0)"] -audio = ["librosa", "soundfile (>=0.12.1)"] -benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"] -dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "black (>=23.1,<24.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "ruff (>=0.0.241)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] -docs = ["s3fs", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "transformers"] -jax = ["jax (>=0.2.8,!=0.3.2,<=0.3.25)", "jaxlib (>=0.1.65,<=0.3.25)"] -metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] -quality = ["black (>=23.1,<24.0)", "pyyaml (>=5.3.1)", "ruff (>=0.0.241)"] -s3 = ["s3fs"] -tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] -tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] -torch = ["torch"] -vision = ["Pillow (>=6.2.1)"] - -[[package]] -name = "deepeval" -version = "1.5.0" -description = "The open-source evaluation framework for LLMs." -optional = false -python-versions = "*" -files = [ - {file = "deepeval-1.5.0-py3-none-any.whl", hash = "sha256:7a62fed763e22562955f9db9ab5c8450bd48ad3178d4e5e6b9364a1640cbbe49"}, - {file = "deepeval-1.5.0.tar.gz", hash = "sha256:24c06dfb15f08c4f5888c597868a7d1d64b38e75bc6c36d8092cc77ab25e0ad0"}, -] - -[package.dependencies] -docx2txt = ">=0.8,<1.0" -grpcio = ">=1.63.0,<1.64.0" -importlib-metadata = ">=6.0.2" -langchain = "*" -langchain-core = "*" -langchain-openai = "*" -opentelemetry-api = ">=1.24.0,<1.25.0" -opentelemetry-exporter-otlp-proto-grpc = ">=1.24.0,<1.25.0" -opentelemetry-sdk = ">=1.24.0,<1.25.0" -portalocker = "*" -protobuf = "*" -pydantic = "*" -pytest = "*" -pytest-repeat = "*" -pytest-xdist = "*" -ragas = "*" -requests = "*" -rich = "*" -sentry-sdk = "*" -tabulate = "*" -tenacity = ">=8.4.1,<8.5.0" -tqdm = "*" -typer = "*" - -[package.extras] -dev = ["black"] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - [[package]] name = "deprecation" version = "2.1.0" @@ -592,20 +528,6 @@ files = [ [package.dependencies] packaging = "*" -[[package]] -name = "dill" -version = "0.3.7" -description = "serialize all of Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - [[package]] name = "distro" version = "1.9.0" @@ -617,25 +539,15 @@ files = [ {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, ] -[[package]] -name = "docx2txt" -version = "0.8" -description = "A pure python-based utility to extract text and images from docx files." -optional = false -python-versions = "*" -files = [ - {file = "docx2txt-0.8.tar.gz", hash = "sha256:2c06d98d7cfe2d3947e5760a57d924e3ff07745b379c8737723922e7009236e5"}, -] - [[package]] name = "exceptiongroup" -version = "1.2.2" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -657,13 +569,13 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "fastapi" -version = "0.115.4" +version = "0.115.5" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"}, - {file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"}, + {file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"}, + {file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"}, ] [package.dependencies] @@ -803,9 +715,6 @@ files = [ {file = "fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493"}, ] -[package.dependencies] -aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} - [package.extras] abfs = ["adlfs"] adl = ["adlfs"] @@ -835,21 +744,36 @@ test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "d tqdm = ["tqdm"] [[package]] -name = "googleapis-common-protos" -version = "1.65.0" -description = "Common protobufs used in Google APIs" +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.43" +description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, - {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, + {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, + {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, ] [package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +gitdb = ">=4.0.1,<5" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] [[package]] name = "gotrue" @@ -952,64 +876,6 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] -[[package]] -name = "grpcio" -version = "1.63.2" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.63.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:bfb7443a525a0ccc8ae89d29d5257a895fe33af23ba8be21609138cef42deb79"}, - {file = "grpcio-1.63.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:4410f179181961c043c58454ee9cb28474ab38080a1f12e56bac45dc1cf21491"}, - {file = "grpcio-1.63.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:2eded368fbb4d31356d4082ad9b70e617fe2a5b39c4b2817dd9f2478084443fa"}, - {file = "grpcio-1.63.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c97067b7b88dab5b546bc5c29fed202a7f4c7df0f4303518c16b85942cd3db80"}, - {file = "grpcio-1.63.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2abe769d62d76d2ca4c7114dfb16e5dc608325bc97a60ce33e6fb97add670d42"}, - {file = "grpcio-1.63.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:18882622160344e8943e31f8db69d047bd4d8e61ed4359b83b59d27dccb0580d"}, - {file = "grpcio-1.63.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a98a7df27aabb7fb1a5e3d7156ce5a2b52bb1881d66b838b53b02b122599901"}, - {file = "grpcio-1.63.2-cp310-cp310-win32.whl", hash = "sha256:5c875f00f963e53339f89e0ed73c5e16f16a8d80ac7634959b8e8ab8f5e58e19"}, - {file = "grpcio-1.63.2-cp310-cp310-win_amd64.whl", hash = "sha256:80ea604527c7f565b4ab8af8cbea163395842eb6a6d63fc202285d351fad264b"}, - {file = "grpcio-1.63.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:b01260e2f07ccd39e2a683b99001c124a614b3d081669c1bac5a810d9a0e36dd"}, - {file = "grpcio-1.63.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:238f00823db78c20888e3510eb5872258b2c450b550b7bc51d1a2c25026a3964"}, - {file = "grpcio-1.63.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:87303d538ac51ef49cd0ad70f1b8aac852797a1ca7fcfc07920d6f9c9df58c57"}, - {file = "grpcio-1.63.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2965c0cc6f21ec26f2ccfacfc89af25338d5cc4b838ca60baa27f7330a074dbd"}, - {file = "grpcio-1.63.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bb39a0ebaca1b9caa0d7c766a3b7875d619baa8df9899b025cd067dc7ab90d2"}, - {file = "grpcio-1.63.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7fbe809d69695e4776a0800625eccdf2e9c770c454a51a17ee7b9a583725ce42"}, - {file = "grpcio-1.63.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:160245acf4e360e40618989abc8e4beef976286af287fff9fb4c3a5be6dbcb4a"}, - {file = "grpcio-1.63.2-cp311-cp311-win32.whl", hash = "sha256:80233c2370eb8fe5601cf307eacc8762d29548f216d3de6376f2b388d5c75c71"}, - {file = "grpcio-1.63.2-cp311-cp311-win_amd64.whl", hash = "sha256:38f53edf277df89175176b26d069c06b8b88ce711aeb6521966a2bdd20eacf2b"}, - {file = "grpcio-1.63.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:0daaba8884d4fe833e26b10490f793405165e81ae933f5e47091469bc91d94a8"}, - {file = "grpcio-1.63.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d32061a7b1fe28658add39276425795131d0a50f5eaf4beaf6beb8151d06072c"}, - {file = "grpcio-1.63.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d02dc76d3bcc61ffb6629ecbb26f8a9a3171e4446ad5ee8a0298b173cc7eaab6"}, - {file = "grpcio-1.63.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4390eb43e9570183be2570a98dbcdd01a322d78e6c519875eec31e03d7dd209"}, - {file = "grpcio-1.63.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f071ae332df667737a2f43a8f66100971ac5b0ab277741f29237764239aa10bb"}, - {file = "grpcio-1.63.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:496efbed750f769206396a45e767eb351e7e99b02d6a8ff40989308bbc878820"}, - {file = "grpcio-1.63.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9a9cb1d4436f9e72c73b3d2b9c0ca97629e2e35653f34fd70f8e39aa44e20dbb"}, - {file = "grpcio-1.63.2-cp312-cp312-win32.whl", hash = "sha256:64f76e867a9b4a6bfa1fc4ab5200b0abffffdaf49eab25c48aef9a06e91de8b4"}, - {file = "grpcio-1.63.2-cp312-cp312-win_amd64.whl", hash = "sha256:79cb9c53bb935983905672b56fa63e7ef61bf41e54c4b65b021fc6502eb361f5"}, - {file = "grpcio-1.63.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:6d0c8805291395be646e732b5d01a64cf9f1961aa45cb088257b0ed599b3d873"}, - {file = "grpcio-1.63.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b7eed646f7733cb5171d6866b8948c21ab3c4a77e128e27b2fd5608d30a18120"}, - {file = "grpcio-1.63.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:96a36eb55619ea4c98d8801d069598fb84b4669e647e82c1db0b3e8c46be0154"}, - {file = "grpcio-1.63.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7df59e2c9ff308995e30c8c6844f101b7e3399420f7a34728a12fa909050033"}, - {file = "grpcio-1.63.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0817d25213c5f1528a77c4be97ca9fa690813396fe7cb0bf06391ec12e4928e"}, - {file = "grpcio-1.63.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0ffb8a4fa9e7d2cdec65be78409de919fbb6ac01d6ade26b8b9b013ee67ec04c"}, - {file = "grpcio-1.63.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ab99b190cb8e4a7692cc1edf76fd9059b216e7aacc6aeb40084c2fed44d323f9"}, - {file = "grpcio-1.63.2-cp38-cp38-win32.whl", hash = "sha256:57fdcd69ab28e22c33baac2631fb99ecff02c1ce7e4e83dc473b50cb14997725"}, - {file = "grpcio-1.63.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee2d4cf1672a6711a7cb19b937baa7865cc523ec5a8133a0b1e95fe02d32adff"}, - {file = "grpcio-1.63.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:b71784130b8af4265566333c3e42c00f4ac13471eb3f4054a85a9df326fa0b50"}, - {file = "grpcio-1.63.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fa9e3c0ebd8d3788d4157041112bbc6f455717d485c0f756d5aaefe5d032ae85"}, - {file = "grpcio-1.63.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:996069d2b37d13e0f96d282abb41e8fabbe8f46971d38d71f318ca8316aca189"}, - {file = "grpcio-1.63.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72f86cdd5643732d394f15a79196fee406030dbf8545161e6a6b5e1ce60376bb"}, - {file = "grpcio-1.63.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4de14801be123c7e6680f63aa33422b0d1bacff9559fc75cf5e9e78ee21ee235"}, - {file = "grpcio-1.63.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a30916b3fd2e851a481fe4a9a7ef4a957c548440bf5020363115712a4ae94ed3"}, - {file = "grpcio-1.63.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db66c5cc44729ef61bf680759d9984b47bbdc700f817801d3d2cd3ec7022bb06"}, - {file = "grpcio-1.63.2-cp39-cp39-win32.whl", hash = "sha256:973d0c97ccdedd7f58df59288ac41abc9ae373e25abe6ef1c1c4265f2518a71d"}, - {file = "grpcio-1.63.2-cp39-cp39-win_amd64.whl", hash = "sha256:37c7fd664d336ae7c1dd0a9d23b8f31b6efc38dcbe6ce357079ec585bb171ae7"}, - {file = "grpcio-1.63.2.tar.gz", hash = "sha256:8dccf9777b4f21cad0a8c84916ae9bbdde7ccd10efdec7d95f31805a41801064"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.63.2)"] - [[package]] name = "h11" version = "0.14.0" @@ -1049,13 +915,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.6" +version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, - {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] @@ -1094,17 +960,6 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "httpx-sse" -version = "0.4.0" -description = "Consume Server-Sent Event (SSE) messages with HTTPX." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, - {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, -] - [[package]] name = "huggingface-hub" version = "0.26.2" @@ -1180,22 +1035,26 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 [[package]] name = "importlib-metadata" -version = "7.0.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, - {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" @@ -1227,84 +1086,84 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jiter" -version = "0.7.0" +version = "0.7.1" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "jiter-0.7.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e14027f61101b3f5e173095d9ecf95c1cac03ffe45a849279bde1d97e559e314"}, - {file = "jiter-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:979ec4711c2e37ac949561858bd42028884c9799516a923e1ff0b501ef341a4a"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:662d5d3cca58ad6af7a3c6226b641c8655de5beebcb686bfde0df0f21421aafa"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d89008fb47043a469f97ad90840b97ba54e7c3d62dc7cbb6cbf938bd0caf71d"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8b16c35c846a323ce9067170d5ab8c31ea3dbcab59c4f7608bbbf20c2c3b43f"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e82daaa1b0a68704f9029b81e664a5a9de3e466c2cbaabcda5875f961702e7"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43a87a9f586636e1f0dd3651a91f79b491ea0d9fd7cbbf4f5c463eebdc48bda7"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2ec05b1615f96cc3e4901678bc863958611584072967d9962f9e571d60711d52"}, - {file = "jiter-0.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5cb97e35370bde7aa0d232a7f910f5a0fbbc96bc0a7dbaa044fd5cd6bcd7ec3"}, - {file = "jiter-0.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb316dacaf48c8c187cea75d0d7f835f299137e6fdd13f691dff8f92914015c7"}, - {file = "jiter-0.7.0-cp310-none-win32.whl", hash = "sha256:243f38eb4072763c54de95b14ad283610e0cd3bf26393870db04e520f60eebb3"}, - {file = "jiter-0.7.0-cp310-none-win_amd64.whl", hash = "sha256:2221d5603c139f6764c54e37e7c6960c469cbcd76928fb10d15023ba5903f94b"}, - {file = "jiter-0.7.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91cec0ad755bd786c9f769ce8d843af955df6a8e56b17658771b2d5cb34a3ff8"}, - {file = "jiter-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:feba70a28a27d962e353e978dbb6afd798e711c04cb0b4c5e77e9d3779033a1a"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d866ec066c3616cacb8535dbda38bb1d470b17b25f0317c4540182bc886ce2"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7a7a00b6f9f18289dd563596f97ecaba6c777501a8ba04bf98e03087bcbc60"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9aaf564094c7db8687f2660605e099f3d3e6ea5e7135498486674fcb78e29165"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4d27e09825c1b3c7a667adb500ce8b840e8fc9f630da8454b44cdd4fb0081bb"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca7c287da9c1d56dda88da1d08855a787dbb09a7e2bd13c66a2e288700bd7c7"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db19a6d160f093cbc8cd5ea2abad420b686f6c0e5fb4f7b41941ebc6a4f83cda"}, - {file = "jiter-0.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e46a63c7f877cf7441ffc821c28287cfb9f533ae6ed707bde15e7d4dfafa7ae"}, - {file = "jiter-0.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ba426fa7ff21cb119fa544b75dd3fbee6a70e55a5829709c0338d07ccd30e6d"}, - {file = "jiter-0.7.0-cp311-none-win32.whl", hash = "sha256:c07f55a64912b0c7982377831210836d2ea92b7bd343fca67a32212dd72e38e0"}, - {file = "jiter-0.7.0-cp311-none-win_amd64.whl", hash = "sha256:ed27b2c43e1b5f6c7fedc5c11d4d8bfa627de42d1143d87e39e2e83ddefd861a"}, - {file = "jiter-0.7.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac7930bcaaeb1e229e35c91c04ed2e9f39025b86ee9fc3141706bbf6fff4aeeb"}, - {file = "jiter-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:571feae3e7c901a8eedde9fd2865b0dfc1432fb15cab8c675a8444f7d11b7c5d"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8af4df8a262fa2778b68c2a03b6e9d1cb4d43d02bea6976d46be77a3a331af1"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd028d4165097a611eb0c7494d8c1f2aebd46f73ca3200f02a175a9c9a6f22f5"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6b487247c7836810091e9455efe56a52ec51bfa3a222237e1587d04d3e04527"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6d28a92f28814e1a9f2824dc11f4e17e1df1f44dc4fdeb94c5450d34bcb2602"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90443994bbafe134f0b34201dad3ebe1c769f0599004084e046fb249ad912425"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f9abf464f9faac652542ce8360cea8e68fba2b78350e8a170248f9bcc228702a"}, - {file = "jiter-0.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db7a8d99fc5f842f7d2852f06ccaed066532292c41723e5dff670c339b649f88"}, - {file = "jiter-0.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:15cf691ebd8693b70c94627d6b748f01e6d697d9a6e9f2bc310934fcfb7cf25e"}, - {file = "jiter-0.7.0-cp312-none-win32.whl", hash = "sha256:9dcd54fa422fb66ca398bec296fed5f58e756aa0589496011cfea2abb5be38a5"}, - {file = "jiter-0.7.0-cp312-none-win_amd64.whl", hash = "sha256:cc989951f73f9375b8eacd571baaa057f3d7d11b7ce6f67b9d54642e7475bfad"}, - {file = "jiter-0.7.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:24cecd18df540963cd27c08ca5ce1d0179f229ff78066d9eecbe5add29361340"}, - {file = "jiter-0.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d41b46236b90b043cca73785674c23d2a67d16f226394079d0953f94e765ed76"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b160db0987171365c153e406a45dcab0ee613ae3508a77bfff42515cb4ce4d6e"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1c8d91e0f0bd78602eaa081332e8ee4f512c000716f5bc54e9a037306d693a7"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997706c683195eeff192d2e5285ce64d2a610414f37da3a3f2625dcf8517cf90"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ea52a8a0ff0229ab2920284079becd2bae0688d432fca94857ece83bb49c541"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d77449d2738cf74752bb35d75ee431af457e741124d1db5e112890023572c7c"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8203519907a1d81d6cb00902c98e27c2d0bf25ce0323c50ca594d30f5f1fbcf"}, - {file = "jiter-0.7.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41d15ccc53931c822dd7f1aebf09faa3cda2d7b48a76ef304c7dbc19d1302e51"}, - {file = "jiter-0.7.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:febf3179b2fabf71fbd2fd52acb8594163bb173348b388649567a548f356dbf6"}, - {file = "jiter-0.7.0-cp313-none-win32.whl", hash = "sha256:4a8e2d866e7eda19f012444e01b55079d8e1c4c30346aaac4b97e80c54e2d6d3"}, - {file = "jiter-0.7.0-cp313-none-win_amd64.whl", hash = "sha256:7417c2b928062c496f381fb0cb50412eee5ad1d8b53dbc0e011ce45bb2de522c"}, - {file = "jiter-0.7.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9c62c737b5368e51e74960a08fe1adc807bd270227291daede78db24d5fbf556"}, - {file = "jiter-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e4640722b1bef0f6e342fe4606aafaae0eb4f4be5c84355bb6867f34400f6688"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f367488c3b9453eab285424c61098faa1cab37bb49425e69c8dca34f2dfe7d69"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0cf5d42beb3514236459454e3287db53d9c4d56c4ebaa3e9d0efe81b19495129"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc5190ea1113ee6f7252fa8a5fe5a6515422e378356c950a03bbde5cafbdbaab"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63ee47a149d698796a87abe445fc8dee21ed880f09469700c76c8d84e0d11efd"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48592c26ea72d3e71aa4bea0a93454df907d80638c3046bb0705507b6704c0d7"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:79fef541199bd91cfe8a74529ecccb8eaf1aca38ad899ea582ebbd4854af1e51"}, - {file = "jiter-0.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d1ef6bb66041f2514739240568136c81b9dcc64fd14a43691c17ea793b6535c0"}, - {file = "jiter-0.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca4d950863b1c238e315bf159466e064c98743eef3bd0ff9617e48ff63a4715"}, - {file = "jiter-0.7.0-cp38-none-win32.whl", hash = "sha256:897745f230350dcedb8d1ebe53e33568d48ea122c25e6784402b6e4e88169be7"}, - {file = "jiter-0.7.0-cp38-none-win_amd64.whl", hash = "sha256:b928c76a422ef3d0c85c5e98c498ce3421b313c5246199541e125b52953e1bc0"}, - {file = "jiter-0.7.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c9b669ff6f8ba08270dee9ccf858d3b0203b42314a428a1676762f2d390fbb64"}, - {file = "jiter-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5be919bacd73ca93801c3042bce6e95cb9c555a45ca83617b9b6c89df03b9c2"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a282e1e8a396dabcea82d64f9d05acf7efcf81ecdd925b967020dcb0e671c103"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:17ecb1a578a56e97a043c72b463776b5ea30343125308f667fb8fce4b3796735"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b6045fa0527129218cdcd8a8b839f678219686055f31ebab35f87d354d9c36e"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:189cc4262a92e33c19d4fd24018f5890e4e6da5b2581f0059938877943f8298c"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c138414839effbf30d185e30475c6dc8a16411a1e3681e5fd4605ab1233ac67a"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2791604acef33da6b72d5ecf885a32384bcaf9aa1e4be32737f3b8b9588eef6a"}, - {file = "jiter-0.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae60ec89037a78d60bbf3d8b127f1567769c8fa24886e0abed3f622791dea478"}, - {file = "jiter-0.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:836f03dea312967635233d826f783309b98cfd9ccc76ac776e224cfcef577862"}, - {file = "jiter-0.7.0-cp39-none-win32.whl", hash = "sha256:ebc30ae2ce4bc4986e1764c404b4ea1924f926abf02ce92516485098f8545374"}, - {file = "jiter-0.7.0-cp39-none-win_amd64.whl", hash = "sha256:abf596f951370c648f37aa9899deab296c42a3829736e598b0dd10b08f77a44d"}, - {file = "jiter-0.7.0.tar.gz", hash = "sha256:c061d9738535497b5509f8970584f20de1e900806b239a39a9994fc191dad630"}, + {file = "jiter-0.7.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:262e96d06696b673fad6f257e6a0abb6e873dc22818ca0e0600f4a1189eb334f"}, + {file = "jiter-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be6de02939aac5be97eb437f45cfd279b1dc9de358b13ea6e040e63a3221c40d"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935f10b802bc1ce2b2f61843e498c7720aa7f4e4bb7797aa8121eab017293c3d"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9cd3cccccabf5064e4bb3099c87bf67db94f805c1e62d1aefd2b7476e90e0ee2"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aa919ebfc5f7b027cc368fe3964c0015e1963b92e1db382419dadb098a05192"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ae2d01e82c94491ce4d6f461a837f63b6c4e6dd5bb082553a70c509034ff3d4"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f9568cd66dbbdab67ae1b4c99f3f7da1228c5682d65913e3f5f95586b3cb9a9"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ecbf4e20ec2c26512736284dc1a3f8ed79b6ca7188e3b99032757ad48db97dc"}, + {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1a0508fddc70ce00b872e463b387d49308ef02b0787992ca471c8d4ba1c0fa1"}, + {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f84c9996664c460f24213ff1e5881530abd8fafd82058d39af3682d5fd2d6316"}, + {file = "jiter-0.7.1-cp310-none-win32.whl", hash = "sha256:c915e1a1960976ba4dfe06551ea87063b2d5b4d30759012210099e712a414d9f"}, + {file = "jiter-0.7.1-cp310-none-win_amd64.whl", hash = "sha256:75bf3b7fdc5c0faa6ffffcf8028a1f974d126bac86d96490d1b51b3210aa0f3f"}, + {file = "jiter-0.7.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ad04a23a91f3d10d69d6c87a5f4471b61c2c5cd6e112e85136594a02043f462c"}, + {file = "jiter-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e47a554de88dff701226bb5722b7f1b6bccd0b98f1748459b7e56acac2707a5"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e44fff69c814a2e96a20b4ecee3e2365e9b15cf5fe4e00869d18396daa91dab"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df0a1d05081541b45743c965436f8b5a1048d6fd726e4a030113a2699a6046ea"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f22cf8f236a645cb6d8ffe2a64edb5d2b66fb148bf7c75eea0cb36d17014a7bc"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8589f50b728ea4bf22e0632eefa125c8aa9c38ed202a5ee6ca371f05eeb3ff"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f20de711224f2ca2dbb166a8d512f6ff48c9c38cc06b51f796520eb4722cc2ce"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a9803396032117b85ec8cbf008a54590644a062fedd0425cbdb95e4b2b60479"}, + {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3d8bae77c82741032e9d89a4026479061aba6e646de3bf5f2fc1ae2bbd9d06e0"}, + {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3dc9939e576bbc68c813fc82f6620353ed68c194c7bcf3d58dc822591ec12490"}, + {file = "jiter-0.7.1-cp311-none-win32.whl", hash = "sha256:f7605d24cd6fab156ec89e7924578e21604feee9c4f1e9da34d8b67f63e54892"}, + {file = "jiter-0.7.1-cp311-none-win_amd64.whl", hash = "sha256:f3ea649e7751a1a29ea5ecc03c4ada0a833846c59c6da75d747899f9b48b7282"}, + {file = "jiter-0.7.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ad36a1155cbd92e7a084a568f7dc6023497df781adf2390c345dd77a120905ca"}, + {file = "jiter-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7ba52e6aaed2dc5c81a3d9b5e4ab95b039c4592c66ac973879ba57c3506492bb"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b7de0b6f6728b678540c7927587e23f715284596724be203af952418acb8a2d"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9463b62bd53c2fb85529c700c6a3beb2ee54fde8bef714b150601616dcb184a6"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:627164ec01d28af56e1f549da84caf0fe06da3880ebc7b7ee1ca15df106ae172"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25d0e5bf64e368b0aa9e0a559c3ab2f9b67e35fe7269e8a0d81f48bbd10e8963"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c244261306f08f8008b3087059601997016549cb8bb23cf4317a4827f07b7d74"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ded4e4b75b68b843b7cea5cd7c55f738c20e1394c68c2cb10adb655526c5f1b"}, + {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:80dae4f1889b9d09e5f4de6b58c490d9c8ce7730e35e0b8643ab62b1538f095c"}, + {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5970cf8ec943b51bce7f4b98d2e1ed3ada170c2a789e2db3cb484486591a176a"}, + {file = "jiter-0.7.1-cp312-none-win32.whl", hash = "sha256:701d90220d6ecb3125d46853c8ca8a5bc158de8c49af60fd706475a49fee157e"}, + {file = "jiter-0.7.1-cp312-none-win_amd64.whl", hash = "sha256:7824c3ecf9ecf3321c37f4e4d4411aad49c666ee5bc2a937071bdd80917e4533"}, + {file = "jiter-0.7.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:097676a37778ba3c80cb53f34abd6943ceb0848263c21bf423ae98b090f6c6ba"}, + {file = "jiter-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3298af506d4271257c0a8f48668b0f47048d69351675dd8500f22420d4eec378"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12fd88cfe6067e2199964839c19bd2b422ca3fd792949b8f44bb8a4e7d21946a"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dacca921efcd21939123c8ea8883a54b9fa7f6545c8019ffcf4f762985b6d0c8"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de3674a5fe1f6713a746d25ad9c32cd32fadc824e64b9d6159b3b34fd9134143"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65df9dbae6d67e0788a05b4bad5706ad40f6f911e0137eb416b9eead6ba6f044"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ba9a358d59a0a55cccaa4957e6ae10b1a25ffdabda863c0343c51817610501d"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:576eb0f0c6207e9ede2b11ec01d9c2182973986514f9c60bc3b3b5d5798c8f50"}, + {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e550e29cdf3577d2c970a18f3959e6b8646fd60ef1b0507e5947dc73703b5627"}, + {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:81d968dbf3ce0db2e0e4dec6b0a0d5d94f846ee84caf779b07cab49f5325ae43"}, + {file = "jiter-0.7.1-cp313-none-win32.whl", hash = "sha256:f892e547e6e79a1506eb571a676cf2f480a4533675f834e9ae98de84f9b941ac"}, + {file = "jiter-0.7.1-cp313-none-win_amd64.whl", hash = "sha256:0302f0940b1455b2a7fb0409b8d5b31183db70d2b07fd177906d83bf941385d1"}, + {file = "jiter-0.7.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c65a3ce72b679958b79d556473f192a4dfc5895e8cc1030c9f4e434690906076"}, + {file = "jiter-0.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e80052d3db39f9bb8eb86d207a1be3d9ecee5e05fdec31380817f9609ad38e60"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a497859c4f3f7acd71c8bd89a6f9cf753ebacacf5e3e799138b8e1843084e3"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c1288bc22b9e36854a0536ba83666c3b1fb066b811019d7b682c9cf0269cdf9f"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b096ca72dd38ef35675e1d3b01785874315182243ef7aea9752cb62266ad516f"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dbbd52c50b605af13dbee1a08373c520e6fcc6b5d32f17738875847fea4e2cd"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af29c5c6eb2517e71ffa15c7ae9509fa5e833ec2a99319ac88cc271eca865519"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f114a4df1e40c03c0efbf974b376ed57756a1141eb27d04baee0680c5af3d424"}, + {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:191fbaee7cf46a9dd9b817547bf556facde50f83199d07fc48ebeff4082f9df4"}, + {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e2b445e5ee627fb4ee6bbceeb486251e60a0c881a8e12398dfdff47c56f0723"}, + {file = "jiter-0.7.1-cp38-none-win32.whl", hash = "sha256:47ac4c3cf8135c83e64755b7276339b26cd3c7ddadf9e67306ace4832b283edf"}, + {file = "jiter-0.7.1-cp38-none-win_amd64.whl", hash = "sha256:60b49c245cd90cde4794f5c30f123ee06ccf42fb8730a019a2870cd005653ebd"}, + {file = "jiter-0.7.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8f212eeacc7203256f526f550d105d8efa24605828382cd7d296b703181ff11d"}, + {file = "jiter-0.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9e247079d88c00e75e297e6cb3a18a039ebcd79fefc43be9ba4eb7fb43eb726"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0aacaa56360139c53dcf352992b0331f4057a0373bbffd43f64ba0c32d2d155"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc1b55314ca97dbb6c48d9144323896e9c1a25d41c65bcb9550b3e0c270ca560"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f281aae41b47e90deb70e7386558e877a8e62e1693e0086f37d015fa1c102289"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93c20d2730a84d43f7c0b6fb2579dc54335db742a59cf9776d0b80e99d587382"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e81ccccd8069110e150613496deafa10da2f6ff322a707cbec2b0d52a87b9671"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a7d5e85766eff4c9be481d77e2226b4c259999cb6862ccac5ef6621d3c8dcce"}, + {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f52ce5799df5b6975439ecb16b1e879d7655e1685b6e3758c9b1b97696313bfb"}, + {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0c91a0304373fdf97d56f88356a010bba442e6d995eb7773cbe32885b71cdd8"}, + {file = "jiter-0.7.1-cp39-none-win32.whl", hash = "sha256:5c08adf93e41ce2755970e8aa95262298afe2bf58897fb9653c47cd93c3c6cdc"}, + {file = "jiter-0.7.1-cp39-none-win_amd64.whl", hash = "sha256:6592f4067c74176e5f369228fb2995ed01400c9e8e1225fb73417183a5e635f0"}, + {file = "jiter-0.7.1.tar.gz", hash = "sha256:448cf4f74f7363c34cdef26214da527e8eeffd88ba06d0b80b485ad0667baf5d"}, ] [[package]] @@ -1318,31 +1177,6 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] -[[package]] -name = "jsonpatch" -version = "1.33" -description = "Apply JSON-Patches (RFC 6902)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, -] - -[package.dependencies] -jsonpointer = ">=1.9" - -[[package]] -name = "jsonpointer" -version = "3.0.0" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, - {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, -] - [[package]] name = "jsonschema" version = "4.23.0" @@ -1379,143 +1213,114 @@ files = [ referencing = ">=0.31.0" [[package]] -name = "langchain" -version = "0.3.7" -description = "Building applications with LLMs through composability" +name = "levenshtein" +version = "0.26.1" +description = "Python extension for computing string edit distances and similarities." optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "langchain-0.3.7-py3-none-any.whl", hash = "sha256:cf4af1d5751dacdc278df3de1ff3cbbd8ca7eb55d39deadccdd7fb3d3ee02ac0"}, - {file = "langchain-0.3.7.tar.gz", hash = "sha256:2e4f83bf794ba38562f7ba0ede8171d7e28a583c0cec6f8595cfe72147d336b2"}, -] - -[package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" -async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} -langchain-core = ">=0.3.15,<0.4.0" -langchain-text-splitters = ">=0.3.0,<0.4.0" -langsmith = ">=0.1.17,<0.2.0" -numpy = [ - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, - {version = ">=1,<2", markers = "python_version < \"3.12\""}, -] -pydantic = ">=2.7.4,<3.0.0" -PyYAML = ">=5.3" -requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10" - -[[package]] -name = "langchain-community" -version = "0.3.4" -description = "Community contributed LangChain integrations." -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "langchain_community-0.3.4-py3-none-any.whl", hash = "sha256:67a44d3db8ba14a8abae67c8f611e6dc20002446439e761f673c7dffa506fb85"}, - {file = "langchain_community-0.3.4.tar.gz", hash = "sha256:80c7e6491788449b8a6e7a31444ff8ebb5c32242f67a65aa33d56ad35a7b5b5c"}, -] - -[package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" -dataclasses-json = ">=0.5.7,<0.7" -httpx-sse = ">=0.4.0,<0.5.0" -langchain = ">=0.3.6,<0.4.0" -langchain-core = ">=0.3.14,<0.4.0" -langsmith = ">=0.1.125,<0.2.0" -numpy = [ - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, - {version = ">=1,<2", markers = "python_version < \"3.12\""}, -] -pydantic-settings = ">=2.4.0,<3.0.0" -PyYAML = ">=5.3" -requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10" - -[[package]] -name = "langchain-core" -version = "0.3.15" -description = "Building applications with LLMs through composability" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "langchain_core-0.3.15-py3-none-any.whl", hash = "sha256:3d4ca6dbb8ed396a6ee061063832a2451b0ce8c345570f7b086ffa7288e4fa29"}, - {file = "langchain_core-0.3.15.tar.gz", hash = "sha256:b1a29787a4ffb7ec2103b4e97d435287201da7809b369740dd1e32f176325aba"}, -] - -[package.dependencies] -jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.125,<0.2.0" -packaging = ">=23.2,<25" -pydantic = [ - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, - {version = ">=2.5.2,<3.0.0", markers = "python_full_version < \"3.12.4\""}, -] -PyYAML = ">=5.3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0" -typing-extensions = ">=4.7" - -[[package]] -name = "langchain-openai" -version = "0.2.6" -description = "An integration package connecting OpenAI and LangChain" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "langchain_openai-0.2.6-py3-none-any.whl", hash = "sha256:d56e4d9183bdd1a5fb5f3ed9d287f15108e01d631ded170dd330a566f2927b95"}, - {file = "langchain_openai-0.2.6.tar.gz", hash = "sha256:7054e5f64498ad8e59d77cdc210103f5ea4f67258997edc48ae237298adeb316"}, -] - -[package.dependencies] -langchain-core = ">=0.3.15,<0.4.0" -openai = ">=1.54.0,<2.0.0" -tiktoken = ">=0.7,<1" - -[[package]] -name = "langchain-text-splitters" -version = "0.3.2" -description = "LangChain text splitting utilities" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "langchain_text_splitters-0.3.2-py3-none-any.whl", hash = "sha256:0db28c53f41d1bc024cdb3b1646741f6d46d5371e90f31e7e7c9fbe75d01c726"}, - {file = "langchain_text_splitters-0.3.2.tar.gz", hash = "sha256:81e6515d9901d6dd8e35fb31ccd4f30f76d44b771890c789dc835ef9f16204df"}, -] - -[package.dependencies] -langchain-core = ">=0.3.15,<0.4.0" - -[[package]] -name = "langsmith" -version = "0.1.142" -description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = ">=3.9" files = [ - {file = "langsmith-0.1.142-py3-none-any.whl", hash = "sha256:f639ca23c9a0bb77af5fb881679b2f66ff1f21f19d0bebf4e51375e7585a8b38"}, - {file = "langsmith-0.1.142.tar.gz", hash = "sha256:f8a84d100f3052233ff0a1d66ae14c5dfc20b7e41a1601de011384f16ee6cb82"}, -] - -[package.dependencies] -httpx = ">=0.23.0,<1" -orjson = ">=3.9.14,<4.0.0" -pydantic = [ - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, -] -requests = ">=2,<3" -requests-toolbelt = ">=1.0.0,<2.0.0" + {file = "levenshtein-0.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8dc4a4aecad538d944a1264c12769c99e3c0bf8e741fc5e454cc954913befb2e"}, + {file = "levenshtein-0.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec108f368c12b25787c8b1a4537a1452bc53861c3ee4abc810cc74098278edcd"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69229d651c97ed5b55b7ce92481ed00635cdbb80fbfb282a22636e6945dc52d5"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79dcd157046d62482a7719b08ba9e3ce9ed3fc5b015af8ea989c734c702aedd4"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f53f9173ae21b650b4ed8aef1d0ad0c37821f367c221a982f4d2922b3044e0d"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3956f3c5c229257dbeabe0b6aacd2c083ebcc1e335842a6ff2217fe6cc03b6b"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1e83af732726987d2c4cd736f415dae8b966ba17b7a2239c8b7ffe70bfb5543"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4f052c55046c2a9c9b5f742f39e02fa6e8db8039048b8c1c9e9fdd27c8a240a1"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9895b3a98f6709e293615fde0dcd1bb0982364278fa2072361a1a31b3e388b7a"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a3777de1d8bfca054465229beed23994f926311ce666f5a392c8859bb2722f16"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:81c57e1135c38c5e6e3675b5e2077d8a8d3be32bf0a46c57276c092b1dffc697"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:91d5e7d984891df3eff7ea9fec8cf06fdfacc03cd074fd1a410435706f73b079"}, + {file = "levenshtein-0.26.1-cp310-cp310-win32.whl", hash = "sha256:f48abff54054b4142ad03b323e80aa89b1d15cabc48ff49eb7a6ff7621829a56"}, + {file = "levenshtein-0.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:79dd6ad799784ea7b23edd56e3bf94b3ca866c4c6dee845658ee75bb4aefdabf"}, + {file = "levenshtein-0.26.1-cp310-cp310-win_arm64.whl", hash = "sha256:3351ddb105ef010cc2ce474894c5d213c83dddb7abb96400beaa4926b0b745bd"}, + {file = "levenshtein-0.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:44c51f5d33b3cfb9db518b36f1288437a509edd82da94c4400f6a681758e0cb6"}, + {file = "levenshtein-0.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56b93203e725f9df660e2afe3d26ba07d71871b6d6e05b8b767e688e23dfb076"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:270d36c5da04a0d89990660aea8542227cbd8f5bc34e9fdfadd34916ff904520"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:480674c05077eeb0b0f748546d4fcbb386d7c737f9fff0010400da3e8b552942"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13946e37323728695ba7a22f3345c2e907d23f4600bc700bf9b4352fb0c72a48"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ceb673f572d1d0dc9b1cd75792bb8bad2ae8eb78a7c6721e23a3867d318cb6f2"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42d6fa242e3b310ce6bfd5af0c83e65ef10b608b885b3bb69863c01fb2fcff98"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b8b68295808893a81e0a1dbc2274c30dd90880f14d23078e8eb4325ee615fc68"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b01061d377d1944eb67bc40bef5d4d2f762c6ab01598efd9297ce5d0047eb1b5"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9d12c8390f156745e533d01b30773b9753e41d8bbf8bf9dac4b97628cdf16314"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:48825c9f967f922061329d1481b70e9fee937fc68322d6979bc623f69f75bc91"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8ec137170b95736842f99c0e7a9fd8f5641d0c1b63b08ce027198545d983e2b"}, + {file = "levenshtein-0.26.1-cp311-cp311-win32.whl", hash = "sha256:798f2b525a2e90562f1ba9da21010dde0d73730e277acaa5c52d2a6364fd3e2a"}, + {file = "levenshtein-0.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:55b1024516c59df55f1cf1a8651659a568f2c5929d863d3da1ce8893753153bd"}, + {file = "levenshtein-0.26.1-cp311-cp311-win_arm64.whl", hash = "sha256:e52575cbc6b9764ea138a6f82d73d3b1bc685fe62e207ff46a963d4c773799f6"}, + {file = "levenshtein-0.26.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cc741ca406d3704dc331a69c04b061fc952509a069b79cab8287413f434684bd"}, + {file = "levenshtein-0.26.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:821ace3b4e1c2e02b43cf5dc61aac2ea43bdb39837ac890919c225a2c3f2fea4"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92694c9396f55d4c91087efacf81297bef152893806fc54c289fc0254b45384"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51ba374de7a1797d04a14a4f0ad3602d2d71fef4206bb20a6baaa6b6a502da58"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7aa5c3327dda4ef952769bacec09c09ff5bf426e07fdc94478c37955681885b"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e2517e8d3c221de2d1183f400aed64211fcfc77077b291ed9f3bb64f141cdc"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9092b622765c7649dd1d8af0f43354723dd6f4e570ac079ffd90b41033957438"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fc16796c85d7d8b259881d59cc8b5e22e940901928c2ff6924b2c967924e8a0b"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4370733967f5994ceeed8dc211089bedd45832ee688cecea17bfd35a9eb22b9"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3535ecfd88c9b283976b5bc61265855f59bba361881e92ed2b5367b6990c93fe"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:90236e93d98bdfd708883a6767826fafd976dac8af8fc4a0fb423d4fa08e1bf0"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:04b7cabb82edf566b1579b3ed60aac0eec116655af75a3c551fee8754ffce2ea"}, + {file = "levenshtein-0.26.1-cp312-cp312-win32.whl", hash = "sha256:ae382af8c76f6d2a040c0d9ca978baf461702ceb3f79a0a3f6da8d596a484c5b"}, + {file = "levenshtein-0.26.1-cp312-cp312-win_amd64.whl", hash = "sha256:fd091209798cfdce53746f5769987b4108fe941c54fb2e058c016ffc47872918"}, + {file = "levenshtein-0.26.1-cp312-cp312-win_arm64.whl", hash = "sha256:7e82f2ea44a81ad6b30d92a110e04cd3c8c7c6034b629aca30a3067fa174ae89"}, + {file = "levenshtein-0.26.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:790374a9f5d2cbdb30ee780403a62e59bef51453ac020668c1564d1e43438f0e"}, + {file = "levenshtein-0.26.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7b05c0415c386d00efda83d48db9db68edd02878d6dbc6df01194f12062be1bb"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3114586032361722ddededf28401ce5baf1cf617f9f49fb86b8766a45a423ff"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2532f8a13b68bf09f152d906f118a88da2063da22f44c90e904b142b0a53d534"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:219c30be6aa734bf927188d1208b7d78d202a3eb017b1c5f01ab2034d2d4ccca"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397e245e77f87836308bd56305bba630010cd8298c34c4c44bd94990cdb3b7b1"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeff6ea3576f72e26901544c6c55c72a7b79b9983b6f913cba0e9edbf2f87a97"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a19862e3539a697df722a08793994e334cd12791e8144851e8a1dee95a17ff63"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:dc3b5a64f57c3c078d58b1e447f7d68cad7ae1b23abe689215d03fc434f8f176"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bb6c7347424a91317c5e1b68041677e4c8ed3e7823b5bbaedb95bffb3c3497ea"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b817376de4195a207cc0e4ca37754c0e1e1078c2a2d35a6ae502afde87212f9e"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7b50c3620ff47c9887debbb4c154aaaac3e46be7fc2e5789ee8dbe128bce6a17"}, + {file = "levenshtein-0.26.1-cp313-cp313-win32.whl", hash = "sha256:9fb859da90262eb474c190b3ca1e61dee83add022c676520f5c05fdd60df902a"}, + {file = "levenshtein-0.26.1-cp313-cp313-win_amd64.whl", hash = "sha256:8adcc90e3a5bfb0a463581d85e599d950fe3c2938ac6247b29388b64997f6e2d"}, + {file = "levenshtein-0.26.1-cp313-cp313-win_arm64.whl", hash = "sha256:c2599407e029865dc66d210b8804c7768cbdbf60f061d993bb488d5242b0b73e"}, + {file = "levenshtein-0.26.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc54ced948fc3feafce8ad4ba4239d8ffc733a0d70e40c0363ac2a7ab2b7251e"}, + {file = "levenshtein-0.26.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6516f69213ae393a220e904332f1a6bfc299ba22cf27a6520a1663a08eba0fb"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4cfea4eada1746d0c75a864bc7e9e63d4a6e987c852d6cec8d9cb0c83afe25b"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a323161dfeeac6800eb13cfe76a8194aec589cd948bcf1cdc03f66cc3ec26b72"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c23e749b68ebc9a20b9047317b5cd2053b5856315bc8636037a8adcbb98bed1"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f80dd7432d4b6cf493d012d22148db7af769017deb31273e43406b1fb7f091c"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ae7cd6e4312c6ef34b2e273836d18f9fff518d84d823feff5ad7c49668256e0"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dcdad740e841d791b805421c2b20e859b4ed556396d3063b3aa64cd055be648c"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e07afb1613d6f5fd99abd4e53ad3b446b4efaa0f0d8e9dfb1d6d1b9f3f884d32"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f1add8f1d83099a98ae4ac472d896b7e36db48c39d3db25adf12b373823cdeff"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1010814b1d7a60833a951f2756dfc5c10b61d09976ce96a0edae8fecdfb0ea7c"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:33fa329d1bb65ce85e83ceda281aea31cee9f2f6e167092cea54f922080bcc66"}, + {file = "levenshtein-0.26.1-cp39-cp39-win32.whl", hash = "sha256:488a945312f2f16460ab61df5b4beb1ea2254c521668fd142ce6298006296c98"}, + {file = "levenshtein-0.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:9f942104adfddd4b336c3997050121328c39479f69de702d7d144abb69ea7ab9"}, + {file = "levenshtein-0.26.1-cp39-cp39-win_arm64.whl", hash = "sha256:c1d8f85b2672939f85086ed75effcf768f6077516a3e299c2ba1f91bc4644c22"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6cf8f1efaf90ca585640c5d418c30b7d66d9ac215cee114593957161f63acde0"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d5b2953978b8c158dd5cd93af8216a5cfddbf9de66cf5481c2955f44bb20767a"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b952b3732c4631c49917d4b15d78cb4a2aa006c1d5c12e2a23ba8e18a307a055"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07227281e12071168e6ae59238918a56d2a0682e529f747b5431664f302c0b42"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8191241cd8934feaf4d05d0cc0e5e72877cbb17c53bbf8c92af9f1aedaa247e9"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9e70d7ee157a9b698c73014f6e2b160830e7d2d64d2e342fefc3079af3c356fc"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0eb3059f826f6cb0a5bca4a85928070f01e8202e7ccafcba94453470f83e49d4"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:6c389e44da12d6fb1d7ba0a709a32a96c9391e9be4160ccb9269f37e040599ee"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e9de292f2c51a7d34a0ae23bec05391b8f61f35781cd3e4c6d0533e06250c55"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d87215113259efdca8716e53b6d59ab6d6009e119d95d45eccc083148855f33"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f00a3eebf68a82fb651d8d0e810c10bfaa60c555d21dde3ff81350c74fb4c2"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b3554c1b59de63d05075577380340c185ff41b028e541c0888fddab3c259a2b4"}, + {file = "levenshtein-0.26.1.tar.gz", hash = "sha256:0d19ba22330d50609b2349021ec3cf7d905c6fe21195a2d0d876a146e7ed2575"}, +] + +[package.dependencies] +rapidfuzz = ">=3.9.0,<4.0.0" [[package]] name = "litellm" -version = "1.52.1" +version = "1.52.9" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.52.1-py3-none-any.whl", hash = "sha256:a76133fc6d14a3157275d9ae850b9f18312ae93ae313092d6cf9e9d35f2c72f2"}, - {file = "litellm-1.52.1.tar.gz", hash = "sha256:750056e0329c5c742193b8f2104133da1e69b2fcc534827e18f7b536af56315c"}, + {file = "litellm-1.52.9-py3-none-any.whl", hash = "sha256:a1ef5561d220d77059a359da497f0ab04c721205c6795f151b07be5bbe51fe45"}, + {file = "litellm-1.52.9.tar.gz", hash = "sha256:73a05fed76cfac4357ee4117f28608209db891223fb9c6e03dddfe1723666437"}, ] [package.dependencies] @@ -1662,25 +1467,6 @@ files = [ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] -[[package]] -name = "marshmallow" -version = "3.23.1" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.9" -files = [ - {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, - {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "simplejson"] - [[package]] name = "mdurl" version = "0.1.2" @@ -1796,110 +1582,15 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} -[[package]] -name = "multiprocess" -version = "0.70.15" -description = "better multiprocessing and multithreading in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multiprocess-0.70.15-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:aa36c7ed16f508091438687fe9baa393a7a8e206731d321e443745e743a0d4e5"}, - {file = "multiprocess-0.70.15-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:20e024018c46d0d1602024c613007ac948f9754659e3853b0aa705e83f6931d8"}, - {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_i686.whl", hash = "sha256:e576062981c91f0fe8a463c3d52506e598dfc51320a8dd8d78b987dfca91c5db"}, - {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:e73f497e6696a0f5433ada2b3d599ae733b87a6e8b008e387c62ac9127add177"}, - {file = "multiprocess-0.70.15-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:73db2e7b32dcc7f9b0f075c2ffa45c90b6729d3f1805f27e88534c8d321a1be5"}, - {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_i686.whl", hash = "sha256:4271647bd8a49c28ecd6eb56a7fdbd3c212c45529ad5303b40b3c65fc6928e5f"}, - {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:cf981fb998d6ec3208cb14f0cf2e9e80216e834f5d51fd09ebc937c32b960902"}, - {file = "multiprocess-0.70.15-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:18f9f2c7063346d1617bd1684fdcae8d33380ae96b99427260f562e1a1228b67"}, - {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_i686.whl", hash = "sha256:0eac53214d664c49a34695e5824872db4006b1a465edd7459a251809c3773370"}, - {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1a51dd34096db47fb21fa2b839e615b051d51b97af9a67afbcdaa67186b44883"}, - {file = "multiprocess-0.70.15-py310-none-any.whl", hash = "sha256:7dd58e33235e83cf09d625e55cffd7b0f0eede7ee9223cdd666a87624f60c21a"}, - {file = "multiprocess-0.70.15-py311-none-any.whl", hash = "sha256:134f89053d82c9ed3b73edd3a2531eb791e602d4f4156fc92a79259590bd9670"}, - {file = "multiprocess-0.70.15-py37-none-any.whl", hash = "sha256:f7d4a1629bccb433114c3b4885f69eccc200994323c80f6feee73b0edc9199c5"}, - {file = "multiprocess-0.70.15-py38-none-any.whl", hash = "sha256:bee9afba476c91f9ebee7beeee0601face9eff67d822e893f9a893725fbd6316"}, - {file = "multiprocess-0.70.15-py39-none-any.whl", hash = "sha256:3e0953f5d52b4c76f1c973eaf8214554d146f2be5decb48e928e55c7a2d19338"}, - {file = "multiprocess-0.70.15.tar.gz", hash = "sha256:f20eed3036c0ef477b07a4177cf7c1ba520d9a2677870a4f47fe026f0cd6787e"}, -] - -[package.dependencies] -dill = ">=0.3.7" - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - [[package]] name = "openai" -version = "1.54.3" +version = "1.54.4" description = "The official Python library for the openai API" optional = false python-versions = ">=3.8" files = [ - {file = "openai-1.54.3-py3-none-any.whl", hash = "sha256:f18dbaf09c50d70c4185b892a2a553f80681d1d866323a2da7f7be2f688615d5"}, - {file = "openai-1.54.3.tar.gz", hash = "sha256:7511b74eeb894ac0b0253dc71f087a15d2e4d71d22d0088767205143d880cca6"}, + {file = "openai-1.54.4-py3-none-any.whl", hash = "sha256:0d95cef99346bf9b6d7fbf57faf61a673924c3e34fa8af84c9ffe04660673a7e"}, + {file = "openai-1.54.4.tar.gz", hash = "sha256:50f3656e45401c54e973fa05dc29f3f0b0d19348d685b2f7ddb4d92bf7b1b6bf"}, ] [package.dependencies] @@ -1915,263 +1606,17 @@ typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -[[package]] -name = "opentelemetry-api" -version = "1.24.0" -description = "OpenTelemetry Python API" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_api-1.24.0-py3-none-any.whl", hash = "sha256:0f2c363d98d10d1ce93330015ca7fd3a65f60be64e05e30f557c61de52c80ca2"}, - {file = "opentelemetry_api-1.24.0.tar.gz", hash = "sha256:42719f10ce7b5a9a73b10a4baf620574fb8ad495a9cbe5c18d76b75d8689c67e"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=7.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.24.0" -description = "OpenTelemetry Protobuf encoding" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.24.0-py3-none-any.whl", hash = "sha256:e51f2c9735054d598ad2df5d3eca830fecfb5b0bda0a2fa742c9c7718e12f641"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.24.0.tar.gz", hash = "sha256:5d31fa1ff976cacc38be1ec4e3279a3f88435c75b38b1f7a099a1faffc302461"}, -] - -[package.dependencies] -opentelemetry-proto = "1.24.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.24.0" -description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0-py3-none-any.whl", hash = "sha256:f40d62aa30a0a43cc1657428e59fcf82ad5f7ea8fff75de0f9d9cb6f739e0a3b"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0.tar.gz", hash = "sha256:217c6e30634f2c9797999ea9da29f7300479a94a610139b9df17433f915e7baa"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -grpcio = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.24.0" -opentelemetry-proto = "1.24.0" -opentelemetry-sdk = ">=1.24.0,<1.25.0" - -[package.extras] -test = ["pytest-grpc"] - -[[package]] -name = "opentelemetry-proto" -version = "1.24.0" -description = "OpenTelemetry Python Proto" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_proto-1.24.0-py3-none-any.whl", hash = "sha256:bcb80e1e78a003040db71ccf83f2ad2019273d1e0828089d183b18a1476527ce"}, - {file = "opentelemetry_proto-1.24.0.tar.gz", hash = "sha256:ff551b8ad63c6cabb1845ce217a6709358dfaba0f75ea1fa21a61ceddc78cab8"}, -] - -[package.dependencies] -protobuf = ">=3.19,<5.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.24.0" -description = "OpenTelemetry Python SDK" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_sdk-1.24.0-py3-none-any.whl", hash = "sha256:fa731e24efe832e98bcd90902085b359dcfef7d9c9c00eb5b9a18587dae3eb59"}, - {file = "opentelemetry_sdk-1.24.0.tar.gz", hash = "sha256:75bc0563affffa827700e0f4f4a68e1e257db0df13372344aebc6f8a64cde2e5"}, -] - -[package.dependencies] -opentelemetry-api = "1.24.0" -opentelemetry-semantic-conventions = "0.45b0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.45b0" -description = "OpenTelemetry Semantic Conventions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_semantic_conventions-0.45b0-py3-none-any.whl", hash = "sha256:a4a6fb9a7bacd9167c082aa4681009e9acdbfa28ffb2387af50c2fef3d30c864"}, - {file = "opentelemetry_semantic_conventions-0.45b0.tar.gz", hash = "sha256:7c84215a44ac846bc4b8e32d5e78935c5c43482e491812a0bb8aaf87e4d92118"}, -] - -[[package]] -name = "orjson" -version = "3.10.11" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.11-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6dade64687f2bd7c090281652fe18f1151292d567a9302b34c2dbb92a3872f1f"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82f07c550a6ccd2b9290849b22316a609023ed851a87ea888c0456485a7d196a"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd9a187742d3ead9df2e49240234d728c67c356516cf4db018833a86f20ec18c"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77b0fed6f209d76c1c39f032a70df2d7acf24b1812ca3e6078fd04e8972685a3"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63fc9d5fe1d4e8868f6aae547a7b8ba0a2e592929245fff61d633f4caccdcdd6"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65cd3e3bb4fbb4eddc3c1e8dce10dc0b73e808fcb875f9fab40c81903dd9323e"}, - {file = "orjson-3.10.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f67c570602300c4befbda12d153113b8974a3340fdcf3d6de095ede86c06d92"}, - {file = "orjson-3.10.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1f39728c7f7d766f1f5a769ce4d54b5aaa4c3f92d5b84817053cc9995b977acc"}, - {file = "orjson-3.10.11-cp310-none-win32.whl", hash = "sha256:1789d9db7968d805f3d94aae2c25d04014aae3a2fa65b1443117cd462c6da647"}, - {file = "orjson-3.10.11-cp310-none-win_amd64.whl", hash = "sha256:5576b1e5a53a5ba8f8df81872bb0878a112b3ebb1d392155f00f54dd86c83ff6"}, - {file = "orjson-3.10.11-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1444f9cb7c14055d595de1036f74ecd6ce15f04a715e73f33bb6326c9cef01b6"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdec57fe3b4bdebcc08a946db3365630332dbe575125ff3d80a3272ebd0ddafe"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eed32f33a0ea6ef36ccc1d37f8d17f28a1d6e8eefae5928f76aff8f1df85e67"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80df27dd8697242b904f4ea54820e2d98d3f51f91e97e358fc13359721233e4b"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:705f03cee0cb797256d54de6695ef219e5bc8c8120b6654dd460848d57a9af3d"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03246774131701de8e7059b2e382597da43144a9a7400f178b2a32feafc54bd5"}, - {file = "orjson-3.10.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8b5759063a6c940a69c728ea70d7c33583991c6982915a839c8da5f957e0103a"}, - {file = "orjson-3.10.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:677f23e32491520eebb19c99bb34675daf5410c449c13416f7f0d93e2cf5f981"}, - {file = "orjson-3.10.11-cp311-none-win32.whl", hash = "sha256:a11225d7b30468dcb099498296ffac36b4673a8398ca30fdaec1e6c20df6aa55"}, - {file = "orjson-3.10.11-cp311-none-win_amd64.whl", hash = "sha256:df8c677df2f9f385fcc85ab859704045fa88d4668bc9991a527c86e710392bec"}, - {file = "orjson-3.10.11-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:360a4e2c0943da7c21505e47cf6bd725588962ff1d739b99b14e2f7f3545ba51"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:496e2cb45de21c369079ef2d662670a4892c81573bcc143c4205cae98282ba97"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7dfa8db55c9792d53c5952900c6a919cfa377b4f4534c7a786484a6a4a350c19"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51f3382415747e0dbda9dade6f1e1a01a9d37f630d8c9049a8ed0e385b7a90c0"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f35a1b9f50a219f470e0e497ca30b285c9f34948d3c8160d5ad3a755d9299433"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f3b7c5803138e67028dde33450e054c87e0703afbe730c105f1fcd873496d5"}, - {file = "orjson-3.10.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f91d9eb554310472bd09f5347950b24442600594c2edc1421403d7610a0998fd"}, - {file = "orjson-3.10.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dfbb2d460a855c9744bbc8e36f9c3a997c4b27d842f3d5559ed54326e6911f9b"}, - {file = "orjson-3.10.11-cp312-none-win32.whl", hash = "sha256:d4a62c49c506d4d73f59514986cadebb7e8d186ad510c518f439176cf8d5359d"}, - {file = "orjson-3.10.11-cp312-none-win_amd64.whl", hash = "sha256:f1eec3421a558ff7a9b010a6c7effcfa0ade65327a71bb9b02a1c3b77a247284"}, - {file = "orjson-3.10.11-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c46294faa4e4d0eb73ab68f1a794d2cbf7bab33b1dda2ac2959ffb7c61591899"}, - {file = "orjson-3.10.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52e5834d7d6e58a36846e059d00559cb9ed20410664f3ad156cd2cc239a11230"}, - {file = "orjson-3.10.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2fc947e5350fdce548bfc94f434e8760d5cafa97fb9c495d2fef6757aa02ec0"}, - {file = "orjson-3.10.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0efabbf839388a1dab5b72b5d3baedbd6039ac83f3b55736eb9934ea5494d258"}, - {file = "orjson-3.10.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a3f29634260708c200c4fe148e42b4aae97d7b9fee417fbdd74f8cfc265f15b0"}, - {file = "orjson-3.10.11-cp313-none-win32.whl", hash = "sha256:1a1222ffcee8a09476bbdd5d4f6f33d06d0d6642df2a3d78b7a195ca880d669b"}, - {file = "orjson-3.10.11-cp313-none-win_amd64.whl", hash = "sha256:bc274ac261cc69260913b2d1610760e55d3c0801bb3457ba7b9004420b6b4270"}, - {file = "orjson-3.10.11-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:19b3763e8bbf8ad797df6b6b5e0fc7c843ec2e2fc0621398534e0c6400098f87"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be83a13312e5e58d633580c5eb8d0495ae61f180da2722f20562974188af205"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:afacfd1ab81f46dedd7f6001b6d4e8de23396e4884cd3c3436bd05defb1a6446"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb4d0bea56bba596723d73f074c420aec3b2e5d7d30698bc56e6048066bd560c"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96ed1de70fcb15d5fed529a656df29f768187628727ee2788344e8a51e1c1350"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bfb30c891b530f3f80e801e3ad82ef150b964e5c38e1fb8482441c69c35c61c"}, - {file = "orjson-3.10.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d496c74fc2b61341e3cefda7eec21b7854c5f672ee350bc55d9a4997a8a95204"}, - {file = "orjson-3.10.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:655a493bac606655db9a47fe94d3d84fc7f3ad766d894197c94ccf0c5408e7d3"}, - {file = "orjson-3.10.11-cp38-none-win32.whl", hash = "sha256:b9546b278c9fb5d45380f4809e11b4dd9844ca7aaf1134024503e134ed226161"}, - {file = "orjson-3.10.11-cp38-none-win_amd64.whl", hash = "sha256:b592597fe551d518f42c5a2eb07422eb475aa8cfdc8c51e6da7054b836b26782"}, - {file = "orjson-3.10.11-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c95f2ecafe709b4e5c733b5e2768ac569bed308623c85806c395d9cca00e08af"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80c00d4acded0c51c98754fe8218cb49cb854f0f7eb39ea4641b7f71732d2cb7"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:461311b693d3d0a060439aa669c74f3603264d4e7a08faa68c47ae5a863f352d"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52ca832f17d86a78cbab86cdc25f8c13756ebe182b6fc1a97d534051c18a08de"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c57ea78a753812f528178aa2f1c57da633754c91d2124cb28991dab4c79a54"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7fcfc6f7ca046383fb954ba528587e0f9336828b568282b27579c49f8e16aad"}, - {file = "orjson-3.10.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:86b9dd983857970c29e4c71bb3e95ff085c07d3e83e7c46ebe959bac07ebd80b"}, - {file = "orjson-3.10.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4d83f87582d223e54efb2242a79547611ba4ebae3af8bae1e80fa9a0af83bb7f"}, - {file = "orjson-3.10.11-cp39-none-win32.whl", hash = "sha256:9fd0ad1c129bc9beb1154c2655f177620b5beaf9a11e0d10bac63ef3fce96950"}, - {file = "orjson-3.10.11-cp39-none-win_amd64.whl", hash = "sha256:10f416b2a017c8bd17f325fb9dee1fb5cdd7a54e814284896b7c3f2763faa017"}, - {file = "orjson-3.10.11.tar.gz", hash = "sha256:e35b6d730de6384d5b2dab5fd23f0d76fae8bbc8c353c2f78210aa5fa4beb3ef"}, -] - [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] -[[package]] -name = "pandas" -version = "2.2.3" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, - {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, - {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, - {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, - {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, - {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, - {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, - {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - [[package]] name = "playwright" version = "1.48.0" @@ -2207,25 +1652,6 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "portalocker" -version = "2.10.1" -description = "Wraps the portalocker recipe for easy usage" -optional = false -python-versions = ">=3.8" -files = [ - {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, - {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, -] - -[package.dependencies] -pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} - -[package.extras] -docs = ["sphinx (>=1.7.1)"] -redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] - [[package]] name = "postgrest" version = "0.18.0" @@ -2350,80 +1776,6 @@ files = [ {file = "propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70"}, ] -[[package]] -name = "protobuf" -version = "4.25.5" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, - {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, - {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, - {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, - {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, - {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, - {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, - {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, - {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, -] - -[[package]] -name = "pyarrow" -version = "18.0.0" -description = "Python library for Apache Arrow" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pyarrow-18.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2333f93260674e185cfbf208d2da3007132572e56871f451ba1a556b45dae6e2"}, - {file = "pyarrow-18.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:4c381857754da44326f3a49b8b199f7f87a51c2faacd5114352fc78de30d3aba"}, - {file = "pyarrow-18.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:603cd8ad4976568954598ef0a6d4ed3dfb78aff3d57fa8d6271f470f0ce7d34f"}, - {file = "pyarrow-18.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58a62549a3e0bc9e03df32f350e10e1efb94ec6cf63e3920c3385b26663948ce"}, - {file = "pyarrow-18.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bc97316840a349485fbb137eb8d0f4d7057e1b2c1272b1a20eebbbe1848f5122"}, - {file = "pyarrow-18.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:2e549a748fa8b8715e734919923f69318c953e077e9c02140ada13e59d043310"}, - {file = "pyarrow-18.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:606e9a3dcb0f52307c5040698ea962685fb1c852d72379ee9412be7de9c5f9e2"}, - {file = "pyarrow-18.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d5795e37c0a33baa618c5e054cd61f586cf76850a251e2b21355e4085def6280"}, - {file = "pyarrow-18.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:5f0510608ccd6e7f02ca8596962afb8c6cc84c453e7be0da4d85f5f4f7b0328a"}, - {file = "pyarrow-18.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616ea2826c03c16e87f517c46296621a7c51e30400f6d0a61be645f203aa2b93"}, - {file = "pyarrow-18.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1824f5b029ddd289919f354bc285992cb4e32da518758c136271cf66046ef22"}, - {file = "pyarrow-18.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd1b52d0d58dd8f685ced9971eb49f697d753aa7912f0a8f50833c7a7426319"}, - {file = "pyarrow-18.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:320ae9bd45ad7ecc12ec858b3e8e462578de060832b98fc4d671dee9f10d9954"}, - {file = "pyarrow-18.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:2c992716cffb1088414f2b478f7af0175fd0a76fea80841b1706baa8fb0ebaad"}, - {file = "pyarrow-18.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:e7ab04f272f98ebffd2a0661e4e126036f6936391ba2889ed2d44c5006237802"}, - {file = "pyarrow-18.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:03f40b65a43be159d2f97fd64dc998f769d0995a50c00f07aab58b0b3da87e1f"}, - {file = "pyarrow-18.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be08af84808dff63a76860847c48ec0416928a7b3a17c2f49a072cac7c45efbd"}, - {file = "pyarrow-18.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c70c1965cde991b711a98448ccda3486f2a336457cf4ec4dca257a926e149c9"}, - {file = "pyarrow-18.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:00178509f379415a3fcf855af020e3340254f990a8534294ec3cf674d6e255fd"}, - {file = "pyarrow-18.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:a71ab0589a63a3e987beb2bc172e05f000a5c5be2636b4b263c44034e215b5d7"}, - {file = "pyarrow-18.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe92efcdbfa0bcf2fa602e466d7f2905500f33f09eb90bf0bcf2e6ca41b574c8"}, - {file = "pyarrow-18.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:907ee0aa8ca576f5e0cdc20b5aeb2ad4d3953a3b4769fc4b499e00ef0266f02f"}, - {file = "pyarrow-18.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:66dcc216ebae2eb4c37b223feaf82f15b69d502821dde2da138ec5a3716e7463"}, - {file = "pyarrow-18.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc1daf7c425f58527900876354390ee41b0ae962a73ad0959b9d829def583bb1"}, - {file = "pyarrow-18.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871b292d4b696b09120ed5bde894f79ee2a5f109cb84470546471df264cae136"}, - {file = "pyarrow-18.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:082ba62bdcb939824ba1ce10b8acef5ab621da1f4c4805e07bfd153617ac19d4"}, - {file = "pyarrow-18.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:2c664ab88b9766413197733c1720d3dcd4190e8fa3bbdc3710384630a0a7207b"}, - {file = "pyarrow-18.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc892be34dbd058e8d189b47db1e33a227d965ea8805a235c8a7286f7fd17d3a"}, - {file = "pyarrow-18.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:28f9c39a56d2c78bf6b87dcc699d520ab850919d4a8c7418cd20eda49874a2ea"}, - {file = "pyarrow-18.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:f1a198a50c409ab2d009fbf20956ace84567d67f2c5701511d4dd561fae6f32e"}, - {file = "pyarrow-18.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5bd7fd32e3ace012d43925ea4fc8bd1b02cc6cc1e9813b518302950e89b5a22"}, - {file = "pyarrow-18.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336addb8b6f5208be1b2398442c703a710b6b937b1a046065ee4db65e782ff5a"}, - {file = "pyarrow-18.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:45476490dd4adec5472c92b4d253e245258745d0ccaabe706f8d03288ed60a79"}, - {file = "pyarrow-18.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b46591222c864e7da7faa3b19455196416cd8355ff6c2cc2e65726a760a3c420"}, - {file = "pyarrow-18.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:eb7e3abcda7e1e6b83c2dc2909c8d045881017270a119cc6ee7fdcfe71d02df8"}, - {file = "pyarrow-18.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:09f30690b99ce34e0da64d20dab372ee54431745e4efb78ac938234a282d15f9"}, - {file = "pyarrow-18.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5ca5d707e158540312e09fd907f9f49bacbe779ab5236d9699ced14d2293b8"}, - {file = "pyarrow-18.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6331f280c6e4521c69b201a42dd978f60f7e129511a55da9e0bfe426b4ebb8d"}, - {file = "pyarrow-18.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3ac24b2be732e78a5a3ac0b3aa870d73766dd00beba6e015ea2ea7394f8b4e55"}, - {file = "pyarrow-18.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b30a927c6dff89ee702686596f27c25160dd6c99be5bcc1513a763ae5b1bfc03"}, - {file = "pyarrow-18.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:8f40ec677e942374e3d7f2fad6a67a4c2811a8b975e8703c6fd26d3b168a90e2"}, - {file = "pyarrow-18.0.0.tar.gz", hash = "sha256:a6aa027b1a9d2970cf328ccd6dbe4a996bc13c39fd427f502782f5bdb9ca20f5"}, -] - -[package.extras] -test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] - [[package]] name = "pydantic" version = "2.9.2" @@ -2439,8 +1791,8 @@ files = [ annotated-types = ">=0.6.0" pydantic-core = "2.23.4" typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, ] [package.extras] @@ -2616,16 +1968,6 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] -[[package]] -name = "pysbd" -version = "0.3.4" -description = "pysbd (Python Sentence Boundary Disambiguation) is a rule-based sentence boundary detection that works out-of-the-box across many languages." -optional = false -python-versions = ">=3" -files = [ - {file = "pysbd-0.3.4-py3-none-any.whl", hash = "sha256:cd838939b7b0b185fcf86b0baf6636667dfb6e474743beeff878e9f42e022953"}, -] - [[package]] name = "pytest" version = "8.3.3" @@ -2648,20 +1990,6 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] -[[package]] -name = "pytest-repeat" -version = "0.9.3" -description = "pytest plugin for repeating tests" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest_repeat-0.9.3-py3-none-any.whl", hash = "sha256:26ab2df18226af9d5ce441c858f273121e92ff55f5bb311d25755b8d7abdd8ed"}, - {file = "pytest_repeat-0.9.3.tar.gz", hash = "sha256:ffd3836dfcd67bb270bec648b330e20be37d2966448c4148c4092d1e8aba8185"}, -] - -[package.dependencies] -pytest = "*" - [[package]] name = "pytest-xdist" version = "3.6.1" @@ -2765,44 +2093,6 @@ text-unidecode = ">=1.3" [package.extras] unidecode = ["Unidecode (>=1.1.1)"] -[[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, -] - -[[package]] -name = "pywin32" -version = "308" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, - {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, - {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, - {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, - {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, - {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, - {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, - {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, - {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, - {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, - {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, - {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, - {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, - {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, - {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, - {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, - {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, - {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, -] - [[package]] name = "pyyaml" version = "6.0.2" @@ -2866,33 +2156,104 @@ files = [ ] [[package]] -name = "ragas" -version = "0.2.4" -description = "" +name = "rapidfuzz" +version = "3.10.1" +description = "rapid fuzzy string matching" optional = false -python-versions = "*" +python-versions = ">=3.9" files = [ - {file = "ragas-0.2.4-py3-none-any.whl", hash = "sha256:015f5974098cc8de9096143f6883f2b7f70c3566f0b60b9d71854e7964849c48"}, - {file = "ragas-0.2.4.tar.gz", hash = "sha256:b4edfeac5942bc4e19580a91e04d51f6037f863905ff49fb2c9daad40cfbb476"}, -] - -[package.dependencies] -appdirs = "*" -datasets = "*" -langchain = "*" -langchain-community = "*" -langchain-core = "*" -langchain-openai = "*" -nest-asyncio = "*" -numpy = "*" -openai = ">1" -pydantic = ">=2" -pysbd = ">=0.3.4" -tiktoken = "*" - -[package.extras] -all = ["datacompy", "llama-index", "nltk", "pandas", "rapidfuzz", "rouge-score", "sentence-transformers", "transformers"] -docs = ["mkdocs (>=1.6.1)", "mkdocs-autorefs", "mkdocs-gen-files", "mkdocs-git-committers-plugin-2", "mkdocs-git-revision-date-localized-plugin", "mkdocs-glightbox", "mkdocs-literate-nav", "mkdocs-material", "mkdocs-material[imaging]", "mkdocs-section-index", "mkdocstrings[python]"] + {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f17d9f21bf2f2f785d74f7b0d407805468b4c173fa3e52c86ec94436b338e74a"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b31f358a70efc143909fb3d75ac6cd3c139cd41339aa8f2a3a0ead8315731f2b"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4f43f2204b56a61448ec2dd061e26fd344c404da99fb19f3458200c5874ba2"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d81bf186a453a2757472133b24915768abc7c3964194406ed93e170e16c21cb"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3611c8f45379a12063d70075c75134f2a8bd2e4e9b8a7995112ddae95ca1c982"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c3b537b97ac30da4b73930fa8a4fe2f79c6d1c10ad535c5c09726612cd6bed9"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231ef1ec9cf7b59809ce3301006500b9d564ddb324635f4ea8f16b3e2a1780da"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed4f3adc1294834955b7e74edd3c6bd1aad5831c007f2d91ea839e76461a5879"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7b6015da2e707bf632a71772a2dbf0703cff6525732c005ad24987fe86e8ec32"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1b35a118d61d6f008e8e3fb3a77674d10806a8972c7b8be433d6598df4d60b01"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bc308d79a7e877226f36bdf4e149e3ed398d8277c140be5c1fd892ec41739e6d"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f017dbfecc172e2d0c37cf9e3d519179d71a7f16094b57430dffc496a098aa17"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win32.whl", hash = "sha256:36c0e1483e21f918d0f2f26799fe5ac91c7b0c34220b73007301c4f831a9c4c7"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:10746c1d4c8cd8881c28a87fd7ba0c9c102346dfe7ff1b0d021cdf093e9adbff"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win_arm64.whl", hash = "sha256:dfa64b89dcb906835e275187569e51aa9d546a444489e97aaf2cc84011565fbe"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:92958ae075c87fef393f835ed02d4fe8d5ee2059a0934c6c447ea3417dfbf0e8"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba7521e072c53e33c384e78615d0718e645cab3c366ecd3cc8cb732befd94967"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d02cbd75d283c287471b5b3738b3e05c9096150f93f2d2dfa10b3d700f2db9"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efa1582a397da038e2f2576c9cd49b842f56fde37d84a6b0200ffebc08d82350"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f12912acee1f506f974f58de9fdc2e62eea5667377a7e9156de53241c05fdba8"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666d5d8b17becc3f53447bcb2b6b33ce6c2df78792495d1fa82b2924cd48701a"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26f71582c0d62445067ee338ddad99b655a8f4e4ed517a90dcbfbb7d19310474"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8a2ef08b27167bcff230ffbfeedd4c4fa6353563d6aaa015d725dd3632fc3de7"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:365e4fc1a2b95082c890f5e98489b894e6bf8c338c6ac89bb6523c2ca6e9f086"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1996feb7a61609fa842e6b5e0c549983222ffdedaf29644cc67e479902846dfe"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:cf654702f144beaa093103841a2ea6910d617d0bb3fccb1d1fd63c54dde2cd49"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec108bf25de674781d0a9a935030ba090c78d49def3d60f8724f3fc1e8e75024"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win32.whl", hash = "sha256:031f8b367e5d92f7a1e27f7322012f3c321c3110137b43cc3bf678505583ef48"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:f98f36c6a1bb9a6c8bbec99ad87c8c0e364f34761739b5ea9adf7b48129ae8cf"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win_arm64.whl", hash = "sha256:f1da2028cb4e41be55ee797a82d6c1cf589442504244249dfeb32efc608edee7"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1340b56340896bede246f612b6ecf685f661a56aabef3d2512481bfe23ac5835"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2316515169b7b5a453f0ce3adbc46c42aa332cae9f2edb668e24d1fc92b2f2bb"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e06fe6a12241ec1b72c0566c6b28cda714d61965d86569595ad24793d1ab259"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d99c1cd9443b19164ec185a7d752f4b4db19c066c136f028991a480720472e23"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1d9aa156ed52d3446388ba4c2f335e312191d1ca9d1f5762ee983cf23e4ecf6"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:54bcf4efaaee8e015822be0c2c28214815f4f6b4f70d8362cfecbd58a71188ac"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0c955e32afdbfdf6e9ee663d24afb25210152d98c26d22d399712d29a9b976b"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:191633722203f5b7717efcb73a14f76f3b124877d0608c070b827c5226d0b972"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:195baad28057ec9609e40385991004e470af9ef87401e24ebe72c064431524ab"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0fff4a6b87c07366662b62ae994ffbeadc472e72f725923f94b72a3db49f4671"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4ffed25f9fdc0b287f30a98467493d1e1ce5b583f6317f70ec0263b3c97dbba6"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d02cf8e5af89a9ac8f53c438ddff6d773f62c25c6619b29db96f4aae248177c0"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win32.whl", hash = "sha256:f3bb81d4fe6a5d20650f8c0afcc8f6e1941f6fecdb434f11b874c42467baded0"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:aaf83e9170cb1338922ae42d320699dccbbdca8ffed07faeb0b9257822c26e24"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win_arm64.whl", hash = "sha256:c5da802a0d085ad81b0f62828fb55557996c497b2d0b551bbdfeafd6d447892f"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fc22d69a1c9cccd560a5c434c0371b2df0f47c309c635a01a913e03bbf183710"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38b0dac2c8e057562b8f0d8ae5b663d2d6a28c5ab624de5b73cef9abb6129a24"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fde3bbb14e92ce8fcb5c2edfff72e474d0080cadda1c97785bf4822f037a309"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9141fb0592e55f98fe9ac0f3ce883199b9c13e262e0bf40c5b18cdf926109d16"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:237bec5dd1bfc9b40bbd786cd27949ef0c0eb5fab5eb491904c6b5df59d39d3c"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18123168cba156ab5794ea6de66db50f21bb3c66ae748d03316e71b27d907b95"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b75fe506c8e02769cc47f5ab21ce3e09b6211d3edaa8f8f27331cb6988779be"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da82aa4b46973aaf9e03bb4c3d6977004648c8638febfc0f9d237e865761270"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c34c022d5ad564f1a5a57a4a89793bd70d7bad428150fb8ff2760b223407cdcf"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e96c84d6c2a0ca94e15acb5399118fff669f4306beb98a6d8ec6f5dccab4412"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e8e154b84a311263e1aca86818c962e1fa9eefdd643d1d5d197fcd2738f88cb9"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:335fee93188f8cd585552bb8057228ce0111bd227fa81bfd40b7df6b75def8ab"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win32.whl", hash = "sha256:6729b856166a9e95c278410f73683957ea6100c8a9d0a8dbe434c49663689255"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:0e06d99ad1ad97cb2ef7f51ec6b1fedd74a3a700e4949353871cf331d07b382a"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win_arm64.whl", hash = "sha256:8d1b7082104d596a3eb012e0549b2634ed15015b569f48879701e9d8db959dbb"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:779027d3307e1a2b1dc0c03c34df87a470a368a1a0840a9d2908baf2d4067956"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:440b5608ab12650d0390128d6858bc839ae77ffe5edf0b33a1551f2fa9860651"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cac41a411e07a6f3dc80dfbd33f6be70ea0abd72e99c59310819d09f07d945"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:958473c9f0bca250590200fd520b75be0dbdbc4a7327dc87a55b6d7dc8d68552"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ef60dfa73749ef91cb6073be1a3e135f4846ec809cc115f3cbfc6fe283a5584"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7fbac18f2c19fc983838a60611e67e3262e36859994c26f2ee85bb268de2355"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a0d519ff39db887cd73f4e297922786d548f5c05d6b51f4e6754f452a7f4296"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bebb7bc6aeb91cc57e4881b222484c26759ca865794187217c9dcea6c33adae6"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe07f8b9c3bb5c5ad1d2c66884253e03800f4189a60eb6acd6119ebaf3eb9894"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bfa48a4a2d45a41457f0840c48e579db157a927f4e97acf6e20df8fc521c79de"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2cf44d01bfe8ee605b7eaeecbc2b9ca64fc55765f17b304b40ed8995f69d7716"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e6bbca9246d9eedaa1c84e04a7f555493ba324d52ae4d9f3d9ddd1b740dcd87"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win32.whl", hash = "sha256:567f88180f2c1423b4fe3f3ad6e6310fc97b85bdba574801548597287fc07028"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6b2cd7c29d6ecdf0b780deb587198f13213ac01c430ada6913452fd0c40190fc"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win_arm64.whl", hash = "sha256:9f912d459e46607ce276128f52bea21ebc3e9a5ccf4cccfef30dd5bddcf47be8"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac4452f182243cfab30ba4668ef2de101effaedc30f9faabb06a095a8c90fd16"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:565c2bd4f7d23c32834652b27b51dd711814ab614b4e12add8476be4e20d1cf5"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d9747149321607be4ccd6f9f366730078bed806178ec3eeb31d05545e9e8f"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:616290fb9a8fa87e48cb0326d26f98d4e29f17c3b762c2d586f2b35c1fd2034b"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073a5b107e17ebd264198b78614c0206fa438cce749692af5bc5f8f484883f50"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39c4983e2e2ccb9732f3ac7d81617088822f4a12291d416b09b8a1eadebb3e29"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac7adee6bcf0c6fee495d877edad1540a7e0f5fc208da03ccb64734b43522d7a"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:425f4ac80b22153d391ee3f94bc854668a0c6c129f05cf2eaf5ee74474ddb69e"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65a2fa13e8a219f9b5dcb9e74abe3ced5838a7327e629f426d333dfc8c5a6e66"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75561f3df9a906aaa23787e9992b228b1ab69007932dc42070f747103e177ba8"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd062490537e97ca125bc6c7f2b7331c2b73d21dc304615afe61ad1691e15d5"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfcc8feccf63245a22dfdd16e222f1a39771a44b870beb748117a0e09cbb4a62"}, + {file = "rapidfuzz-3.10.1.tar.gz", hash = "sha256:5a15546d847a915b3f42dc79ef9b0c78b998b4e2c53b252e7166284066585979"}, +] + +[package.extras] +all = ["numpy"] [[package]] name = "realtime" @@ -3050,20 +2411,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -description = "A utility belt for advanced users of python-requests" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, - {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, -] - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - [[package]] name = "rich" version = "13.9.4" @@ -3184,29 +2531,29 @@ files = [ [[package]] name = "ruff" -version = "0.7.3" +version = "0.7.4" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"}, - {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"}, - {file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"}, - {file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"}, - {file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"}, - {file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"}, - {file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"}, - {file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"}, - {file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"}, - {file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"}, - {file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"}, + {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, + {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"}, + {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"}, + {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"}, + {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"}, + {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"}, + {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"}, ] [[package]] @@ -3226,60 +2573,6 @@ botocore = ">=1.33.2,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] -[[package]] -name = "sentry-sdk" -version = "2.18.0" -description = "Python client for Sentry (https://sentry.io)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "sentry_sdk-2.18.0-py2.py3-none-any.whl", hash = "sha256:ee70e27d1bbe4cd52a38e1bd28a5fadb9b17bc29d91b5f2b97ae29c0a7610442"}, - {file = "sentry_sdk-2.18.0.tar.gz", hash = "sha256:0dc21febd1ab35c648391c664df96f5f79fb0d92d7d4225cd9832e53a617cafd"}, -] - -[package.dependencies] -certifi = "*" -urllib3 = ">=1.26.11" - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -anthropic = ["anthropic (>=0.16)"] -arq = ["arq (>=0.23)"] -asyncpg = ["asyncpg (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -celery-redbeat = ["celery-redbeat (>=2)"] -chalice = ["chalice (>=1.16.0)"] -clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] -http2 = ["httpcore[http2] (==1.*)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -huggingface-hub = ["huggingface-hub (>=0.22)"] -langchain = ["langchain (>=0.0.210)"] -launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] -litestar = ["litestar (>=2.0.0)"] -loguru = ["loguru (>=0.5)"] -openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] -openfeature = ["openfeature-sdk (>=0.7.1)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-distro"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=6)"] - [[package]] name = "setuptools" version = "72.2.0" @@ -3346,6 +2639,17 @@ files = [ [package.extras] optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<14)"] +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -3369,100 +2673,16 @@ files = [ ] [[package]] -name = "sqlalchemy" -version = "2.0.36" -description = "Database Abstraction Library" +name = "sseclient-py" +version = "1.8.0" +description = "SSE client for Python" optional = false -python-versions = ">=3.7" +python-versions = "*" files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, + {file = "sseclient-py-1.8.0.tar.gz", hash = "sha256:c547c5c1a7633230a38dc599a21a2dc638f9b5c297286b48b46b935c71fac3e8"}, + {file = "sseclient_py-1.8.0-py2.py3-none-any.whl", hash = "sha256:4ecca6dc0b9f963f8384e9d7fd529bf93dd7d708144c4fb5da0e0a1a926fee83"}, ] -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - [[package]] name = "starlette" version = "0.40.0" @@ -3545,35 +2765,6 @@ files = [ [package.dependencies] httpx = {version = ">=0.26,<0.28", extras = ["http2"]} -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -name = "tenacity" -version = "8.4.2" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-8.4.2-py3-none-any.whl", hash = "sha256:9e6f7cf7da729125c7437222f8a522279751cdfbe6b67bfe64f75d3a348661b2"}, - {file = "tenacity-8.4.2.tar.gz", hash = "sha256:cd80a53a79336edba8489e767f729e4f391c896956b57140b5d7511a64bbd3ef"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - [[package]] name = "text-unidecode" version = "1.3" @@ -3763,13 +2954,13 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] [[package]] name = "tomli" -version = "2.0.2" +version = "2.1.0" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, + {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, ] [[package]] @@ -3821,32 +3012,6 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "tzdata" -version = "2024.2" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, -] - [[package]] name = "urllib3" version = "1.26.20" @@ -3977,306 +3142,95 @@ files = [ {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, ] -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[[package]] -name = "xxhash" -version = "3.5.0" -description = "Python binding for xxHash" -optional = false -python-versions = ">=3.7" -files = [ - {file = "xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ece616532c499ee9afbb83078b1b952beffef121d989841f7f4b3dc5ac0fd212"}, - {file = "xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3171f693dbc2cef6477054a665dc255d996646b4023fe56cb4db80e26f4cc520"}, - {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c5d3e570ef46adaf93fc81b44aca6002b5a4d8ca11bd0580c07eac537f36680"}, - {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cb29a034301e2982df8b1fe6328a84f4b676106a13e9135a0d7e0c3e9f806da"}, - {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0d307d27099bb0cbeea7260eb39ed4fdb99c5542e21e94bb6fd29e49c57a23"}, - {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0342aafd421795d740e514bc9858ebddfc705a75a8c5046ac56d85fe97bf196"}, - {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dbbd9892c5ebffeca1ed620cf0ade13eb55a0d8c84e0751a6653adc6ac40d0c"}, - {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4cc2d67fdb4d057730c75a64c5923abfa17775ae234a71b0200346bfb0a7f482"}, - {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ec28adb204b759306a3d64358a5e5c07d7b1dd0ccbce04aa76cb9377b7b70296"}, - {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1328f6d8cca2b86acb14104e381225a3d7b42c92c4b86ceae814e5c400dbb415"}, - {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8d47ebd9f5d9607fd039c1fbf4994e3b071ea23eff42f4ecef246ab2b7334198"}, - {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b96d559e0fcddd3343c510a0fe2b127fbff16bf346dd76280b82292567523442"}, - {file = "xxhash-3.5.0-cp310-cp310-win32.whl", hash = "sha256:61c722ed8d49ac9bc26c7071eeaa1f6ff24053d553146d5df031802deffd03da"}, - {file = "xxhash-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:9bed5144c6923cc902cd14bb8963f2d5e034def4486ab0bbe1f58f03f042f9a9"}, - {file = "xxhash-3.5.0-cp310-cp310-win_arm64.whl", hash = "sha256:893074d651cf25c1cc14e3bea4fceefd67f2921b1bb8e40fcfeba56820de80c6"}, - {file = "xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02c2e816896dc6f85922ced60097bcf6f008dedfc5073dcba32f9c8dd786f3c1"}, - {file = "xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6027dcd885e21581e46d3c7f682cfb2b870942feeed58a21c29583512c3f09f8"}, - {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1308fa542bbdbf2fa85e9e66b1077eea3a88bef38ee8a06270b4298a7a62a166"}, - {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c28b2fdcee797e1c1961cd3bcd3d545cab22ad202c846235197935e1df2f8ef7"}, - {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:924361811732ddad75ff23e90efd9ccfda4f664132feecb90895bade6a1b4623"}, - {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89997aa1c4b6a5b1e5b588979d1da048a3c6f15e55c11d117a56b75c84531f5a"}, - {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c4f4e8c59837de103344eb1c8a3851f670309eb5c361f746805c5471b8c88"}, - {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbd2ecfbfee70bc1a4acb7461fa6af7748ec2ab08ac0fa298f281c51518f982c"}, - {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25b5a51dc3dfb20a10833c8eee25903fd2e14059e9afcd329c9da20609a307b2"}, - {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a8fb786fb754ef6ff8c120cb96629fb518f8eb5a61a16aac3a979a9dbd40a084"}, - {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a905ad00ad1e1c34fe4e9d7c1d949ab09c6fa90c919860c1534ff479f40fd12d"}, - {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:963be41bcd49f53af6d795f65c0da9b4cc518c0dd9c47145c98f61cb464f4839"}, - {file = "xxhash-3.5.0-cp311-cp311-win32.whl", hash = "sha256:109b436096d0a2dd039c355fa3414160ec4d843dfecc64a14077332a00aeb7da"}, - {file = "xxhash-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:b702f806693201ad6c0a05ddbbe4c8f359626d0b3305f766077d51388a6bac58"}, - {file = "xxhash-3.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:c4dcb4120d0cc3cc448624147dba64e9021b278c63e34a38789b688fd0da9bf3"}, - {file = "xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00"}, - {file = "xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9"}, - {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84"}, - {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793"}, - {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be"}, - {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6"}, - {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90"}, - {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27"}, - {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2"}, - {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d"}, - {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab"}, - {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e"}, - {file = "xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8"}, - {file = "xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e"}, - {file = "xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2"}, - {file = "xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6"}, - {file = "xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5"}, - {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc"}, - {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3"}, - {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c"}, - {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb"}, - {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f"}, - {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7"}, - {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326"}, - {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf"}, - {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7"}, - {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c"}, - {file = "xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637"}, - {file = "xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43"}, - {file = "xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b"}, - {file = "xxhash-3.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6e5f70f6dca1d3b09bccb7daf4e087075ff776e3da9ac870f86ca316736bb4aa"}, - {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e76e83efc7b443052dd1e585a76201e40b3411fe3da7af4fe434ec51b2f163b"}, - {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33eac61d0796ca0591f94548dcfe37bb193671e0c9bcf065789b5792f2eda644"}, - {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ec70a89be933ea49222fafc3999987d7899fc676f688dd12252509434636622"}, - {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86b8e7f703ec6ff4f351cfdb9f428955859537125904aa8c963604f2e9d3e7"}, - {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0adfbd36003d9f86c8c97110039f7539b379f28656a04097e7434d3eaf9aa131"}, - {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:63107013578c8a730419adc05608756c3fa640bdc6abe806c3123a49fb829f43"}, - {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:683b94dbd1ca67557850b86423318a2e323511648f9f3f7b1840408a02b9a48c"}, - {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5d2a01dcce81789cf4b12d478b5464632204f4c834dc2d064902ee27d2d1f0ee"}, - {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:a9d360a792cbcce2fe7b66b8d51274ec297c53cbc423401480e53b26161a290d"}, - {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:f0b48edbebea1b7421a9c687c304f7b44d0677c46498a046079d445454504737"}, - {file = "xxhash-3.5.0-cp37-cp37m-win32.whl", hash = "sha256:7ccb800c9418e438b44b060a32adeb8393764da7441eb52aa2aa195448935306"}, - {file = "xxhash-3.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c3bc7bf8cb8806f8d1c9bf149c18708cb1c406520097d6b0a73977460ea03602"}, - {file = "xxhash-3.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74752ecaa544657d88b1d1c94ae68031e364a4d47005a90288f3bab3da3c970f"}, - {file = "xxhash-3.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dee1316133c9b463aa81aca676bc506d3f80d8f65aeb0bba2b78d0b30c51d7bd"}, - {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:602d339548d35a8579c6b013339fb34aee2df9b4e105f985443d2860e4d7ffaa"}, - {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:695735deeddfb35da1677dbc16a083445360e37ff46d8ac5c6fcd64917ff9ade"}, - {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1030a39ba01b0c519b1a82f80e8802630d16ab95dc3f2b2386a0b5c8ed5cbb10"}, - {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5bc08f33c4966f4eb6590d6ff3ceae76151ad744576b5fc6c4ba8edd459fdec"}, - {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160e0c19ee500482ddfb5d5570a0415f565d8ae2b3fd69c5dcfce8a58107b1c3"}, - {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f1abffa122452481a61c3551ab3c89d72238e279e517705b8b03847b1d93d738"}, - {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d5e9db7ef3ecbfc0b4733579cea45713a76852b002cf605420b12ef3ef1ec148"}, - {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:23241ff6423378a731d84864bf923a41649dc67b144debd1077f02e6249a0d54"}, - {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:82b833d5563fefd6fceafb1aed2f3f3ebe19f84760fdd289f8b926731c2e6e91"}, - {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a80ad0ffd78bef9509eee27b4a29e56f5414b87fb01a888353e3d5bda7038bd"}, - {file = "xxhash-3.5.0-cp38-cp38-win32.whl", hash = "sha256:50ac2184ffb1b999e11e27c7e3e70cc1139047e7ebc1aa95ed12f4269abe98d4"}, - {file = "xxhash-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:392f52ebbb932db566973693de48f15ce787cabd15cf6334e855ed22ea0be5b3"}, - {file = "xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc8cdd7f33d57f0468b0614ae634cc38ab9202c6957a60e31d285a71ebe0301"}, - {file = "xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0c48b6300cd0b0106bf49169c3e0536408dfbeb1ccb53180068a18b03c662ab"}, - {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1a92cfbaa0a1253e339ccec42dbe6db262615e52df591b68726ab10338003f"}, - {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33513d6cc3ed3b559134fb307aae9bdd94d7e7c02907b37896a6c45ff9ce51bd"}, - {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eefc37f6138f522e771ac6db71a6d4838ec7933939676f3753eafd7d3f4c40bc"}, - {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a606c8070ada8aa2a88e181773fa1ef17ba65ce5dd168b9d08038e2a61b33754"}, - {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42eca420c8fa072cc1dd62597635d140e78e384a79bb4944f825fbef8bfeeef6"}, - {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:604253b2143e13218ff1ef0b59ce67f18b8bd1c4205d2ffda22b09b426386898"}, - {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6e93a5ad22f434d7876665444a97e713a8f60b5b1a3521e8df11b98309bff833"}, - {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7a46e1d6d2817ba8024de44c4fd79913a90e5f7265434cef97026215b7d30df6"}, - {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:30eb2efe6503c379b7ab99c81ba4a779748e3830241f032ab46bd182bf5873af"}, - {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c8aa771ff2c13dd9cda8166d685d7333d389fae30a4d2bb39d63ab5775de8606"}, - {file = "xxhash-3.5.0-cp39-cp39-win32.whl", hash = "sha256:5ed9ebc46f24cf91034544b26b131241b699edbfc99ec5e7f8f3d02d6eb7fba4"}, - {file = "xxhash-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:220f3f896c6b8d0316f63f16c077d52c412619e475f9372333474ee15133a558"}, - {file = "xxhash-3.5.0-cp39-cp39-win_arm64.whl", hash = "sha256:a7b1d8315d9b5e9f89eb2933b73afae6ec9597a258d52190944437158b49d38e"}, - {file = "xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2014c5b3ff15e64feecb6b713af12093f75b7926049e26a580e94dcad3c73d8c"}, - {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fab81ef75003eda96239a23eda4e4543cedc22e34c373edcaf744e721a163986"}, - {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e2febf914ace002132aa09169cc572e0d8959d0f305f93d5828c4836f9bc5a6"}, - {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d3a10609c51da2a1c0ea0293fc3968ca0a18bd73838455b5bca3069d7f8e32b"}, - {file = "xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a74f23335b9689b66eb6dbe2a931a88fcd7a4c2cc4b1cb0edba8ce381c7a1da"}, - {file = "xxhash-3.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2b4154c00eb22e4d543f472cfca430e7962a0f1d0f3778334f2e08a7ba59363c"}, - {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d30bbc1644f726b825b3278764240f449d75f1a8bdda892e641d4a688b1494ae"}, - {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa0b72f2423e2aa53077e54a61c28e181d23effeaafd73fcb9c494e60930c8e"}, - {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13de2b76c1835399b2e419a296d5b38dc4855385d9e96916299170085ef72f57"}, - {file = "xxhash-3.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0691bfcc4f9c656bcb96cc5db94b4d75980b9d5589f2e59de790091028580837"}, - {file = "xxhash-3.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:297595fe6138d4da2c8ce9e72a04d73e58725bb60f3a19048bc96ab2ff31c692"}, - {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1276d369452040cbb943300dc8abeedab14245ea44056a2943183822513a18"}, - {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2061188a1ba352fc699c82bff722f4baacb4b4b8b2f0c745d2001e56d0dfb514"}, - {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c384c434021e4f62b8d9ba0bc9467e14d394893077e2c66d826243025e1f81"}, - {file = "xxhash-3.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e6a4dd644d72ab316b580a1c120b375890e4c52ec392d4aef3c63361ec4d77d1"}, - {file = "xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:531af8845aaadcadf951b7e0c1345c6b9c68a990eeb74ff9acd8501a0ad6a1c9"}, - {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce379bcaa9fcc00f19affa7773084dd09f5b59947b3fb47a1ceb0179f91aaa1"}, - {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd1b2281d01723f076df3c8188f43f2472248a6b63118b036e641243656b1b0f"}, - {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c770750cc80e8694492244bca7251385188bc5597b6a39d98a9f30e8da984e0"}, - {file = "xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b150b8467852e1bd844387459aa6fbe11d7f38b56e901f9f3b3e6aba0d660240"}, - {file = "xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f"}, -] - [[package]] name = "yarl" -version = "1.17.1" +version = "1.17.2" description = "Yet another URL library" optional = false python-versions = ">=3.9" files = [ - {file = "yarl-1.17.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1794853124e2f663f0ea54efb0340b457f08d40a1cef78edfa086576179c91"}, - {file = "yarl-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fbea1751729afe607d84acfd01efd95e3b31db148a181a441984ce9b3d3469da"}, - {file = "yarl-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ee427208c675f1b6e344a1f89376a9613fc30b52646a04ac0c1f6587c7e46ec"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b74ff4767d3ef47ffe0cd1d89379dc4d828d4873e5528976ced3b44fe5b0a21"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62a91aefff3d11bf60e5956d340eb507a983a7ec802b19072bb989ce120cd948"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:846dd2e1243407133d3195d2d7e4ceefcaa5f5bf7278f0a9bda00967e6326b04"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e844be8d536afa129366d9af76ed7cb8dfefec99f5f1c9e4f8ae542279a6dc3"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc7c92c1baa629cb03ecb0c3d12564f172218fb1739f54bf5f3881844daadc6d"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae3476e934b9d714aa8000d2e4c01eb2590eee10b9d8cd03e7983ad65dfbfcba"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c7e177c619342e407415d4f35dec63d2d134d951e24b5166afcdfd1362828e17"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64cc6e97f14cf8a275d79c5002281f3040c12e2e4220623b5759ea7f9868d6a5"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:84c063af19ef5130084db70ada40ce63a84f6c1ef4d3dbc34e5e8c4febb20822"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:482c122b72e3c5ec98f11457aeb436ae4aecca75de19b3d1de7cf88bc40db82f"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:380e6c38ef692b8fd5a0f6d1fa8774d81ebc08cfbd624b1bca62a4d4af2f9931"}, - {file = "yarl-1.17.1-cp310-cp310-win32.whl", hash = "sha256:16bca6678a83657dd48df84b51bd56a6c6bd401853aef6d09dc2506a78484c7b"}, - {file = "yarl-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:561c87fea99545ef7d692403c110b2f99dced6dff93056d6e04384ad3bc46243"}, - {file = "yarl-1.17.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cbad927ea8ed814622305d842c93412cb47bd39a496ed0f96bfd42b922b4a217"}, - {file = "yarl-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fca4b4307ebe9c3ec77a084da3a9d1999d164693d16492ca2b64594340999988"}, - {file = "yarl-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff5c6771c7e3511a06555afa317879b7db8d640137ba55d6ab0d0c50425cab75"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b29beab10211a746f9846baa39275e80034e065460d99eb51e45c9a9495bcca"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a52a1ffdd824fb1835272e125385c32fd8b17fbdefeedcb4d543cc23b332d74"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58c8e9620eb82a189c6c40cb6b59b4e35b2ee68b1f2afa6597732a2b467d7e8f"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d216e5d9b8749563c7f2c6f7a0831057ec844c68b4c11cb10fc62d4fd373c26d"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:881764d610e3269964fc4bb3c19bb6fce55422828e152b885609ec176b41cf11"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8c79e9d7e3d8a32d4824250a9c6401194fb4c2ad9a0cec8f6a96e09a582c2cc0"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:299f11b44d8d3a588234adbe01112126010bd96d9139c3ba7b3badd9829261c3"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cc7d768260f4ba4ea01741c1b5fe3d3a6c70eb91c87f4c8761bbcce5181beafe"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:de599af166970d6a61accde358ec9ded821234cbbc8c6413acfec06056b8e860"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2b24ec55fad43e476905eceaf14f41f6478780b870eda5d08b4d6de9a60b65b4"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9fb815155aac6bfa8d86184079652c9715c812d506b22cfa369196ef4e99d1b4"}, - {file = "yarl-1.17.1-cp311-cp311-win32.whl", hash = "sha256:7615058aabad54416ddac99ade09a5510cf77039a3b903e94e8922f25ed203d7"}, - {file = "yarl-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:14bc88baa44e1f84164a392827b5defb4fa8e56b93fecac3d15315e7c8e5d8b3"}, - {file = "yarl-1.17.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:327828786da2006085a4d1feb2594de6f6d26f8af48b81eb1ae950c788d97f61"}, - {file = "yarl-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cc353841428d56b683a123a813e6a686e07026d6b1c5757970a877195f880c2d"}, - {file = "yarl-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c73df5b6e8fabe2ddb74876fb82d9dd44cbace0ca12e8861ce9155ad3c886139"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bdff5e0995522706c53078f531fb586f56de9c4c81c243865dd5c66c132c3b5"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:06157fb3c58f2736a5e47c8fcbe1afc8b5de6fb28b14d25574af9e62150fcaac"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1654ec814b18be1af2c857aa9000de7a601400bd4c9ca24629b18486c2e35463"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6595c852ca544aaeeb32d357e62c9c780eac69dcd34e40cae7b55bc4fb1147"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:459e81c2fb920b5f5df744262d1498ec2c8081acdcfe18181da44c50f51312f7"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7e48cdb8226644e2fbd0bdb0a0f87906a3db07087f4de77a1b1b1ccfd9e93685"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d9b6b28a57feb51605d6ae5e61a9044a31742db557a3b851a74c13bc61de5172"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e594b22688d5747b06e957f1ef822060cb5cb35b493066e33ceac0cf882188b7"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5f236cb5999ccd23a0ab1bd219cfe0ee3e1c1b65aaf6dd3320e972f7ec3a39da"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a2a64e62c7a0edd07c1c917b0586655f3362d2c2d37d474db1a509efb96fea1c"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d0eea830b591dbc68e030c86a9569826145df485b2b4554874b07fea1275a199"}, - {file = "yarl-1.17.1-cp312-cp312-win32.whl", hash = "sha256:46ddf6e0b975cd680eb83318aa1d321cb2bf8d288d50f1754526230fcf59ba96"}, - {file = "yarl-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:117ed8b3732528a1e41af3aa6d4e08483c2f0f2e3d3d7dca7cf538b3516d93df"}, - {file = "yarl-1.17.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5d1d42556b063d579cae59e37a38c61f4402b47d70c29f0ef15cee1acaa64488"}, - {file = "yarl-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c0167540094838ee9093ef6cc2c69d0074bbf84a432b4995835e8e5a0d984374"}, - {file = "yarl-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2f0a6423295a0d282d00e8701fe763eeefba8037e984ad5de44aa349002562ac"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b078134f48552c4d9527db2f7da0b5359abd49393cdf9794017baec7506170"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d401f07261dc5aa36c2e4efc308548f6ae943bfff20fcadb0a07517a26b196d8"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5f1ac7359e17efe0b6e5fec21de34145caef22b260e978336f325d5c84e6938"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f63d176a81555984e91f2c84c2a574a61cab7111cc907e176f0f01538e9ff6e"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e275792097c9f7e80741c36de3b61917aebecc08a67ae62899b074566ff8556"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:81713b70bea5c1386dc2f32a8f0dab4148a2928c7495c808c541ee0aae614d67"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:aa46dce75078fceaf7cecac5817422febb4355fbdda440db55206e3bd288cfb8"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1ce36ded585f45b1e9bb36d0ae94765c6608b43bd2e7f5f88079f7a85c61a4d3"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2d374d70fdc36f5863b84e54775452f68639bc862918602d028f89310a034ab0"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2d9f0606baaec5dd54cb99667fcf85183a7477f3766fbddbe3f385e7fc253299"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b0341e6d9a0c0e3cdc65857ef518bb05b410dbd70d749a0d33ac0f39e81a4258"}, - {file = "yarl-1.17.1-cp313-cp313-win32.whl", hash = "sha256:2e7ba4c9377e48fb7b20dedbd473cbcbc13e72e1826917c185157a137dac9df2"}, - {file = "yarl-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:949681f68e0e3c25377462be4b658500e85ca24323d9619fdc41f68d46a1ffda"}, - {file = "yarl-1.17.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8994b29c462de9a8fce2d591028b986dbbe1b32f3ad600b2d3e1c482c93abad6"}, - {file = "yarl-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f9cbfbc5faca235fbdf531b93aa0f9f005ec7d267d9d738761a4d42b744ea159"}, - {file = "yarl-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b40d1bf6e6f74f7c0a567a9e5e778bbd4699d1d3d2c0fe46f4b717eef9e96b95"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5efe0661b9fcd6246f27957f6ae1c0eb29bc60552820f01e970b4996e016004"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5c4804e4039f487e942c13381e6c27b4b4e66066d94ef1fae3f6ba8b953f383"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5d6a6c9602fd4598fa07e0389e19fe199ae96449008d8304bf5d47cb745462e"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4c9156c4d1eb490fe374fb294deeb7bc7eaccda50e23775b2354b6a6739934"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6324274b4e0e2fa1b3eccb25997b1c9ed134ff61d296448ab8269f5ac068c4c"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d8a8b74d843c2638f3864a17d97a4acda58e40d3e44b6303b8cc3d3c44ae2d29"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:7fac95714b09da9278a0b52e492466f773cfe37651cf467a83a1b659be24bf71"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c180ac742a083e109c1a18151f4dd8675f32679985a1c750d2ff806796165b55"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578d00c9b7fccfa1745a44f4eddfdc99d723d157dad26764538fbdda37209857"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1a3b91c44efa29e6c8ef8a9a2b583347998e2ba52c5d8280dbd5919c02dfc3b5"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a7ac5b4984c468ce4f4a553df281450df0a34aefae02e58d77a0847be8d1e11f"}, - {file = "yarl-1.17.1-cp39-cp39-win32.whl", hash = "sha256:7294e38f9aa2e9f05f765b28ffdc5d81378508ce6dadbe93f6d464a8c9594473"}, - {file = "yarl-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:eb6dce402734575e1a8cc0bb1509afca508a400a57ce13d306ea2c663bad1138"}, - {file = "yarl-1.17.1-py3-none-any.whl", hash = "sha256:f1790a4b1e8e8e028c391175433b9c8122c39b46e1663228158e61e6f915bf06"}, - {file = "yarl-1.17.1.tar.gz", hash = "sha256:067a63fcfda82da6b198fa73079b1ca40b7c9b7994995b6ee38acda728b64d47"}, + {file = "yarl-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:93771146ef048b34201bfa382c2bf74c524980870bb278e6df515efaf93699ff"}, + {file = "yarl-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8281db240a1616af2f9c5f71d355057e73a1409c4648c8949901396dc0a3c151"}, + {file = "yarl-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:170ed4971bf9058582b01a8338605f4d8c849bd88834061e60e83b52d0c76870"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc61b005f6521fcc00ca0d1243559a5850b9dd1e1fe07b891410ee8fe192d0c0"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871e1b47eec7b6df76b23c642a81db5dd6536cbef26b7e80e7c56c2fd371382e"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a58a2f2ca7aaf22b265388d40232f453f67a6def7355a840b98c2d547bd037f"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:736bb076f7299c5c55dfef3eb9e96071a795cb08052822c2bb349b06f4cb2e0a"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fd51299e21da709eabcd5b2dd60e39090804431292daacbee8d3dabe39a6bc0"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:358dc7ddf25e79e1cc8ee16d970c23faee84d532b873519c5036dbb858965795"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:50d866f7b1a3f16f98603e095f24c0eeba25eb508c85a2c5939c8b3870ba2df8"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8b9c4643e7d843a0dca9cd9d610a0876e90a1b2cbc4c5ba7930a0d90baf6903f"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d63123bfd0dce5f91101e77c8a5427c3872501acece8c90df457b486bc1acd47"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4e76381be3d8ff96a4e6c77815653063e87555981329cf8f85e5be5abf449021"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:734144cd2bd633a1516948e477ff6c835041c0536cef1d5b9a823ae29899665b"}, + {file = "yarl-1.17.2-cp310-cp310-win32.whl", hash = "sha256:26bfb6226e0c157af5da16d2d62258f1ac578d2899130a50433ffee4a5dfa673"}, + {file = "yarl-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:76499469dcc24759399accd85ec27f237d52dec300daaca46a5352fcbebb1071"}, + {file = "yarl-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:792155279dc093839e43f85ff7b9b6493a8eaa0af1f94f1f9c6e8f4de8c63500"}, + {file = "yarl-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:38bc4ed5cae853409cb193c87c86cd0bc8d3a70fd2268a9807217b9176093ac6"}, + {file = "yarl-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4a8c83f6fcdc327783bdc737e8e45b2e909b7bd108c4da1892d3bc59c04a6d84"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6d5fed96f0646bfdf698b0a1cebf32b8aae6892d1bec0c5d2d6e2df44e1e2d"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:782ca9c58f5c491c7afa55518542b2b005caedaf4685ec814fadfcee51f02493"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff6af03cac0d1a4c3c19e5dcc4c05252411bf44ccaa2485e20d0a7c77892ab6e"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a3f47930fbbed0f6377639503848134c4aa25426b08778d641491131351c2c8"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1fa68a3c921365c5745b4bd3af6221ae1f0ea1bf04b69e94eda60e57958907f"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:187df91395c11e9f9dc69b38d12406df85aa5865f1766a47907b1cc9855b6303"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:93d1c8cc5bf5df401015c5e2a3ce75a5254a9839e5039c881365d2a9dcfc6dc2"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:11d86c6145ac5c706c53d484784cf504d7d10fa407cb73b9d20f09ff986059ef"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c42774d1d1508ec48c3ed29e7b110e33f5e74a20957ea16197dbcce8be6b52ba"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8e589379ef0407b10bed16cc26e7392ef8f86961a706ade0a22309a45414d7"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1056cadd5e850a1c026f28e0704ab0a94daaa8f887ece8dfed30f88befb87bb0"}, + {file = "yarl-1.17.2-cp311-cp311-win32.whl", hash = "sha256:be4c7b1c49d9917c6e95258d3d07f43cfba2c69a6929816e77daf322aaba6628"}, + {file = "yarl-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:ac8eda86cc75859093e9ce390d423aba968f50cf0e481e6c7d7d63f90bae5c9c"}, + {file = "yarl-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:dd90238d3a77a0e07d4d6ffdebc0c21a9787c5953a508a2231b5f191455f31e9"}, + {file = "yarl-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c74f0b0472ac40b04e6d28532f55cac8090e34c3e81f118d12843e6df14d0909"}, + {file = "yarl-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d486ddcaca8c68455aa01cf53d28d413fb41a35afc9f6594a730c9779545876"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25b7e93f5414b9a983e1a6c1820142c13e1782cc9ed354c25e933aebe97fcf2"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a0baff7827a632204060f48dca9e63fbd6a5a0b8790c1a2adfb25dc2c9c0d50"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:460024cacfc3246cc4d9f47a7fc860e4fcea7d1dc651e1256510d8c3c9c7cde0"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5870d620b23b956f72bafed6a0ba9a62edb5f2ef78a8849b7615bd9433384171"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2941756754a10e799e5b87e2319bbec481ed0957421fba0e7b9fb1c11e40509f"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9611b83810a74a46be88847e0ea616794c406dbcb4e25405e52bff8f4bee2d0a"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:cd7e35818d2328b679a13268d9ea505c85cd773572ebb7a0da7ccbca77b6a52e"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6b981316fcd940f085f646b822c2ff2b8b813cbd61281acad229ea3cbaabeb6b"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:688058e89f512fb7541cb85c2f149c292d3fa22f981d5a5453b40c5da49eb9e8"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56afb44a12b0864d17b597210d63a5b88915d680f6484d8d202ed68ade38673d"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:17931dfbb84ae18b287279c1f92b76a3abcd9a49cd69b92e946035cff06bcd20"}, + {file = "yarl-1.17.2-cp312-cp312-win32.whl", hash = "sha256:ff8d95e06546c3a8c188f68040e9d0360feb67ba8498baf018918f669f7bc39b"}, + {file = "yarl-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:4c840cc11163d3c01a9d8aad227683c48cd3e5be5a785921bcc2a8b4b758c4f3"}, + {file = "yarl-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3294f787a437cb5d81846de3a6697f0c35ecff37a932d73b1fe62490bef69211"}, + {file = "yarl-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f1e7fedb09c059efee2533119666ca7e1a2610072076926fa028c2ba5dfeb78c"}, + {file = "yarl-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:da9d3061e61e5ae3f753654813bc1cd1c70e02fb72cf871bd6daf78443e9e2b1"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91c012dceadc695ccf69301bfdccd1fc4472ad714fe2dd3c5ab4d2046afddf29"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f11fd61d72d93ac23718d393d2a64469af40be2116b24da0a4ca6922df26807e"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46c465ad06971abcf46dd532f77560181387b4eea59084434bdff97524444032"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef6eee1a61638d29cd7c85f7fd3ac7b22b4c0fabc8fd00a712b727a3e73b0685"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4434b739a8a101a837caeaa0137e0e38cb4ea561f39cb8960f3b1e7f4967a3fc"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:752485cbbb50c1e20908450ff4f94217acba9358ebdce0d8106510859d6eb19a"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:17791acaa0c0f89323c57da7b9a79f2174e26d5debbc8c02d84ebd80c2b7bff8"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5c6ea72fe619fee5e6b5d4040a451d45d8175f560b11b3d3e044cd24b2720526"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db5ac3871ed76340210fe028f535392f097fb31b875354bcb69162bba2632ef4"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7a1606ba68e311576bcb1672b2a1543417e7e0aa4c85e9e718ba6466952476c0"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9bc27dd5cfdbe3dc7f381b05e6260ca6da41931a6e582267d5ca540270afeeb2"}, + {file = "yarl-1.17.2-cp313-cp313-win32.whl", hash = "sha256:52492b87d5877ec405542f43cd3da80bdcb2d0c2fbc73236526e5f2c28e6db28"}, + {file = "yarl-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:8e1bf59e035534ba4077f5361d8d5d9194149f9ed4f823d1ee29ef3e8964ace3"}, + {file = "yarl-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c556fbc6820b6e2cda1ca675c5fa5589cf188f8da6b33e9fc05b002e603e44fa"}, + {file = "yarl-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f2f44a4247461965fed18b2573f3a9eb5e2c3cad225201ee858726cde610daca"}, + {file = "yarl-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a3ede8c248f36b60227eb777eac1dbc2f1022dc4d741b177c4379ca8e75571a"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2654caaf5584449d49c94a6b382b3cb4a246c090e72453493ea168b931206a4d"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d41c684f286ce41fa05ab6af70f32d6da1b6f0457459a56cf9e393c1c0b2217"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2270d590997445a0dc29afa92e5534bfea76ba3aea026289e811bf9ed4b65a7f"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18662443c6c3707e2fc7fad184b4dc32dd428710bbe72e1bce7fe1988d4aa654"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75ac158560dec3ed72f6d604c81090ec44529cfb8169b05ae6fcb3e986b325d9"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1fee66b32e79264f428dc8da18396ad59cc48eef3c9c13844adec890cd339db5"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:585ce7cd97be8f538345de47b279b879e091c8b86d9dbc6d98a96a7ad78876a3"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c019abc2eca67dfa4d8fb72ba924871d764ec3c92b86d5b53b405ad3d6aa56b0"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c6e659b9a24d145e271c2faf3fa6dd1fcb3e5d3f4e17273d9e0350b6ab0fe6e2"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:d17832ba39374134c10e82d137e372b5f7478c4cceeb19d02ae3e3d1daed8721"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bc3003710e335e3f842ae3fd78efa55f11a863a89a72e9a07da214db3bf7e1f8"}, + {file = "yarl-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f5ffc6b7ace5b22d9e73b2a4c7305740a339fbd55301d52735f73e21d9eb3130"}, + {file = "yarl-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:48e424347a45568413deec6f6ee2d720de2cc0385019bedf44cd93e8638aa0ed"}, + {file = "yarl-1.17.2-py3-none-any.whl", hash = "sha256:dd7abf4f717e33b7487121faf23560b3a50924f80e4bef62b22dab441ded8f3b"}, + {file = "yarl-1.17.2.tar.gz", hash = "sha256:753eaaa0c7195244c84b5cc159dc8204b7fd99f716f11198f999f2332a86b178"}, ] [package.dependencies] @@ -4286,13 +3240,13 @@ propcache = ">=0.2.0" [[package]] name = "zipp" -version = "3.20.2" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, - {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] @@ -4306,4 +3260,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "406c67a3ed8f0e33970736bff3a0c0356c6f350a576dafd9acca500bdc171604" +content-hash = "c6579904b41ba69dee0f53d1351661405122c8f3e1a5305d15a7df08d8251fcf" diff --git a/pyproject.toml b/pyproject.toml index 14480103..28f01f21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,9 +42,10 @@ starlette = "^0.40" [tool.poetry.group.dev.dependencies] pytest = "^8.3.3" -deepeval = "^1.4.8" pytest-xdist = "^3.6.1" ruff = "^0.7.3" +braintrust = "^0.0.168" +autoevals = "^0.0.103" [build-system] requires = ["poetry-core"] diff --git a/server.py b/server.py index 98422da5..aed1d909 100644 --- a/server.py +++ b/server.py @@ -1,4 +1,5 @@ import os +from holmes.core import investigation from holmes.utils.cert_utils import add_custom_certificate ADDITIONAL_CERTIFICATE: str = os.environ.get("CERTIFICATE", "") @@ -75,42 +76,14 @@ def init_logging(): @app.post("/api/investigate") def investigate_issues(investigate_request: InvestigateRequest): try: - load_robusta_api_key(dal=dal, config=config) - context = dal.get_issue_data( - investigate_request.context.get("robusta_issue_id") - ) - - resource_instructions = dal.get_resource_instructions( - "alert", investigate_request.context.get("issue_type") - ) - raw_data = investigate_request.model_dump() - if context: - raw_data["extra_context"] = context - - ai = config.create_issue_investigator( - console, allowed_toolsets=ALLOWED_TOOLSETS, dal=dal - ) - issue = Issue( - id=context["id"] if context else "", - name=investigate_request.title, - source_type=investigate_request.source, - source_instance_id=investigate_request.source_instance_id, - raw=raw_data, + result = investigation.investigate_issues( + investigate_request=investigate_request, + dal=dal, + config=config, + console=console ) + return result - investigation = ai.investigate( - issue, - prompt=investigate_request.prompt_template, - console=console, - post_processing_prompt=HOLMES_POST_PROCESSING_PROMPT, - instructions=resource_instructions, - ) - - return InvestigationResult( - analysis=investigation.result, - tool_calls=investigation.tool_calls, - instructions=investigation.instructions, - ) except AuthenticationError as e: raise HTTPException(status_code=401, detail=e.message) diff --git a/tests/llm/__init__.py b/tests/llm/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/test_chat/1_how_many_pods/kubectl_find_resource_pod_by_keyword.txt b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource.txt similarity index 100% rename from tests/fixtures/test_chat/1_how_many_pods/kubectl_find_resource_pod_by_keyword.txt rename to tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource.txt diff --git a/tests/fixtures/test_chat/1_how_many_pods/kubectl_find_resource_node.txt b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource_node.txt similarity index 100% rename from tests/fixtures/test_chat/1_how_many_pods/kubectl_find_resource_node.txt rename to tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource_node.txt diff --git a/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource_pod_by_keyword.txt b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource_pod_by_keyword.txt new file mode 100644 index 00000000..fd0a5a8a --- /dev/null +++ b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource_pod_by_keyword.txt @@ -0,0 +1,23 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"ip-172-31-8-128.us-east-2.compute.internal"}} +stdout: +default alertmanager-robusta-kube-prometheus-st-alertmanager-0 2/2 Running 0 3d22h 172.31.5.200 ip-172-31-8-128.us-east-2.compute.internal alertmanager=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/instance=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=alertmanager,app.kubernetes.io/version=0.26.0,apps.kubernetes.io/pod-index=0,controller-revision-hash=alertmanager-robusta-kube-prometheus-st-alertmanager-57cd7fb46f,statefulset.kubernetes.io/pod-name=alertmanager-robusta-kube-prometheus-st-alertmanager-0 +default analytics-exporter-fast-8cf8c9446-6rqwc 0/1 CrashLoopBackOff 1061 (2m51s ago) 3d18h 172.31.15.122 ip-172-31-8-128.us-east-2.compute.internal app=analytics-exporter-fast,pod-template-hash=8cf8c9446 +default customer-relations-webapp-5d98ffcfd-nj5gs 0/1 ImagePullBackOff 0 3d18h 172.31.14.171 ip-172-31-8-128.us-east-2.compute.internal app=customer-relations,pod-template-hash=5d98ffcfd,visualize=true +default db-certs-authenticator-7ffd769f48-d9pxl 0/1 CrashLoopBackOff 886 (69s ago) 3d18h 172.31.3.214 ip-172-31-8-128.us-east-2.compute.internal app=flask,pod-template-hash=7ffd769f48 +default java-api-checker-9pj7k 0/1 Error 0 3d18h 172.31.12.200 ip-172-31-8-128.us-east-2.compute.internal batch.kubernetes.io/controller-uid=ea3f2c52-3382-4cbc-8958-41832511a3e7,batch.kubernetes.io/job-name=java-api-checker,controller-uid=ea3f2c52-3382-4cbc-8958-41832511a3e7,job-name=java-api-checker +default java-api-checker-vzm7z 0/1 Error 0 3d18h 172.31.13.205 ip-172-31-8-128.us-east-2.compute.internal batch.kubernetes.io/controller-uid=ea3f2c52-3382-4cbc-8958-41832511a3e7,batch.kubernetes.io/job-name=java-api-checker,controller-uid=ea3f2c52-3382-4cbc-8958-41832511a3e7,job-name=java-api-checker +default logging-agent 0/1 Init:CrashLoopBackOff 1067 (15s ago) 3d18h 172.31.1.249 ip-172-31-8-128.us-east-2.compute.internal +default prometheus-robusta-kube-prometheus-st-prometheus-0 2/2 Running 0 3d22h 172.31.11.168 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=robusta-kube-prometheus-st-prometheus,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=prometheus,app.kubernetes.io/version=2.48.1,apps.kubernetes.io/pod-index=0,controller-revision-hash=prometheus-robusta-kube-prometheus-st-prometheus-55d87c869b,operator.prometheus.io/name=robusta-kube-prometheus-st-prometheus,operator.prometheus.io/shard=0,prometheus=robusta-kube-prometheus-st-prometheus,statefulset.kubernetes.io/pod-name=prometheus-robusta-kube-prometheus-st-prometheus-0 +default robusta-forwarder-89f44d49b-fxtrh 1/1 Running 0 3d22h 172.31.3.106 ip-172-31-8-128.us-east-2.compute.internal app=robusta-forwarder,pod-template-hash=89f44d49b +default robusta-kube-prometheus-st-operator-7fc5db7f4d-dr46l 1/1 Running 0 3d22h 172.31.6.195 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/part-of=kube-prometheus-stack,app.kubernetes.io/version=55.7.0,app=kube-prometheus-stack-operator,chart=kube-prometheus-stack-55.7.0,heritage=Helm,pod-template-hash=7fc5db7f4d,release=robusta +default robusta-prometheus-node-exporter-t2b5k 1/1 Running 0 3d22h 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default search-engine-service 0/1 Running 0 3d18h 172.31.11.151 ip-172-31-8-128.us-east-2.compute.internal +kube-system aws-node-m47xg 2/2 Running 0 25d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system ebs-csi-controller-7bb676b68d-cs2gx 6/6 Running 0 25d 172.31.12.254 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-node-pgrvq 3/3 Running 0 25d 172.31.2.194 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system eks-pod-identity-agent-vgz8h 1/1 Running 0 25d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system kube-proxy-l7vqp 1/1 Running 0 25d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +sock-shop user-5bd96d75fb-ld8xv 1/1 Running 0 3d18h 172.31.0.106 ip-172-31-8-128.us-east-2.compute.internal name=user,pod-template-hash=5bd96d75fb +sock-shop user-db-5dc5c5f488-dw6xw 1/1 Running 0 3d18h 172.31.0.66 ip-172-31-8-128.us-east-2.compute.internal name=user-db,pod-template-hash=5dc5c5f488 + +stderr: diff --git a/tests/fixtures/test_chat/1_how_many_pods/kubectl_find_resource_pod_by_name.txt b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource_pod_by_name.txt similarity index 100% rename from tests/fixtures/test_chat/1_how_many_pods/kubectl_find_resource_pod_by_name.txt rename to tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_find_resource_pod_by_name.txt diff --git a/tests/fixtures/test_chat/1_how_many_pods/kubectl_get.txt b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get.txt similarity index 100% rename from tests/fixtures/test_chat/1_how_many_pods/kubectl_get.txt rename to tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get.txt diff --git a/tests/fixtures/test_chat/1_how_many_pods/kubectl_get_all.txt b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_all.txt similarity index 100% rename from tests/fixtures/test_chat/1_how_many_pods/kubectl_get_all.txt rename to tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_all.txt diff --git a/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_node.txt b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_node.txt new file mode 100644 index 00000000..3c951eea --- /dev/null +++ b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_node.txt @@ -0,0 +1,3 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"node","keyword":"ip-172-31-8-128.us-east-2.compute.internal"}} +NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME LABELS +ip-172-31-8-128.us-east-2.compute.internal Ready 25d v1.30.4-eks-a737599 172.31.8.128 3.147.70.176 Amazon Linux 2 5.10.225-213.878.amzn2.x86_64 containerd://1.7.11 beta.kubernetes.io/arch=amd64,beta.kubernetes.io/instance-type=t3.medium,beta.kubernetes.io/os=linux,eks.amazonaws.com/capacityType=ON_DEMAND,eks.amazonaws.com/nodegroup-image=ami-067ed4d12a282fb31,eks.amazonaws.com/nodegroup=nicolas-node-group,failure-domain.beta.kubernetes.io/region=us-east-2,failure-domain.beta.kubernetes.io/zone=us-east-2a,k8s.io/cloud-provider-aws=02bcd7cbb8e774ede4606ab79260ae31,kubernetes.io/arch=amd64,kubernetes.io/hostname=ip-172-31-8-128.us-east-2.compute.internal,kubernetes.io/os=linux,node.kubernetes.io/instance-type=t3.medium,topology.ebs.csi.aws.com/zone=us-east-2a,topology.k8s.aws/zone-id=use2-az1,topology.kubernetes.io/region=us-east-2,topology.kubernetes.io/zone=us-east-2a diff --git a/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_pod.txt b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_pod.txt new file mode 100644 index 00000000..ebe27926 --- /dev/null +++ b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_get_pod.txt @@ -0,0 +1,2 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","name":"ip-172-31-8-128.us-east-2.compute.internal"}} +Error from server (NotFound): pods "ip-172-31-8-128.us-east-2.compute.internal" not found diff --git a/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_top_pods.txt b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_top_pods.txt new file mode 100644 index 00000000..a29a1535 --- /dev/null +++ b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/kubectl_top_pods.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/live-metrics","tool_name":"kubectl_top_pods","match_params":{}} +Command `kubectl top pods -A` failed with return code 1 +stdout: + +stderr: +error: Metrics API not available diff --git a/tests/fixtures/test_chat/1_how_many_pods/test_case.yaml b/tests/llm/fixtures/test_ask_holmes/1_how_many_pods/test_case.yaml similarity index 100% rename from tests/fixtures/test_chat/1_how_many_pods/test_case.yaml rename to tests/llm/fixtures/test_ask_holmes/1_how_many_pods/test_case.yaml diff --git a/tests/fixtures/test_chat/2_what_is_wrong_with_pod/kubectl_describe.txt b/tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/kubectl_describe.txt similarity index 100% rename from tests/fixtures/test_chat/2_what_is_wrong_with_pod/kubectl_describe.txt rename to tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/kubectl_describe.txt diff --git a/tests/fixtures/test_chat/2_what_is_wrong_with_pod/kubectl_find_resource.txt b/tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/kubectl_find_resource.txt similarity index 100% rename from tests/fixtures/test_chat/2_what_is_wrong_with_pod/kubectl_find_resource.txt rename to tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/kubectl_find_resource.txt diff --git a/tests/fixtures/test_chat/2_what_is_wrong_with_pod/kubectl_logs.txt b/tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/kubectl_logs.txt similarity index 100% rename from tests/fixtures/test_chat/2_what_is_wrong_with_pod/kubectl_logs.txt rename to tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/kubectl_logs.txt diff --git a/tests/fixtures/test_chat/2_what_is_wrong_with_pod/kubectl_previous_logs.txt b/tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/kubectl_previous_logs.txt similarity index 100% rename from tests/fixtures/test_chat/2_what_is_wrong_with_pod/kubectl_previous_logs.txt rename to tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/kubectl_previous_logs.txt diff --git a/tests/fixtures/test_chat/2_what_is_wrong_with_pod/test_case.yaml b/tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/test_case.yaml similarity index 100% rename from tests/fixtures/test_chat/2_what_is_wrong_with_pod/test_case.yaml rename to tests/llm/fixtures/test_ask_holmes/2_what_is_wrong_with_pod/test_case.yaml diff --git a/tests/fixtures/test_chat/3_what_is_the_command_to_port_forward/kubectl_describe.txt b/tests/llm/fixtures/test_ask_holmes/3_what_is_the_command_to_port_forward/kubectl_describe.txt similarity index 100% rename from tests/fixtures/test_chat/3_what_is_the_command_to_port_forward/kubectl_describe.txt rename to tests/llm/fixtures/test_ask_holmes/3_what_is_the_command_to_port_forward/kubectl_describe.txt diff --git a/tests/fixtures/test_chat/3_what_is_the_command_to_port_forward/kubectl_get_all.txt b/tests/llm/fixtures/test_ask_holmes/3_what_is_the_command_to_port_forward/kubectl_get_all.txt similarity index 100% rename from tests/fixtures/test_chat/3_what_is_the_command_to_port_forward/kubectl_get_all.txt rename to tests/llm/fixtures/test_ask_holmes/3_what_is_the_command_to_port_forward/kubectl_get_all.txt diff --git a/tests/fixtures/test_chat/3_what_is_the_command_to_port_forward/kubectl_get_pod.txt b/tests/llm/fixtures/test_ask_holmes/3_what_is_the_command_to_port_forward/kubectl_get_pod.txt similarity index 100% rename from tests/fixtures/test_chat/3_what_is_the_command_to_port_forward/kubectl_get_pod.txt rename to tests/llm/fixtures/test_ask_holmes/3_what_is_the_command_to_port_forward/kubectl_get_pod.txt diff --git a/tests/fixtures/test_chat/3_what_is_the_command_to_port_forward/test_case.yaml b/tests/llm/fixtures/test_ask_holmes/3_what_is_the_command_to_port_forward/test_case.yaml similarity index 100% rename from tests/fixtures/test_chat/3_what_is_the_command_to_port_forward/test_case.yaml rename to tests/llm/fixtures/test_ask_holmes/3_what_is_the_command_to_port_forward/test_case.yaml diff --git a/tests/fixtures/test_chat/4_related_k8s_events/kubectl_describe.txt b/tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/kubectl_describe.txt similarity index 100% rename from tests/fixtures/test_chat/4_related_k8s_events/kubectl_describe.txt rename to tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/kubectl_describe.txt diff --git a/tests/fixtures/test_chat/4_related_k8s_events/kubectl_events.txt b/tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/kubectl_events.txt similarity index 100% rename from tests/fixtures/test_chat/4_related_k8s_events/kubectl_events.txt rename to tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/kubectl_events.txt diff --git a/tests/fixtures/test_chat/4_related_k8s_events/kubectl_find_resource.txt b/tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/kubectl_find_resource.txt similarity index 100% rename from tests/fixtures/test_chat/4_related_k8s_events/kubectl_find_resource.txt rename to tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/kubectl_find_resource.txt diff --git a/tests/fixtures/test_chat/4_related_k8s_events/kubectl_lineage_parents.txt b/tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/kubectl_lineage_parents.txt similarity index 100% rename from tests/fixtures/test_chat/4_related_k8s_events/kubectl_lineage_parents.txt rename to tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/kubectl_lineage_parents.txt diff --git a/tests/fixtures/test_chat/4_related_k8s_events/test_case.yaml b/tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/test_case.yaml similarity index 100% rename from tests/fixtures/test_chat/4_related_k8s_events/test_case.yaml rename to tests/llm/fixtures/test_ask_holmes/4_related_k8s_events/test_case.yaml diff --git a/tests/fixtures/test_chat/5_image_version/kubectl_find_resource.txt b/tests/llm/fixtures/test_ask_holmes/5_image_version/kubectl_find_resource.txt similarity index 100% rename from tests/fixtures/test_chat/5_image_version/kubectl_find_resource.txt rename to tests/llm/fixtures/test_ask_holmes/5_image_version/kubectl_find_resource.txt diff --git a/tests/fixtures/test_chat/5_image_version/kubectl_get_yaml.txt b/tests/llm/fixtures/test_ask_holmes/5_image_version/kubectl_get_yaml.txt similarity index 100% rename from tests/fixtures/test_chat/5_image_version/kubectl_get_yaml.txt rename to tests/llm/fixtures/test_ask_holmes/5_image_version/kubectl_get_yaml.txt diff --git a/tests/fixtures/test_chat/5_image_version/test_case.yaml b/tests/llm/fixtures/test_ask_holmes/5_image_version/test_case.yaml similarity index 100% rename from tests/fixtures/test_chat/5_image_version/test_case.yaml rename to tests/llm/fixtures/test_ask_holmes/5_image_version/test_case.yaml diff --git a/tests/fixtures/test_chat/6_explain_issue/fetch_finding_by_id.txt b/tests/llm/fixtures/test_ask_holmes/6_explain_issue/fetch_finding_by_id.txt similarity index 100% rename from tests/fixtures/test_chat/6_explain_issue/fetch_finding_by_id.txt rename to tests/llm/fixtures/test_ask_holmes/6_explain_issue/fetch_finding_by_id.txt diff --git a/tests/fixtures/test_chat/6_explain_issue/test_case.yaml b/tests/llm/fixtures/test_ask_holmes/6_explain_issue/test_case.yaml similarity index 100% rename from tests/fixtures/test_chat/6_explain_issue/test_case.yaml rename to tests/llm/fixtures/test_ask_holmes/6_explain_issue/test_case.yaml diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/investigate_request.json b/tests/llm/fixtures/test_investigate/01_oom_kill/investigate_request.json new file mode 100644 index 00000000..1bffb0de --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/investigate_request.json @@ -0,0 +1,23 @@ +{ + "source": "prometheus", + "source_instance_id": "some-instance", + "title": "Pod is crash looping.", + "description": "Pod default/oomkill-deployment-696dbdbf67-d47z6 (main2) is in waiting state (reason: \"CrashLoopBackOff\").", + "subject": { + "name": "oomkill-deployment-696dbdbf67-d47z6", + "subject_type": "deployment", + "namespace": "default", + "node": "some-node", + "container": "main2", + "labels": { + "x": "y", + "p": "q" + }, + "annotations": {} + }, + "context": { + "robusta_issue_id": "5b3e2fb1-cb83-45ea-82ec-318c94718e44" + }, + "include_tool_calls": true, + "include_tool_call_results": true +} diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/issue_data.json b/tests/llm/fixtures/test_investigate/01_oom_kill/issue_data.json new file mode 100644 index 00000000..0967ef42 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/issue_data.json @@ -0,0 +1 @@ +{} diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_describe.txt b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_describe.txt new file mode 100644 index 00000000..3e6d413c --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_describe.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"pod","name":"oomkill-deployment-696dbdbf67-d47z6","namespace":"default"}} +Command `kubectl describe pod oomkill-deployment-696dbdbf67-d47z6 -n default` failed with return code 1 +stdout: + +stderr: +Error from server (NotFound): pods "oomkill-deployment-696dbdbf67-d47z6" not found diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_1.txt b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_1.txt new file mode 100644 index 00000000..8a7a971a --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_1.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"oomkill-deployment-696dbdbf67"}} +Command `kubectl get -A --show-labels -o wide pod | grep oomkill-deployment-696dbdbf67` failed with return code 1 +stdout: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_2.txt b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_2.txt new file mode 100644 index 00000000..d7cb687f --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_2.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"oomkill-deployment"}} +Command `kubectl get -A --show-labels -o wide pod | grep oomkill-deployment` failed with return code 1 +stdout: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_3.txt b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_3.txt new file mode 100644 index 00000000..a44b8632 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_find_resource_3.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"oomkill-deployment","namespace":"default"}} +Command `kubectl get -A --show-labels -o wide pod | grep oomkill-deployment` failed with return code 1 +stdout: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_get_all.txt b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_get_all.txt new file mode 100644 index 00000000..0dd2b90e --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_get_all.txt @@ -0,0 +1,47 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get_all","match_params":{"kind":"pod"}} +stdout: +NAMESPACE NAME READY STATUS RESTARTS AGE IP NODE NOMINATED NODE READINESS GATES LABELS +default alertmanager-robusta-kube-prometheus-st-alertmanager-0 2/2 Running 0 4d4h 172.31.3.106 ip-172-31-8-128.us-east-2.compute.internal alertmanager=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/instance=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=alertmanager,app.kubernetes.io/version=0.26.0,apps.kubernetes.io/pod-index=0,controller-revision-hash=alertmanager-robusta-kube-prometheus-st-alertmanager-57cd7fb46f,statefulset.kubernetes.io/pod-name=alertmanager-robusta-kube-prometheus-st-alertmanager-0 +default krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58-n68zx 0/1 Pending 0 4d4h app=krr.robusta.dev,batch.kubernetes.io/controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,batch.kubernetes.io/job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58,controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58 +default prometheus-robusta-kube-prometheus-st-prometheus-0 2/2 Running 0 4d4h 172.31.1.249 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=robusta-kube-prometheus-st-prometheus,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=prometheus,app.kubernetes.io/version=2.48.1,apps.kubernetes.io/pod-index=0,controller-revision-hash=prometheus-robusta-kube-prometheus-st-prometheus-55d87c869b,operator.prometheus.io/name=robusta-kube-prometheus-st-prometheus,operator.prometheus.io/shard=0,prometheus=robusta-kube-prometheus-st-prometheus,statefulset.kubernetes.io/pod-name=prometheus-robusta-kube-prometheus-st-prometheus-0 +default robusta-forwarder-89f44d49b-7ccwf 1/1 Running 0 4d4h 172.31.6.195 ip-172-31-8-128.us-east-2.compute.internal app=robusta-forwarder,pod-template-hash=89f44d49b +default robusta-grafana-597597b88-2wrpx 3/3 Running 0 4d4h 172.31.25.172 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/name=grafana,pod-template-hash=597597b88 +default robusta-holmes-fd87d8997-xxvpz 1/1 Running 0 3d9h 172.31.37.22 ip-172-31-40-111.us-east-2.compute.internal app=holmes,pod-template-hash=fd87d8997 +default robusta-kube-prometheus-st-operator-7fc5db7f4d-pkv8m 1/1 Running 0 4d4h 172.31.38.193 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/part-of=kube-prometheus-stack,app.kubernetes.io/version=55.7.0,app=kube-prometheus-stack-operator,chart=kube-prometheus-stack-55.7.0,heritage=Helm,pod-template-hash=7fc5db7f4d,release=robusta +default robusta-kube-state-metrics-7964495ff6-mszjx 1/1 Running 0 4d4h 172.31.23.28 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=kube-state-metrics,app.kubernetes.io/part-of=kube-state-metrics,app.kubernetes.io/version=2.10.1,helm.sh/chart=kube-state-metrics-5.15.3,pod-template-hash=7964495ff6,release=robusta +default robusta-prometheus-node-exporter-22ncm 1/1 Running 0 4d4h 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-hsf46 1/1 Running 0 4d4h 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-r9ss7 1/1 Running 0 4d4h 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-runner-68d76784b4-bqm9r 1/1 Running 0 4d4h 172.31.45.91 ip-172-31-40-111.us-east-2.compute.internal app=robusta-runner,pod-template-hash=68d76784b4,robustaComponent=runner +kube-system aws-node-475cb 2/2 Running 0 32d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-87tjm 2/2 Running 0 32d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-m47xg 2/2 Running 0 32d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system coredns-858457f4f6-9vksl 1/1 Running 0 32d 172.31.43.91 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system coredns-858457f4f6-d6kvv 1/1 Running 0 32d 172.31.44.154 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system ebs-csi-controller-7bb676b68d-cs2gx 6/6 Running 0 32d 172.31.12.254 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-controller-7bb676b68d-dz5wd 6/6 Running 0 32d 172.31.37.38 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-node-j7r2p 3/3 Running 0 32d 172.31.30.77 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-pgrvq 3/3 Running 0 32d 172.31.2.194 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-snr4c 3/3 Running 0 32d 172.31.36.149 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system eks-pod-identity-agent-mbfpx 1/1 Running 0 32d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-nfqc6 1/1 Running 0 32d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-vgz8h 1/1 Running 0 32d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system kube-proxy-24klf 1/1 Running 0 32d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-l7vqp 1/1 Running 0 32d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-tkz9f 1/1 Running 0 32d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +sock-shop carts-db-6548bf479b-49wbw 1/1 Running 0 11d 172.31.38.83 ip-172-31-40-111.us-east-2.compute.internal name=carts-db,pod-template-hash=6548bf479b +sock-shop carts-f5db95987-hvh8w 1/1 Running 0 11d 172.31.35.216 ip-172-31-40-111.us-east-2.compute.internal name=carts,pod-template-hash=f5db95987 +sock-shop catalogue-58dc79c975-qrxkj 1/1 Running 0 11d 172.31.28.250 ip-172-31-21-139.us-east-2.compute.internal name=catalogue,pod-template-hash=58dc79c975 +sock-shop catalogue-db-f659c4c59-x94xs 1/1 Running 0 11d 172.31.38.49 ip-172-31-40-111.us-east-2.compute.internal name=catalogue-db,pod-template-hash=f659c4c59 +sock-shop front-end-84cb7d7d7c-4s2cp 1/1 Running 0 11d 172.31.47.71 ip-172-31-40-111.us-east-2.compute.internal name=front-end,pod-template-hash=84cb7d7d7c +sock-shop orders-68dc698477-b6g8x 1/1 Running 0 11d 172.31.16.203 ip-172-31-21-139.us-east-2.compute.internal name=orders,pod-template-hash=68dc698477 +sock-shop orders-db-85b469d778-wf7sh 1/1 Running 0 11d 172.31.27.164 ip-172-31-21-139.us-east-2.compute.internal name=orders-db,pod-template-hash=85b469d778 +sock-shop payment-d6fd65b86-zmjt5 1/1 Running 0 11d 172.31.40.230 ip-172-31-40-111.us-east-2.compute.internal name=payment,pod-template-hash=d6fd65b86 +sock-shop queue-master-ff4874c99-kxgg9 1/1 Running 0 11d 172.31.31.177 ip-172-31-21-139.us-east-2.compute.internal name=queue-master,pod-template-hash=ff4874c99 +sock-shop rabbitmq-c9fd5c694-6g9xm 2/2 Running 0 11d 172.31.29.7 ip-172-31-21-139.us-east-2.compute.internal name=rabbitmq,pod-template-hash=c9fd5c694 +sock-shop session-db-686f69d474-qzhcx 1/1 Running 0 11d 172.31.23.242 ip-172-31-21-139.us-east-2.compute.internal name=session-db,pod-template-hash=686f69d474 +sock-shop shipping-84b9f9b465-pg8kc 1/1 Running 0 11d 172.31.18.149 ip-172-31-21-139.us-east-2.compute.internal name=shipping,pod-template-hash=84b9f9b465 +sock-shop user-5bd96d75fb-ld8xv 1/1 Running 0 11d 172.31.0.106 ip-172-31-8-128.us-east-2.compute.internal name=user,pod-template-hash=5bd96d75fb +sock-shop user-db-5dc5c5f488-dw6xw 1/1 Running 0 11d 172.31.0.66 ip-172-31-8-128.us-east-2.compute.internal name=user-db,pod-template-hash=5dc5c5f488 + +stderr: diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_logs.txt b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_logs.txt new file mode 100644 index 00000000..a89f4249 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"oomkill-deployment-696dbdbf67-d47z6","namespace":"default"}} +Command `kubectl logs oomkill-deployment-696dbdbf67-d47z6 -n default` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "oomkill-deployment-696dbdbf67-d47z6" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_previous_logs.txt b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_previous_logs.txt new file mode 100644 index 00000000..e3a1e988 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/kubectl_previous_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_previous_logs","match_params":{"name":"oomkill-deployment-696dbdbf67-d47z6","namespace":"default"}} +Command `kubectl logs oomkill-deployment-696dbdbf67-d47z6 -n default --previous` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "oomkill-deployment-696dbdbf67-d47z6" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/resource_instructions.json b/tests/llm/fixtures/test_investigate/01_oom_kill/resource_instructions.json new file mode 100644 index 00000000..0967ef42 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/resource_instructions.json @@ -0,0 +1 @@ +{} diff --git a/tests/llm/fixtures/test_investigate/01_oom_kill/test_case.yaml b/tests/llm/fixtures/test_investigate/01_oom_kill/test_case.yaml new file mode 100644 index 00000000..8a04435d --- /dev/null +++ b/tests/llm/fixtures/test_investigate/01_oom_kill/test_case.yaml @@ -0,0 +1,37 @@ +expected_output: | + The alert indicates that the pod `oomkill-deployment-696dbdbf67-d47z6` in the `default` namespace was experiencing a `CrashLoopBackOff` state, which typically means the pod was repeatedly crashing and restarting. + + # Investigation + I attempted to gather more information by describing the pod and fetching its logs, but the pod `oomkill-deployment-696dbdbf67-d47z6` could not be found in the cluster. This suggests that the pod may have been deleted or the deployment was scaled down after the alert was triggered. + + # Conclusions and Possible Root causes + 1. *Pod Deletion*: The pod might have been manually deleted or automatically removed by a scaling operation or deployment update. + 2. *Deployment Update*: A new deployment or update might have replaced the pod, leading to its removal. + 3. *Resource Constraints*: If the pod was indeed crashing due to resource constraints (e.g., OOMKilled), it might have been removed as part of a cleanup process. + + # Next Steps + 1. Verify if the deployment `oomkill-deployment` is still present and check its current status using: + ```bash + kubectl get deployment oomkill-deployment -n default + ``` + 2. If the deployment exists, check the replica set and any new pods created: + ```bash + kubectl get rs -n default + kubectl get pods -n default + ``` + 3. Review any recent changes or events related to the deployment: + ```bash + kubectl describe deployment oomkill-deployment -n default + ``` + 4. If resource constraints were suspected, consider increasing the resources allocated to the pods in the deployment configuration. + +retrieval_context: + - There is a total of 12 pods on node ip-172-31-8-128.us-east-2.compute.internal + - There are 5 pods in running state + - 7 pods are not running as indicated by the STATUS column +evaluation: + answer_relevancy: 0 + faithfulness: 0 + contextual_precision: 0 + contextual_recall: 0 + contextual_relevancy: 0 diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/fetch_finding.txt b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/fetch_finding.txt new file mode 100644 index 00000000..ebad0c2e --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/fetch_finding.txt @@ -0,0 +1,47 @@ +{"toolset_name":"findings","tool_name":"fetch_finding_by_id","match_params":{"id":"cb37003b-9c88-440b-9f6b-7af1239d8da0"}} +account_id: debe17e5-0fe2-4c9c-9a7b-5dba04b83f89 +aggregation_key: CrashLoopBackoff +annotations: + kubectl.kubernetes.io/last-applied-configuration: '{"apiVersion":"v1","kind":"Pod","metadata":{"annotations":{},"name":"inventory-management-api","namespace":"default"},"spec":{"containers":[{"command":["wge","-O","/work-dir/index.html","https://home.robusta.dev"],"image":"nginx","name":"nginx","ports":[{"containerPort":80}]}]}} + + ' +category: null +cluster: test_cluster +creation_date: '2024-11-15T15:42:04.661407' +description: null +ends_at: null +evidence: +- account_id: debe17e5-0fe2-4c9c-9a7b-5dba04b83f89 + collection_timestamp: null + creation_date: '2024-11-15T15:42:04.561292' + data: '[{"type": "markdown", "data": "**Crash Info**"}, {"type": "table", "data": + {"headers": ["label", "value"], "rows": [["Container", "nginx"], ["Restarts", + 2], ["Status", "WAITING"], ["Reason", "CrashLoopBackOff"]], "column_renderers": + {}}, "metadata": {"format": "vertical"}}, {"type": "markdown", "data": "**Previous + Container**"}, {"type": "table", "data": {"headers": ["label", "value"], "rows": + [["Status", "TERMINATED"], ["Reason", "StartError"], ["Started at", "1970-01-01T00:00:00Z"], + ["Finished at", "2024-11-15T15:41:51Z"]], "column_renderers": {}}, "metadata": + {"format": "vertical"}}]' + enrichment_type: crash_info + file_type: structured_data + id: c87a789e-d469-432b-8fcb-63f53652e1f4 + issue_id: cb37003b-9c88-440b-9f6b-7af1239d8da0 + title: Container Crash information +failure: true +finding_type: issue +fingerprint: 169254c759440b85e6bf18192474e7fa8d16316fdb349d5c972b2a1f5ddd0695 +group_id: null +id: cb37003b-9c88-440b-9f6b-7af1239d8da0 +labels: {} +priority: HIGH +service_key: default/Pod/inventory-management-api +service_kind: null +source: kubernetes_api_server +starts_at: '2024-11-15T15:42:04.455427+00:00' +subject_name: inventory-management-api +subject_namespace: default +subject_node: ip-172-31-21-139.us-east-2.compute.internal +subject_type: pod +title: Crashing pod inventory-management-api in namespace default +updated_at: '2024-11-15T15:42:04.630262+00:00' +video_links: [] diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/investigate_request.json b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/investigate_request.json new file mode 100644 index 00000000..b273e103 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/investigate_request.json @@ -0,0 +1,20 @@ +{ + "source": "prometheus", + "source_instance_id": "some-instance", + "title": "CrashLoopBackoff.", + "description": "Crashing pod inventory-management-api in namespace default", + "subject": { + "name": "inventory-management-api", + "subject_type": "pod", + "namespace": "default", + "node": "ip-172-31-21-139.us-east-2.compute.internal", + "container": "inventory-management-api", + "labels": {}, + "annotations": {} + }, + "context": { + "robusta_issue_id": "cb37003b-9c88-440b-9f6b-7af1239d8da0" + }, + "include_tool_calls": true, + "include_tool_call_results": true +} diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/issue_data.json b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/issue_data.json new file mode 100644 index 00000000..0967ef42 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/issue_data.json @@ -0,0 +1 @@ +{} diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_describe.txt b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_describe.txt new file mode 100644 index 00000000..338d4855 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_describe.txt @@ -0,0 +1,72 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"pod","name":"oomkill-deployment-696dbdbf67-d47z6","namespace":"default"}} +stdout: +Name: inventory-management-api +Namespace: default +Priority: 0 +Service Account: default +Node: ip-172-31-21-139.us-east-2.compute.internal/172.31.21.139 +Start Time: Fri, 15 Nov 2024 15:41:31 +0000 +Labels: +Annotations: +Status: Running +IP: 172.31.23.45 +IPs: + IP: 172.31.23.45 +Containers: + nginx: + Container ID: containerd://dcd66290c2c48eb9e97d3b6c61258d6b58b392640d1dbad653d4e9947d69e7fb + Image: nginx + Image ID: docker.io/library/nginx@sha256:bc5eac5eafc581aeda3008b4b1f07ebba230de2f27d47767129a6a905c84f470 + Port: 80/TCP + Host Port: 0/TCP + Command: + wge + -O + /work-dir/index.html + https://home.robusta.dev + State: Waiting + Reason: CrashLoopBackOff + Last State: Terminated + Reason: StartError + Message: failed to create containerd task: failed to create shim task: OCI runtime create failed: runc create failed: unable to start container process: exec: "wge": executable file not found in $PATH: unknown + Exit Code: 128 + Started: Thu, 01 Jan 1970 00:00:00 +0000 + Finished: Fri, 15 Nov 2024 15:43:02 +0000 + Ready: False + Restart Count: 4 + Environment: + Mounts: + /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-jffrr (ro) +Conditions: + Type Status + PodReadyToStartContainers True + Initialized True + Ready False + ContainersReady False + PodScheduled True +Volumes: + kube-api-access-jffrr: + Type: Projected (a volume that contains injected data from multiple sources) + TokenExpirationSeconds: 3607 + ConfigMapName: kube-root-ca.crt + ConfigMapOptional: + DownwardAPI: true +QoS Class: BestEffort +Node-Selectors: +Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s + node.kubernetes.io/unreachable:NoExecute op=Exists for 300s +Events: + Type Reason Age From Message + ---- ------ ---- ---- ------- + Normal Scheduled 2m57s default-scheduler Successfully assigned default/inventory-management-api to ip-172-31-21-139.us-east-2.compute.internal + Normal Pulled 2m52s kubelet Successfully pulled image "nginx" in 4.534s (4.534s including waiting). Image size: 72955450 bytes. + Normal Pulled 2m51s kubelet Successfully pulled image "nginx" in 229ms (229ms including waiting). Image size: 72955450 bytes. + Normal Pulled 2m37s kubelet Successfully pulled image "nginx" in 212ms (212ms including waiting). Image size: 72955450 bytes. + Normal Created 2m9s (x4 over 2m52s) kubelet Created container nginx + Warning Failed 2m9s (x4 over 2m52s) kubelet Error: failed to create containerd task: failed to create shim task: OCI runtime create failed: runc create failed: unable to start container process: exec: "wge": executable file not found in $PATH: unknown + Normal Pulled 2m9s kubelet Successfully pulled image "nginx" in 190ms (190ms including waiting). Image size: 72955450 bytes. + Warning BackOff 102s (x7 over 2m50s) kubelet Back-off restarting failed container nginx in pod inventory-management-api_default(735cc85c-9bae-4b2a-b626-4066b0661c59) + Normal Pulling 87s (x5 over 2m57s) kubelet Pulling image "nginx" + Normal Pulled 87s kubelet Successfully pulled image "nginx" in 276ms (276ms including waiting). Image size: 72955450 bytes. + +stderr: diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_describe_pod.txt b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_describe_pod.txt new file mode 100644 index 00000000..6b8ef066 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_describe_pod.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"pod","name":"inventory-management-api","namespace":"default"}} +Command `kubectl describe pod inventory-management-api -n default` failed with return code 1 +stdout: + +stderr: +Error from server (NotFound): pods "inventory-management-api" not found diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_find_resource.txt b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_find_resource.txt new file mode 100644 index 00000000..16c2aa7a --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_find_resource.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"inventory-management-api"}} +Command `kubectl get -A --show-labels -o wide pod | grep inventory-management-api` failed with return code 1 +stdout: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_get_all.txt b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_get_all.txt new file mode 100644 index 00000000..1875b780 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_get_all.txt @@ -0,0 +1,65 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get_all","match_params":{"kind":"pod"}} +stdout: +NAMESPACE NAME READY STATUS RESTARTS AGE IP NODE NOMINATED NODE READINESS GATES LABELS +default alertmanager-robusta-kube-prometheus-st-alertmanager-0 2/2 Running 0 7d3h 172.31.3.106 ip-172-31-8-128.us-east-2.compute.internal alertmanager=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/instance=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=alertmanager,app.kubernetes.io/version=0.26.0,apps.kubernetes.io/pod-index=0,controller-revision-hash=alertmanager-robusta-kube-prometheus-st-alertmanager-57cd7fb46f,statefulset.kubernetes.io/pod-name=alertmanager-robusta-kube-prometheus-st-alertmanager-0 +default customer-orders-c88c44547-sxg6h 2/2 Running 0 94m 172.31.5.200 ip-172-31-8-128.us-east-2.compute.internal app=customer-orders,pod-template-hash=c88c44547 +default krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58-n68zx 0/1 Pending 0 7d2h app=krr.robusta.dev,batch.kubernetes.io/controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,batch.kubernetes.io/job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58,controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58 +default krr-job-99dbe7b7-898d-4062-8187-34b499525000-9fzjp 0/1 Pending 0 3h9m app=krr.robusta.dev,batch.kubernetes.io/controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,batch.kubernetes.io/job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000,controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000 +default nginx-deployment-688794cb9c-5cqqz 0/1 Pending 0 5h8m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-8j7g6 0/1 Pending 0 5h8m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-kbfbf 1/1 Running 0 5h8m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-mz8pm 0/1 Pending 0 5h8m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-ng4mg 0/1 Pending 0 5h8m app=nginx,pod-template-hash=688794cb9c +default order-processor 1/1 Running 3 (48m ago) 5h 172.31.23.45 ip-172-31-21-139.us-east-2.compute.internal +default prod-endpoint-84b57fcff-2zl4x 1/1 Running 0 5h39m 172.31.40.222 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=84b57fcff +default product-data-scraper-2fqrh 0/1 Error 0 5h7m 172.31.21.29 ip-172-31-21-139.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-5fdj6 0/1 Error 0 5h10m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-bg7vg 0/1 Error 0 5h10m 172.31.37.22 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-n8bhx 0/1 Error 0 5h9m 172.31.39.19 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-vr29r 0/1 Error 0 5h9m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default prometheus-robusta-kube-prometheus-st-prometheus-0 2/2 Running 0 7d3h 172.31.1.249 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=robusta-kube-prometheus-st-prometheus,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=prometheus,app.kubernetes.io/version=2.48.1,apps.kubernetes.io/pod-index=0,controller-revision-hash=prometheus-robusta-kube-prometheus-st-prometheus-55d87c869b,operator.prometheus.io/name=robusta-kube-prometheus-st-prometheus,operator.prometheus.io/shard=0,prometheus=robusta-kube-prometheus-st-prometheus,statefulset.kubernetes.io/pod-name=prometheus-robusta-kube-prometheus-st-prometheus-0 +default robusta-forwarder-89f44d49b-7ccwf 1/1 Running 0 7d3h 172.31.6.195 ip-172-31-8-128.us-east-2.compute.internal app=robusta-forwarder,pod-template-hash=89f44d49b +default robusta-grafana-597597b88-2wrpx 3/3 Running 0 7d3h 172.31.25.172 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/name=grafana,pod-template-hash=597597b88 +default robusta-holmes-54799fc97b-wpv45 1/1 Running 0 6h15m 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=54799fc97b +default robusta-holmes-678999dfd-ln8n5 0/1 Error 0 6h23m 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=678999dfd +default robusta-kube-prometheus-st-operator-7fc5db7f4d-pkv8m 1/1 Running 0 7d3h 172.31.38.193 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/part-of=kube-prometheus-stack,app.kubernetes.io/version=55.7.0,app=kube-prometheus-stack-operator,chart=kube-prometheus-stack-55.7.0,heritage=Helm,pod-template-hash=7fc5db7f4d,release=robusta +default robusta-kube-state-metrics-7964495ff6-mszjx 1/1 Running 0 7d3h 172.31.23.28 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=kube-state-metrics,app.kubernetes.io/part-of=kube-state-metrics,app.kubernetes.io/version=2.10.1,helm.sh/chart=kube-state-metrics-5.15.3,pod-template-hash=7964495ff6,release=robusta +default robusta-prometheus-node-exporter-22ncm 1/1 Running 0 7d3h 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-hsf46 1/1 Running 0 7d3h 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-r9ss7 1/1 Running 0 7d3h 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-runner-68d76784b4-bqm9r 1/1 Running 0 7d3h 172.31.45.91 ip-172-31-40-111.us-east-2.compute.internal app=robusta-runner,pod-template-hash=68d76784b4,robustaComponent=runner +default stress-test-7b596f4759-62lpg 0/1 Pending 0 5h6m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-f7vmc 0/1 Pending 0 5h6m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-s8m84 0/1 Pending 0 5h6m app=stress-test,pod-template-hash=7b596f4759 +kube-system aws-node-475cb 2/2 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-87tjm 2/2 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-m47xg 2/2 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system coredns-858457f4f6-9vksl 1/1 Running 0 35d 172.31.43.91 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system coredns-858457f4f6-d6kvv 1/1 Running 0 35d 172.31.44.154 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system ebs-csi-controller-7bb676b68d-cs2gx 6/6 Running 0 35d 172.31.12.254 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-controller-7bb676b68d-dz5wd 6/6 Running 0 35d 172.31.37.38 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-node-j7r2p 3/3 Running 0 35d 172.31.30.77 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-pgrvq 3/3 Running 0 35d 172.31.2.194 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-snr4c 3/3 Running 0 35d 172.31.36.149 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system eks-pod-identity-agent-mbfpx 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-nfqc6 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-vgz8h 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system kube-proxy-24klf 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-l7vqp 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-tkz9f 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +sock-shop carts-db-6548bf479b-49wbw 1/1 Running 0 14d 172.31.38.83 ip-172-31-40-111.us-east-2.compute.internal name=carts-db,pod-template-hash=6548bf479b +sock-shop carts-f5db95987-hvh8w 1/1 Running 0 14d 172.31.35.216 ip-172-31-40-111.us-east-2.compute.internal name=carts,pod-template-hash=f5db95987 +sock-shop catalogue-58dc79c975-qrxkj 1/1 Running 0 14d 172.31.28.250 ip-172-31-21-139.us-east-2.compute.internal name=catalogue,pod-template-hash=58dc79c975 +sock-shop catalogue-db-f659c4c59-x94xs 1/1 Running 0 14d 172.31.38.49 ip-172-31-40-111.us-east-2.compute.internal name=catalogue-db,pod-template-hash=f659c4c59 +sock-shop front-end-84cb7d7d7c-4s2cp 1/1 Running 0 14d 172.31.47.71 ip-172-31-40-111.us-east-2.compute.internal name=front-end,pod-template-hash=84cb7d7d7c +sock-shop orders-68dc698477-b6g8x 1/1 Running 0 14d 172.31.16.203 ip-172-31-21-139.us-east-2.compute.internal name=orders,pod-template-hash=68dc698477 +sock-shop orders-db-85b469d778-wf7sh 1/1 Running 0 14d 172.31.27.164 ip-172-31-21-139.us-east-2.compute.internal name=orders-db,pod-template-hash=85b469d778 +sock-shop payment-d6fd65b86-zmjt5 1/1 Running 0 14d 172.31.40.230 ip-172-31-40-111.us-east-2.compute.internal name=payment,pod-template-hash=d6fd65b86 +sock-shop queue-master-ff4874c99-kxgg9 1/1 Running 0 14d 172.31.31.177 ip-172-31-21-139.us-east-2.compute.internal name=queue-master,pod-template-hash=ff4874c99 +sock-shop rabbitmq-c9fd5c694-6g9xm 2/2 Running 0 14d 172.31.29.7 ip-172-31-21-139.us-east-2.compute.internal name=rabbitmq,pod-template-hash=c9fd5c694 +sock-shop session-db-686f69d474-qzhcx 1/1 Running 0 14d 172.31.23.242 ip-172-31-21-139.us-east-2.compute.internal name=session-db,pod-template-hash=686f69d474 +sock-shop shipping-84b9f9b465-pg8kc 1/1 Running 0 14d 172.31.18.149 ip-172-31-21-139.us-east-2.compute.internal name=shipping,pod-template-hash=84b9f9b465 +sock-shop user-5bd96d75fb-ld8xv 1/1 Running 0 14d 172.31.0.106 ip-172-31-8-128.us-east-2.compute.internal name=user,pod-template-hash=5bd96d75fb +sock-shop user-db-5dc5c5f488-dw6xw 1/1 Running 0 14d 172.31.0.66 ip-172-31-8-128.us-east-2.compute.internal name=user-db,pod-template-hash=5dc5c5f488 + +stderr: diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_logs.txt b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_logs.txt new file mode 100644 index 00000000..21ede759 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"inventory-management-api","namespace":"default"}} +Command `kubectl logs inventory-management-api -n default` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "inventory-management-api" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_previous_logs.txt b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_previous_logs.txt new file mode 100644 index 00000000..2568d4f5 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/kubectl_previous_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_previous_logs","match_params":{"name":"inventory-management-api","namespace":"default"}} +Command `kubectl logs inventory-management-api -n default --previous` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "inventory-management-api" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/resource_instructions.json b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/resource_instructions.json new file mode 100644 index 00000000..0967ef42 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/resource_instructions.json @@ -0,0 +1 @@ +{} diff --git a/tests/llm/fixtures/test_investigate/02_crashloop_backoff/test_case.yaml b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/test_case.yaml new file mode 100644 index 00000000..b9780e87 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/02_crashloop_backoff/test_case.yaml @@ -0,0 +1,32 @@ +expected_output: | + Alert Explanation + + The pod inventory-management-api in the default namespace is experiencing a CrashLoopBackOff due to repeated failures in starting its container. + + Investigation + + - The pod inventory-management-api is using the nginx image and attempts to execute a command wge which is not found in the container's $PATH. This results in a StartError with an exit code 128. + - The pod has restarted 4 times and is currently in a Waiting state with the reason CrashLoopBackOff. + - The error message indicates: exec: "wge": executable file not found in $PATH. + + Conclusions and Possible Root Causes + + - Incorrect Command: The command wge is likely a typo or misconfiguration. The intended command might be wget, which is a common utility for downloading files. + - Missing Executable: The required executable is not present in the container's image, leading to the failure to start the container. + + Next Steps + + - Correct the command in the pod's configuration to use the correct executable, likely wget, if that was the intended command. + - Verify the container image includes all necessary executables and dependencies. + - Update the pod configuration and redeploy the pod with the corrected command. You can do this by editing the pod's YAML configuration and applying the changes: + + ``` + kubectl edit pod inventory-management-api -n default + ``` + +evaluation: + answer_relevancy: 0 + faithfulness: 0 + contextual_precision: 0 + contextual_recall: 0 + contextual_relevancy: 0 diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/fetch_finding_by_id.txt b/tests/llm/fixtures/test_investigate/03_cpu_throttling/fetch_finding_by_id.txt new file mode 100644 index 00000000..c5536c99 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/fetch_finding_by_id.txt @@ -0,0 +1,3 @@ +{"toolset_name":"findings","tool_name":"fetch_finding_by_id","match_params":{"id":"cb37003b-9c88-440b-9f6b-7af1239d8da0"}} +error: 'Failed to find a finding with finding_id=cb37003b-9c88-440b-9f6b-7af1239d8da0: + Holmes'' data access layer is not enabled.' diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/investigate_request.json b/tests/llm/fixtures/test_investigate/03_cpu_throttling/investigate_request.json new file mode 100644 index 00000000..b273e103 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/investigate_request.json @@ -0,0 +1,20 @@ +{ + "source": "prometheus", + "source_instance_id": "some-instance", + "title": "CrashLoopBackoff.", + "description": "Crashing pod inventory-management-api in namespace default", + "subject": { + "name": "inventory-management-api", + "subject_type": "pod", + "namespace": "default", + "node": "ip-172-31-21-139.us-east-2.compute.internal", + "container": "inventory-management-api", + "labels": {}, + "annotations": {} + }, + "context": { + "robusta_issue_id": "cb37003b-9c88-440b-9f6b-7af1239d8da0" + }, + "include_tool_calls": true, + "include_tool_call_results": true +} diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/issue_data.json b/tests/llm/fixtures/test_investigate/03_cpu_throttling/issue_data.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/issue_data.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_describe.txt b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_describe.txt new file mode 100644 index 00000000..6b8ef066 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_describe.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"pod","name":"inventory-management-api","namespace":"default"}} +Command `kubectl describe pod inventory-management-api -n default` failed with return code 1 +stdout: + +stderr: +Error from server (NotFound): pods "inventory-management-api" not found diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_find_resource.txt b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_find_resource.txt new file mode 100644 index 00000000..16c2aa7a --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_find_resource.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"inventory-management-api"}} +Command `kubectl get -A --show-labels -o wide pod | grep inventory-management-api` failed with return code 1 +stdout: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_get_all.txt b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_get_all.txt new file mode 100644 index 00000000..7e04761d --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_get_all.txt @@ -0,0 +1,65 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get_all","match_params":{"kind":"pod"}} +stdout: +NAMESPACE NAME READY STATUS RESTARTS AGE IP NODE NOMINATED NODE READINESS GATES LABELS +default alertmanager-robusta-kube-prometheus-st-alertmanager-0 2/2 Running 0 7d2h 172.31.3.106 ip-172-31-8-128.us-east-2.compute.internal alertmanager=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/instance=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=alertmanager,app.kubernetes.io/version=0.26.0,apps.kubernetes.io/pod-index=0,controller-revision-hash=alertmanager-robusta-kube-prometheus-st-alertmanager-57cd7fb46f,statefulset.kubernetes.io/pod-name=alertmanager-robusta-kube-prometheus-st-alertmanager-0 +default customer-orders-c88c44547-sxg6h 2/2 Running 0 83m 172.31.5.200 ip-172-31-8-128.us-east-2.compute.internal app=customer-orders,pod-template-hash=c88c44547 +default krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58-n68zx 0/1 Pending 0 7d2h app=krr.robusta.dev,batch.kubernetes.io/controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,batch.kubernetes.io/job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58,controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58 +default krr-job-99dbe7b7-898d-4062-8187-34b499525000-9fzjp 0/1 Pending 0 178m app=krr.robusta.dev,batch.kubernetes.io/controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,batch.kubernetes.io/job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000,controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000 +default nginx-deployment-688794cb9c-5cqqz 0/1 Pending 0 4h57m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-8j7g6 0/1 Pending 0 4h57m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-kbfbf 1/1 Running 0 4h57m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-mz8pm 0/1 Pending 0 4h57m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-ng4mg 0/1 Pending 0 4h57m app=nginx,pod-template-hash=688794cb9c +default order-processor 1/1 Running 3 (37m ago) 4h49m 172.31.23.45 ip-172-31-21-139.us-east-2.compute.internal +default prod-endpoint-84b57fcff-2zl4x 1/1 Running 0 5h29m 172.31.40.222 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=84b57fcff +default product-data-scraper-2fqrh 0/1 Error 0 4h57m 172.31.21.29 ip-172-31-21-139.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-5fdj6 0/1 Error 0 4h59m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-bg7vg 0/1 Error 0 4h59m 172.31.37.22 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-n8bhx 0/1 Error 0 4h59m 172.31.39.19 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-vr29r 0/1 Error 0 4h58m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default prometheus-robusta-kube-prometheus-st-prometheus-0 2/2 Running 0 7d2h 172.31.1.249 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=robusta-kube-prometheus-st-prometheus,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=prometheus,app.kubernetes.io/version=2.48.1,apps.kubernetes.io/pod-index=0,controller-revision-hash=prometheus-robusta-kube-prometheus-st-prometheus-55d87c869b,operator.prometheus.io/name=robusta-kube-prometheus-st-prometheus,operator.prometheus.io/shard=0,prometheus=robusta-kube-prometheus-st-prometheus,statefulset.kubernetes.io/pod-name=prometheus-robusta-kube-prometheus-st-prometheus-0 +default robusta-forwarder-89f44d49b-7ccwf 1/1 Running 0 7d2h 172.31.6.195 ip-172-31-8-128.us-east-2.compute.internal app=robusta-forwarder,pod-template-hash=89f44d49b +default robusta-grafana-597597b88-2wrpx 3/3 Running 0 7d2h 172.31.25.172 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/name=grafana,pod-template-hash=597597b88 +default robusta-holmes-54799fc97b-wpv45 1/1 Running 0 6h4m 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=54799fc97b +default robusta-holmes-678999dfd-ln8n5 0/1 Error 0 6h12m 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=678999dfd +default robusta-kube-prometheus-st-operator-7fc5db7f4d-pkv8m 1/1 Running 0 7d2h 172.31.38.193 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/part-of=kube-prometheus-stack,app.kubernetes.io/version=55.7.0,app=kube-prometheus-stack-operator,chart=kube-prometheus-stack-55.7.0,heritage=Helm,pod-template-hash=7fc5db7f4d,release=robusta +default robusta-kube-state-metrics-7964495ff6-mszjx 1/1 Running 0 7d2h 172.31.23.28 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=kube-state-metrics,app.kubernetes.io/part-of=kube-state-metrics,app.kubernetes.io/version=2.10.1,helm.sh/chart=kube-state-metrics-5.15.3,pod-template-hash=7964495ff6,release=robusta +default robusta-prometheus-node-exporter-22ncm 1/1 Running 0 7d2h 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-hsf46 1/1 Running 0 7d2h 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-r9ss7 1/1 Running 0 7d2h 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-runner-68d76784b4-bqm9r 1/1 Running 0 7d2h 172.31.45.91 ip-172-31-40-111.us-east-2.compute.internal app=robusta-runner,pod-template-hash=68d76784b4,robustaComponent=runner +default stress-test-7b596f4759-62lpg 0/1 Pending 0 4h55m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-f7vmc 0/1 Pending 0 4h55m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-s8m84 0/1 Pending 0 4h55m app=stress-test,pod-template-hash=7b596f4759 +kube-system aws-node-475cb 2/2 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-87tjm 2/2 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-m47xg 2/2 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system coredns-858457f4f6-9vksl 1/1 Running 0 35d 172.31.43.91 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system coredns-858457f4f6-d6kvv 1/1 Running 0 35d 172.31.44.154 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system ebs-csi-controller-7bb676b68d-cs2gx 6/6 Running 0 35d 172.31.12.254 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-controller-7bb676b68d-dz5wd 6/6 Running 0 35d 172.31.37.38 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-node-j7r2p 3/3 Running 0 35d 172.31.30.77 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-pgrvq 3/3 Running 0 35d 172.31.2.194 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-snr4c 3/3 Running 0 35d 172.31.36.149 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system eks-pod-identity-agent-mbfpx 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-nfqc6 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-vgz8h 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system kube-proxy-24klf 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-l7vqp 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-tkz9f 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +sock-shop carts-db-6548bf479b-49wbw 1/1 Running 0 14d 172.31.38.83 ip-172-31-40-111.us-east-2.compute.internal name=carts-db,pod-template-hash=6548bf479b +sock-shop carts-f5db95987-hvh8w 1/1 Running 0 14d 172.31.35.216 ip-172-31-40-111.us-east-2.compute.internal name=carts,pod-template-hash=f5db95987 +sock-shop catalogue-58dc79c975-qrxkj 1/1 Running 0 14d 172.31.28.250 ip-172-31-21-139.us-east-2.compute.internal name=catalogue,pod-template-hash=58dc79c975 +sock-shop catalogue-db-f659c4c59-x94xs 1/1 Running 0 14d 172.31.38.49 ip-172-31-40-111.us-east-2.compute.internal name=catalogue-db,pod-template-hash=f659c4c59 +sock-shop front-end-84cb7d7d7c-4s2cp 1/1 Running 0 14d 172.31.47.71 ip-172-31-40-111.us-east-2.compute.internal name=front-end,pod-template-hash=84cb7d7d7c +sock-shop orders-68dc698477-b6g8x 1/1 Running 0 14d 172.31.16.203 ip-172-31-21-139.us-east-2.compute.internal name=orders,pod-template-hash=68dc698477 +sock-shop orders-db-85b469d778-wf7sh 1/1 Running 0 14d 172.31.27.164 ip-172-31-21-139.us-east-2.compute.internal name=orders-db,pod-template-hash=85b469d778 +sock-shop payment-d6fd65b86-zmjt5 1/1 Running 0 14d 172.31.40.230 ip-172-31-40-111.us-east-2.compute.internal name=payment,pod-template-hash=d6fd65b86 +sock-shop queue-master-ff4874c99-kxgg9 1/1 Running 0 14d 172.31.31.177 ip-172-31-21-139.us-east-2.compute.internal name=queue-master,pod-template-hash=ff4874c99 +sock-shop rabbitmq-c9fd5c694-6g9xm 2/2 Running 0 14d 172.31.29.7 ip-172-31-21-139.us-east-2.compute.internal name=rabbitmq,pod-template-hash=c9fd5c694 +sock-shop session-db-686f69d474-qzhcx 1/1 Running 0 14d 172.31.23.242 ip-172-31-21-139.us-east-2.compute.internal name=session-db,pod-template-hash=686f69d474 +sock-shop shipping-84b9f9b465-pg8kc 1/1 Running 0 14d 172.31.18.149 ip-172-31-21-139.us-east-2.compute.internal name=shipping,pod-template-hash=84b9f9b465 +sock-shop user-5bd96d75fb-ld8xv 1/1 Running 0 14d 172.31.0.106 ip-172-31-8-128.us-east-2.compute.internal name=user,pod-template-hash=5bd96d75fb +sock-shop user-db-5dc5c5f488-dw6xw 1/1 Running 0 14d 172.31.0.66 ip-172-31-8-128.us-east-2.compute.internal name=user-db,pod-template-hash=5dc5c5f488 + +stderr: diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_logs.txt b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_logs.txt new file mode 100644 index 00000000..21ede759 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"inventory-management-api","namespace":"default"}} +Command `kubectl logs inventory-management-api -n default` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "inventory-management-api" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_previous_logs.txt b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_previous_logs.txt new file mode 100644 index 00000000..2568d4f5 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/kubectl_previous_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_previous_logs","match_params":{"name":"inventory-management-api","namespace":"default"}} +Command `kubectl logs inventory-management-api -n default --previous` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "inventory-management-api" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/resource_instructions.json b/tests/llm/fixtures/test_investigate/03_cpu_throttling/resource_instructions.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/resource_instructions.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/03_cpu_throttling/test_case.yaml b/tests/llm/fixtures/test_investigate/03_cpu_throttling/test_case.yaml new file mode 100644 index 00000000..d4110b12 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/03_cpu_throttling/test_case.yaml @@ -0,0 +1,22 @@ +expected_output: | + Alert Explanation + + The frontend-service pod in the default namespace is experiencing 100% CPU throttling for the stress container. This means the container is being limited by its CPU quota, potentially affecting performance. + Investigation + + - The frontend-service pod is running on node ip-172-31-21-139.us-east-2.compute.internal. + - The stress container is configured with CPU limits and requests set to 10m. + - The container is running a command to stress the CPU with 100 threads, which is likely causing the CPU throttling. + - The pod is in a Running state with no restarts, indicating it hasn't crashed or been restarted recently. + - Current logs show the stress command is actively running, but there are no previous logs available, suggesting the container hasn't been restarted. + + Conclusions and Possible Root Causes + + - CPU Limit Configuration: The stress container has a CPU limit of 10m, which is insufficient for the workload it is trying to perform (100 CPU threads). This is causing the CPU throttling. + - Application Design: The application is designed to use a high number of CPU threads, which is not compatible with the current CPU limits. + + Next Steps + + - Consider removing or increasing the CPU limit for the stress container if the application requires more CPU resources. This can be done by editing the pod's resource configuration: + `kubectl edit pod frontend-service -n default` + - Adjust the resources.limits.cpu value to a higher number or remove it entirely if appropriate. diff --git a/tests/llm/fixtures/test_investigate/04_image_pull_backoff/fetch_finding_by_id.txt b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/fetch_finding_by_id.txt new file mode 100644 index 00000000..4a7dc087 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/fetch_finding_by_id.txt @@ -0,0 +1,42 @@ +{"toolset_name":"findings","tool_name":"fetch_finding_by_id","match_params":{"id":"5fa9728c-4dbe-4c10-81ce-48ecfa7c4ce1"}} +{ + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "aggregation_key": "KubernetesWarningEvent", + "annotations": {}, + "category": null, + "cluster": "test_cluster", + "creation_date": "2024-11-18T08:21:32.144684", + "description": "Failed to pull image \\\"yourcompany/crw:latest\\\": failed to pull and unpack image \\\"docker.io/yourcompany/crw:latest\\\": failed to resolve reference \\\"docker.io/yourcompany/crw:latest\\\": pull access denied, repository does not exist or may require authorization: server message: insufficient_scope: authorization failed", + "ends_at": null, + "evidence": [ + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T08:21:32.061455", + "data": "[{\\\"type\\\": \\\"markdown\\\", \\\"data\\\": \\\"**Related Events**\\\"}, {\\\"type\\\": \\\"table\\\", \\\"data\\\": {\\\"headers\\\": [\\\"reason\\\", \\\"type\\\", \\\"time\\\", \\\"message\\\"], \\\"rows\\\": [[\\\"Scheduled\\\", \\\"Normal\\\", 1731918091183.75, \\\"Successfully assigned default/customer-relations-webapp-5d98ffcfd-tz4nc to ip-172-31-40-111.us-east-2.compute.internal\\\"], [\\\"Pulling\\\", \\\"Normal\\\", 1731918091000.0, \\\"Pulling image \\\\\\\"yourcompany/crw:latest\\\\\\\"\\\"], [\\\"Failed\\\", \\\"Warning\\\", 1731918091000.0, \\\"Failed to pull image \\\\\\\"yourcompany/crw:latest\\\\\\\": failed to pull and unpack image \\\\\\\"docker.io/yourcompany/crw:latest\\\\\\\": failed to resolve reference \\\\\\\"docker.io/yourcompany/crw:latest\\\\\\\": pull access denied, repository does not exist or may require authorization: server message: insufficient_scope: authorization failed\\\"], [\\\"Failed\\\", \\\"Warning\\\", 1731918091000.0, \\\"Error: ErrImagePull\\\"]], \\\"column_renderers\\\": {\\\"time\\\": \\\"DATETIME\\\"}}, \\\"metadata\\\": {}}]", + "enrichment_type": "k8s_events", + "file_type": "structured_data", + "id": "a2284f9e-11aa-4a1c-b72a-1985c1b1e145", + "issue_id": "5fa9728c-4dbe-4c10-81ce-48ecfa7c4ce1", + "title": "Related Events" + } + ], + "failure": true, + "finding_type": "issue", + "fingerprint": "4396bc99d320067445030e77f6d2b9c9798a1835565f88e7c60fd31339e34486", + "group_id": null, + "id": "5fa9728c-4dbe-4c10-81ce-48ecfa7c4ce1", + "labels": {}, + "priority": "DEBUG", + "service_key": "default/Deployment/customer-relations-webapp", + "service_kind": null, + "source": "kubernetes_api_server", + "starts_at": "2024-11-18T08:21:31.842217+00:00", + "subject_name": "customer-relations-webapp-5d98ffcfd-tz4nc", + "subject_namespace": "default", + "subject_node": null, + "subject_type": "pod", + "title": "Failed Warning for Pod default/customer-relations-webapp-5d98ffcfd-tz4nc", + "updated_at": "2024-11-18T08:21:32.119862+00:00", + "video_links": [] +} diff --git a/tests/llm/fixtures/test_investigate/04_image_pull_backoff/investigate_request.json b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/investigate_request.json new file mode 100644 index 00000000..62927a4d --- /dev/null +++ b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/investigate_request.json @@ -0,0 +1,22 @@ +{ + "source": "kubernetes_api_server", + "title": "KubernetesWarningEvent", + "description": "", + "subject": { + "name": "customer-relations-webapp-5d98ffcfd-tz4nc", + "namespace": "default", + "kind": "pod", + "node": null, + "container": null, + "cluster": null + }, + "context": { + "issue_type": "KubernetesWarningEvent", + "robusta_issue_id": "5fa9728c-4dbe-4c10-81ce-48ecfa7c4ce1", + "source": "kubernetes_api_server" + }, + "source_instance_id": "ApiRequest", + "include_tool_calls": true, + "include_tool_call_results": true, + "prompt_template": "builtin://generic_investigation.jinja2" +} diff --git a/tests/llm/fixtures/test_investigate/04_image_pull_backoff/issue_data.json b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/issue_data.json new file mode 100644 index 00000000..32164efd --- /dev/null +++ b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/issue_data.json @@ -0,0 +1,41 @@ +{ + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "aggregation_key": "KubernetesWarningEvent", + "annotations": {}, + "category": null, + "cluster": "test_cluster", + "creation_date": "2024-11-18T08:21:32.144684", + "description": "Failed to pull image \\\"yourcompany/crw:latest\\\": failed to pull and unpack image \\\"docker.io/yourcompany/crw:latest\\\": failed to resolve reference \\\"docker.io/yourcompany/crw:latest\\\": pull access denied, repository does not exist or may require authorization: server message: insufficient_scope: authorization failed", + "ends_at": null, + "evidence": [ + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T08:21:32.061455", + "data": "[{\\\"type\\\": \\\"markdown\\\", \\\"data\\\": \\\"**Related Events**\\\"}, {\\\"type\\\": \\\"table\\\", \\\"data\\\": {\\\"headers\\\": [\\\"reason\\\", \\\"type\\\", \\\"time\\\", \\\"message\\\"], \\\"rows\\\": [[\\\"Scheduled\\\", \\\"Normal\\\", 1731918091183.75, \\\"Successfully assigned default/customer-relations-webapp-5d98ffcfd-tz4nc to ip-172-31-40-111.us-east-2.compute.internal\\\"], [\\\"Pulling\\\", \\\"Normal\\\", 1731918091000.0, \\\"Pulling image \\\\\\\"yourcompany/crw:latest\\\\\\\"\\\"], [\\\"Failed\\\", \\\"Warning\\\", 1731918091000.0, \\\"Failed to pull image \\\\\\\"yourcompany/crw:latest\\\\\\\": failed to pull and unpack image \\\\\\\"docker.io/yourcompany/crw:latest\\\\\\\": failed to resolve reference \\\\\\\"docker.io/yourcompany/crw:latest\\\\\\\": pull access denied, repository does not exist or may require authorization: server message: insufficient_scope: authorization failed\\\"], [\\\"Failed\\\", \\\"Warning\\\", 1731918091000.0, \\\"Error: ErrImagePull\\\"]], \\\"column_renderers\\\": {\\\"time\\\": \\\"DATETIME\\\"}}, \\\"metadata\\\": {}}]", + "enrichment_type": "k8s_events", + "file_type": "structured_data", + "id": "a2284f9e-11aa-4a1c-b72a-1985c1b1e145", + "issue_id": "5fa9728c-4dbe-4c10-81ce-48ecfa7c4ce1", + "title": "Related Events" + } + ], + "failure": true, + "finding_type": "issue", + "fingerprint": "4396bc99d320067445030e77f6d2b9c9798a1835565f88e7c60fd31339e34486", + "group_id": null, + "id": "5fa9728c-4dbe-4c10-81ce-48ecfa7c4ce1", + "labels": {}, + "priority": "DEBUG", + "service_key": "default/Deployment/customer-relations-webapp", + "service_kind": null, + "source": "kubernetes_api_server", + "starts_at": "2024-11-18T08:21:31.842217+00:00", + "subject_name": "customer-relations-webapp-5d98ffcfd-tz4nc", + "subject_namespace": "default", + "subject_node": null, + "subject_type": "pod", + "title": "Failed Warning for Pod default/customer-relations-webapp-5d98ffcfd-tz4nc", + "updated_at": "2024-11-18T08:21:32.119862+00:00", + "video_links": [] +} diff --git a/tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_describe.txt b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_describe.txt new file mode 100644 index 00000000..c18268e6 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_describe.txt @@ -0,0 +1,61 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"pod","name":"customer-relations-webapp-5d98ffcfd-tz4nc","namespace":"default"}} +stdout: +Name: customer-relations-webapp-5d98ffcfd-tz4nc +Namespace: default +Priority: 0 +Service Account: default +Node: ip-172-31-40-111.us-east-2.compute.internal/172.31.40.111 +Start Time: Mon, 18 Nov 2024 08:21:31 +0000 +Labels: app=customer-relations + pod-template-hash=5d98ffcfd + visualize=true +Annotations: +Status: Pending +IP: 172.31.40.222 +IPs: + IP: 172.31.40.222 +Controlled By: ReplicaSet/customer-relations-webapp-5d98ffcfd +Containers: + crw-main-container: + Container ID: + Image: yourcompany/crw:latest + Image ID: + Port: + Host Port: + State: Waiting + Reason: ErrImagePull + Ready: False + Restart Count: 0 + Environment: + Mounts: + /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-tkq5c (ro) +Conditions: + Type Status + PodReadyToStartContainers True + Initialized True + Ready False + ContainersReady False + PodScheduled True +Volumes: + kube-api-access-tkq5c: + Type: Projected (a volume that contains injected data from multiple sources) + TokenExpirationSeconds: 3607 + ConfigMapName: kube-root-ca.crt + ConfigMapOptional: + DownwardAPI: true +QoS Class: BestEffort +Node-Selectors: +Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s + node.kubernetes.io/unreachable:NoExecute op=Exists for 300s +Events: + Type Reason Age From Message + ---- ------ ---- ---- ------- + Normal Scheduled 61s default-scheduler Successfully assigned default/customer-relations-webapp-5d98ffcfd-tz4nc to ip-172-31-40-111.us-east-2.compute.internal + Normal Pulling 21s (x3 over 61s) kubelet Pulling image \"yourcompany/crw:latest\" + Warning Failed 21s (x3 over 61s) kubelet Failed to pull image \"yourcompany/crw:latest\": failed to pull and unpack image \"docker.io/yourcompany/crw:latest\": failed to resolve reference \"docker.io/yourcompany/crw:latest\": pull access denied, repository does not exist or may require authorization: server message: insufficient_scope: authorization failed + Warning Failed 21s (x3 over 61s) kubelet Error: ErrImagePull + Normal BackOff 6s (x3 over 60s) kubelet Back-off pulling image \"yourcompany/crw:latest\" + Warning Failed 6s (x3 over 60s) kubelet Error: ImagePullBackOff + +stderr: +" diff --git a/tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_logs.txt b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_logs.txt new file mode 100644 index 00000000..081a3a5b --- /dev/null +++ b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_logs.txt @@ -0,0 +1,3 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"customer-relations-webapp-5d98ffcfd-tz4nc","namespace":"default"}} +Command `kubectl logs customer-relations-webapp-5d98ffcfd-tz4nc -n default` failed with return code 1 +Error from server (BadRequest): container "crw-main-container" in pod "customer-relations-webapp-5d98ffcfd-tz4nc" is waiting to start: image can't be pulled diff --git a/tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_previous_logs.txt b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_previous_logs.txt new file mode 100644 index 00000000..c6231b57 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/kubectl_previous_logs.txt @@ -0,0 +1,3 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_previous_logs","match_params":{"name":"customer-relations-webapp-5d98ffcfd-tz4nc","namespace":"default"}} +Command `kubectl logs customer-relations-webapp-5d98ffcfd-tz4nc -n default --previous` failed with return code 1 +Error from server (BadRequest): previous terminated container "crw-main-container" in pod "customer-relations-webapp-5d98ffcfd-tz4nc" not found diff --git a/tests/llm/fixtures/test_investigate/04_image_pull_backoff/resource_instructions.json b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/resource_instructions.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/resource_instructions.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/04_image_pull_backoff/test_case.yaml b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/test_case.yaml new file mode 100644 index 00000000..3785c84a --- /dev/null +++ b/tests/llm/fixtures/test_investigate/04_image_pull_backoff/test_case.yaml @@ -0,0 +1,18 @@ +expected_output: | + # Alert Explanation + The pod `customer-relations-webapp-5d98ffcfd-tz4nc` in the `default` namespace is experiencing a warning event due to a failure in pulling the Docker image `yourcompany/crw:latest`. + + # Investigation + - The pod is in a `Pending` state with the container `crw-main-container` waiting due to `ErrImagePull`. + - The events indicate repeated failures to pull the image `yourcompany/crw:latest` due to "pull access denied" and "insufficient_scope: authorization failed". + - The logs for the current and previous container states are unavailable because the container has not started successfully. + + # Conclusions and Possible Root Causes + 1. *Image Pull Authorization Failure*: The image `yourcompany/crw:latest` cannot be pulled because of authorization issues. This could be due to missing or incorrect credentials for accessing the Docker registry. + 2. *Repository Access Issues*: The repository might not exist or the image tag `latest` might be incorrect, leading to the failure in resolving the image reference. + + # Next Steps + - Verify the credentials used for accessing the Docker registry and ensure they have the necessary permissions. + - Check if the image `yourcompany/crw:latest` exists in the Docker registry and is accessible. + - Update the Kubernetes secret used for Docker registry credentials if necessary. + - Consider specifying a specific image tag instead of `latest` to avoid potential issues with image resolution. diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/fetch_finding_by_id.txt b/tests/llm/fixtures/test_investigate/05_crashpod/fetch_finding_by_id.txt new file mode 100644 index 00000000..86177a86 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/fetch_finding_by_id.txt @@ -0,0 +1,3 @@ +{"toolset_name":"findings","tool_name":"fetch_finding_by_id","match_params":{"id":"46001a4e-ce6a-4995-a025-89dbb846b821"}} +error: 'Failed to find a finding with finding_id=46001a4e-ce6a-4995-a025-89dbb846b821: + Holmes'' data access layer is not enabled.' diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/investigate_request.json b/tests/llm/fixtures/test_investigate/05_crashpod/investigate_request.json new file mode 100644 index 00000000..044320da --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/investigate_request.json @@ -0,0 +1,22 @@ +{ + "source": "prometheus", + "title": "KubePodCrashLooping", + "description": "", + "subject": { + "name": "db-certs-authenticator-7ffd769f48-vndgd", + "namespace": "default", + "kind": "pod", + "node": null, + "container": null, + "cluster": null + }, + "context": { + "issue_type": "KubePodCrashLooping", + "robusta_issue_id": "46001a4e-ce6a-4995-a025-89dbb846b821", + "source": "prometheus" + }, + "source_instance_id": "ApiRequest", + "include_tool_calls": true, + "include_tool_call_results": true, + "prompt_template": "builtin://generic_investigation.jinja2" +} diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/issue_data.json b/tests/llm/fixtures/test_investigate/05_crashpod/issue_data.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/issue_data.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_describe.txt b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_describe.txt new file mode 100644 index 00000000..36931e81 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_describe.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"pod","name":"db-certs-authenticator-7ffd769f48-vndgd","namespace":"default"}} +Command `kubectl describe pod db-certs-authenticator-7ffd769f48-vndgd -n default` failed with return code 1 +stdout: + +stderr: +Error from server (NotFound): pods "db-certs-authenticator-7ffd769f48-vndgd" not found diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_find_resource.txt b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_find_resource.txt new file mode 100644 index 00000000..86203dfa --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_find_resource.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"db-certs-authenticator"}} +Command `kubectl get -A --show-labels -o wide pod | grep db-certs-authenticator` failed with return code 1 +stdout: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_find_resource_w_namespace.txt b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_find_resource_w_namespace.txt new file mode 100644 index 00000000..6aeea630 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_find_resource_w_namespace.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"db-certs-authenticator","namespace":"default"}} +Command `kubectl get -A --show-labels -o wide pod | grep db-certs-authenticator` failed with return code 1 +stdout: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_get_all.txt b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_get_all.txt new file mode 100644 index 00000000..4ff71a1d --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_get_all.txt @@ -0,0 +1,65 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get_all","match_params":{"kind":"pod"}} +stdout: +NAMESPACE NAME READY STATUS RESTARTS AGE IP NODE NOMINATED NODE READINESS GATES LABELS +default alertmanager-robusta-kube-prometheus-st-alertmanager-0 2/2 Running 0 7d2h 172.31.3.106 ip-172-31-8-128.us-east-2.compute.internal alertmanager=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/instance=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=alertmanager,app.kubernetes.io/version=0.26.0,apps.kubernetes.io/pod-index=0,controller-revision-hash=alertmanager-robusta-kube-prometheus-st-alertmanager-57cd7fb46f,statefulset.kubernetes.io/pod-name=alertmanager-robusta-kube-prometheus-st-alertmanager-0 +default customer-orders-c88c44547-sxg6h 2/2 Running 0 71m 172.31.5.200 ip-172-31-8-128.us-east-2.compute.internal app=customer-orders,pod-template-hash=c88c44547 +default krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58-n68zx 0/1 Pending 0 7d2h app=krr.robusta.dev,batch.kubernetes.io/controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,batch.kubernetes.io/job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58,controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58 +default krr-job-99dbe7b7-898d-4062-8187-34b499525000-9fzjp 0/1 Pending 0 166m app=krr.robusta.dev,batch.kubernetes.io/controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,batch.kubernetes.io/job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000,controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000 +default nginx-deployment-688794cb9c-5cqqz 0/1 Pending 0 4h45m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-8j7g6 0/1 Pending 0 4h45m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-kbfbf 1/1 Running 0 4h45m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-mz8pm 0/1 Pending 0 4h45m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-ng4mg 0/1 Pending 0 4h45m app=nginx,pod-template-hash=688794cb9c +default order-processor 1/1 Running 3 (25m ago) 4h37m 172.31.23.45 ip-172-31-21-139.us-east-2.compute.internal +default prod-endpoint-84b57fcff-2zl4x 1/1 Running 0 5h16m 172.31.40.222 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=84b57fcff +default product-data-scraper-2fqrh 0/1 Error 0 4h44m 172.31.21.29 ip-172-31-21-139.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-5fdj6 0/1 Error 0 4h47m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-bg7vg 0/1 Error 0 4h47m 172.31.37.22 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-n8bhx 0/1 Error 0 4h46m 172.31.39.19 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-vr29r 0/1 Error 0 4h46m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default prometheus-robusta-kube-prometheus-st-prometheus-0 2/2 Running 0 7d2h 172.31.1.249 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=robusta-kube-prometheus-st-prometheus,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=prometheus,app.kubernetes.io/version=2.48.1,apps.kubernetes.io/pod-index=0,controller-revision-hash=prometheus-robusta-kube-prometheus-st-prometheus-55d87c869b,operator.prometheus.io/name=robusta-kube-prometheus-st-prometheus,operator.prometheus.io/shard=0,prometheus=robusta-kube-prometheus-st-prometheus,statefulset.kubernetes.io/pod-name=prometheus-robusta-kube-prometheus-st-prometheus-0 +default robusta-forwarder-89f44d49b-7ccwf 1/1 Running 0 7d2h 172.31.6.195 ip-172-31-8-128.us-east-2.compute.internal app=robusta-forwarder,pod-template-hash=89f44d49b +default robusta-grafana-597597b88-2wrpx 3/3 Running 0 7d2h 172.31.25.172 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/name=grafana,pod-template-hash=597597b88 +default robusta-holmes-54799fc97b-wpv45 1/1 Running 0 5h52m 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=54799fc97b +default robusta-holmes-678999dfd-ln8n5 0/1 Error 0 6h 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=678999dfd +default robusta-kube-prometheus-st-operator-7fc5db7f4d-pkv8m 1/1 Running 0 7d2h 172.31.38.193 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/part-of=kube-prometheus-stack,app.kubernetes.io/version=55.7.0,app=kube-prometheus-stack-operator,chart=kube-prometheus-stack-55.7.0,heritage=Helm,pod-template-hash=7fc5db7f4d,release=robusta +default robusta-kube-state-metrics-7964495ff6-mszjx 1/1 Running 0 7d2h 172.31.23.28 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=kube-state-metrics,app.kubernetes.io/part-of=kube-state-metrics,app.kubernetes.io/version=2.10.1,helm.sh/chart=kube-state-metrics-5.15.3,pod-template-hash=7964495ff6,release=robusta +default robusta-prometheus-node-exporter-22ncm 1/1 Running 0 7d2h 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-hsf46 1/1 Running 0 7d2h 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-r9ss7 1/1 Running 0 7d2h 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-runner-68d76784b4-bqm9r 1/1 Running 0 7d2h 172.31.45.91 ip-172-31-40-111.us-east-2.compute.internal app=robusta-runner,pod-template-hash=68d76784b4,robustaComponent=runner +default stress-test-7b596f4759-62lpg 0/1 Pending 0 4h43m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-f7vmc 0/1 Pending 0 4h43m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-s8m84 0/1 Pending 0 4h43m app=stress-test,pod-template-hash=7b596f4759 +kube-system aws-node-475cb 2/2 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-87tjm 2/2 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-m47xg 2/2 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system coredns-858457f4f6-9vksl 1/1 Running 0 35d 172.31.43.91 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system coredns-858457f4f6-d6kvv 1/1 Running 0 35d 172.31.44.154 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system ebs-csi-controller-7bb676b68d-cs2gx 6/6 Running 0 35d 172.31.12.254 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-controller-7bb676b68d-dz5wd 6/6 Running 0 35d 172.31.37.38 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-node-j7r2p 3/3 Running 0 35d 172.31.30.77 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-pgrvq 3/3 Running 0 35d 172.31.2.194 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-snr4c 3/3 Running 0 35d 172.31.36.149 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system eks-pod-identity-agent-mbfpx 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-nfqc6 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-vgz8h 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system kube-proxy-24klf 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-l7vqp 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-tkz9f 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +sock-shop carts-db-6548bf479b-49wbw 1/1 Running 0 14d 172.31.38.83 ip-172-31-40-111.us-east-2.compute.internal name=carts-db,pod-template-hash=6548bf479b +sock-shop carts-f5db95987-hvh8w 1/1 Running 0 14d 172.31.35.216 ip-172-31-40-111.us-east-2.compute.internal name=carts,pod-template-hash=f5db95987 +sock-shop catalogue-58dc79c975-qrxkj 1/1 Running 0 14d 172.31.28.250 ip-172-31-21-139.us-east-2.compute.internal name=catalogue,pod-template-hash=58dc79c975 +sock-shop catalogue-db-f659c4c59-x94xs 1/1 Running 0 14d 172.31.38.49 ip-172-31-40-111.us-east-2.compute.internal name=catalogue-db,pod-template-hash=f659c4c59 +sock-shop front-end-84cb7d7d7c-4s2cp 1/1 Running 0 14d 172.31.47.71 ip-172-31-40-111.us-east-2.compute.internal name=front-end,pod-template-hash=84cb7d7d7c +sock-shop orders-68dc698477-b6g8x 1/1 Running 0 14d 172.31.16.203 ip-172-31-21-139.us-east-2.compute.internal name=orders,pod-template-hash=68dc698477 +sock-shop orders-db-85b469d778-wf7sh 1/1 Running 0 14d 172.31.27.164 ip-172-31-21-139.us-east-2.compute.internal name=orders-db,pod-template-hash=85b469d778 +sock-shop payment-d6fd65b86-zmjt5 1/1 Running 0 14d 172.31.40.230 ip-172-31-40-111.us-east-2.compute.internal name=payment,pod-template-hash=d6fd65b86 +sock-shop queue-master-ff4874c99-kxgg9 1/1 Running 0 14d 172.31.31.177 ip-172-31-21-139.us-east-2.compute.internal name=queue-master,pod-template-hash=ff4874c99 +sock-shop rabbitmq-c9fd5c694-6g9xm 2/2 Running 0 14d 172.31.29.7 ip-172-31-21-139.us-east-2.compute.internal name=rabbitmq,pod-template-hash=c9fd5c694 +sock-shop session-db-686f69d474-qzhcx 1/1 Running 0 14d 172.31.23.242 ip-172-31-21-139.us-east-2.compute.internal name=session-db,pod-template-hash=686f69d474 +sock-shop shipping-84b9f9b465-pg8kc 1/1 Running 0 14d 172.31.18.149 ip-172-31-21-139.us-east-2.compute.internal name=shipping,pod-template-hash=84b9f9b465 +sock-shop user-5bd96d75fb-ld8xv 1/1 Running 0 14d 172.31.0.106 ip-172-31-8-128.us-east-2.compute.internal name=user,pod-template-hash=5bd96d75fb +sock-shop user-db-5dc5c5f488-dw6xw 1/1 Running 0 14d 172.31.0.66 ip-172-31-8-128.us-east-2.compute.internal name=user-db,pod-template-hash=5dc5c5f488 + +stderr: diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_logs.txt b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_logs.txt new file mode 100644 index 00000000..f674bb76 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"db-certs-authenticator-7ffd769f48-vndgd","namespace":"default"}} +Command `kubectl logs db-certs-authenticator-7ffd769f48-vndgd -n default` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "db-certs-authenticator-7ffd769f48-vndgd" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_previous_logs.txt b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_previous_logs.txt new file mode 100644 index 00000000..0b770066 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/kubectl_previous_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_previous_logs","match_params":{"name":"db-certs-authenticator-7ffd769f48-vndgd","namespace":"default"}} +Command `kubectl logs db-certs-authenticator-7ffd769f48-vndgd -n default --previous` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "db-certs-authenticator-7ffd769f48-vndgd" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/resource_instructions.json b/tests/llm/fixtures/test_investigate/05_crashpod/resource_instructions.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/resource_instructions.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/05_crashpod/test_case.yaml b/tests/llm/fixtures/test_investigate/05_crashpod/test_case.yaml new file mode 100644 index 00000000..84d24e13 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/05_crashpod/test_case.yaml @@ -0,0 +1,27 @@ +expected_output: | + The pod `db-certs-authenticator-7ffd769f48-vndgd` in the `default` namespace is experiencing a `CrashLoopBackOff`, indicating that the application within the pod is repeatedly crashing and Kubernetes is attempting to restart it. + + # Investigation + 1. The runbook for `KubePodCrashLooping` suggests checking pod events, logs, and configuration for issues such as resource constraints, misconfigurations, or missing dependencies. + 2. Attempts to describe the pod and fetch logs failed because the pod `db-certs-authenticator-7ffd769f48-vndgd` is not currently found in the cluster. + 3. A search for the pod across all namespaces confirmed that it is not present. + + # Conclusions and Possible Root Causes + 1. *Pod Deletion*: The pod `db-certs-authenticator-7ffd769f48-vndgd` may have been deleted after the alert was triggered, which is why it is not found in the cluster. + 2. *Transient Issue*: The crash loop might have been a transient issue that resolved itself, leading to the pod being replaced or removed. + 3. *Configuration or Resource Issue*: If the pod was deleted due to a configuration or resource issue, it might recur when the pod is redeployed. + + # Next Steps + 1. **Verify Deployment**: Check the deployment `db-certs-authenticator` to ensure it is correctly configured and running. + ```bash + kubectl get deployment db-certs-authenticator -n default + ``` + 2. **Check for New Pods**: Look for any new pods created by the deployment to replace the crashed pod. + ```bash + kubectl get pods -n default -l app=db-certs-authenticator + ``` + 3. **Review Deployment Configuration**: Ensure that the deployment configuration does not have issues that could cause pods to crash. + ```bash + kubectl describe deployment db-certs-authenticator -n default + ``` + 4. **Monitor for Recurrence**: Keep an eye on the deployment for any new crash loop alerts to identify if the issue persists. diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/fetch_finding_by_id.txt b/tests/llm/fixtures/test_investigate/06_job_failure/fetch_finding_by_id.txt new file mode 100644 index 00000000..3cf23edb --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/fetch_finding_by_id.txt @@ -0,0 +1,77 @@ +{"toolset_name":"findings","tool_name":"fetch_finding_by_id","match_params":{"id":"1eb606c4-46af-430e-a116-a92c201ee1d0"}} +{ + "category": null, + "cluster": "test_cluster", + "creation_date": "2024-11-18T09:19:27.025638", + "description": null, + "ends_at": null, + "evidence": [ + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T09:19:26.85989", + "data": "[{\\\"type\\\": \\\"markdown\\\", \\\"data\\\": \\\"**Job events:**\\\"}, {\\\"type\\\": \\\"table\\\", \\\"data\\\": {\\\"headers\\\": [\\\"reason\\\", \\\"type\\\", \\\"time\\\", \\\"message\\\"], \\\"rows\\\": [[\\\"BackoffLimitExceeded\\\", \\\"Warning\\\", 1731921566000.0, \\\"Job has reached the specified backoff limit\\\"], [\\\"SuccessfulCreate\\\", \\\"Normal\\\", 1731921503000.0, \\\"Created pod: java-api-checker-tgmn7\\\"], [\\\"SuccessfulCreate\\\", \\\"Normal\\\", 1731921432000.0, \\\"Created pod: java-api-checker-slp89\\\"]], \\\"column_renderers\\\": {\\\"time\\\": \\\"DATETIME\\\"}}, \\\"metadata\\\": {}}]", + "enrichment_type": "k8s_events", + "file_type": "structured_data", + "id": "22615573-cf16-4bc6-aa7c-b5a888648274", + "issue_id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "title": "Job Events" + }, + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T09:19:26.808787", + "data": "[{\\\"type\\\": \\\"markdown\\\", \\\"data\\\": \\\"**Job information**\\\"}, {\\\"type\\\": \\\"table\\\", \\\"data\\\": {\\\"headers\\\": [\\\"description\\\", \\\"value\\\"], \\\"rows\\\": [[\\\"status\\\", \\\"Failed\\\"], [\\\"message\\\", \\\"Job has reached the specified backoff limit\\\"], [\\\"completions\\\", \\\"0/1\\\"], [\\\"failures\\\", \\\"2\\\"], [\\\"backoffLimit\\\", \\\"1\\\"], [\\\"duration\\\", \\\"2024-11-18T09:17:12Z - None\\\"], [\\\"containers\\\", \\\"------------------\\\"], [\\\"name\\\", \\\"java-beans\\\"], [\\\"image\\\", \\\"busybox\\\"], [\\\"cpu (request/limit)\\\", \\\"None/None\\\"], [\\\"memory MB (request/limit)\\\", \\\"None/None\\\"]], \\\"column_renderers\\\": {}}, \\\"metadata\\\": {}}]", + "enrichment_type": null, + "file_type": "structured_data", + "id": "e4c22ca3-4bcd-4acd-91b6-bbd0c3d15337", + "issue_id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "title": null + }, + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T09:19:26.925522", + "data": "[{\\\"type\\\": \\\"markdown\\\", \\\"data\\\": \\\"**Job pod events:**\\\"}, {\\\"type\\\": \\\"table\\\", \\\"data\\\": {\\\"headers\\\": [\\\"reason\\\", \\\"type\\\", \\\"time\\\", \\\"message\\\"], \\\"rows\\\": [[\\\"Scheduled\\\", \\\"Normal\\\", 1731921503047.968, \\\"Successfully assigned default/java-api-checker-tgmn7 to ip-172-31-40-111.us-east-2.compute.internal\\\"], [\\\"Pulling\\\", \\\"Normal\\\", 1731921503000.0, \\\"Pulling image \\\\\\\"busybox\\\\\\\"\\\"], [\\\"Pulled\\\", \\\"Normal\\\", 1731921503000.0, \\\"Successfully pulled image \\\\\\\"busybox\\\\\\\" in 191ms (192ms including waiting). Image size: 2166802 bytes.\\\"], [\\\"Created\\\", \\\"Normal\\\", 1731921503000.0, \\\"Created container java-beans\\\"], [\\\"Started\\\", \\\"Normal\\\", 1731921503000.0, \\\"Started container java-beans\\\"]], \\\"column_renderers\\\": {\\\"time\\\": \\\"DATETIME\\\"}}, \\\"metadata\\\": {}}]", + "enrichment_type": null, + "file_type": "structured_data", + "id": "314d4247-975d-4792-b54d-3b028c5ffe1a", + "issue_id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "title": null + }, + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T09:19:26.971606", + "data": "[{\\\"type\\\": \\\"gz\\\", \\\"data\\\": \\\"b'H4sIAJ4GO2cC/+3OMQ7CMBAEwD6v2DIp0oCgSEdBQ8EfzvZFtjC+YF9C4PU4vIKCLXel0V5oIVxZn5JvOK+WJw2SBjSnGOGlaFs6aA7sMEqGM3jMnF8YKcTatd9pwJTF9c4M+5oOPZKgwiGSiQwrKbHdXFByUM8bMjM8FWQm66sUtOBOK0p4M3aH4//BLzxoPrhQ8yUgAgAA'\\\"}]", + "enrichment_type": null, + "file_type": "structured_data", + "id": "0f5c4a5b-d110-4dec-b55c-fb5e005ee180", + "issue_id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "title": null + } + ], + "failure": true, + "finding_type": "issue", + "fingerprint": "01261151cfe344ff0a66f1df2aa82c90652f0787459b385b5cd7c1c3cdc40ab5", + "group_id": null, + "id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "labels": { + "batch.kubernetes.io/controller-uid": "9872d2b2-9be8-489b-8b90-7c94f2d01e14", + "batch.kubernetes.io/job-name": "java-api-checker", + "controller-uid": "9872d2b2-9be8-489b-8b90-7c94f2d01e14", + "job-name": "java-api-checker" + }, + "priority": "HIGH", + "service_key": "default/Job/java-api-checker", + "service_kind": null, + "source": "kubernetes_api_server", + "starts_at": "2024-11-18T09:19:26.562228+00:00", + "subject_name": "java-api-checker", + "subject_namespace": "default", + "subject_node": null, + "subject_type": "job", + "title": "Job java-api-checker on namespace default failed", + "updated_at": "2024-11-18T09:19:26.994827+00:00", + "video_links": [] +} diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/investigate_request.json b/tests/llm/fixtures/test_investigate/06_job_failure/investigate_request.json new file mode 100644 index 00000000..291ccea5 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/investigate_request.json @@ -0,0 +1,22 @@ +{ + "source": "kubernetes_api_server", + "title": "JobFailure", + "description": "", + "subject": { + "name": "java-api-checker", + "namespace": "default", + "kind": "job", + "node": null, + "container": null, + "cluster": null + }, + "context": { + "issue_type": "JobFailure", + "robusta_issue_id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "source": "kubernetes_api_server" + }, + "source_instance_id": "ApiRequest", + "include_tool_calls": true, + "include_tool_call_results": true, + "prompt_template": "builtin://generic_investigation.jinja2" +} diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/issue_data.json b/tests/llm/fixtures/test_investigate/06_job_failure/issue_data.json new file mode 100644 index 00000000..68ca3209 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/issue_data.json @@ -0,0 +1,76 @@ +{ + "category": null, + "cluster": "test_cluster", + "creation_date": "2024-11-18T09:19:27.025638", + "description": null, + "ends_at": null, + "evidence": [ + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T09:19:26.85989", + "data": "[{\\\"type\\\": \\\"markdown\\\", \\\"data\\\": \\\"**Job events:**\\\"}, {\\\"type\\\": \\\"table\\\", \\\"data\\\": {\\\"headers\\\": [\\\"reason\\\", \\\"type\\\", \\\"time\\\", \\\"message\\\"], \\\"rows\\\": [[\\\"BackoffLimitExceeded\\\", \\\"Warning\\\", 1731921566000.0, \\\"Job has reached the specified backoff limit\\\"], [\\\"SuccessfulCreate\\\", \\\"Normal\\\", 1731921503000.0, \\\"Created pod: java-api-checker-tgmn7\\\"], [\\\"SuccessfulCreate\\\", \\\"Normal\\\", 1731921432000.0, \\\"Created pod: java-api-checker-slp89\\\"]], \\\"column_renderers\\\": {\\\"time\\\": \\\"DATETIME\\\"}}, \\\"metadata\\\": {}}]", + "enrichment_type": "k8s_events", + "file_type": "structured_data", + "id": "22615573-cf16-4bc6-aa7c-b5a888648274", + "issue_id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "title": "Job Events" + }, + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T09:19:26.808787", + "data": "[{\\\"type\\\": \\\"markdown\\\", \\\"data\\\": \\\"**Job information**\\\"}, {\\\"type\\\": \\\"table\\\", \\\"data\\\": {\\\"headers\\\": [\\\"description\\\", \\\"value\\\"], \\\"rows\\\": [[\\\"status\\\", \\\"Failed\\\"], [\\\"message\\\", \\\"Job has reached the specified backoff limit\\\"], [\\\"completions\\\", \\\"0/1\\\"], [\\\"failures\\\", \\\"2\\\"], [\\\"backoffLimit\\\", \\\"1\\\"], [\\\"duration\\\", \\\"2024-11-18T09:17:12Z - None\\\"], [\\\"containers\\\", \\\"------------------\\\"], [\\\"name\\\", \\\"java-beans\\\"], [\\\"image\\\", \\\"busybox\\\"], [\\\"cpu (request/limit)\\\", \\\"None/None\\\"], [\\\"memory MB (request/limit)\\\", \\\"None/None\\\"]], \\\"column_renderers\\\": {}}, \\\"metadata\\\": {}}]", + "enrichment_type": null, + "file_type": "structured_data", + "id": "e4c22ca3-4bcd-4acd-91b6-bbd0c3d15337", + "issue_id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "title": null + }, + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T09:19:26.925522", + "data": "[{\\\"type\\\": \\\"markdown\\\", \\\"data\\\": \\\"**Job pod events:**\\\"}, {\\\"type\\\": \\\"table\\\", \\\"data\\\": {\\\"headers\\\": [\\\"reason\\\", \\\"type\\\", \\\"time\\\", \\\"message\\\"], \\\"rows\\\": [[\\\"Scheduled\\\", \\\"Normal\\\", 1731921503047.968, \\\"Successfully assigned default/java-api-checker-tgmn7 to ip-172-31-40-111.us-east-2.compute.internal\\\"], [\\\"Pulling\\\", \\\"Normal\\\", 1731921503000.0, \\\"Pulling image \\\\\\\"busybox\\\\\\\"\\\"], [\\\"Pulled\\\", \\\"Normal\\\", 1731921503000.0, \\\"Successfully pulled image \\\\\\\"busybox\\\\\\\" in 191ms (192ms including waiting). Image size: 2166802 bytes.\\\"], [\\\"Created\\\", \\\"Normal\\\", 1731921503000.0, \\\"Created container java-beans\\\"], [\\\"Started\\\", \\\"Normal\\\", 1731921503000.0, \\\"Started container java-beans\\\"]], \\\"column_renderers\\\": {\\\"time\\\": \\\"DATETIME\\\"}}, \\\"metadata\\\": {}}]", + "enrichment_type": null, + "file_type": "structured_data", + "id": "314d4247-975d-4792-b54d-3b028c5ffe1a", + "issue_id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "title": null + }, + { + "account_id": "debe17e5-0fe2-4c9c-9a7b-5dba04b83f89", + "collection_timestamp": null, + "creation_date": "2024-11-18T09:19:26.971606", + "data": "[{\\\"type\\\": \\\"gz\\\", \\\"data\\\": \\\"b'H4sIAJ4GO2cC/+3OMQ7CMBAEwD6v2DIp0oCgSEdBQ8EfzvZFtjC+YF9C4PU4vIKCLXel0V5oIVxZn5JvOK+WJw2SBjSnGOGlaFs6aA7sMEqGM3jMnF8YKcTatd9pwJTF9c4M+5oOPZKgwiGSiQwrKbHdXFByUM8bMjM8FWQm66sUtOBOK0p4M3aH4//BLzxoPrhQ8yUgAgAA'\\\"}]", + "enrichment_type": null, + "file_type": "structured_data", + "id": "0f5c4a5b-d110-4dec-b55c-fb5e005ee180", + "issue_id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "title": null + } + ], + "failure": true, + "finding_type": "issue", + "fingerprint": "01261151cfe344ff0a66f1df2aa82c90652f0787459b385b5cd7c1c3cdc40ab5", + "group_id": null, + "id": "1eb606c4-46af-430e-a116-a92c201ee1d0", + "labels": { + "batch.kubernetes.io/controller-uid": "9872d2b2-9be8-489b-8b90-7c94f2d01e14", + "batch.kubernetes.io/job-name": "java-api-checker", + "controller-uid": "9872d2b2-9be8-489b-8b90-7c94f2d01e14", + "job-name": "java-api-checker" + }, + "priority": "HIGH", + "service_key": "default/Job/java-api-checker", + "service_kind": null, + "source": "kubernetes_api_server", + "starts_at": "2024-11-18T09:19:26.562228+00:00", + "subject_name": "java-api-checker", + "subject_namespace": "default", + "subject_node": null, + "subject_type": "job", + "title": "Job java-api-checker on namespace default failed", + "updated_at": "2024-11-18T09:19:26.994827+00:00", + "video_links": [] +} diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_describe.txt b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_describe.txt new file mode 100644 index 00000000..ca6f4dc8 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_describe.txt @@ -0,0 +1,45 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"job","name":"java-api-checker","namespace":"default"}} +stdout: +Name: java-api-checker +Namespace: default +Selector: batch.kubernetes.io/controller-uid=9872d2b2-9be8-489b-8b90-7c94f2d01e14 +Labels: batch.kubernetes.io/controller-uid=9872d2b2-9be8-489b-8b90-7c94f2d01e14 + batch.kubernetes.io/job-name=java-api-checker + controller-uid=9872d2b2-9be8-489b-8b90-7c94f2d01e14 + job-name=java-api-checker +Annotations: +Parallelism: 1 +Completions: 1 +Completion Mode: NonIndexed +Start Time: Mon, 18 Nov 2024 09:17:12 +0000 +Pods Statuses: 0 Active (0 Ready) / 0 Succeeded / 2 Failed +Pod Template: + Labels: batch.kubernetes.io/controller-uid=9872d2b2-9be8-489b-8b90-7c94f2d01e14 + batch.kubernetes.io/job-name=java-api-checker + controller-uid=9872d2b2-9be8-489b-8b90-7c94f2d01e14 + job-name=java-api-checker + Containers: + java-beans: + Image: busybox + Port: + Host Port: + Command: + /bin/sh + -c + Args: + echo 'Java Network Exception: + All host(s) tried for db query failed (tried: prod-db:3333) - no available connection and the queue has reached its max size 256 + All host(s) tried for db query failed (tried: prod-db:3333) - no available connection and the queue has reached its max size 256 + All host(s) tried for db query failed (tried: prod-db:3333) - no available connection and the queue has reached its max size 256 + All host(s) tried for db query failed (tried: prod-db:3333) - no available connection and the queue has reached its max size 256'; sleep 60; exit 1 + Environment: + Mounts: + Volumes: +Events: + Type Reason Age From Message + ---- ------ ---- ---- ------- + Normal SuccessfulCreate 5m45s job-controller Created pod: java-api-checker-slp89 + Normal SuccessfulCreate 4m34s job-controller Created pod: java-api-checker-tgmn7 + Warning BackoffLimitExceeded 3m31s job-controller Job has reached the specified backoff limit + +stderr: diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_get.txt b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_get.txt new file mode 100644 index 00000000..750d5cd8 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_get.txt @@ -0,0 +1,3 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get","match_params":{"kind":"job","name":"java-api-checker","namespace":"default"}} +NAME READY STATUS RESTARTS AGE IP NODE NOMINATED NODE READINESS GATES LABELS +java-api-checker-slp89 0/1 Error 0 5m46s 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=9872d2b2-9be8-489b-8b90-7c94f2d01e14,batch.kubernetes.io/job-name=java-api-checker,controller-uid=9872d2b2-9be8-489b-8b90-7c94f2d01e14,job-name=java-api-checker diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_logs.txt b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_logs.txt new file mode 100644 index 00000000..acc438a4 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"java-api-checker-slp89","namespace":"default"}} +Java Network Exception: +All host(s) tried for db query failed (tried: prod-db:3333) - no available connection and the queue has reached its max size 256 +All host(s) tried for db query failed (tried: prod-db:3333) - no available connection and the queue has reached its max size 256 +All host(s) tried for db query failed (tried: prod-db:3333) - no available connection and the queue has reached its max size 256 +All host(s) tried for db query failed (tried: prod-db:3333) - no available connection and the queue has reached its max size 256 diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_logs_tgmn7.txt b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_logs_tgmn7.txt new file mode 100644 index 00000000..c28a6ec2 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_logs_tgmn7.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"java-api-checker-tgmn7","namespace":"default"}} +Command `kubectl logs java-api-checker-tgmn7 -n default` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "java-api-checker-tgmn7" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_previous_logs.txt b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_previous_logs.txt new file mode 100644 index 00000000..35abae4d --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_previous_logs.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_previous_logs","match_params":{"name":"java-api-checker-slp89","namespace":"default"}} +Command `kubectl logs java-api-checker-slp89 -n default --previous` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "java-api-checker-slp89" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_previous_logs_tgmn7.txt b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_previous_logs_tgmn7.txt new file mode 100644 index 00000000..08c17c5c --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/kubectl_previous_logs_tgmn7.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_previous_logs","match_params":{"name":"java-api-checker-tgmn7","namespace":"default"}} +Command `kubectl logs java-api-checker-tgmn7 -n default --previous` failed with return code 1 +stdout: + +stderr: +error: error from server (NotFound): pods "java-api-checker-tgmn7" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/resource_instructions.json b/tests/llm/fixtures/test_investigate/06_job_failure/resource_instructions.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/resource_instructions.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/06_job_failure/test_case.yaml b/tests/llm/fixtures/test_investigate/06_job_failure/test_case.yaml new file mode 100644 index 00000000..b87cf837 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/06_job_failure/test_case.yaml @@ -0,0 +1,16 @@ +expected_output: | + The job `java-api-checker` in the `default` namespace has failed because it reached the specified backoff limit. + + # Investigation + The job `java-api-checker` was configured with a `backoffLimit` of 1, meaning it would retry once upon failure. The job failed twice, as indicated by the events and logs. The logs from the pod `java-api-checker-slp89` show repeated network exceptions indicating that all attempts to connect to the database at `prod-db:3333` failed due to no available connections and a full queue. + + # Conclusions and Possible Root Causes + 1. *Database Connectivity Issues*: The primary issue is a failure to connect to the database at `prod-db:3333`. This could be due to network issues, the database being down, or the database being overloaded. + 2. *Configuration Error*: The job's configuration might not be suitable for the current database load, leading to connection failures. + 3. *Resource Limitations*: The database might be hitting its connection limits or queue size, preventing new connections. + + # Next Steps + 1. Verify the status and availability of the database at `prod-db:3333`. + 2. Check network connectivity between the Kubernetes cluster and the database. + 3. Review and possibly increase the database's connection limits or queue size. + 4. Consider adjusting the job's retry logic or backoff limit to allow more retries if transient issues are expected. diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/fetch_finding_by_id.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/fetch_finding_by_id.txt new file mode 100644 index 00000000..a452d7cc --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/fetch_finding_by_id.txt @@ -0,0 +1,3 @@ +{"toolset_name":"findings","tool_name":"fetch_finding_by_id","match_params":{"id":"4a25f1ef-2206-43d2-9601-1eb4254b5904"}} +error: 'Failed to find a finding with finding_id=4a25f1ef-2206-43d2-9601-1eb4254b5904: + Holmes'' data access layer is not enabled.' diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/investigate_request.json b/tests/llm/fixtures/test_investigate/07_job_syntax_error/investigate_request.json new file mode 100644 index 00000000..297b27e5 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/investigate_request.json @@ -0,0 +1,22 @@ +{ + "source": "prometheus", + "title": "KubeJobFailed", + "description": "", + "subject": { + "name": "product-data-scraper", + "namespace": "default", + "kind": "job", + "node": null, + "container": null, + "cluster": null + }, + "context": { + "issue_type": "KubeJobFailed", + "robusta_issue_id": "4a25f1ef-2206-43d2-9601-1eb4254b5904", + "source": "prometheus" + }, + "source_instance_id": "ApiRequest", + "include_tool_calls": true, + "include_tool_call_results": true, + "prompt_template": "builtin://generic_investigation.jinja2" +} diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/issue_data.json b/tests/llm/fixtures/test_investigate/07_job_syntax_error/issue_data.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/issue_data.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_describe_job.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_describe_job.txt new file mode 100644 index 00000000..c71e29d8 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_describe_job.txt @@ -0,0 +1,40 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"job","name":"product-data-scraper","namespace":"default"}} +stdout: +Name: product-data-scraper +Namespace: default +Selector: batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c +Labels: batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c + batch.kubernetes.io/job-name=product-data-scraper + controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c + job-name=product-data-scraper +Annotations: +Parallelism: 1 +Completions: 5 +Completion Mode: NonIndexed +Suspend: false +Backoff Limit: 4 +Start Time: Mon, 18 Nov 2024 10:24:17 +0100 +Pods Statuses: 0 Active (0 Ready) / 0 Succeeded / 5 Failed +Pod Template: + Labels: batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c + batch.kubernetes.io/job-name=product-data-scraper + controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c + job-name=product-data-scraper + Containers: + run-forever: + Image: bash + Port: + Host Port: + Command: + /bin/sh + Args: + -c + wget -O - https://gist.githubusercontent.com/odyssomay/1078370/raw/35c5981f8c139bc9dc02186f187ebee61f5b9eb9/gistfile1.txt 2>/dev/null; while true; do sleep 10;done; fi + Environment: + Mounts: + Volumes: + Node-Selectors: + Tolerations: +Events: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_describe_pod.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_describe_pod.txt new file mode 100644 index 00000000..8e4e4464 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_describe_pod.txt @@ -0,0 +1,61 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"pod","name":"product-data-scraper-bg7vg","namespace":"default"}} +stdout: +Name: product-data-scraper-bg7vg +Namespace: default +Priority: 0 +Service Account: default +Node: ip-172-31-40-111.us-east-2.compute.internal/172.31.40.111 +Start Time: Mon, 18 Nov 2024 10:24:29 +0100 +Labels: batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c + batch.kubernetes.io/job-name=product-data-scraper + controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c + job-name=product-data-scraper +Annotations: +Status: Failed +IP: 172.31.37.22 +IPs: + IP: 172.31.37.22 +Controlled By: Job/product-data-scraper +Containers: + run-forever: + Container ID: containerd://1a8a95c96821711196ac0bfb8bb76e859dfc516d4ca5bf90c93c81afe9e5eba6 + Image: bash + Image ID: docker.io/library/bash@sha256:4bbfbe07eceeed5ab9136b37faf4f5cff3c28a339087ce068a76f2c1733054e8 + Port: + Host Port: + Command: + /bin/sh + Args: + -c + wget -O - https://gist.githubusercontent.com/odyssomay/1078370/raw/35c5981f8c139bc9dc02186f187ebee61f5b9eb9/gistfile1.txt 2>/dev/null; while true; do sleep 10;done; fi + State: Terminated + Reason: Error + Exit Code: 2 + Started: Mon, 18 Nov 2024 10:24:30 +0100 + Finished: Mon, 18 Nov 2024 10:24:30 +0100 + Ready: False + Restart Count: 0 + Environment: + Mounts: + /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-tjb7n (ro) +Conditions: + Type Status + PodReadyToStartContainers False + Initialized True + Ready False + ContainersReady False + PodScheduled True +Volumes: + kube-api-access-tjb7n: + Type: Projected (a volume that contains injected data from multiple sources) + TokenExpirationSeconds: 3607 + ConfigMapName: kube-root-ca.crt + ConfigMapOptional: + DownwardAPI: true +QoS Class: BestEffort +Node-Selectors: +Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s + node.kubernetes.io/unreachable:NoExecute op=Exists for 300s +Events: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_find_resource.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_find_resource.txt new file mode 100644 index 00000000..aa284f3a --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_find_resource.txt @@ -0,0 +1,9 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"product-data-scraper"}} +stdout: +default product-data-scraper-2fqrh 0/1 Error 0 118m 172.31.21.29 ip-172-31-21-139.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-5fdj6 0/1 Error 0 120m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-bg7vg 0/1 Error 0 120m 172.31.37.22 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-n8bhx 0/1 Error 0 120m 172.31.39.19 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-vr29r 0/1 Error 0 119m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper + +stderr: diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_get.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_get.txt new file mode 100644 index 00000000..78042977 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_get.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get","match_params":{"kind":"pod","name":"product-data-scraper","namespace":"default"}} +Command `kubectl get --show-labels -o wide pod product-data-scraper -n default` failed with return code 1 +stdout: + +stderr: +Error from server (NotFound): pods "product-data-scraper" not found diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_get_all.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_get_all.txt new file mode 100644 index 00000000..d8d2f0ea --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_get_all.txt @@ -0,0 +1,65 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get_all","match_params":{"kind":"pod"}} +stdout: +NAMESPACE NAME READY STATUS RESTARTS AGE IP NODE NOMINATED NODE READINESS GATES LABELS +default alertmanager-robusta-kube-prometheus-st-alertmanager-0 2/2 Running 0 7d2h 172.31.3.106 ip-172-31-8-128.us-east-2.compute.internal alertmanager=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/instance=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=alertmanager,app.kubernetes.io/version=0.26.0,apps.kubernetes.io/pod-index=0,controller-revision-hash=alertmanager-robusta-kube-prometheus-st-alertmanager-57cd7fb46f,statefulset.kubernetes.io/pod-name=alertmanager-robusta-kube-prometheus-st-alertmanager-0 +default customer-orders-c88c44547-sxg6h 2/2 Running 0 91m 172.31.5.200 ip-172-31-8-128.us-east-2.compute.internal app=customer-orders,pod-template-hash=c88c44547 +default krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58-n68zx 0/1 Pending 0 7d2h app=krr.robusta.dev,batch.kubernetes.io/controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,batch.kubernetes.io/job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58,controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58 +default krr-job-99dbe7b7-898d-4062-8187-34b499525000-9fzjp 0/1 Pending 0 3h6m app=krr.robusta.dev,batch.kubernetes.io/controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,batch.kubernetes.io/job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000,controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000 +default nginx-deployment-688794cb9c-5cqqz 0/1 Pending 0 5h4m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-8j7g6 0/1 Pending 0 5h4m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-kbfbf 1/1 Running 0 5h4m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-mz8pm 0/1 Pending 0 5h4m app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-ng4mg 0/1 Pending 0 5h4m app=nginx,pod-template-hash=688794cb9c +default order-processor 1/1 Running 3 (45m ago) 4h56m 172.31.23.45 ip-172-31-21-139.us-east-2.compute.internal +default prod-endpoint-84b57fcff-2zl4x 1/1 Running 0 5h36m 172.31.40.222 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=84b57fcff +default product-data-scraper-2fqrh 0/1 Error 0 5h4m 172.31.21.29 ip-172-31-21-139.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-5fdj6 0/1 Error 0 5h7m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-bg7vg 0/1 Error 0 5h6m 172.31.37.22 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-n8bhx 0/1 Error 0 5h6m 172.31.39.19 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-vr29r 0/1 Error 0 5h5m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default prometheus-robusta-kube-prometheus-st-prometheus-0 2/2 Running 0 7d2h 172.31.1.249 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=robusta-kube-prometheus-st-prometheus,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=prometheus,app.kubernetes.io/version=2.48.1,apps.kubernetes.io/pod-index=0,controller-revision-hash=prometheus-robusta-kube-prometheus-st-prometheus-55d87c869b,operator.prometheus.io/name=robusta-kube-prometheus-st-prometheus,operator.prometheus.io/shard=0,prometheus=robusta-kube-prometheus-st-prometheus,statefulset.kubernetes.io/pod-name=prometheus-robusta-kube-prometheus-st-prometheus-0 +default robusta-forwarder-89f44d49b-7ccwf 1/1 Running 0 7d2h 172.31.6.195 ip-172-31-8-128.us-east-2.compute.internal app=robusta-forwarder,pod-template-hash=89f44d49b +default robusta-grafana-597597b88-2wrpx 3/3 Running 0 7d2h 172.31.25.172 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/name=grafana,pod-template-hash=597597b88 +default robusta-holmes-54799fc97b-wpv45 1/1 Running 0 6h11m 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=54799fc97b +default robusta-holmes-678999dfd-ln8n5 0/1 Error 0 6h20m 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=678999dfd +default robusta-kube-prometheus-st-operator-7fc5db7f4d-pkv8m 1/1 Running 0 7d2h 172.31.38.193 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/part-of=kube-prometheus-stack,app.kubernetes.io/version=55.7.0,app=kube-prometheus-stack-operator,chart=kube-prometheus-stack-55.7.0,heritage=Helm,pod-template-hash=7fc5db7f4d,release=robusta +default robusta-kube-state-metrics-7964495ff6-mszjx 1/1 Running 0 7d2h 172.31.23.28 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=kube-state-metrics,app.kubernetes.io/part-of=kube-state-metrics,app.kubernetes.io/version=2.10.1,helm.sh/chart=kube-state-metrics-5.15.3,pod-template-hash=7964495ff6,release=robusta +default robusta-prometheus-node-exporter-22ncm 1/1 Running 0 7d2h 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-hsf46 1/1 Running 0 7d2h 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-r9ss7 1/1 Running 0 7d2h 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-runner-68d76784b4-bqm9r 1/1 Running 0 7d2h 172.31.45.91 ip-172-31-40-111.us-east-2.compute.internal app=robusta-runner,pod-template-hash=68d76784b4,robustaComponent=runner +default stress-test-7b596f4759-62lpg 0/1 Pending 0 5h3m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-f7vmc 0/1 Pending 0 5h3m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-s8m84 0/1 Pending 0 5h3m app=stress-test,pod-template-hash=7b596f4759 +kube-system aws-node-475cb 2/2 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-87tjm 2/2 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-m47xg 2/2 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system coredns-858457f4f6-9vksl 1/1 Running 0 35d 172.31.43.91 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system coredns-858457f4f6-d6kvv 1/1 Running 0 35d 172.31.44.154 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system ebs-csi-controller-7bb676b68d-cs2gx 6/6 Running 0 35d 172.31.12.254 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-controller-7bb676b68d-dz5wd 6/6 Running 0 35d 172.31.37.38 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-node-j7r2p 3/3 Running 0 35d 172.31.30.77 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-pgrvq 3/3 Running 0 35d 172.31.2.194 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-snr4c 3/3 Running 0 35d 172.31.36.149 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system eks-pod-identity-agent-mbfpx 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-nfqc6 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-vgz8h 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system kube-proxy-24klf 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-l7vqp 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-tkz9f 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +sock-shop carts-db-6548bf479b-49wbw 1/1 Running 0 14d 172.31.38.83 ip-172-31-40-111.us-east-2.compute.internal name=carts-db,pod-template-hash=6548bf479b +sock-shop carts-f5db95987-hvh8w 1/1 Running 0 14d 172.31.35.216 ip-172-31-40-111.us-east-2.compute.internal name=carts,pod-template-hash=f5db95987 +sock-shop catalogue-58dc79c975-qrxkj 1/1 Running 0 14d 172.31.28.250 ip-172-31-21-139.us-east-2.compute.internal name=catalogue,pod-template-hash=58dc79c975 +sock-shop catalogue-db-f659c4c59-x94xs 1/1 Running 0 14d 172.31.38.49 ip-172-31-40-111.us-east-2.compute.internal name=catalogue-db,pod-template-hash=f659c4c59 +sock-shop front-end-84cb7d7d7c-4s2cp 1/1 Running 0 14d 172.31.47.71 ip-172-31-40-111.us-east-2.compute.internal name=front-end,pod-template-hash=84cb7d7d7c +sock-shop orders-68dc698477-b6g8x 1/1 Running 0 14d 172.31.16.203 ip-172-31-21-139.us-east-2.compute.internal name=orders,pod-template-hash=68dc698477 +sock-shop orders-db-85b469d778-wf7sh 1/1 Running 0 14d 172.31.27.164 ip-172-31-21-139.us-east-2.compute.internal name=orders-db,pod-template-hash=85b469d778 +sock-shop payment-d6fd65b86-zmjt5 1/1 Running 0 14d 172.31.40.230 ip-172-31-40-111.us-east-2.compute.internal name=payment,pod-template-hash=d6fd65b86 +sock-shop queue-master-ff4874c99-kxgg9 1/1 Running 0 14d 172.31.31.177 ip-172-31-21-139.us-east-2.compute.internal name=queue-master,pod-template-hash=ff4874c99 +sock-shop rabbitmq-c9fd5c694-6g9xm 2/2 Running 0 14d 172.31.29.7 ip-172-31-21-139.us-east-2.compute.internal name=rabbitmq,pod-template-hash=c9fd5c694 +sock-shop session-db-686f69d474-qzhcx 1/1 Running 0 14d 172.31.23.242 ip-172-31-21-139.us-east-2.compute.internal name=session-db,pod-template-hash=686f69d474 +sock-shop shipping-84b9f9b465-pg8kc 1/1 Running 0 14d 172.31.18.149 ip-172-31-21-139.us-east-2.compute.internal name=shipping,pod-template-hash=84b9f9b465 +sock-shop user-5bd96d75fb-ld8xv 1/1 Running 0 14d 172.31.0.106 ip-172-31-8-128.us-east-2.compute.internal name=user,pod-template-hash=5bd96d75fb +sock-shop user-db-5dc5c5f488-dw6xw 1/1 Running 0 14d 172.31.0.66 ip-172-31-8-128.us-east-2.compute.internal name=user-db,pod-template-hash=5dc5c5f488 + +stderr: diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_lineage_children.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_lineage_children.txt new file mode 100644 index 00000000..3cd9d880 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_lineage_children.txt @@ -0,0 +1,44 @@ +{"toolset_name":"kubernetes/extras","tool_name":"kubectl_lineage_children","match_params":{"kind":"job","name":"product-data-scraper","namespace":"default"}} +NAME READY STATUS AGE +Job/product-data-scraper - 21m +├── Event/product-data-scraper.180905c0dd171cd7 - SuccessfulCreate: Created pod: product-data-scraper-5fdj6 21m +├── Event/product-data-scraper.180905c3856b374f - SuccessfulCreate: Created pod: product-data-scraper-bg7vg 21m +├── Event/product-data-scraper.180905c8693f3398 - SuccessfulCreate: Created pod: product-data-scraper-n8bhx 21m +├── Event/product-data-scraper.180905d1bb080b33 - SuccessfulCreate: Created pod: product-data-scraper-vr29r 20m +├── Event/product-data-scraper.180905e4954bde82 - SuccessfulCreate: Created pod: product-data-scraper-2fqrh 19m +├── Event/product-data-scraper.180905e5868ff48d - BackoffLimitExceeded: Job has reached the specified backoff limit 19m +├── Pod/product-data-scraper-2fqrh 0/1 Error 19m +│ ├── Event/product-data-scraper-2fqrh.180905e49616dcad - Scheduled: Successfully assigned default/product-data-scraper-2fqrh to ip-172-31-21-139.us-east-2.compute.internal 19m +│ ├── Event/product-data-scraper-2fqrh.180905e4b3684858 - Pulling: Pulling image \"bash\" 19m +│ ├── Event/product-data-scraper-2fqrh.180905e4ee7548d7 - Pulled: Successfully pulled image \"bash\" in 990ms (990ms including waiting). Image size: 6335174 bytes. 19m +│ ├── Event/product-data-scraper-2fqrh.180905e4f03c1a59 - Created: Created container run-forever 19m +│ ├── Event/product-data-scraper-2fqrh.180905e4f543acde - Started: Started container run-forever 19m +│ └── Service/kubernetes - 6d22h +├── Pod/product-data-scraper-5fdj6 0/1 Error 21m +│ ├── Event/product-data-scraper-5fdj6.180905c0ddf7c940 - Scheduled: Successfully assigned default/product-data-scraper-5fdj6 to ip-172-31-40-111.us-east-2.compute.internal 21m +│ ├── Event/product-data-scraper-5fdj6.180905c0fc1d7dd1 - Pulling: Pulling image \"bash\" 21m +│ ├── Event/product-data-scraper-5fdj6.180905c131ad5155 - Pulled: Successfully pulled image \"bash\" in 898ms (898ms including waiting). Image size: 6335174 bytes. 21m +│ ├── Event/product-data-scraper-5fdj6.180905c13369495d - Created: Created container run-forever 21m +│ ├── Event/product-data-scraper-5fdj6.180905c13b29868f - Started: Started container run-forever 21m +│ └── Service/kubernetes - 6d22h +├── Pod/product-data-scraper-bg7vg 0/1 Error 21m +│ ├── Event/product-data-scraper-bg7vg.180905c386455311 - Scheduled: Successfully assigned default/product-data-scraper-bg7vg to ip-172-31-40-111.us-east-2.compute.internal 21m +│ ├── Event/product-data-scraper-bg7vg.180905c3a5239cc3 - Pulling: Pulling image \"bash\" 21m +│ ├── Event/product-data-scraper-bg7vg.180905c3b7018c10 - Pulled: Successfully pulled image \"bash\" in 299ms (299ms including waiting). Image size: 6335174 bytes. 21m +│ ├── Event/product-data-scraper-bg7vg.180905c3b9556b37 - Created: Created container run-forever 21m +│ ├── Event/product-data-scraper-bg7vg.180905c3c16b1328 - Started: Started container run-forever 21m +│ └── Service/kubernetes - 6d22h +├── Pod/product-data-scraper-n8bhx 0/1 Error 21m +│ ├── Event/product-data-scraper-n8bhx.180905c86a4979e8 - Scheduled: Successfully assigned default/product-data-scraper-n8bhx to ip-172-31-40-111.us-east-2.compute.internal 21m +│ ├── Event/product-data-scraper-n8bhx.180905c888c31ae6 - Pulling: Pulling image \"bash\" 21m +│ ├── Event/product-data-scraper-n8bhx.180905c89696fe5a - Pulled: Successfully pulled image \"bash\" in 231ms (231ms including waiting). Image size: 6335174 bytes. 21m +│ ├── Event/product-data-scraper-n8bhx.180905c898214f1d - Created: Created container run-forever 21m +│ ├── Event/product-data-scraper-n8bhx.180905c89c4b407a - Started: Started container run-forever 21m +│ └── Service/kubernetes - 6d22h +└── Pod/product-data-scraper-vr29r 0/1 Error 20m + ├── Event/product-data-scraper-vr29r.180905d1bb7f81de - Scheduled: Successfully assigned default/product-data-scraper-vr29r to ip-172-31-40-111.us-east-2.compute.internal 20m + ├── Event/product-data-scraper-vr29r.180905d1dbadf7e0 - Pulling: Pulling image \"bash\" 20m + ├── Event/product-data-scraper-vr29r.180905d1e7f2d101 - Pulled: Successfully pulled image \"bash\" in 205ms (205ms including waiting). Image size: 6335174 bytes. 20m + ├── Event/product-data-scraper-vr29r.180905d1ea398b00 - Created: Created container run-forever 20m + ├── Event/product-data-scraper-vr29r.180905d1f4d38ea8 - Started: Started container run-forever 20m + └── Service/kubernetes - 6d22h diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs.txt new file mode 100644 index 00000000..8fc49c31 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"product-data-scraper-2fqrh","namespace":"default"}} +stdout: +/bin/sh: syntax error: unexpected "fi" + +stderr: diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs_5fdj6.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs_5fdj6.txt new file mode 100644 index 00000000..68f836e6 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs_5fdj6.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"product-data-scraper-5fdj6","namespace":"default"}} +stdout: +/bin/sh: syntax error: unexpected "fi" + +stderr: diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs_bg7vg.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs_bg7vg.txt new file mode 100644 index 00000000..d1b4f7a5 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_logs_bg7vg.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"product-data-scraper-bg7vg","namespace":"default"}} +stdout: +/bin/sh: syntax error: unexpected "fi" + +stderr: diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_previous_logs.txt b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_previous_logs.txt new file mode 100644 index 00000000..66d7819f --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/kubectl_previous_logs.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_previous_logs","match_params":{"name":"product-data-scraper-2fqrh","namespace":"default"}} +stdout: +/bin/sh: syntax error: unexpected "fi" + +stderr: diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/resource_instructions.json b/tests/llm/fixtures/test_investigate/07_job_syntax_error/resource_instructions.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/resource_instructions.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/07_job_syntax_error/test_case.yaml b/tests/llm/fixtures/test_investigate/07_job_syntax_error/test_case.yaml new file mode 100644 index 00000000..567f39da --- /dev/null +++ b/tests/llm/fixtures/test_investigate/07_job_syntax_error/test_case.yaml @@ -0,0 +1,26 @@ +expected_output: | + The `KubeJobFailed` alert indicates that the Kubernetes job `product-data-scraper` in the `default` namespace failed to complete successfully. + + # Investigation + 1. The job `product-data-scraper` was configured to complete 5 times but failed all 5 attempts, reaching the backoff limit of 4. + 2. The job's pods (`product-data-scraper-2fqrh`, `product-data-scraper-5fdj6`, etc.) were created successfully but encountered errors during execution. + 3. The logs from one of the pods, `product-data-scraper-2fqrh`, show a syntax error: `/bin/sh: syntax error: unexpected "fi"`. + + # Conclusions and Possible Root Causes + 1. *Syntax Error in Command*: The job's container command has a syntax error, specifically an unexpected "fi", which is causing the job to fail. + 2. *Job Configuration*: The job's backoff limit was reached due to repeated failures, indicating that the job's command needs correction. + + # Next Steps + 1. **Fix the Command**: Correct the syntax error in the job's command. Ensure that the shell script logic is correct. + 2. **Redeploy the Job**: After fixing the command, delete the failed job and redeploy it to clear the alert. + 3. **Verify**: Monitor the job to ensure it completes successfully without hitting the backoff limit. + + To fix the job, you can edit the job's YAML to correct the command and then apply the changes: + ```bash + kubectl edit job product-data-scraper -n default + ``` + After editing, delete the existing job and create a new one: + ```bash + kubectl delete job product-data-scraper -n default + kubectl apply -f .yaml + ``` diff --git a/tests/llm/fixtures/test_investigate/08_memory_pressure/fetch_finding_by_id.txt b/tests/llm/fixtures/test_investigate/08_memory_pressure/fetch_finding_by_id.txt new file mode 100644 index 00000000..ed42d125 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/08_memory_pressure/fetch_finding_by_id.txt @@ -0,0 +1,3 @@ +{"toolset_name":"findings","tool_name":"fetch_finding_by_id","match_params":{"id":"aff5113d-1310-4fa8-a24d-6838171273ee"}} +error: 'Failed to find a finding with finding_id=aff5113d-1310-4fa8-a24d-6838171273ee: + Holmes'' data access layer is not enabled.' diff --git a/tests/llm/fixtures/test_investigate/08_memory_pressure/investigate_request.json b/tests/llm/fixtures/test_investigate/08_memory_pressure/investigate_request.json new file mode 100644 index 00000000..ed20d5f8 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/08_memory_pressure/investigate_request.json @@ -0,0 +1,22 @@ +{ + "source": "prometheus", + "title": "KubeletTooManyPods", + "description": "", + "subject": { + "name": "ip-172-31-40-111.us-east-2.compute.internal", + "namespace": null, + "kind": "node", + "node": null, + "container": null, + "cluster": null + }, + "context": { + "issue_type": "KubeletTooManyPods", + "robusta_issue_id": "aff5113d-1310-4fa8-a24d-6838171273ee", + "source": "prometheus" + }, + "source_instance_id": "ApiRequest", + "include_tool_calls": true, + "include_tool_call_results": true, + "prompt_template": "builtin://generic_investigation.jinja2" +} diff --git a/tests/llm/fixtures/test_investigate/08_memory_pressure/issue_data.json b/tests/llm/fixtures/test_investigate/08_memory_pressure/issue_data.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/08_memory_pressure/issue_data.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_describe.txt b/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_describe.txt new file mode 100644 index 00000000..6d900501 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_describe.txt @@ -0,0 +1,103 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"node","name":"ip-172-31-40-111.us-east-2.compute.internal","namespace":"default"}} +stdout: +Name: ip-172-31-40-111.us-east-2.compute.internal +Roles: +Labels: beta.kubernetes.io/arch=amd64 + beta.kubernetes.io/instance-type=t3.medium + beta.kubernetes.io/os=linux + eks.amazonaws.com/capacityType=ON_DEMAND + eks.amazonaws.com/nodegroup=nicolas-node-group + eks.amazonaws.com/nodegroup-image=ami-067ed4d12a282fb31 + failure-domain.beta.kubernetes.io/region=us-east-2 + failure-domain.beta.kubernetes.io/zone=us-east-2c + k8s.io/cloud-provider-aws=02bcd7cbb8e774ede4606ab79260ae31 + kubernetes.io/arch=amd64 + kubernetes.io/hostname=ip-172-31-40-111.us-east-2.compute.internal + kubernetes.io/os=linux + node.kubernetes.io/instance-type=t3.medium + topology.ebs.csi.aws.com/zone=us-east-2c + topology.k8s.aws/zone-id=use2-az3 + topology.kubernetes.io/region=us-east-2 + topology.kubernetes.io/zone=us-east-2c +Annotations: alpha.kubernetes.io/provided-node-ip: 172.31.40.111 + csi.volume.kubernetes.io/nodeid: {"ebs.csi.aws.com":"i-0e6de6538ed7dd6a1"} + node.alpha.kubernetes.io/ttl: 0 + volumes.kubernetes.io/controller-managed-attach-detach: true +CreationTimestamp: Mon, 14 Oct 2024 08:10:44 +0200 +Taints: +Unschedulable: false +Lease: + HolderIdentity: ip-172-31-40-111.us-east-2.compute.internal + AcquireTime: + RenewTime: Mon, 18 Nov 2024 11:30:53 +0100 +Conditions: + Type Status LastHeartbeatTime LastTransitionTime Reason Message + ---- ------ ----------------- ------------------ ------ ------- + MemoryPressure False Mon, 18 Nov 2024 11:27:57 +0100 Mon, 14 Oct 2024 08:10:44 +0200 KubeletHasSufficientMemory kubelet has sufficient memory available + DiskPressure False Mon, 18 Nov 2024 11:27:57 +0100 Mon, 14 Oct 2024 08:10:44 +0200 KubeletHasNoDiskPressure kubelet has no disk pressure + PIDPressure False Mon, 18 Nov 2024 11:27:57 +0100 Mon, 14 Oct 2024 08:10:44 +0200 KubeletHasSufficientPID kubelet has sufficient PID available + Ready True Mon, 18 Nov 2024 11:27:57 +0100 Mon, 14 Oct 2024 08:10:57 +0200 KubeletReady kubelet is posting ready status +Addresses: + InternalIP: 172.31.40.111 + ExternalIP: 18.191.138.60 + InternalDNS: ip-172-31-40-111.us-east-2.compute.internal + Hostname: ip-172-31-40-111.us-east-2.compute.internal + ExternalDNS: ec2-18-191-138-60.us-east-2.compute.amazonaws.com +Capacity: + cpu: 2 + ephemeral-storage: 20959212Ki + hugepages-1Gi: 0 + hugepages-2Mi: 0 + memory: 3943312Ki + pods: 17 +Allocatable: + cpu: 1930m + ephemeral-storage: 18242267924 + hugepages-1Gi: 0 + hugepages-2Mi: 0 + memory: 3388304Ki + pods: 17 +System Info: + Machine ID: ec2479e827097f9137436ffc1e256e23 + System UUID: ec2479e8-2709-7f91-3743-6ffc1e256e23 + Boot ID: 490eb300-a8b1-4c5a-a0a3-2d243837b65c + Kernel Version: 5.10.225-213.878.amzn2.x86_64 + OS Image: Amazon Linux 2 + Operating System: linux + Architecture: amd64 + Container Runtime Version: containerd://1.7.11 + Kubelet Version: v1.30.4-eks-a737599 + Kube-Proxy Version: v1.30.4-eks-a737599 +ProviderID: aws:///us-east-2c/i-0e6de6538ed7dd6a1 +Non-terminated Pods: (17 in total) + Namespace Name CPU Requests CPU Limits Memory Requests Memory Limits Age + --------- ---- ------------ ---------- --------------- ------------- --- + default nginx-deployment-688794cb9c-kbfbf 0 (0%) 0 (0%) 0 (0%) 0 (0%) 63m + default prod-endpoint-84b57fcff-2zl4x 0 (0%) 0 (0%) 0 (0%) 0 (0%) 95m + default robusta-kube-prometheus-st-operator-7fc5db7f4d-pkv8m 100m (5%) 0 (0%) 0 (0%) 0 (0%) 6d22h + default robusta-prometheus-node-exporter-22ncm 50m (2%) 0 (0%) 50Mi (1%) 50Mi (1%) 6d22h + default robusta-runner-68d76784b4-bqm9r 250m (12%) 0 (0%) 1Gi (30%) 1Gi (30%) 6d22h + kube-system aws-node-87tjm 50m (2%) 0 (0%) 0 (0%) 0 (0%) 35d + kube-system coredns-858457f4f6-9vksl 100m (5%) 0 (0%) 70Mi (2%) 170Mi (5%) 35d + kube-system coredns-858457f4f6-d6kvv 100m (5%) 0 (0%) 70Mi (2%) 170Mi (5%) 35d + kube-system ebs-csi-controller-7bb676b68d-dz5wd 60m (3%) 0 (0%) 240Mi (7%) 1536Mi (46%) 35d + kube-system ebs-csi-node-snr4c 30m (1%) 0 (0%) 120Mi (3%) 768Mi (23%) 35d + kube-system eks-pod-identity-agent-mbfpx 0 (0%) 0 (0%) 0 (0%) 0 (0%) 35d + kube-system kube-proxy-tkz9f 100m (5%) 0 (0%) 0 (0%) 0 (0%) 35d + sock-shop carts-db-6548bf479b-49wbw 0 (0%) 0 (0%) 0 (0%) 0 (0%) 13d + sock-shop carts-f5db95987-hvh8w 100m (5%) 300m (15%) 200Mi (6%) 500Mi (15%) 13d + sock-shop catalogue-db-f659c4c59-x94xs 0 (0%) 0 (0%) 0 (0%) 0 (0%) 13d + sock-shop front-end-84cb7d7d7c-4s2cp 100m (5%) 300m (15%) 300Mi (9%) 1000Mi (30%) 13d + sock-shop payment-d6fd65b86-zmjt5 99m (5%) 200m (10%) 100Mi (3%) 200Mi (6%) 13d +Allocated resources: + (Total limits may be over 100 percent, i.e., overcommitted.) + Resource Requests Limits + -------- -------- ------ + cpu 1139m (59%) 800m (41%) + memory 2174Mi (65%) 5418Mi (163%) + ephemeral-storage 0 (0%) 0 (0%) + hugepages-1Gi 0 (0%) 0 (0%) + hugepages-2Mi 0 (0%) 0 (0%) +Events: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_get.txt b/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_get.txt new file mode 100644 index 00000000..cb1ec825 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_get.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get","match_params":{"kind":"node","name":"ip-172-31-40-111.us-east-2.compute.internal","namespace":"default"}} +stdout: +NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME LABELS +ip-172-31-40-111.us-east-2.compute.internal Ready 35d v1.30.4-eks-a737599 172.31.40.111 18.191.138.60 Amazon Linux 2 5.10.225-213.878.amzn2.x86_64 containerd://1.7.11 beta.kubernetes.io/arch=amd64,beta.kubernetes.io/instance-type=t3.medium,beta.kubernetes.io/os=linux,eks.amazonaws.com/capacityType=ON_DEMAND,eks.amazonaws.com/nodegroup-image=ami-067ed4d12a282fb31,eks.amazonaws.com/nodegroup=nicolas-node-group,failure-domain.beta.kubernetes.io/region=us-east-2,failure-domain.beta.kubernetes.io/zone=us-east-2c,k8s.io/cloud-provider-aws=02bcd7cbb8e774ede4606ab79260ae31,kubernetes.io/arch=amd64,kubernetes.io/hostname=ip-172-31-40-111.us-east-2.compute.internal,kubernetes.io/os=linux,node.kubernetes.io/instance-type=t3.medium,topology.ebs.csi.aws.com/zone=us-east-2c,topology.k8s.aws/zone-id=use2-az3,topology.kubernetes.io/region=us-east-2,topology.kubernetes.io/zone=us-east-2c + +stderr: diff --git a/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_get_all.txt b/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_get_all.txt new file mode 100644 index 00000000..501a54e6 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_get_all.txt @@ -0,0 +1,8 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get_all","match_params":{"kind":"node"}} +stdout: +NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME LABELS +ip-172-31-21-139.us-east-2.compute.internal Ready 35d v1.30.4-eks-a737599 172.31.21.139 18.221.191.202 Amazon Linux 2 5.10.225-213.878.amzn2.x86_64 containerd://1.7.11 beta.kubernetes.io/arch=amd64,beta.kubernetes.io/instance-type=t3.medium,beta.kubernetes.io/os=linux,eks.amazonaws.com/capacityType=ON_DEMAND,eks.amazonaws.com/nodegroup-image=ami-067ed4d12a282fb31,eks.amazonaws.com/nodegroup=nicolas-node-group,failure-domain.beta.kubernetes.io/region=us-east-2,failure-domain.beta.kubernetes.io/zone=us-east-2b,k8s.io/cloud-provider-aws=02bcd7cbb8e774ede4606ab79260ae31,kubernetes.io/arch=amd64,kubernetes.io/hostname=ip-172-31-21-139.us-east-2.compute.internal,kubernetes.io/os=linux,node.kubernetes.io/instance-type=t3.medium,topology.ebs.csi.aws.com/zone=us-east-2b,topology.k8s.aws/zone-id=use2-az2,topology.kubernetes.io/region=us-east-2,topology.kubernetes.io/zone=us-east-2b +ip-172-31-40-111.us-east-2.compute.internal Ready 35d v1.30.4-eks-a737599 172.31.40.111 18.191.138.60 Amazon Linux 2 5.10.225-213.878.amzn2.x86_64 containerd://1.7.11 beta.kubernetes.io/arch=amd64,beta.kubernetes.io/instance-type=t3.medium,beta.kubernetes.io/os=linux,eks.amazonaws.com/capacityType=ON_DEMAND,eks.amazonaws.com/nodegroup-image=ami-067ed4d12a282fb31,eks.amazonaws.com/nodegroup=nicolas-node-group,failure-domain.beta.kubernetes.io/region=us-east-2,failure-domain.beta.kubernetes.io/zone=us-east-2c,k8s.io/cloud-provider-aws=02bcd7cbb8e774ede4606ab79260ae31,kubernetes.io/arch=amd64,kubernetes.io/hostname=ip-172-31-40-111.us-east-2.compute.internal,kubernetes.io/os=linux,node.kubernetes.io/instance-type=t3.medium,topology.ebs.csi.aws.com/zone=us-east-2c,topology.k8s.aws/zone-id=use2-az3,topology.kubernetes.io/region=us-east-2,topology.kubernetes.io/zone=us-east-2c +ip-172-31-8-128.us-east-2.compute.internal Ready 35d v1.30.4-eks-a737599 172.31.8.128 3.147.70.176 Amazon Linux 2 5.10.225-213.878.amzn2.x86_64 containerd://1.7.11 beta.kubernetes.io/arch=amd64,beta.kubernetes.io/instance-type=t3.medium,beta.kubernetes.io/os=linux,eks.amazonaws.com/capacityType=ON_DEMAND,eks.amazonaws.com/nodegroup-image=ami-067ed4d12a282fb31,eks.amazonaws.com/nodegroup=nicolas-node-group,failure-domain.beta.kubernetes.io/region=us-east-2,failure-domain.beta.kubernetes.io/zone=us-east-2a,k8s.io/cloud-provider-aws=02bcd7cbb8e774ede4606ab79260ae31,kubernetes.io/arch=amd64,kubernetes.io/hostname=ip-172-31-8-128.us-east-2.compute.internal,kubernetes.io/os=linux,node.kubernetes.io/instance-type=t3.medium,topology.ebs.csi.aws.com/zone=us-east-2a,topology.k8s.aws/zone-id=use2-az1,topology.kubernetes.io/region=us-east-2,topology.kubernetes.io/zone=us-east-2a + +stderr: diff --git a/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_top_pods.txt b/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_top_pods.txt new file mode 100644 index 00000000..a29a1535 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/08_memory_pressure/kubectl_top_pods.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/live-metrics","tool_name":"kubectl_top_pods","match_params":{}} +Command `kubectl top pods -A` failed with return code 1 +stdout: + +stderr: +error: Metrics API not available diff --git a/tests/llm/fixtures/test_investigate/08_memory_pressure/resource_instructions.json b/tests/llm/fixtures/test_investigate/08_memory_pressure/resource_instructions.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/08_memory_pressure/resource_instructions.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/08_memory_pressure/test_case.yaml b/tests/llm/fixtures/test_investigate/08_memory_pressure/test_case.yaml new file mode 100644 index 00000000..6f4e312f --- /dev/null +++ b/tests/llm/fixtures/test_investigate/08_memory_pressure/test_case.yaml @@ -0,0 +1,19 @@ +expected_output: | + The node `ip-172-31-40-111.us-east-2.compute.internal` is running at 100% of its pod capacity, which is set to 17 pods. This alert indicates that the node cannot schedule any additional pods. + + # Investigation + - The node `ip-172-31-40-111.us-east-2.compute.internal` is currently running 17 pods, which is its maximum capacity. + - Several pods are pending, indicating they cannot be scheduled due to the node's capacity limit. + - The node is labeled as `t3.medium`, which typically has a lower pod capacity compared to larger instance types. + - Some pods on this node do not have CPU or memory requests defined, which can lead to inefficient resource utilization. + + # Conclusions and Possible Root Causes + 1. *Node Capacity Limit Reached*: The node has reached its maximum pod capacity of 17, preventing new pods from being scheduled. + 2. *Resource Requests Not Defined*: The absence of CPU and memory requests for some pods may lead to suboptimal scheduling and resource allocation. + 3. *Instance Type Limitation*: The `t3.medium` instance type may inherently support fewer pods due to its resource constraints. + + # Next Steps + 1. **Redistribute Pods**: Consider redistributing pods to other nodes with available capacity. + 2. **Increase Node Capacity**: Add more nodes to the cluster or upgrade existing nodes to a larger instance type to accommodate more pods. + 3. **Define Resource Requests**: Ensure all pods have defined CPU and memory requests to improve scheduling efficiency. + 4. **Adjust Kubelet Configuration**: If feasible, increase the `maxPods` setting in the Kubelet configuration for nodes that can handle more pods. diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/fetch_finding_by_id.txt b/tests/llm/fixtures/test_investigate/09_high_latency/fetch_finding_by_id.txt new file mode 100644 index 00000000..972f4d27 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/fetch_finding_by_id.txt @@ -0,0 +1,21 @@ +{"toolset_name":"findings","tool_name":"fetch_finding_by_id","match_params":{"id":"5fedd1a3-83b7-45dc-9649-708323bd50f5"}} +{ + "id": "5fedd1a3-83b7-45dc-9649-708323bd50f5", + "description": "HTTP Requests to the '/checkout' endpoint in customer-orders-app are taking longer than 3 seconds", + "source": "customer-orders", + "priority": "DEBUG", + "subject_type": "Deployment", + "subject_name": "customer-orders", + "subject_namespace": "default", + "subject_node": null, + "service_key": "", + "cluster": "test_cluster", + "creation_date": "2024-11-18 14:03:55.635001", + "title": "HighLatencyForCustomerCheckout", + "aggregation_key": "HighLatencyForCustomerCheckout", + "finding_type": "issue", + "failure": true, + "labels": {}, + "annotations": {}, + "evidence": [] +} diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/investigate_request.json b/tests/llm/fixtures/test_investigate/09_high_latency/investigate_request.json new file mode 100644 index 00000000..fdad0ad0 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/investigate_request.json @@ -0,0 +1,22 @@ +{ + "source": "prometheus", + "title": "HighLatencyForCustomerCheckout", + "description": "", + "subject": { + "name": "customer-orders", + "namespace": "default", + "kind": "deployment", + "node": null, + "container": null, + "cluster": null + }, + "context": { + "issue_type": "HighLatencyForCustomerCheckout", + "robusta_issue_id": "5fedd1a3-83b7-45dc-9649-708323bd50f5", + "source": "prometheus" + }, + "source_instance_id": "ApiRequest", + "include_tool_calls": true, + "include_tool_call_results": true, + "prompt_template": "builtin://generic_investigation.jinja2" +} diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/issue_data.json b/tests/llm/fixtures/test_investigate/09_high_latency/issue_data.json new file mode 100644 index 00000000..7014fd74 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/issue_data.json @@ -0,0 +1,20 @@ +{ + "id": "5fedd1a3-83b7-45dc-9649-708323bd50f5", + "description": "HTTP Requests to the '/checkout' endpoint in customer-orders-app are taking longer than 3 seconds", + "source": "customer-orders", + "priority": "DEBUG", + "subject_type": "Deployment", + "subject_name": "customer-orders", + "subject_namespace": "default", + "subject_node": null, + "service_key": "", + "cluster": "test_cluster", + "creation_date": "2024-11-18 14:03:55.635001", + "title": "HighLatencyForCustomerCheckout", + "aggregation_key": "HighLatencyForCustomerCheckout", + "finding_type": "issue", + "failure": true, + "labels": {}, + "annotations": {}, + "evidence": [] +} diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_describe_deployment.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_describe_deployment.txt new file mode 100644 index 00000000..5546a4a9 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_describe_deployment.txt @@ -0,0 +1,47 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"deployment","name":"customer-orders","namespace":"default"}} +stdout: +Name: customer-orders +Namespace: default +CreationTimestamp: Mon, 18 Nov 2024 14:00:06 +0100 +Labels: +Annotations: deployment.kubernetes.io/revision: 1 +Selector: app=customer-orders +Replicas: 1 desired | 1 updated | 1 total | 1 available | 0 unavailable +StrategyType: RollingUpdate +MinReadySeconds: 0 +RollingUpdateStrategy: 25% max unavailable, 25% max surge +Pod Template: + Labels: app=customer-orders + Containers: + fastapi-app: + Image: us-central1-docker.pkg.dev/genuine-flight-317411/devel/rds-demo:nicolas-no-db + Ports: 8000/TCP, 8001/TCP + Host Ports: 0/TCP, 0/TCP + Environment: + Mounts: + curl-sidecar: + Image: curlimages/curl + Port: + Host Port: + Args: + /bin/sh + -c + while true; do curl -s http://localhost:8000; sleep 60; done + Environment: + Mounts: + Volumes: + Node-Selectors: + Tolerations: +Conditions: + Type Status Reason + ---- ------ ------ + Available True MinimumReplicasAvailable + Progressing True NewReplicaSetAvailable +OldReplicaSets: +NewReplicaSet: customer-orders-c88c44547 (1/1 replicas created) +Events: + Type Reason Age From Message + ---- ------ ---- ---- ------- + Normal ScalingReplicaSet 16m deployment-controller Scaled up replica set customer-orders-c88c44547 to 1 + +stderr: diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_describe_pod.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_describe_pod.txt new file mode 100644 index 00000000..ff811949 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_describe_pod.txt @@ -0,0 +1,68 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_describe","match_params":{"kind":"pod","name":"customer-orders-c88c44547-sxg6h","namespace":"default"}} +stdout: +Name: customer-orders-c88c44547-sxg6h +Namespace: default +Priority: 0 +Service Account: default +Node: ip-172-31-8-128.us-east-2.compute.internal/172.31.8.128 +Start Time: Mon, 18 Nov 2024 14:00:06 +0100 +Labels: app=customer-orders + pod-template-hash=c88c44547 +Annotations: +Status: Running +IP: 172.31.5.200 +IPs: + IP: 172.31.5.200 +Controlled By: ReplicaSet/customer-orders-c88c44547 +Containers: + fastapi-app: + Container ID: containerd://cdf17f6b5bf5e8ba5007dff4dd71be5f2ad45562776ece098f2d67f2ce1de46f + Image: us-central1-docker.pkg.dev/genuine-flight-317411/devel/rds-demo:nicolas-no-db + Image ID: us-central1-docker.pkg.dev/genuine-flight-317411/devel/rds-demo@sha256:4fb95fef92b4a3bb59b28fd50feb2abfe51e1cfab7376edeee72c06c3f3b9b8c + Ports: 8000/TCP, 8001/TCP + Host Ports: 0/TCP, 0/TCP + State: Running + Started: Mon, 18 Nov 2024 14:00:13 +0100 + Ready: True + Restart Count: 0 + Environment: + Mounts: + /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-zrggv (ro) + curl-sidecar: + Container ID: containerd://4290436349a40b333d469ade52dfc23499648c59ded642b1550ef5ce1bcf484c + Image: curlimages/curl + Image ID: docker.io/curlimages/curl@sha256:83a505ba2ba62f208ed6e410c268b7b9aa48f0f7b403c8108b9773b44199dbba + Port: + Host Port: + Args: + /bin/sh + -c + while true; do curl -s http://localhost:8000; sleep 60; done + State: Running + Started: Mon, 18 Nov 2024 14:00:14 +0100 + Ready: True + Restart Count: 0 + Environment: + Mounts: + /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-zrggv (ro) +Conditions: + Type Status + PodReadyToStartContainers True + Initialized True + Ready True + ContainersReady True + PodScheduled True +Volumes: + kube-api-access-zrggv: + Type: Projected (a volume that contains injected data from multiple sources) + TokenExpirationSeconds: 3607 + ConfigMapName: kube-root-ca.crt + ConfigMapOptional: + DownwardAPI: true +QoS Class: BestEffort +Node-Selectors: +Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s + node.kubernetes.io/unreachable:NoExecute op=Exists for 300s +Events: + +stderr: diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_find_resource.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_find_resource.txt new file mode 100644 index 00000000..fde187fc --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_find_resource.txt @@ -0,0 +1,5 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_find_resource","match_params":{"kind":"pod","keyword":"customer-orders"}} +stdout: +default customer-orders-c88c44547-sxg6h 2/2 Running 0 128m 172.31.5.200 ip-172-31-8-128.us-east-2.compute.internal app=customer-orders,pod-template-hash=c88c44547 + +stderr: diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_get.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_get.txt new file mode 100644 index 00000000..b1903379 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_get.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get","match_params":{"kind":"deployment","name":"customer-orders","namespace":"default"}} +stdout: +NAME READY UP-TO-DATE AVAILABLE AGE CONTAINERS IMAGES SELECTOR LABELS +customer-orders 1/1 1 1 17m fastapi-app,curl-sidecar us-central1-docker.pkg.dev/genuine-flight-317411/devel/rds-demo:nicolas-no-db,curlimages/curl app=customer-orders + +stderr: diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_get_all.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_get_all.txt new file mode 100644 index 00000000..1d51fe96 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_get_all.txt @@ -0,0 +1,65 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_get_all","match_params":{"kind":"pod"}} +stdout: +NAMESPACE NAME READY STATUS RESTARTS AGE IP NODE NOMINATED NODE READINESS GATES LABELS +default alertmanager-robusta-kube-prometheus-st-alertmanager-0 2/2 Running 0 7d1h 172.31.3.106 ip-172-31-8-128.us-east-2.compute.internal alertmanager=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/instance=robusta-kube-prometheus-st-alertmanager,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=alertmanager,app.kubernetes.io/version=0.26.0,apps.kubernetes.io/pod-index=0,controller-revision-hash=alertmanager-robusta-kube-prometheus-st-alertmanager-57cd7fb46f,statefulset.kubernetes.io/pod-name=alertmanager-robusta-kube-prometheus-st-alertmanager-0 +default customer-orders-c88c44547-sxg6h 2/2 Running 0 27m 172.31.5.200 ip-172-31-8-128.us-east-2.compute.internal app=customer-orders,pod-template-hash=c88c44547 +default krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58-n68zx 0/1 Pending 0 7d1h app=krr.robusta.dev,batch.kubernetes.io/controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,batch.kubernetes.io/job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58,controller-uid=d9d8bfd2-914e-4e32-9a1a-3f96ebe4d5dd,job-name=krr-job-8e4112dc-6c9c-4de8-ab0d-0a4fcd6ffa58 +default krr-job-99dbe7b7-898d-4062-8187-34b499525000-9fzjp 0/1 Pending 0 122m app=krr.robusta.dev,batch.kubernetes.io/controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,batch.kubernetes.io/job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000,controller-uid=703fffa3-98c4-4f15-861a-5523231ecc54,job-name=krr-job-99dbe7b7-898d-4062-8187-34b499525000 +default nginx-deployment-688794cb9c-5cqqz 0/1 Pending 0 4h app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-8j7g6 0/1 Pending 0 4h app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-kbfbf 1/1 Running 0 4h 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-mz8pm 0/1 Pending 0 4h app=nginx,pod-template-hash=688794cb9c +default nginx-deployment-688794cb9c-ng4mg 0/1 Pending 0 4h app=nginx,pod-template-hash=688794cb9c +default order-processor 1/1 Running 2 (65m ago) 3h53m 172.31.23.45 ip-172-31-21-139.us-east-2.compute.internal +default prod-endpoint-84b57fcff-2zl4x 1/1 Running 0 4h32m 172.31.40.222 ip-172-31-40-111.us-east-2.compute.internal app=nginx,pod-template-hash=84b57fcff +default product-data-scraper-2fqrh 0/1 Error 0 4h 172.31.21.29 ip-172-31-21-139.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-5fdj6 0/1 Error 0 4h3m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-bg7vg 0/1 Error 0 4h2m 172.31.37.22 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-n8bhx 0/1 Error 0 4h2m 172.31.39.19 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default product-data-scraper-vr29r 0/1 Error 0 4h1m 172.31.37.156 ip-172-31-40-111.us-east-2.compute.internal batch.kubernetes.io/controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,batch.kubernetes.io/job-name=product-data-scraper,controller-uid=5f522d60-db91-4642-a719-00bdd7bb8c8c,job-name=product-data-scraper +default prometheus-robusta-kube-prometheus-st-prometheus-0 2/2 Running 0 7d1h 172.31.1.249 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=robusta-kube-prometheus-st-prometheus,app.kubernetes.io/managed-by=prometheus-operator,app.kubernetes.io/name=prometheus,app.kubernetes.io/version=2.48.1,apps.kubernetes.io/pod-index=0,controller-revision-hash=prometheus-robusta-kube-prometheus-st-prometheus-55d87c869b,operator.prometheus.io/name=robusta-kube-prometheus-st-prometheus,operator.prometheus.io/shard=0,prometheus=robusta-kube-prometheus-st-prometheus,statefulset.kubernetes.io/pod-name=prometheus-robusta-kube-prometheus-st-prometheus-0 +default robusta-forwarder-89f44d49b-7ccwf 1/1 Running 0 7d1h 172.31.6.195 ip-172-31-8-128.us-east-2.compute.internal app=robusta-forwarder,pod-template-hash=89f44d49b +default robusta-grafana-597597b88-2wrpx 3/3 Running 0 7d1h 172.31.25.172 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/name=grafana,pod-template-hash=597597b88 +default robusta-holmes-54799fc97b-wpv45 1/1 Running 0 5h7m 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=54799fc97b +default robusta-holmes-678999dfd-ln8n5 0/1 Error 0 5h16m 172.31.23.36 ip-172-31-21-139.us-east-2.compute.internal app=holmes,pod-template-hash=678999dfd +default robusta-kube-prometheus-st-operator-7fc5db7f4d-pkv8m 1/1 Running 0 7d1h 172.31.38.193 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/part-of=kube-prometheus-stack,app.kubernetes.io/version=55.7.0,app=kube-prometheus-stack-operator,chart=kube-prometheus-stack-55.7.0,heritage=Helm,pod-template-hash=7fc5db7f4d,release=robusta +default robusta-kube-state-metrics-7964495ff6-mszjx 1/1 Running 0 7d1h 172.31.23.28 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=kube-state-metrics,app.kubernetes.io/part-of=kube-state-metrics,app.kubernetes.io/version=2.10.1,helm.sh/chart=kube-state-metrics-5.15.3,pod-template-hash=7964495ff6,release=robusta +default robusta-prometheus-node-exporter-22ncm 1/1 Running 0 7d1h 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-hsf46 1/1 Running 0 7d1h 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-prometheus-node-exporter-r9ss7 1/1 Running 0 7d1h 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=metrics,app.kubernetes.io/instance=robusta,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=prometheus-node-exporter,app.kubernetes.io/part-of=prometheus-node-exporter,app.kubernetes.io/version=1.7.0,controller-revision-hash=7bf445876b,helm.sh/chart=prometheus-node-exporter-4.24.0,jobLabel=node-exporter,pod-template-generation=1,release=robusta +default robusta-runner-68d76784b4-bqm9r 1/1 Running 0 7d1h 172.31.45.91 ip-172-31-40-111.us-east-2.compute.internal app=robusta-runner,pod-template-hash=68d76784b4,robustaComponent=runner +default stress-test-7b596f4759-62lpg 0/1 Pending 0 3h59m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-f7vmc 0/1 Pending 0 3h59m app=stress-test,pod-template-hash=7b596f4759 +default stress-test-7b596f4759-s8m84 0/1 Pending 0 3h59m app=stress-test,pod-template-hash=7b596f4759 +kube-system aws-node-475cb 2/2 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-87tjm 2/2 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system aws-node-m47xg 2/2 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=aws-vpc-cni,app.kubernetes.io/name=aws-node,controller-revision-hash=54f5998898,k8s-app=aws-node,pod-template-generation=1 +kube-system coredns-858457f4f6-9vksl 1/1 Running 0 35d 172.31.43.91 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system coredns-858457f4f6-d6kvv 1/1 Running 0 35d 172.31.44.154 ip-172-31-40-111.us-east-2.compute.internal eks.amazonaws.com/component=coredns,k8s-app=kube-dns,pod-template-hash=858457f4f6 +kube-system ebs-csi-controller-7bb676b68d-cs2gx 6/6 Running 0 35d 172.31.12.254 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-controller-7bb676b68d-dz5wd 6/6 Running 0 35d 172.31.37.38 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-controller,pod-template-hash=7bb676b68d +kube-system ebs-csi-node-j7r2p 3/3 Running 0 35d 172.31.30.77 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-pgrvq 3/3 Running 0 35d 172.31.2.194 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system ebs-csi-node-snr4c 3/3 Running 0 35d 172.31.36.149 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/component=csi-driver,app.kubernetes.io/managed-by=EKS,app.kubernetes.io/name=aws-ebs-csi-driver,app.kubernetes.io/version=1.35.0,app=ebs-csi-node,controller-revision-hash=6bc69bc4b9,pod-template-generation=1 +kube-system eks-pod-identity-agent-mbfpx 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-nfqc6 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system eks-pod-identity-agent-vgz8h 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal app.kubernetes.io/instance=eks-pod-identity-agent,app.kubernetes.io/name=eks-pod-identity-agent,controller-revision-hash=74bcb67854,pod-template-generation=1 +kube-system kube-proxy-24klf 1/1 Running 0 35d 172.31.21.139 ip-172-31-21-139.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-l7vqp 1/1 Running 0 35d 172.31.8.128 ip-172-31-8-128.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +kube-system kube-proxy-tkz9f 1/1 Running 0 35d 172.31.40.111 ip-172-31-40-111.us-east-2.compute.internal controller-revision-hash=6b64cc6947,k8s-app=kube-proxy,pod-template-generation=1 +sock-shop carts-db-6548bf479b-49wbw 1/1 Running 0 14d 172.31.38.83 ip-172-31-40-111.us-east-2.compute.internal name=carts-db,pod-template-hash=6548bf479b +sock-shop carts-f5db95987-hvh8w 1/1 Running 0 14d 172.31.35.216 ip-172-31-40-111.us-east-2.compute.internal name=carts,pod-template-hash=f5db95987 +sock-shop catalogue-58dc79c975-qrxkj 1/1 Running 0 14d 172.31.28.250 ip-172-31-21-139.us-east-2.compute.internal name=catalogue,pod-template-hash=58dc79c975 +sock-shop catalogue-db-f659c4c59-x94xs 1/1 Running 0 14d 172.31.38.49 ip-172-31-40-111.us-east-2.compute.internal name=catalogue-db,pod-template-hash=f659c4c59 +sock-shop front-end-84cb7d7d7c-4s2cp 1/1 Running 0 14d 172.31.47.71 ip-172-31-40-111.us-east-2.compute.internal name=front-end,pod-template-hash=84cb7d7d7c +sock-shop orders-68dc698477-b6g8x 1/1 Running 0 14d 172.31.16.203 ip-172-31-21-139.us-east-2.compute.internal name=orders,pod-template-hash=68dc698477 +sock-shop orders-db-85b469d778-wf7sh 1/1 Running 0 14d 172.31.27.164 ip-172-31-21-139.us-east-2.compute.internal name=orders-db,pod-template-hash=85b469d778 +sock-shop payment-d6fd65b86-zmjt5 1/1 Running 0 14d 172.31.40.230 ip-172-31-40-111.us-east-2.compute.internal name=payment,pod-template-hash=d6fd65b86 +sock-shop queue-master-ff4874c99-kxgg9 1/1 Running 0 13d 172.31.31.177 ip-172-31-21-139.us-east-2.compute.internal name=queue-master,pod-template-hash=ff4874c99 +sock-shop rabbitmq-c9fd5c694-6g9xm 2/2 Running 0 13d 172.31.29.7 ip-172-31-21-139.us-east-2.compute.internal name=rabbitmq,pod-template-hash=c9fd5c694 +sock-shop session-db-686f69d474-qzhcx 1/1 Running 0 13d 172.31.23.242 ip-172-31-21-139.us-east-2.compute.internal name=session-db,pod-template-hash=686f69d474 +sock-shop shipping-84b9f9b465-pg8kc 1/1 Running 0 13d 172.31.18.149 ip-172-31-21-139.us-east-2.compute.internal name=shipping,pod-template-hash=84b9f9b465 +sock-shop user-5bd96d75fb-ld8xv 1/1 Running 0 13d 172.31.0.106 ip-172-31-8-128.us-east-2.compute.internal name=user,pod-template-hash=5bd96d75fb +sock-shop user-db-5dc5c5f488-dw6xw 1/1 Running 0 13d 172.31.0.66 ip-172-31-8-128.us-east-2.compute.internal name=user-db,pod-template-hash=5dc5c5f488 + +stderr: diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_lineage_children.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_lineage_children.txt new file mode 100644 index 00000000..dddcc176 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_lineage_children.txt @@ -0,0 +1,19 @@ +{"toolset_name":"kubernetes/extras","tool_name":"kubectl_lineage_children","match_params":{"kind":"deployment","name":"customer-orders","namespace":"default"}} +NAME READY STATUS AGE +Deployment/customer-orders 1/1 18m +├── Event/customer-orders.18091187b032cbf6 - ScalingReplicaSet: Scaled up replica set customer-orders-c88c44547 to 1 18m +└── ReplicaSet/customer-orders-c88c44547 1/1 18m + ├── Event/customer-orders-c88c44547.18091187b6c90993 - SuccessfulCreate: Created pod: customer-orders-c88c44547-sxg6h 18m + └── Pod/customer-orders-c88c44547-sxg6h 2/2 Running 18m + ├── Event/customer-orders-c88c44547-sxg6h.18091187b75ccb2f - Scheduled: Successfully assigned default/customer-orders-c88c44547-sxg6h to ip-172-31-8-128.us-east-2.compute.internal 18m + ├── Event/customer-orders-c88c44547-sxg6h.18091187db54aa9a - Pulling: Pulling image "us-central1-docker.pkg.dev/genuine-flight-317411/devel/rds-demo:nicolas-no-db" 18m + ├── Event/customer-orders-c88c44547-sxg6h.1809118978f62a1a - Pulled: Successfully pulled image "us-central1-docker.pkg.dev/genuine-flight-317411/devel/rds-demo:nicolas-no-db" in 6.939s (6.939s including waiting). Image size: 67301774 bytes. 18m + ├── Event/customer-orders-c88c44547-sxg6h.180911897a178f04 - Created: Created container fastapi-app 18m + ├── Event/customer-orders-c88c44547-sxg6h.180911897f55f265 - Started: Started container fastapi-app 18m + ├── Event/customer-orders-c88c44547-sxg6h.180911897f64816a - Pulling: Pulling image "curlimages/curl" 18m + ├── Event/customer-orders-c88c44547-sxg6h.180911898fbbdbe8 - Pulled: Successfully pulled image "curlimages/curl" in 274ms (274ms including waiting). Image size: 11928059 bytes. 18m + ├── Event/customer-orders-c88c44547-sxg6h.18091189924769e7 - Created: Created container curl-sidecar 18m + ├── Event/customer-orders-c88c44547-sxg6h.180911899ab0ded2 - Started: Started container curl-sidecar 18m + ├── Service/customer-orders-service - 18m + │ └── EndpointSlice/customer-orders-service-npl28 - 18m + └── Service/kubernetes - 7d1h diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_logs.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_logs.txt new file mode 100644 index 00000000..b971fe12 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_logs.txt @@ -0,0 +1,247 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"customer-orders-c88c44547-sxg6h","namespace":"default"}} +stdout: +INFO: Started server process [1] +INFO: Waiting for application startup. +INFO: Application startup complete. +INFO: Uvicorn running on http://0.0.0.0:8000 (Press CTRL+C to quit) +INFO: 172.31.1.249:37648 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:41184 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:52694 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:37688 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 10.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 10.01 seconds. +INFO: 127.0.0.1:57854 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:53568 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:36558 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:37284 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:44580 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO:app:Database call completed in 5.00 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 5.00 seconds. +INFO: 127.0.0.1:49894 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:52262 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:53888 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:44244 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:50682 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:34058 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 7.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 7.01 seconds. +INFO: 127.0.0.1:47096 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:52178 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:50542 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:53894 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:55848 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO:app:Database call completed in 10.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 10.01 seconds. +INFO: 127.0.0.1:33682 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:45348 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:51026 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:44558 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:55882 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:35828 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 10.00 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 10.00 seconds. +INFO: 127.0.0.1:34256 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:55100 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:50840 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:54038 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:50354 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:54560 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 9.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 9.01 seconds. +INFO: 127.0.0.1:33940 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:33810 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:36200 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:33664 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:47906 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO:app:Database call completed in 7.00 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 7.00 seconds. +INFO: 127.0.0.1:37862 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:51658 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:47682 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:37456 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:48484 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:48106 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 9.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 9.01 seconds. +INFO: 127.0.0.1:51438 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:49996 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:44972 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:37910 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:44894 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO:app:Database call completed in 8.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 8.12 seconds. +INFO: 127.0.0.1:39676 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:38644 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:45972 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:60316 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:44092 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:41348 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 6.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 6.01 seconds. +INFO: 127.0.0.1:52826 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:58958 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:59168 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:46936 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:38788 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO:app:Database call completed in 9.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 9.01 seconds. +INFO: 127.0.0.1:35066 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:42412 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:57514 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:46342 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:39226 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:33332 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 5.00 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 5.00 seconds. +INFO: 127.0.0.1:42878 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:38596 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:36630 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:51618 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:33904 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO:app:Database call completed in 9.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 9.01 seconds. +INFO: 127.0.0.1:51464 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:57692 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:60304 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:50526 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:56900 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:47230 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 9.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 9.01 seconds. +INFO: 127.0.0.1:47332 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:34966 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:55612 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:52442 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:41190 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO:app:Database call completed in 9.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 9.01 seconds. +INFO: 127.0.0.1:32824 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:56400 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:57886 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:51362 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:51674 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:33884 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 10.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 10.01 seconds. +INFO: 127.0.0.1:51586 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:33032 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:58260 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:33630 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:33390 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO:app:Database call completed in 5.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 5.01 seconds. +INFO: 127.0.0.1:47184 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:44284 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:53232 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:41306 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:57394 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO: 172.31.1.249:45920 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Database call completed in 8.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 8.01 seconds. +INFO: 127.0.0.1:37884 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:59450 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:50548 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:53928 - "GET /metrics HTTP/1.1" 200 OK +INFO: 172.31.1.249:54492 - "GET /metrics HTTP/1.1" 200 OK +INFO:app:Received request for checkout page. +INFO:app:Connecting to promotions database to see if we should try to upsell user +INFO:app:Connecting to database at None +INFO:app:Fetching data using stored procedure: sp_CheckUserNotifications +INFO:app:Database call completed in 9.01 seconds. +INFO:app:Promotions result: True +INFO:app:Page rendered in 9.01 seconds. +INFO: 127.0.0.1:41566 - "GET / HTTP/1.1" 200 OK +INFO: 172.31.1.249:47816 - "GET /metrics HTTP/1.1" 200 OK + +stderr: +Defaulted container "fastapi-app" out of: fastapi-app, curl-sidecar diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_logs_incorrect.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_logs_incorrect.txt new file mode 100644 index 00000000..313690da --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_logs_incorrect.txt @@ -0,0 +1,2 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_logs","match_params":{"name":"customer-orders","namespace":"default"}} +error: error from server (NotFound): pods "customer-orders" not found in namespace "default" diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_previous_logs.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_previous_logs.txt new file mode 100644 index 00000000..a25d2434 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_previous_logs.txt @@ -0,0 +1,7 @@ +{"toolset_name":"kubernetes/core","tool_name":"kubectl_previous_logs","match_params":{"name":"customer-orders-c88c44547-sxg6h","namespace":"default"}} +Command `kubectl logs customer-orders-c88c44547-sxg6h -n default --previous` failed with return code 1 +stdout: + +stderr: +Defaulted container "fastapi-app" out of: fastapi-app, curl-sidecar +Error from server (BadRequest): previous terminated container "fastapi-app" in pod "customer-orders-c88c44547-sxg6h" not found diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_top_pods.txt b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_top_pods.txt new file mode 100644 index 00000000..a29a1535 --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/kubectl_top_pods.txt @@ -0,0 +1,6 @@ +{"toolset_name":"kubernetes/live-metrics","tool_name":"kubectl_top_pods","match_params":{}} +Command `kubectl top pods -A` failed with return code 1 +stdout: + +stderr: +error: Metrics API not available diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/resource_instructions.json b/tests/llm/fixtures/test_investigate/09_high_latency/resource_instructions.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/resource_instructions.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/llm/fixtures/test_investigate/09_high_latency/test_case.yaml b/tests/llm/fixtures/test_investigate/09_high_latency/test_case.yaml new file mode 100644 index 00000000..b16e2e6e --- /dev/null +++ b/tests/llm/fixtures/test_investigate/09_high_latency/test_case.yaml @@ -0,0 +1,16 @@ +expected_output: | + The `HighLatencyForCustomerCheckout` alert is triggered because HTTP requests to the `/checkout` endpoint in the `customer-orders` application are taking longer than 3 seconds to complete. + + # Investigation + 1. **Deployment Details**: The `customer-orders` deployment in the `default` namespace is running with one replica. The deployment uses the `fastapi-app` container from the image `us-central1-docker.pkg.dev/genuine-flight-317411/devel/rds-demo:nicolas-no-db`. + 2. **Pod Logs**: The logs from the `fastapi-app` container show that database calls are taking a significant amount of time, ranging from 5 to 10 seconds, which is contributing to the high latency of the checkout page rendering. + + # Conclusions and Possible Root Causes + 1. *Database Latency*: The primary cause of the high latency appears to be the time taken for database calls. The logs indicate that the database call `sp_CheckUserNotifications` is taking between 5 to 10 seconds to complete, which directly impacts the response time of the `/checkout` endpoint. + 2. *Database Connection Issues*: The logs mention connecting to a database at `None`, which might indicate a misconfiguration or an issue with the database connection string. + + # Next Steps + 1. **Database Optimization**: Investigate the stored procedure `sp_CheckUserNotifications` for potential optimizations to reduce execution time. + 2. **Configuration Check**: Verify the database connection settings in the application configuration to ensure they are correct and pointing to the intended database. + 3. **Monitoring and Metrics**: Implement detailed monitoring on the database to identify any bottlenecks or resource constraints that could be causing delays. + 4. **Scaling**: Consider scaling the database resources or the application if the load is higher than expected. diff --git a/tests/llm/test_ask_holmes.py b/tests/llm/test_ask_holmes.py new file mode 100644 index 00000000..b53d11fb --- /dev/null +++ b/tests/llm/test_ask_holmes.py @@ -0,0 +1,108 @@ +from pathlib import Path +from pydantic import TypeAdapter +import os +import pytest + +from holmes.core.conversations import build_chat_messages +from holmes.core.llm import DefaultLLM +from holmes.core.models import ChatRequest +from holmes.core.tool_calling_llm import LLMResult, ToolCallingLLM +from holmes.core.tools import ToolExecutor +from tests.llm.utils.classifiers import get_context_classifier +from tests.llm.utils.constants import PROJECT +from tests.llm.utils.system import readable_timestamp +from tests.llm.utils.mock_toolset import MockToolsets +from braintrust import Experiment, ReadonlyExperiment + +from autoevals.llm import Factuality +import braintrust +from tests.llm.utils.mock_utils import AskHolmesTestCase, MockHelper, upload_dataset +from tests.llm.utils.system import get_machine_state_tags +from os import path + +TEST_CASES_FOLDER = Path(path.abspath(path.join( + path.dirname(__file__), + "fixtures", "test_ask_holmes" +))) + +DATASET_NAME = "ask_holmes" + +@pytest.mark.llm +@pytest.mark.skipif(not os.environ.get('BRAINTRUST_API_KEY'), reason="BRAINTRUST_API_KEY must be set to run LLM evaluations") +def test_ask_holmes(): + + mh = MockHelper(TEST_CASES_FOLDER) + # upload_dataset( + # test_cases=mh.load_investigate_test_cases(), + # project_name=PROJECT, + # dataset_name=DATASET_NAME + # ) + + # dataset = braintrust.init_dataset(project=PROJECT, name=DATASET_NAME) + # experiment:Experiment|ReadonlyExperiment = braintrust.init( + # project=PROJECT, + # experiment=f"ask_holmes_{readable_timestamp()}", + # dataset=dataset, + # open=False, + # update=False, + # metadata=get_machine_state_tags()) + + # if isinstance(experiment, ReadonlyExperiment): + # raise Exception("Experiment must be writable. The above options open=False and update=False ensure this is the case so this exception should never be raised") + + + eval_factuality = Factuality() + test_cases = mh.load_ask_holmes_test_cases() + for test_case in test_cases: + # for dataset_row in dataset: + # test_case = TypeAdapter(AskHolmesTestCase).validate_python(dataset_row["metadata"]) + + # span = experiment.start_span(name=f"ask_holmes:{test_case.id}", span_attributes={"test_case_id": test_case.id}) + result = ask_holmes(test_case) + # span.end() + + input = test_case.user_prompt + output = result.result + expected = test_case.expected_output + + scores = { + "faithfulness": eval_factuality(output, expected, input=input).score, + } + + if len(test_case.retrieval_context) > 0: + evaluate_context_usage = get_context_classifier(test_case.retrieval_context) + scores["context"] = evaluate_context_usage(output, expected, input=input).score + + # span.log( + # input=input, + # output=output, + # expected=expected, + # dataset_record_id=dataset_row["id"], + # scores=scores + # ) + + # experiment.flush() + + +def ask_holmes(test_case:AskHolmesTestCase) -> LLMResult: + + mock = MockToolsets(tools_passthrough=test_case.mocks_passthrough, test_case_folder=test_case.folder) + + expected_tools = [] + for tool_mock in test_case.tool_mocks: + mock.mock_tool(tool_mock) + expected_tools.append(tool_mock.tool_name) + + tool_executor = ToolExecutor(mock.mocked_toolsets) + ai = ToolCallingLLM( + tool_executor=tool_executor, + max_steps=10, + llm=DefaultLLM("gpt-4o") + ) + + chat_request = ChatRequest(ask=test_case.user_prompt) + + messages = build_chat_messages( + chat_request.ask, [], ai=ai + ) + return ai.messages_call(messages=messages) diff --git a/tests/llm/test_investigate.py b/tests/llm/test_investigate.py new file mode 100644 index 00000000..414ae5aa --- /dev/null +++ b/tests/llm/test_investigate.py @@ -0,0 +1,258 @@ + +import json +import logging +import os +from pathlib import Path +from typing import Optional + +from autoevals import Factuality +import braintrust +import pytest +from rich.console import Console + +from holmes.config import Config +from holmes.core.investigation import investigate_issues +from holmes.core.supabase_dal import SupabaseDal +from holmes.core.tools import ToolExecutor, ToolsetPattern +from tests.llm.utils.classifiers import get_context_classifier, get_logs_explanation_classifier +from tests.llm.utils.constants import PROJECT +from tests.llm.utils.system import get_machine_state_tags, readable_timestamp +from tests.llm.utils.mock_dal import MockSupabaseDal +from tests.llm.utils.mock_toolset import MockToolsets +from tests.llm.utils.mock_utils import InvestigateTestCase, MockHelper, upload_dataset +from os import path + + +TEST_CASES_FOLDER = Path(path.abspath(path.join( + path.dirname(__file__), + "fixtures", "test_investigate" +))) + +DATASET_NAME = "investigate" + +class MockConfig(Config): + def __init__(self, test_case:InvestigateTestCase): + super().__init__() + self._test_case = test_case + + def create_tool_executor( + self, console: Console, allowed_toolsets: ToolsetPattern, dal:Optional[SupabaseDal] + ) -> ToolExecutor: + + mock = MockToolsets(tools_passthrough=self._test_case.mocks_passthrough, test_case_folder=self._test_case.folder) + + expected_tools = [] + for tool_mock in self._test_case.tool_mocks: + mock.mock_tool(tool_mock) + expected_tools.append(tool_mock.tool_name) + + return ToolExecutor(mock.mocked_toolsets) + +@pytest.mark.llm +@pytest.mark.skipif(not os.environ.get('BRAINTRUST_API_KEY'), reason="BRAINTRUST_API_KEY must be set to run LLM evaluations") +def test_investigate(): + + mh = MockHelper(TEST_CASES_FOLDER) + upload_dataset( + test_cases=mh.load_investigate_test_cases(), + project_name=PROJECT, + dataset_name=DATASET_NAME + ) + dataset = braintrust.init_dataset(project=PROJECT, name=DATASET_NAME) + experiment:braintrust.Experiment|braintrust.ReadonlyExperiment = braintrust.init( + project=PROJECT, + experiment=f"investigate_{readable_timestamp()}", + dataset=dataset, + open=False, + update=False, + metadata=get_machine_state_tags()) + + if isinstance(experiment, braintrust.ReadonlyExperiment): + raise Exception("Experiment must be writable. The above options open=False and update=False ensure this is the case so this exception should never be raised") + + + eval_factuality = Factuality() + failed_runs = [] + runs = [] + + mh = MockHelper(TEST_CASES_FOLDER) + test_cases = mh.load_investigate_test_cases() + for test_case in test_cases: + # for dataset_row in dataset: + + # test_case = TypeAdapter(InvestigateTestCase).validate_python(dataset_row["metadata"]) + + config = MockConfig(test_case) + mock_dal = MockSupabaseDal( + test_case_folder=Path(test_case.folder), + dal_passthrough=test_case.mocks_passthrough, + issue_data=test_case.issue_data, + resource_instructions=test_case.resource_instructions + ) + # span = experiment.start_span(name=f"investigate:{test_case.id}", span_attributes={"test_case_id": test_case.id}) + + input = test_case.investigate_request + expected = test_case.expected_output + result = None + try: + result = investigate_issues( + investigate_request=test_case.investigate_request, + config=config, + dal=mock_dal, + console=Console() + ) + except Exception: + logging.exception(f"Failed to run test case {test_case.id}") + finally: + # span.end() + pass + + if result is None: + # span.log( + # input=input, + # output="", + # expected=expected, + # dataset_record_id=dataset_row["id"], + # scores={"runs_successfully": 0} + # ) + failed_runs.append(test_case.id) + continue + + output = result.analysis + + evaluate_logs_explanation = get_logs_explanation_classifier() + scores = { + "runs_successfully": 1, + "faithfulness": eval_factuality(output, expected, input=input).score, + "previous_logs": evaluate_logs_explanation(output, expected, input=input).score + } + + if len(test_case.retrieval_context) > 0: + evaluate_context_usage = get_context_classifier(test_case.retrieval_context) + scores["context"] = evaluate_context_usage(output, expected, input=input).score + + # span.log( + # input=input, + # output=output, + # expected=expected, + # dataset_record_id=dataset_row["id"], + # scores=scores + # ) + print(output) + runs.append({ + "id": test_case.id, + "scores":scores + }) + + # experiment.flush() + + print(json.dumps(runs, indent=2)) + assert failed_runs == [], f"The following llm runs failed: {str(failed_runs)}" + + + +# @pytest.mark.skipif(not os.environ.get('BRAINTRUST_API_KEY'), reason="BRAINTRUST_API_KEY must be set to run LLM evaluations") +# def _test_investigate_langfuse(): + +# mh = MockHelper(TEST_CASES_FOLDER) +# upload_dataset( +# test_cases=mh.load_investigate_test_cases(), +# project_name=PROJECT, +# dataset_name=DATASET_NAME +# ) +# experiment_name = f"investigate_{readable_timestamp()}" +# dataset = braintrust.init_dataset(project=PROJECT, name=DATASET_NAME) +# experiment:braintrust.Experiment|braintrust.ReadonlyExperiment = braintrust.init( +# project=PROJECT, +# experiment=experiment_name, +# dataset=dataset, +# open=False, +# update=False, +# metadata=get_machine_state_tags()) + +# lf_dataset = None +# try: +# lf_dataset = langfuse.get_dataset(DATASET_NAME) +# except Exception: +# pass +# if not lf_dataset: +# lf_dataset = langfuse.create_dataset( +# name=DATASET_NAME, +# # optional description +# description=DATASET_NAME +# ) +# for dataset_row in dataset: +# test_case = TypeAdapter(InvestigateTestCase).validate_python(dataset_row["metadata"]) +# langfuse.create_dataset_item( +# dataset_name=DATASET_NAME, +# input={ +# "text": test_case.id +# }, +# expected_output={ +# "text": test_case.expected_output +# }, +# metadata=test_case.model_dump() +# ) +# assert False, "Dataset created" + + +# if isinstance(experiment, braintrust.ReadonlyExperiment): +# raise Exception("Experiment must be writable. The above options open=False and update=False ensure this is the case so this exception should never be raised") + + +# eval_factuality = Factuality() +# failed_runs = [] + + + + +# for item in lf_dataset.items: +# # Make sure your application function is decorated with @observe decorator to automatically link the trace +# test_case = TypeAdapter(InvestigateTestCase).validate_python(item.metadata) + + +# config = MockConfig(test_case) +# mock_dal = MockSupabaseDal( +# test_case_folder=Path(test_case.folder), +# dal_passthrough=test_case.mocks_passthrough, +# issue_data=test_case.issue_data, +# resource_instructions=test_case.resource_instructions +# ) +# # span = experiment.start_span(name=f"investigate:{test_case.id}", span_attributes={"test_case_id": test_case.id}) + +# input = test_case.investigate_request +# expected = test_case.expected_output + +# trace = langfuse.trace( +# name = test_case.id, +# input={"text": test_case.id} +# ) + + +# result = investigate_issues( +# investigate_request=test_case.investigate_request, +# config=config, +# dal=mock_dal, +# console=Console() +# ) +# # update span and sets end_time +# trace.update( +# output=result.analysis, +# ) +# item.link( +# trace, +# experiment_name, +# run_description=experiment_name, # optional +# run_metadata={ "model": "gpt-4o" } # optional +# ) +# # optionally, evaluate the output to compare different runs more easily +# # langfuse.score( +# # trace_id=trace_id, +# # name="", +# # # any float value +# # value=my_eval_fn(item.input, output, item.expected_output), +# # comment="This is a comment", # optional, useful to add reasoning +# # ) + +# # Flush the langfuse client to ensure all data is sent to the server at the end of the experiment run +# langfuse.flush() diff --git a/tests/test_mocks.py b/tests/llm/test_mocks.py similarity index 97% rename from tests/test_mocks.py rename to tests/llm/test_mocks.py index 772be18f..a1c1e7b8 100644 --- a/tests/test_mocks.py +++ b/tests/llm/test_mocks.py @@ -1,12 +1,8 @@ - - - from holmes.core.tools import ToolExecutor -from tests.mock_toolset import MockToolsets, ToolMock +from tests.llm.utils.mock_toolset import MockToolsets, ToolMock import pytest import tempfile - @pytest.mark.parametrize("params", [ ({"field1": "1", "field2": "2"}), ({"field1": "1", "field2": "2", "field3": "3"}) diff --git a/tests/llm/utils/__init__.py b/tests/llm/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/llm/utils/classifiers.py b/tests/llm/utils/classifiers.py new file mode 100644 index 00000000..32570675 --- /dev/null +++ b/tests/llm/utils/classifiers.py @@ -0,0 +1,60 @@ +from typing import List +from autoevals import LLMClassifier + +def get_context_classifier(context_items:List[str]): + context = "\n- ".join(context_items) + prompt_prefix = f""" +CONTEXT +------- +{context} + + +QUESTION +-------- +{{{{input}}}} + + +ANSWER +------ +{{{{output}}}} + + +Verify whether the ANSWER to the QUESTION refers to all items mentioned in the CONTEXT. +Then evaluate which of the following statement matches the closest and return the corresponding letter: + +A. No item mentioned in the CONTEXT is mentioned in the ANSWER +B. Less than half of items present in the CONTEXT are mentioned in the ANSWER +C. More than half of items present in the CONTEXT are mentioned in the ANSWER +D. All items present in the CONTEXT are mentioned in the ANSWER + """ + + return LLMClassifier( + name="ContextPrecision", + prompt_template=prompt_prefix, + choice_scores={"A": 0, "B": 0.33, "C": 0.67, "D": 1}, + use_cot=True, + ) + + +def get_logs_explanation_classifier(): + prompt_prefix = """ + +OUTPUT +====== +{{output}} + + +Evaluate which of the following descibes the OUTPUT best and return the related letter: + +A. OUTPUT makes no mention of "previous logs" +B. OUTPUT makes no mention logs +C. OUTPUT mentions "previous logs" and differentiate the anlysis with the "logs" +D. OUTPUT mentions both "logs" and "previous logs" but presents both as having the same meaning + """ + + return LLMClassifier( + name="LogsExplanation", + prompt_template=prompt_prefix, + choice_scores={"A": 1, "B": 1, "C": 0, "D": 1}, + use_cot=True, + ) diff --git a/tests/llm/utils/constants.py b/tests/llm/utils/constants.py new file mode 100644 index 00000000..af28ed6f --- /dev/null +++ b/tests/llm/utils/constants.py @@ -0,0 +1,4 @@ + +PROJECT="HolmesGPT" + +AUTO_GENERATED_FILE_SUFFIX = ".AUTOGENERATED" diff --git a/tests/llm/utils/mock_dal.py b/tests/llm/utils/mock_dal.py new file mode 100644 index 00000000..714fcf12 --- /dev/null +++ b/tests/llm/utils/mock_dal.py @@ -0,0 +1,71 @@ +import json +import logging +from pathlib import Path +from typing import Dict, Optional + +from pydantic import TypeAdapter + +from holmes.core.supabase_dal import SupabaseDal +from holmes.core.tool_calling_llm import ResourceInstructions +from tests.llm.utils.constants import AUTO_GENERATED_FILE_SUFFIX +from tests.llm.utils.mock_utils import load_issue_data, load_resource_instructions, read_file + +class MockSupabaseDal(SupabaseDal): + + def __init__(self, test_case_folder:Path, issue_data:Optional[Dict], resource_instructions:Optional[ResourceInstructions], dal_passthrough:bool): + super().__init__() + self._issue_data = issue_data + self._resource_instructions = resource_instructions + self._test_case_folder = test_case_folder + self._dal_passthrough = dal_passthrough + + def get_issue_data(self, issue_id: Optional[str]) -> Optional[Dict]: + if self._issue_data is not None: + return self._issue_data + else: + file_path = self._get_mock_file_path("issue_data") + data = super().get_issue_data(issue_id) + file_path = self._get_mock_file_path("issue_data") + + with open(file_path, 'w') as f: + f.write(json.dumps(data or {}, indent=2)) + f.close() + + logging.warning(f"A mock file was generated for you at {file_path} with the contentof dal.get_issue_data({issue_id})") + if self._dal_passthrough: + return data + else: + raise Exception(f"dal.get_issue_data({issue_id}) was invoked and is not mocked. A mock file was generated for you at {file_path}. Remove the '{AUTO_GENERATED_FILE_SUFFIX}' suffix to enable that file") + + def get_resource_instructions(self, type: str, name: Optional[str]) -> Optional[ResourceInstructions]: + if self._resource_instructions is not None: + return self._resource_instructions + else: + data = super().get_resource_instructions(type, name) + file_path = self._get_mock_file_path("resource_instructions") + + with open(file_path, 'w') as f: + f.write(json.dumps(data or {}, indent=2)) + f.close() + + logging.warning(f"A mock file was generated for you at {file_path} with the contentof dal.get_resource_instructions({type}, {name})") + if self._dal_passthrough: + return data + else: + raise Exception(f"dal.get_resource_instructions({type}, {name}) was invoked and is not mocked. A mock file was generated for you at {file_path}. Remove the '{AUTO_GENERATED_FILE_SUFFIX}' suffix to enable that file") + + def _get_mock_file_path(self, entity_type:str): + return f"{self._test_case_folder}/{entity_type}.json{AUTO_GENERATED_FILE_SUFFIX}" + +pydantic_resource_instructions = TypeAdapter(ResourceInstructions) + +def load_mock_dal(test_case_folder:Path, dal_passthrough:bool = False): + issue_data = load_issue_data(test_case_folder) + resource_instructions = load_resource_instructions(test_case_folder) + + return MockSupabaseDal( + test_case_folder=test_case_folder, + issue_data=issue_data, + resource_instructions=resource_instructions, + dal_passthrough=dal_passthrough + ) diff --git a/tests/mock_toolset.py b/tests/llm/utils/mock_toolset.py similarity index 98% rename from tests/mock_toolset.py rename to tests/llm/utils/mock_toolset.py index 21f7eb87..5b792aa0 100644 --- a/tests/mock_toolset.py +++ b/tests/llm/utils/mock_toolset.py @@ -7,7 +7,7 @@ import logging import re -AUTO_GENERATED_FILE_SUFFIX = ".AUTOGENERATED" +from tests.llm.utils.constants import AUTO_GENERATED_FILE_SUFFIX ansi_escape = re.compile(r'\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]') def strip_ansi(text): diff --git a/tests/llm/utils/mock_utils.py b/tests/llm/utils/mock_utils.py new file mode 100644 index 00000000..d7e7c999 --- /dev/null +++ b/tests/llm/utils/mock_utils.py @@ -0,0 +1,206 @@ + +import json +import braintrust +from typing_extensions import Dict +import yaml +import logging +import os +import re +from pathlib import Path +from typing import Generic, List, Optional, TypeVar, Union, cast + +from pydantic import BaseModel, TypeAdapter +from holmes.core.models import InvestigateRequest +from holmes.core.tool_calling_llm import ResourceInstructions +from tests.llm.utils.constants import AUTO_GENERATED_FILE_SUFFIX +from tests.llm.utils.mock_toolset import MockMetadata, ToolMock + +def read_file(file_path:Path): + with open(file_path, 'r', encoding='utf-8') as file: + return file.read().strip() + + +TEST_CASE_ID_PATTERN = r'^[\d+]_(?:[a-z]+_)*[a-z]+$' +CONFIG_FILE_NAME = "test_case.yaml" + +class LLMEvaluation(BaseModel): + answer_relevancy: float = 0.5 + faithfulness: float = 0.5 + contextual_precision: float = 0.5 + contextual_recall: float = 0 + contextual_relevancy: float = 0 + +class Message(BaseModel): + message: str + +T = TypeVar('T') + +class HolmesTestCase(BaseModel): + id: str + folder: str + mocks_passthrough: bool = False # If True, unmocked tools and dal can be invoked by the LLM without error + expected_output: str # Whether an output is expected + evaluation: LLMEvaluation = LLMEvaluation() + retrieval_context: List[str] = [] # Elements helping to evaluate the correctness of the LLM response + tool_mocks: List[ToolMock] = [] + +class AskHolmesTestCase(HolmesTestCase, BaseModel): + user_prompt: str # The user's question to ask holmes + +class InvestigateTestCase(HolmesTestCase, BaseModel): + investigate_request: InvestigateRequest + issue_data: Optional[Dict] + resource_instructions: Optional[ResourceInstructions] + +pydantic_tool_mock = TypeAdapter(MockMetadata) + +def parse_mock_metadata(text) -> Optional[MockMetadata]: + """ + Expects the mock metadata to be the first line of the text and be a JSON string. + """ + try: + match = re.match(r'^(.*)$', text, re.MULTILINE) + if match: + first_line = match.group(0) + metadata = json.loads(first_line) + return pydantic_tool_mock.validate_python(metadata) + return None + except Exception as e: + logging.error(e) + return None + + +class MockHelper(): + + def __init__(self, test_cases_folder:Path) -> None: + super().__init__() + self._test_cases_folder = test_cases_folder + + + def load_investigate_test_cases(self) -> List[InvestigateTestCase]: + return cast(List[InvestigateTestCase], self._load_test_cases()) + + def load_ask_holmes_test_cases(self) -> List[AskHolmesTestCase]: + return cast(List[AskHolmesTestCase], self._load_test_cases()) + + def _load_test_cases(self) -> List[HolmesTestCase]: + + test_cases:List[HolmesTestCase] = [] + test_cases_ids:List[str] = os.listdir(self._test_cases_folder) + for test_case_id in test_cases_ids: + test_case_folder = self._test_cases_folder.joinpath(test_case_id) + logging.info("Evaluating potential test case folder: {test_case_folder}") + try: + config_dict = yaml.safe_load(read_file(test_case_folder.joinpath(CONFIG_FILE_NAME))) + config_dict["id"] = test_case_id + config_dict["folder"] = str(test_case_folder) + + if config_dict.get("user_prompt"): + test_case = TypeAdapter(AskHolmesTestCase).validate_python(config_dict) + else: + config_dict["investigate_request"] = load_investigate_request(test_case_folder) + config_dict["issue_data"] = load_issue_data(test_case_folder) + config_dict["resource_instructions"] = load_resource_instructions(test_case_folder) + config_dict["request"] = TypeAdapter(InvestigateRequest) + test_case = TypeAdapter(InvestigateTestCase).validate_python(config_dict) + + logging.info(f"Successfully loaded test case {test_case_id}") + except FileNotFoundError: + logging.info(f"Folder {self._test_cases_folder}/{test_case_id} ignored because it is missing a {CONFIG_FILE_NAME} file.") + continue + + mock_file_names:List[str] = os.listdir(test_case_folder) + + for mock_file_name in mock_file_names: + if mock_file_name == CONFIG_FILE_NAME: + continue + if mock_file_name.endswith(AUTO_GENERATED_FILE_SUFFIX): + continue + if not mock_file_name.endswith(".txt"): + continue + mock_file_path = test_case_folder.joinpath(mock_file_name) + mock_text = read_file(mock_file_path) + + metadata = parse_mock_metadata(mock_text) + mock_value = mock_text[mock_text.find('\n') + 1:] # remove first line + if not metadata: + logging.warning(f"Failed to parse metadata from test case file at {str(mock_file_path)}. It will be skipped") + continue + tool_mock = ToolMock( + source_file=mock_file_name, + toolset_name= metadata.toolset_name, + tool_name= metadata.tool_name, + match_params= metadata.match_params, + return_value=mock_value + ) + logging.info(f"Successfully loaded tool mock {tool_mock}") + test_case.tool_mocks.append(tool_mock) + test_cases.append(test_case) + logging.info(f"Found {len(test_cases)} in {self._test_cases_folder}") + + return test_cases + +def find_dataset_row_by_test_case(dataset:braintrust.Dataset, test_case:HolmesTestCase): + for row in dataset: + if row.get("metadata", {}).get("id") == test_case.id: + return row + return None + + +def upload_dataset( + test_cases:Union[List[AskHolmesTestCase], List[InvestigateTestCase]], + project_name:str, + dataset_name:str): + + dataset = braintrust.init_dataset(project=project_name, name=dataset_name) + for test_case in test_cases: + + input = "" + if isinstance(test_case, AskHolmesTestCase): + input = test_case.user_prompt + elif isinstance(test_case, InvestigateTestCase): + input = test_case.investigate_request + else: + raise Exception("Unsupported test case class") + + row = find_dataset_row_by_test_case(dataset, test_case) + + if row: + dataset.update( + id=test_case.id, + input=input, + expected=test_case.expected_output, + metadata=test_case.model_dump(), + tags=[], + ) + else: + dataset.insert( + id=test_case.id, + input=input, + expected=test_case.expected_output, + metadata=test_case.model_dump(), + tags=[], + ) + logging.info("Inserted dataset record with id", id) + + logging.info(dataset.summarize()) + +def load_investigate_request(test_case_folder:Path) -> InvestigateRequest: + investigate_request_path = test_case_folder.joinpath(Path("investigate_request.json")) + if investigate_request_path.exists(): + return TypeAdapter(InvestigateRequest).validate_json(read_file(Path(investigate_request_path))) + raise Exception(f"Investigate test case declared in folder {str(test_case_folder)} should have an investigate_request.json file but none is present") + +def load_issue_data(test_case_folder:Path) -> Optional[Dict]: + + issue_data_mock_path = test_case_folder.joinpath(Path("issue_data.json")) + if issue_data_mock_path.exists(): + return json.loads(read_file(issue_data_mock_path)) + return None + + +def load_resource_instructions(test_case_folder:Path) -> Optional[ResourceInstructions]: + resource_instructions_mock_path = test_case_folder.joinpath(Path("resource_instructions.json")) + if resource_instructions_mock_path.exists(): + return TypeAdapter(ResourceInstructions).validate_json(read_file(Path(resource_instructions_mock_path))) + return None diff --git a/tests/llm/utils/system.py b/tests/llm/utils/system.py new file mode 100644 index 00000000..3b43b577 --- /dev/null +++ b/tests/llm/utils/system.py @@ -0,0 +1,31 @@ +import socket +import platform +import os +import pwd +from datetime import datetime +from typing import Dict +from pathlib import Path + + +def get_active_branch_name(): + + head_dir = Path(".",".git","HEAD") + with head_dir.open("r") as f: + content = f.read().splitlines() + + for line in content: + if line[0:4] == "ref:": + return line.partition("refs/heads/")[2] + + return "Unknown" + +def get_machine_state_tags() -> Dict[str, str]: + return { + "username": pwd.getpwuid(os.getuid()).pw_name, + "branch": get_active_branch_name(), + "platform": platform.platform(), + "hostname": socket.gethostname(), + } + +def readable_timestamp(): + return datetime.now().strftime("%Y%m%d_%H%M%S") diff --git a/tests/test_chat.py b/tests/test_chat.py deleted file mode 100644 index 127499ce..00000000 --- a/tests/test_chat.py +++ /dev/null @@ -1,83 +0,0 @@ -from pathlib import Path -import pytest -from holmes.core.conversations import build_chat_messages -from holmes.core.llm import DefaultLLM -from holmes.core.models import ChatRequest -from holmes.core.tool_calling_llm import ToolCallingLLM -from holmes.core.tools import ToolExecutor -from tests.mock_toolset import MockToolsets - -from deepeval import assert_test -from deepeval.test_case import LLMTestCase -from deepeval.metrics import AnswerRelevancyMetric, FaithfulnessMetric, ContextualPrecisionMetric, ContextualRecallMetric, ContextualRelevancyMetric - -from tests.utils import AskHolmesTestCase, load_ask_holmes_test_cases - -TEST_CASES_FOLDER = Path("tests/fixtures/test_chat") - -test_cases = load_ask_holmes_test_cases(TEST_CASES_FOLDER) - - -def idfn(test_case:AskHolmesTestCase): - return test_case.id - -@pytest.mark.llm -@pytest.mark.parametrize("test_case", test_cases, ids=idfn) -def test_ask_holmes_with_tags(test_case:AskHolmesTestCase): - - mock = MockToolsets(tools_passthrough=test_case.tools_passthrough, test_case_folder=test_case.folder) - expected_tools = [] - for tool_mock in test_case.tool_mocks: - mock.mock_tool(tool_mock) - expected_tools.append(tool_mock.tool_name) - - tool_executor = ToolExecutor(mock.mocked_toolsets) - ai = ToolCallingLLM( - tool_executor=tool_executor, - max_steps=10, - llm=DefaultLLM("gpt-4o") - ) - - chat_request = ChatRequest(ask=test_case.user_prompt) - - messages = build_chat_messages( - chat_request.ask, [], ai=ai - ) - llm_call = ai.messages_call(messages=messages) - - deepeval_test_case = LLMTestCase( - name=f"ask_holmes:{test_case.id}", - input=test_case.user_prompt, - actual_output=llm_call.result or "", - expected_output=test_case.expected_output, - retrieval_context=test_case.retrieval_context, - tools_called=[tool_call.tool_name for tool_call in (llm_call.tool_calls or [])], - expected_tools=expected_tools - ) - assert_test(deepeval_test_case, [ - AnswerRelevancyMetric( - threshold=test_case.evaluation.answer_relevancy, - model="gpt-4o", - include_reason=True - ), - FaithfulnessMetric( - threshold=test_case.evaluation.faithfulness, - model="gpt-4o", - include_reason=True - ), - ContextualPrecisionMetric( - threshold=test_case.evaluation.contextual_precision, - model="gpt-4o", - include_reason=True - ), - ContextualRecallMetric( - threshold=test_case.evaluation.contextual_recall, - model="gpt-4o", - include_reason=True - ), - ContextualRelevancyMetric( - threshold=test_case.evaluation.contextual_relevancy, - model="gpt-4o", - include_reason=True - ) - ]) diff --git a/tests/test_fetch_url.py b/tests/test_fetch_url.py index aaefc950..3c4eb4f8 100644 --- a/tests/test_fetch_url.py +++ b/tests/test_fetch_url.py @@ -3,9 +3,8 @@ import pytest from pathlib import Path -from tests.utils import read_file from holmes.core.tools import ToolExecutor -from holmes.plugins.toolsets.internet import InternetToolset, html_to_markdown, FetchWebpage +from holmes.plugins.toolsets.internet import InternetToolset, html_to_markdown THIS_DIR = os.path.dirname(__file__) FIXTURES_DIR = os.path.join(THIS_DIR, 'fixtures', 'test_fetch_url') @@ -24,6 +23,10 @@ More information... """.strip() +def read_file(file_path:Path): + with open(file_path, 'r', encoding='utf-8') as file: + return file.read().strip() + def parse_fixture_id(file_name:str) -> str: match = re.match(r'fixture(\d+)', file_name) if match: @@ -63,7 +66,7 @@ def test_internet_toolset_prerequisites(): toolset = InternetToolset() toolset.check_prerequisites() - assert toolset.is_enabled() == True, ("" if toolset.is_enabled() else toolset.get_disabled_reason() + ". Make sure playwright is installed by running `playwright install`.") + assert toolset.is_enabled(), ("" if toolset.is_enabled() else toolset.get_disabled_reason() + ". Make sure playwright is installed by running `playwright install`.") def test_fetch_webpage(): toolset = InternetToolset() diff --git a/tests/utils.py b/tests/utils.py deleted file mode 100644 index 9be42084..00000000 --- a/tests/utils.py +++ /dev/null @@ -1,105 +0,0 @@ - -import json -import yaml -import logging -import os -import re -from pathlib import Path -from typing import List, Optional - -from pydantic import BaseModel, TypeAdapter -from tests.mock_toolset import AUTO_GENERATED_FILE_SUFFIX, MockMetadata, ToolMock - -def read_file(file_path:Path): - with open(file_path, 'r', encoding='utf-8') as file: - return file.read().strip() - - -TEST_CASE_ID_PATTERN = r'^[\d+]_(?:[a-z]+_)*[a-z]+$' -CONFIG_FILE_NAME = "test_case.yaml" - -class LLMEvaluation(BaseModel): - answer_relevancy: float = 0.5 - faithfulness: float = 0.5 - contextual_precision: float = 0.5 - contextual_recall: float = 0 - contextual_relevancy: float = 0 - -class Message(BaseModel): - message: str - -class AskHolmesTestCase(BaseModel): - id: str - folder: str - tools_passthrough: bool = False # If True, unmocked tools can be invoked by the LLM without error - user_prompt: str # The user's question to ask holmes - expected_output: str # Whether an output is expected - evaluation: LLMEvaluation = LLMEvaluation() - retrieval_context: List[str] = [] # Elements helping to evaluate the correctness of the LLM response - tool_mocks: List[ToolMock] = [] - -pydantic_test_case = TypeAdapter(AskHolmesTestCase) -pydantic_tool_mock = TypeAdapter(MockMetadata) - -def parse_mock_metadata(text) -> Optional[MockMetadata]: - """ - Expects the mock metadata to be the first line of the text and be a JSON string. - """ - try: - match = re.match(r'^(.*)$', text, re.MULTILINE) - if match: - first_line = match.group(0) - metadata = json.loads(first_line) - return pydantic_tool_mock.validate_python(metadata) - return None - except Exception as e: - logging.error(e) - return None - -def load_ask_holmes_test_cases(test_cases_folder:Path, expected_number_of_test_cases=-1) -> List[AskHolmesTestCase]: - - test_cases = [] - test_cases_ids:List[str] = os.listdir(test_cases_folder) - for test_case_id in test_cases_ids: - test_case_folder = test_cases_folder.joinpath(test_case_id) - logging.info("Evaluating potential test case folder: {test_case_folder}") - try: - config_dict = yaml.safe_load(read_file(test_case_folder.joinpath(CONFIG_FILE_NAME))) - config_dict["id"] = test_case_id - config_dict["folder"] = str(test_case_folder) - test_case:AskHolmesTestCase = pydantic_test_case.validate_python(config_dict) - logging.info(f"Successfully loaded test case {test_case_id}") - except FileNotFoundError: - logging.info(f"Folder {test_cases_folder}/{test_case_id} ignored because it is missing a {CONFIG_FILE_NAME} file.") - continue - - mock_file_names:List[str] = os.listdir(test_case_folder) - - for mock_file_name in mock_file_names: - if mock_file_name == CONFIG_FILE_NAME: - continue - if mock_file_name.endswith(AUTO_GENERATED_FILE_SUFFIX): - continue - mock_file_path = test_case_folder.joinpath(mock_file_name) - mock_text = read_file(mock_file_path) - - metadata = parse_mock_metadata(mock_text) - mock_value = mock_text[mock_text.find('\n') + 1:] # remove first line - if not metadata: - logging.warning(f"Failed to parse metadata from test case file at {str(mock_file_path)}. It will be skipped") - continue - tool_mock = ToolMock( - source_file=str(mock_file_path), - toolset_name= metadata.toolset_name, - tool_name= metadata.tool_name, - match_params= metadata.match_params, - return_value=mock_value - ) - logging.info(f"Successfully loaded tool mock {tool_mock}") - test_case.tool_mocks.append(tool_mock) - test_cases.append(test_case) - logging.info(f"Found {len(test_cases)} in {test_cases_folder}") - - if expected_number_of_test_cases > 0: - assert len(test_cases) == expected_number_of_test_cases - return test_cases From 19be349c2d91689057aba69ecdb08bd628644b53 Mon Sep 17 00:00:00 2001 From: Natan Yellin Date: Mon, 25 Nov 2024 19:08:04 +0200 Subject: [PATCH 4/5] Update README.md (#213) --- README.md | 39 +++++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index 5d1f9f05..d2a7c565 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,18 @@
-

Solve Prometheus alerts faster with an AI assistant

+

Solve cloud alerts faster with an AI assistant

HolmesGPT - AI Agent for On-Call Engineers 🔥

- Examples | + Examples | Key Features | Installation | YouTube Demo

-Transforms your existing cloud alerts from this 👇 +Improve developer experience and reduce mean-time-to-respond (MTTR) by transforming alerts from this 👇 ![Screenshot 2024-10-31 at 12 01 12 2](https://github.com/user-attachments/assets/931ebd71-ccd2-4b7b-969d-a061a99cec2d) - To this 👇 ![Screenshot 2024-10-31 at 11 40 09](https://github.com/user-attachments/assets/9e2c7a23-b942-4720-8a98-488323e092ca) @@ -34,7 +33,8 @@ To this 👇 ## Ways to Use HolmesGPT
- AI analysis in Robusta UI + Analyze your alerts in a free UI + Includes free use of the Robusta AI model. ![Screenshot 2024-10-31 at 11 40 09](https://github.com/user-attachments/assets/2e90cc7b-4b0a-4386-ab4f-0d36692b549c) @@ -44,7 +44,7 @@ Includes free use of the Robusta AI model.
-Root cause for Prometheus alerts in Slack +Add root-cause-analysis to Prometheus alerts in Slack Investigate Prometheus alerts right from Slack with the official [Robusta integration](https://docs.robusta.dev/holmes_chart_dependency/configuration/ai-analysis.html). @@ -62,7 +62,9 @@ Note - if on Mac OS and using the Docker image, you will need to use `http://doc
-Free-text questions (CLI) +Query observability data in human language + +Via the Holmes CLI or [a free UI (video)](https://www.loom.com/share/3cdcd94ed6bc458888b338493b108d1d?t=0) ```bash holmes ask "what pods are in crashloopbackoff in my cluster and why?" @@ -164,19 +166,16 @@ plugins: ```
- -### Bring your own LLM
-Bring your own LLM +Importing Holmes as a Python library and bringing your own LLM You can use Holmes as a library and pass in your own LLM implementation. This is particularly useful if LiteLLM or the default Holmes implementation does not suit you. See an example implementation [here](examples/custom_llm.py). -
-Like what you see? Checkout [other use cases](#other-use-cases) or get started by [installing HolmesGPT](#installation). +Like what you see? Discover [more use cases](#more-use-cases) or get started by [installing HolmesGPT](#installation). ## Installation @@ -372,13 +371,6 @@ To work with Azure AI, you need to provide the below variables:
-**Trusting custom Certificate Authority (CA) certificate:** - -If your llm provider url uses a certificate from a custom CA, in order to trust it, base-64 encode the certificate, and store it in an environment variable named ``CERTIFICATE`` - - - - ### Getting an API Key HolmesGPT requires an LLM API Key to function. The most common option is OpenAI, but many [LiteLLM-compatible](https://docs.litellm.ai/docs/providers/) models are supported. To use an LLM, set `--model` (e.g. `gpt-4o` or `bedrock/anthropic.claude-3-5-sonnet-20240620-v1:0`) and `--api-key` (if necessary). Depending on the provider, you may need to set environment variables too. @@ -493,6 +485,13 @@ In particular, note that [vLLM does not yet support function calling](https://gi +**Additional LLM Configuration:** + +
+Trusting custom Certificate Authority (CA) certificate +If your llm provider url uses a certificate from a custom CA, in order to trust it, base-64 encode the certificate, and store it in an environment variable named CERTIFICATE +
+ ### Enabling Integrations
@@ -524,7 +523,7 @@ HolmesGPT can consult webpages containing runbooks or other relevant information HolmesGPT uses playwright to scrape webpages and requires playwright to be installed and working through `playwright install`.
-## Other Use Cases +## More Use Cases HolmesGPT was designed for incident response, but it is a general DevOps assistant too. Here are some examples: From 743e62c8e54d65f77dff691f16a2a86da6090234 Mon Sep 17 00:00:00 2001 From: itisallgood <25401000+itisallgood@users.noreply.github.com> Date: Mon, 25 Nov 2024 19:13:20 +0100 Subject: [PATCH 5/5] Make holmesgpt work with WatsonX (#200) Co-authored-by: arik --- holmes/core/llm.py | 40 ++++++++++++++++++++++++++++++++++++---- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 3 files changed, 42 insertions(+), 10 deletions(-) diff --git a/holmes/core/llm.py b/holmes/core/llm.py index 0c8d5be0..f6b4a38e 100644 --- a/holmes/core/llm.py +++ b/holmes/core/llm.py @@ -71,13 +71,45 @@ def check_llm(self, model:str, api_key:Optional[str]): if not lookup: raise Exception(f"Unknown provider for model {model}") provider = lookup[1] - api_key_env_var = f"{provider.upper()}_API_KEY" - if api_key: - os.environ[api_key_env_var] = api_key - model_requirements = litellm.validate_environment(model=model) + if provider == "watsonx": + # NOTE: LiteLLM's validate_environment does not currently include checks for IBM WatsonX. + # The following WatsonX-specific variables are set based on documentation from: + # https://docs.litellm.ai/docs/providers/watsonx + # Required variables for WatsonX: + # - WATSONX_URL: Base URL of your WatsonX instance (required) + # - WATSONX_APIKEY or WATSONX_TOKEN: IBM Cloud API key or IAM auth token (one is required) + model_requirements = {'missing_keys': [], 'keys_in_environment': True} + if api_key: + os.environ["WATSONX_APIKEY"] = api_key + if not "WATSONX_URL" in os.environ: + model_requirements['missing_keys'].append("WATSONX_URL") + model_requirements['keys_in_environment'] = False + if not "WATSONX_APIKEY" in os.environ and not "WATSONX_TOKEN" in os.environ: + model_requirements['missing_keys'].extend(["WATSONX_APIKEY", "WATSONX_TOKEN"]) + model_requirements['keys_in_environment'] = False + # WATSONX_PROJECT_ID is required because we don't let user pass it to completion call directly + if not "WATSONX_PROJECT_ID" in os.environ: + model_requirements['missing_keys'].append("WATSONX_PROJECT_ID") + model_requirements['keys_in_environment'] = False + # https://docs.litellm.ai/docs/providers/watsonx#usage---models-in-deployment-spaces + # using custom watsonx deployments might require to set WATSONX_DEPLOYMENT_SPACE_ID env + if "watsonx/deployment/" in self.model: + logging.warning( + "Custom WatsonX deployment detected. You may need to set the WATSONX_DEPLOYMENT_SPACE_ID " + "environment variable for proper functionality. For more information, refer to the documentation: " + "https://docs.litellm.ai/docs/providers/watsonx#usage---models-in-deployment-spaces" + ) + else: + # + api_key_env_var = f"{provider.upper()}_API_KEY" + if api_key: + os.environ[api_key_env_var] = api_key + model_requirements = litellm.validate_environment(model=model) + if not model_requirements["keys_in_environment"]: raise Exception(f"model {model} requires the following environment variables: {model_requirements['missing_keys']}") + def _strip_model_prefix(self) -> str: """ Helper function to strip 'openai/' prefix from model name if it exists. diff --git a/poetry.lock b/poetry.lock index 2a46ae7d..6a5ec836 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -1314,13 +1314,13 @@ rapidfuzz = ">=3.9.0,<4.0.0" [[package]] name = "litellm" -version = "1.52.9" +version = "1.52.6" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.52.9-py3-none-any.whl", hash = "sha256:a1ef5561d220d77059a359da497f0ab04c721205c6795f151b07be5bbe51fe45"}, - {file = "litellm-1.52.9.tar.gz", hash = "sha256:73a05fed76cfac4357ee4117f28608209db891223fb9c6e03dddfe1723666437"}, + {file = "litellm-1.52.6-py3-none-any.whl", hash = "sha256:9b3e9fb51f7e2a3cc8b50997b346c55aae9435a138d9a656f18e262750a1bfe1"}, + {file = "litellm-1.52.6.tar.gz", hash = "sha256:d67c653f97bd07f503b975c167de1e25632b7bc6bb3c008c46921e4acc81ec60"}, ] [package.dependencies] @@ -3260,4 +3260,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "c6579904b41ba69dee0f53d1351661405122c8f3e1a5305d15a7df08d8251fcf" +content-hash = "71d830808845121cdfdd0e435649f101abe541395497f70bd5aeaa30a8d9dac4" diff --git a/pyproject.toml b/pyproject.toml index 28f01f21..4d410fe9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ supabase = "^2.5" colorlog = "^6.8.2" strenum = "^0.4.15" markdown = "^3.6" -litellm = "^1.50.2" +litellm = "^1.52.6" certifi = "^2024.7.4" urllib3 = "^1.26.19" boto3 = "^1.34.145"