Skip to content

Commit

Permalink
refactor: make google llm extension
Browse files Browse the repository at this point in the history
  • Loading branch information
gventuri committed Oct 17, 2024
1 parent 24c2a76 commit 4e0a6a8
Show file tree
Hide file tree
Showing 9 changed files with 1,155 additions and 124 deletions.
11 changes: 11 additions & 0 deletions extensions/llms/google/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Google AI Extension for PandasAI

This extension integrates Google AI with PandasAI, providing Google AI LLMs support.

## Installation

You can install this extension using poetry:

```bash
poetry add pandasai-google
```
4 changes: 4 additions & 0 deletions extensions/llms/google/pandasai_google/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .google_gemini import GoogleGemini
from .google_vertexai import GoogleVertexAI

__all__ = ["GoogleGemini", "GoogleVertexAI"]
95 changes: 95 additions & 0 deletions extensions/llms/google/pandasai_google/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
from __future__ import annotations

from abc import abstractmethod
from typing import TYPE_CHECKING, Optional

from pandasai.helpers.memory import Memory

from pandasai.exceptions import (
MethodNotImplementedError,
)
from pandasai.prompts.base import BasePrompt
from pandasai.llm.base import LLM

if TYPE_CHECKING:
from pandasai.pipelines.pipeline_context import PipelineContext



class BaseGoogle(LLM):
"""Base class to implement a new Google LLM
LLM base class is extended to be used with
"""

temperature: Optional[float] = 0
top_p: Optional[float] = 0.8
top_k: Optional[int] = 40
max_output_tokens: Optional[int] = 1000

def _valid_params(self):
return ["temperature", "top_p", "top_k", "max_output_tokens"]

def _set_params(self, **kwargs):
"""
Dynamically set Parameters for the object.
Args:
**kwargs:
Possible keyword arguments: "temperature", "top_p", "top_k",
"max_output_tokens".
Returns:
None.
"""

valid_params = self._valid_params()
for key, value in kwargs.items():
if key in valid_params:
setattr(self, key, value)

def _validate(self):
"""Validates the parameters for Google"""

if self.temperature is not None and not 0 <= self.temperature <= 1:
raise ValueError("temperature must be in the range [0.0, 1.0]")

if self.top_p is not None and not 0 <= self.top_p <= 1:
raise ValueError("top_p must be in the range [0.0, 1.0]")

if self.top_k is not None and not 0 <= self.top_k <= 100:
raise ValueError("top_k must be in the range [0.0, 100.0]")

if self.max_output_tokens is not None and self.max_output_tokens <= 0:
raise ValueError("max_output_tokens must be greater than zero")

@abstractmethod
def _generate_text(self, prompt: str, memory: Optional[Memory] = None) -> str:
"""
Generates text for prompt, specific to implementation.
Args:
prompt (str): A string representation of the prompt.
Returns:
str: LLM response.
"""
raise MethodNotImplementedError("method has not been implemented")

def call(self, instruction: BasePrompt, context: PipelineContext = None) -> str:
"""
Call the Google LLM.
Args:
instruction (BasePrompt): Instruction to pass.
context (PipelineContext): Pass PipelineContext.
Returns:
str: LLM response.
"""
self.last_prompt = instruction.to_string()
memory = context.memory if context else None
return self._generate_text(self.last_prompt, memory)
Original file line number Diff line number Diff line change
@@ -1,19 +1,8 @@
"""Google Gemini LLM
This module is to run the Google Gemini API hosted and maintained by Google.
To read more on Google Gemini follow
https://ai.google.dev/docs/gemini_api_overview.
Example:
Use below example to call GoogleGemini Model
>>> from pandasai.llm.google_gemini import GoogleGemini
"""
from typing import Any, Optional

from ..exceptions import APIKeyNotFoundError
from ..helpers.memory import Memory
from ..helpers.optional import import_dependency
from pandasai.exceptions import APIKeyNotFoundError
from pandasai.helpers.memory import Memory
from pandasai.helpers.optional import import_dependency
from .base import BaseGoogle


Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,9 @@
"""Google VertexAI
This module is to run the Google VertexAI LLM.
To read more on VertexAI:
https://cloud.google.com/vertex-ai/docs/generative-ai/learn/generative-ai-studio.
Example:
Use below example to call Google VertexAI
>>> from pandasai.llm.google_palm import GoogleVertexAI
"""
from typing import Optional

from pandasai.helpers.memory import Memory

from ..exceptions import UnsupportedModelError
from ..helpers.optional import import_dependency
from pandasai.exceptions import UnsupportedModelError
from pandasai.helpers.optional import import_dependency
from .base import BaseGoogle


Expand Down
Loading

0 comments on commit 4e0a6a8

Please sign in to comment.