diff --git a/docs/sdk/chat.mdx b/docs/sdk/chat.mdx index ea0e54426..65719d74d 100644 --- a/docs/sdk/chat.mdx +++ b/docs/sdk/chat.mdx @@ -47,6 +47,11 @@ Implemented in `services/openai.js` - OpenAI chat completions integration - GPT-3.5-turbo implementation - Conversation threading support +- API key can be loaded from secrets.toml project file by adding a [data.openai] field and setting api_key to your API key like so +``` +[data.openai] +api_key = "sk-YOUR-KEY" +``` ## Basic Usage diff --git a/frontend/src/components/DynamicComponents.jsx b/frontend/src/components/DynamicComponents.jsx index 76eea7c9f..bc99be909 100644 --- a/frontend/src/components/DynamicComponents.jsx +++ b/frontend/src/components/DynamicComponents.jsx @@ -218,6 +218,7 @@ const MemoizedComponent = memo( {...props} sourceId={component.config?.source || null} sourceData={component.config?.data || null} + apiKey={component.config?.apiKey || null} value={component.value || component.state || { messages: [] }} onChange={(value) => { handleUpdate(componentId, value); diff --git a/frontend/src/components/widgets/ChatWidget.jsx b/frontend/src/components/widgets/ChatWidget.jsx index 7400d9a1d..62d20eb73 100644 --- a/frontend/src/components/widgets/ChatWidget.jsx +++ b/frontend/src/components/widgets/ChatWidget.jsx @@ -14,6 +14,7 @@ import { createChatCompletion } from '@/services/openai'; const ChatWidget = ({ sourceId = null, sourceData = null, + apiKey = null, value = { messages: [] }, onChange, className, @@ -23,9 +24,14 @@ const ChatWidget = ({ const messagesEndRef = useRef(null); const chatContainerRef = useRef(null); + // Load API key from secrets.toml if present + if (apiKey && apiKey != '') { + sessionStorage.setItem('openai_api_key', apiKey.trim()); + } + const [inputValue, setInputValue] = useState(''); const [showSettings, setShowSettings] = useState(false); - const [apiKey, setApiKey] = useState(''); + const [defaultApiKey, setApiKey] = useState(''); const [isLoading, setIsLoading] = useState(false); const [error, setError] = useState(null); const hasApiKey = useMemo(() => !!sessionStorage.getItem('openai_api_key'), []); @@ -165,8 +171,8 @@ const ChatWidget = ({ // Add this function to handle API key submission const handleApiKeySubmit = (e) => { e.preventDefault(); - if (apiKey.trim()) { - sessionStorage.setItem('openai_api_key', apiKey.trim()); + if (defaultApiKey.trim()) { + sessionStorage.setItem('openai_api_key', defaultApiKey.trim()); setShowSettings(false); window.location.reload(); // Refresh to update hasApiKey state } @@ -210,7 +216,7 @@ const ChatWidget = ({

OpenAI API Key

setApiKey(e.target.value)} placeholder="sk-..." className="flex-1 transition-colors text-sm h-8" @@ -220,7 +226,7 @@ const ChatWidget = ({

-
diff --git a/preswald/interfaces/components.py b/preswald/interfaces/components.py index 943c33056..09da510dd 100644 --- a/preswald/interfaces/components.py +++ b/preswald/interfaces/components.py @@ -12,6 +12,7 @@ import matplotlib.pyplot as plt import numpy as np import pandas as pd +import tomli # from PIL import Image # try: @@ -97,6 +98,20 @@ def chat(source: str, table: Optional[str] = None) -> Dict: if current_state is None: current_state = {"messages": [], "source": source} + # Get API key from secrets.toml + try: + with open("secrets.toml", "rb") as toml: + secrets = tomli.load(toml) + + if secrets and secrets["data"]["openai"]["api_key"]: + api_key = secrets["data"]["openai"]["api_key"] + + else: + api_key = None + + except FileNotFoundError: + api_key = None + # Get dataframe from source df = ( service.data_manager.get_df(source) @@ -132,6 +147,7 @@ def chat(source: str, table: Optional[str] = None) -> Dict: "config": { "source": source, "data": serializable_data, + "apiKey": api_key, }, }