forked from LibreChat-AI/librechat-config-yaml
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathunify.py
More file actions
60 lines (49 loc) · 1.63 KB
/
unify.py
File metadata and controls
60 lines (49 loc) · 1.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import json
import requests
from dotenv import load_dotenv
import os
from pathlib import Path
def get_api_key():
"""Get API key from .env file or prompt user for input."""
env_path = Path(__file__).parent.parent / '.env'
load_dotenv(dotenv_path=env_path)
key = os.getenv('UNIFY_API_KEY')
if key:
return key
return input("Please enter your Unify API key (press Enter to skip): ").strip() or None
def fetch_models(api_key):
"""Fetch models from Unify API."""
url = 'https://api.unify.ai/v0/endpoints'
headers = {
'Authorization': f'Bearer {api_key}',
'accept': 'application/json',
}
try:
response = requests.get(url, headers=headers)
response.raise_for_status()
data = response.json()
if isinstance(data, list):
# Extract and sort model names with providers
model_ids = sorted([
f"{model.split('@')[0]}@{model.split('@')[1]}"
for model in data
])
return model_ids
except Exception as e:
print(f"Error fetching models: {str(e)}")
return None
def main():
api_key = get_api_key()
if not api_key:
print("No API key provided. Skipping model fetch.")
return
print("Fetching models from Unify API...")
models = fetch_models(api_key)
if models:
with open("unify.txt", "w") as file:
json.dump(models, file, indent=2)
print(f"Successfully saved {len(models)} models to unify.txt")
else:
print("Failed to fetch models.")
if __name__ == "__main__":
main()