Skip to content

Commit

Permalink
change client name
Browse files Browse the repository at this point in the history
  • Loading branch information
phact committed Mar 15, 2024
1 parent 73aaed2 commit aad2384
Show file tree
Hide file tree
Showing 9 changed files with 104 additions and 104 deletions.
2 changes: 1 addition & 1 deletion tests/streaming-assistants/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,6 @@ async def check_for_stop():


@pytest.fixture(scope="function")
def openai_client(start_application) -> OpenAI:
def patched_openai_client(start_application) -> OpenAI:
oai = patch(OpenAI())
return oai
24 changes: 12 additions & 12 deletions tests/streaming-assistants/test_chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,26 +35,26 @@ def print_chat_completion(model, client):
assert i > 0


def test_chat_completion_gpt4(openai_client):
def test_chat_completion_gpt4(patched_openai_client):
model="gpt-4-1106-preview"
print_chat_completion(model, openai_client)
print_chat_completion(model, patched_openai_client)

def test_chat_completion_gpt3_5(openai_client):
def test_chat_completion_gpt3_5(patched_openai_client):
model="gpt-3.5-turbo"
print_chat_completion(model, openai_client)
print_chat_completion(model, patched_openai_client)

def test_chat_completion_cohere(openai_client):
def test_chat_completion_cohere(patched_openai_client):
model="cohere/command"
print_chat_completion(model, openai_client)
print_chat_completion(model, patched_openai_client)

def test_chat_completion_perp_mixtral(openai_client):
def test_chat_completion_perp_mixtral(patched_openai_client):
model="perplexity/mixtral-8x7b-instruct"
print_chat_completion(model, openai_client)
print_chat_completion(model, patched_openai_client)

def test_chat_completion_claude(openai_client):
def test_chat_completion_claude(patched_openai_client):
model="anthropic.claude-v2"
print_chat_completion(model, openai_client)
print_chat_completion(model, patched_openai_client)

def test_chat_completion_gemini_pro(openai_client):
def test_chat_completion_gemini_pro(patched_openai_client):
model="gemini/gemini-pro"
print_chat_completion(model, openai_client)
print_chat_completion(model, patched_openai_client)
20 changes: 10 additions & 10 deletions tests/streaming-assistants/test_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,24 +25,24 @@ def print_embedding(model, client):



def test_embedding_cohere(openai_client):
def test_embedding_cohere(patched_openai_client):
model = "cohere/embed-english-v3.0"
print_embedding(model, openai_client)
print_embedding(model, patched_openai_client)

def test_embedding_titan(openai_client):
def test_embedding_titan(patched_openai_client):
model = "amazon.titan-embed-text-v1"
print_embedding(model, openai_client)
print_embedding(model, patched_openai_client)


def test_embedding_ada_002(openai_client):
def test_embedding_ada_002(patched_openai_client):
model = "text-embedding-ada-002"
print_embedding(model, openai_client)
print_embedding(model, patched_openai_client)


def test_embedding_3_small(openai_client):
def test_embedding_3_small(patched_openai_client):
model="text-embedding-3-small"
print_embedding(model, openai_client)
print_embedding(model, patched_openai_client)

def test_embedding_3_small(openai_client):
def test_embedding_3_small(patched_openai_client):
model="text-embedding-3-large"
print_embedding(model, openai_client)
print_embedding(model, patched_openai_client)
4 changes: 2 additions & 2 deletions tests/streaming-assistants/test_file_embedding.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import logging

logger = logging.getLogger(__name__)
def test_file_embedding(openai_client):
file = openai_client.files.create(
def test_file_embedding(patched_openai_client):
file = patched_openai_client.files.create(
file=open(
"./tests/fixtures/language_models_are_unsupervised_multitask_learners.pdf",
"rb",
Expand Down
32 changes: 16 additions & 16 deletions tests/streaming-assistants/test_function_calling.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,41 +4,41 @@

logger = logging.getLogger(__name__)

def test_function_calling_gpt_4(openai_client):
def test_function_calling_gpt_4(patched_openai_client):
model="gpt-4-1106-preview"
function_calling(model, openai_client)
function_calling(model, patched_openai_client)

def test_function_calling_gpt_3_5(openai_client):
def test_function_calling_gpt_3_5(patched_openai_client):
model="gpt-3.5-turbo"
function_calling(model, openai_client)
function_calling(model, patched_openai_client)

@pytest.mark.skip(reason="claude does not consistently work with function calling, skip")
def test_function_calling_cohere(openai_client):
def test_function_calling_cohere(patched_openai_client):
model="cohere/command"
function_calling(model, openai_client)
function_calling(model, patched_openai_client)

def test_function_calling_pplx_mix(openai_client):
def test_function_calling_pplx_mix(patched_openai_client):
model="perplexity/mixtral-8x7b-instruct"
function_calling(model, openai_client)
function_calling(model, patched_openai_client)

@pytest.mark.skip(reason="pplx_online just looks up the weather and doesn't do the function call")
def test_function_calling_pplx_online(openai_client):
def test_function_calling_pplx_online(patched_openai_client):
model="perplexity/pplx-70b-online"
function_calling(model, openai_client)
function_calling(model, patched_openai_client)

@pytest.mark.skip(reason="claude does not consistently work with function calling, skip")
def test_function_calling_claude(openai_client):
def test_function_calling_claude(patched_openai_client):
model="anthropic.claude-v2"
function_calling(model, openai_client)
function_calling(model, patched_openai_client)

def test_function_calling_gemini(openai_client):
def test_function_calling_gemini(patched_openai_client):
model="gemini/gemini-pro"
function_calling(model, openai_client)
function_calling(model, patched_openai_client)

@pytest.mark.skip(reason="llama does not consistently work with function calling, skip")
def test_function_calling_llama(openai_client):
def test_function_calling_llama(patched_openai_client):
model = "meta.llama2-13b-chat-v1"
function_calling(model, openai_client)
function_calling(model, patched_openai_client)


def function_calling(model, client):
Expand Down
32 changes: 16 additions & 16 deletions tests/streaming-assistants/test_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,46 +52,46 @@ def run_with_assistant(assistant, client):


instructions="You're an animal expert who gives very long winded answers with flowery prose. Keep answers below 3 sentences."
def test_run_gpt3_5(openai_client):
gpt3_assistant = openai_client.beta.assistants.create(
def test_run_gpt3_5(patched_openai_client):
gpt3_assistant = patched_openai_client.beta.assistants.create(
name="GPT3 Animal Tutor",
instructions=instructions,
model="gpt-3.5-turbo",
)

assistant = openai_client.beta.assistants.retrieve(gpt3_assistant.id)
assistant = patched_openai_client.beta.assistants.retrieve(gpt3_assistant.id)
logger.info(assistant)

run_with_assistant(gpt3_assistant, openai_client)
run_with_assistant(gpt3_assistant, patched_openai_client)

def test_run_cohere(openai_client):
cohere_assistant = openai_client.beta.assistants.create(
def test_run_cohere(patched_openai_client):
cohere_assistant = patched_openai_client.beta.assistants.create(
name="Cohere Animal Tutor",
instructions=instructions,
model="cohere/command",
)
run_with_assistant(cohere_assistant, openai_client)
run_with_assistant(cohere_assistant, patched_openai_client)

def test_run_perp(openai_client):
perplexity_assistant = openai_client.beta.assistants.create(
def test_run_perp(patched_openai_client):
perplexity_assistant = patched_openai_client.beta.assistants.create(
name="Perplexity/Mixtral Animal Tutor",
instructions=instructions,
model="perplexity/mixtral-8x7b-instruct",
)
run_with_assistant(perplexity_assistant, openai_client)
run_with_assistant(perplexity_assistant, patched_openai_client)

def test_run_claude(openai_client):
claude_assistant = openai_client.beta.assistants.create(
def test_run_claude(patched_openai_client):
claude_assistant = patched_openai_client.beta.assistants.create(
name="Claude Animal Tutor",
instructions=instructions,
model="anthropic.claude-v2",
)
run_with_assistant(claude_assistant, openai_client)
run_with_assistant(claude_assistant, patched_openai_client)

def test_run_gemini(openai_client):
gemini_assistant = openai_client.beta.assistants.create(
def test_run_gemini(patched_openai_client):
gemini_assistant = patched_openai_client.beta.assistants.create(
name="Gemini Animal Tutor",
instructions=instructions,
model="gemini/gemini-pro",
)
run_with_assistant(gemini_assistant, openai_client)
run_with_assistant(gemini_assistant, patched_openai_client)
32 changes: 16 additions & 16 deletions tests/streaming-assistants/test_run_retreival.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,58 +62,58 @@ def run_with_assistant(assistant, client):

instructions = "You are a personal math tutor. Answer thoroughly. The system will provide relevant context from files, use the context to respond."

def test_run_gpt3_5(openai_client):
def test_run_gpt3_5(patched_openai_client):
model = "gpt-3.5-turbo"
name = f"{model} Math Tutor"

gpt3_assistant = openai_client.beta.assistants.create(
gpt3_assistant = patched_openai_client.beta.assistants.create(
name=name,
instructions=instructions,
model=model,
)
run_with_assistant(gpt3_assistant, openai_client)
run_with_assistant(gpt3_assistant, patched_openai_client)

def test_run_cohere(openai_client):
def test_run_cohere(patched_openai_client):
model = "cohere/command"
name = f"{model} Math Tutor"

cohere_assistant = openai_client.beta.assistants.create(
cohere_assistant = patched_openai_client.beta.assistants.create(
name=name,
instructions=instructions,
model=model,
)
run_with_assistant(cohere_assistant, openai_client)
run_with_assistant(cohere_assistant, patched_openai_client)

def test_run_perp(openai_client):
def test_run_perp(patched_openai_client):
model = "perplexity/mixtral-8x7b-instruct"
name = f"{model} Math Tutor"

perplexity_assistant = openai_client.beta.assistants.create(
perplexity_assistant = patched_openai_client.beta.assistants.create(
name=name,
instructions=instructions,
model=model,
)
run_with_assistant(perplexity_assistant, openai_client)
run_with_assistant(perplexity_assistant, patched_openai_client)

@pytest.mark.skip(reason="fix streaming-assistants aws with openai embedding issue")
def test_run_claude(openai_client):
@pytest.mark.skip(reason="fix streaming-assistants aws with patched_openai embedding issue")
def test_run_claude(patched_openai_client):
model = "anthropic.claude-v2"
name = f"{model} Math Tutor"

claude_assistant = openai_client.beta.assistants.create(
claude_assistant = patched_openai_client.beta.assistants.create(
name=name,
instructions=instructions,
model=model,
)
run_with_assistant(claude_assistant, openai_client)
run_with_assistant(claude_assistant, patched_openai_client)

def test_run_gemini(openai_client):
def test_run_gemini(patched_openai_client):
model = "gemini/gemini-pro"
name = f"{model} Math Tutor"

gemini_assistant = openai_client.beta.assistants.create(
gemini_assistant = patched_openai_client.beta.assistants.create(
name=name,
instructions=instructions,
model=model,
)
run_with_assistant(gemini_assistant, openai_client)
run_with_assistant(gemini_assistant, patched_openai_client)
32 changes: 16 additions & 16 deletions tests/streaming-assistants/test_streaming_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,46 +50,46 @@ def on_text_delta(self, delta, snapshot):


instructions="You're an animal expert who gives very long winded answers with flowery prose. Keep answers below 3 sentences."
def test_run_gpt3_5(openai_client):
gpt3_assistant = openai_client.beta.assistants.create(
def test_run_gpt3_5(patched_openai_client):
gpt3_assistant = patched_openai_client.beta.assistants.create(
name="GPT3 Animal Tutor",
instructions=instructions,
model="gpt-3.5-turbo",
)

assistant = openai_client.beta.assistants.retrieve(gpt3_assistant.id)
assistant = patched_openai_client.beta.assistants.retrieve(gpt3_assistant.id)
logger.info(assistant)

run_with_assistant(gpt3_assistant, openai_client)
run_with_assistant(gpt3_assistant, patched_openai_client)

def test_run_cohere(openai_client):
cohere_assistant = openai_client.beta.assistants.create(
def test_run_cohere(patched_openai_client):
cohere_assistant = patched_openai_client.beta.assistants.create(
name="Cohere Animal Tutor",
instructions=instructions,
model="cohere/command",
)
run_with_assistant(cohere_assistant, openai_client)
run_with_assistant(cohere_assistant, patched_openai_client)

def test_run_perp(openai_client):
perplexity_assistant = openai_client.beta.assistants.create(
def test_run_perp(patched_openai_client):
perplexity_assistant = patched_openai_client.beta.assistants.create(
name="Perplexity/Mixtral Animal Tutor",
instructions=instructions,
model="perplexity/mixtral-8x7b-instruct",
)
run_with_assistant(perplexity_assistant, openai_client)
run_with_assistant(perplexity_assistant, patched_openai_client)

def test_run_claude(openai_client):
claude_assistant = openai_client.beta.assistants.create(
def test_run_claude(patched_openai_client):
claude_assistant = patched_openai_client.beta.assistants.create(
name="Claude Animal Tutor",
instructions=instructions,
model="anthropic.claude-v2",
)
run_with_assistant(claude_assistant, openai_client)
run_with_assistant(claude_assistant, patched_openai_client)

def test_run_gemini(openai_client):
gemini_assistant = openai_client.beta.assistants.create(
def test_run_gemini(patched_openai_client):
gemini_assistant = patched_openai_client.beta.assistants.create(
name="Gemini Animal Tutor",
instructions=instructions,
model="gemini/gemini-pro",
)
run_with_assistant(gemini_assistant, openai_client)
run_with_assistant(gemini_assistant, patched_openai_client)
Loading

0 comments on commit aad2384

Please sign in to comment.