Skip to content

Commit

Permalink
increase token limit for generate citation
Browse files Browse the repository at this point in the history
  • Loading branch information
etwk committed Aug 16, 2024
1 parent 148b4c6 commit 4e9326e
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 3 deletions.
3 changes: 3 additions & 0 deletions src/modules/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
llm = dspy.OpenAI(model=settings.LLM_MODEL_NAME, api_base=f"{settings.OPENAI_BASE_URL}/", max_tokens=200, stop='\n\n')
dspy.settings.configure(lm=llm)

# LM with higher token limits
llm_long = dspy.OpenAI(model=settings.LLM_MODEL_NAME, api_base=f"{settings.OPENAI_BASE_URL}/", max_tokens=500, stop='\n\n')

from .citation import Citation
from .ollama_embedding import OllamaEmbedding
from .retrieve import LlamaIndexRM
Expand Down
11 changes: 8 additions & 3 deletions src/pipeline/verdict_citation.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
import os
import dspy

from modules import llm_long
from modules import Citation, LlamaIndexRM, Verdict

"""
Expand All @@ -23,9 +26,11 @@ def get(self, statement):
rep = self.context_verdict(statement)
context = rep.context
verdict = rep.answer

rep = Citation()(statement=statement, context=context, verdict=verdict)
citation = rep.citation

# Use the LLM with higher token limit for citation generation call
with dspy.context(lm=llm_long):
rep = Citation()(statement=statement, context=context, verdict=verdict)
citation = rep.citation

return verdict, citation

0 comments on commit 4e9326e

Please sign in to comment.