-
-
Notifications
You must be signed in to change notification settings - Fork 7.8k
[Bugfix] Fix nomic max_model_len #18755
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from 16 commits
Commits
Show all changes
22 commits
Select commit
Hold shift + click to select a range
1e0edcc
fix nomic max_model_len
noooop 994ac46
+ examples
noooop 6333d80
fix
noooop b2846e2
fix
noooop e1e920d
fix
noooop 75ed7be
fix
noooop 52d9ce7
fix
noooop c594729
fix
noooop 3ab82d6
fix
noooop 5466920
fix
noooop 9966739
fix
noooop d06275a
fix
noooop 653d573
fix
noooop c04050a
fix
noooop b5fa6bd
fix
noooop c746de9
fix
noooop 886eb32
fix
noooop de07c5d
fix
noooop cc0f499
Merge branch 'vllm-project:main' into fix_nomic
noooop ada5fb6
Merge branch 'vllm-project:main' into fix_nomic
noooop 961ef1a
fix
noooop 5ac1f2f
Merge branch 'vllm-project:main' into fix_nomic
noooop File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
from vllm import LLM, SamplingParams | ||
|
||
rope_theta = 1000000 | ||
original_max_position_embeddings = 32768 | ||
factor = 4.0 | ||
|
||
# Use yarn to extend context | ||
hf_overrides = { | ||
"rope_theta": rope_theta, | ||
"rope_scaling": { | ||
"rope_type": "yarn", | ||
"factor": factor, | ||
"original_max_position_embeddings": original_max_position_embeddings, | ||
}, | ||
"max_model_len": int(original_max_position_embeddings * factor), | ||
} | ||
|
||
llm = LLM(model="Qwen/Qwen3-0.6B", hf_overrides=hf_overrides) | ||
|
||
sampling_params = SamplingParams( | ||
temperature=0.8, | ||
top_p=0.95, | ||
max_tokens=128, | ||
) | ||
|
||
conversation = [ | ||
{"role": "system", "content": "You are a helpful assistant"}, | ||
{"role": "user", "content": "Hello"}, | ||
{"role": "assistant", "content": "Hello! How can I assist you today?"}, | ||
] | ||
outputs = llm.chat(conversation, sampling_params, use_tqdm=False) | ||
|
||
|
||
def print_outputs(outputs): | ||
print("\nGenerated Outputs:\n" + "-" * 80) | ||
for output in outputs: | ||
prompt = output.prompt | ||
generated_text = output.outputs[0].text | ||
print(f"Prompt: {prompt!r}\n") | ||
print(f"Generated text: {generated_text!r}") | ||
print("-" * 80) | ||
|
||
|
||
print_outputs(outputs) |
138 changes: 138 additions & 0 deletions
138
tests/models/language/pooling/test_nomic_max_model_len.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,138 @@ | ||
# SPDX-License-Identifier: Apache-2.0 | ||
# ruff: noqa: SIM117 | ||
import pytest | ||
import vllm.transformers_utils.config as config | ||
|
||
from ...utils import EmbedModelInfo | ||
|
||
|
||
def cache_clear(): | ||
config.get_sentence_transformer_tokenizer_config.cache_clear() | ||
|
||
|
||
MODELS = [ | ||
EmbedModelInfo("nomic-ai/nomic-embed-text-v1"), | ||
#EmbedModelInfo("nomic-ai/nomic-embed-text-v1.5"), | ||
#EmbedModelInfo("nomic-ai/CodeRankEmbed"), | ||
EmbedModelInfo("nomic-ai/nomic-embed-text-v2-moe"), | ||
#EmbedModelInfo("Snowflake/snowflake-arctic-embed-m-long"), | ||
] | ||
|
||
rope_theta = 1000 | ||
factor = 4.0 | ||
original_max_position_embeddings = 2048 | ||
max_model_len = int(original_max_position_embeddings * factor) | ||
|
||
|
||
@pytest.mark.parametrize("model_info", MODELS) | ||
def test_default(model_info, vllm_runner): | ||
with vllm_runner(model_info.name, task="embed", | ||
max_model_len=None) as vllm_model: | ||
model_config = vllm_model.model.llm_engine.model_config | ||
if model_info.name == "nomic-ai/nomic-embed-text-v2-moe": | ||
# For nomic-embed-text-v2-moe the length is set to 512 | ||
# by sentence_bert_config.json. | ||
assert model_config.max_model_len == 512 | ||
else: | ||
assert ( | ||
model_config.max_model_len == original_max_position_embeddings) | ||
|
||
|
||
@pytest.mark.parametrize("model_info", MODELS) | ||
def test_set_max_model_len_legal1(model_info, vllm_runner): | ||
# set max_model_len <= 512 | ||
with vllm_runner(model_info.name, task="embed", | ||
max_model_len=256) as vllm_model: | ||
model_config = vllm_model.model.llm_engine.model_config | ||
assert model_config.max_model_len == 256 | ||
|
||
# set 512 < max_model_len <= 2048 | ||
if model_info.name == "nomic-ai/nomic-embed-text-v2-moe": | ||
cache_clear() | ||
|
||
# For nomic-embed-text-v2-moe the length is set to 512 | ||
# by sentence_bert_config.json. | ||
with pytest.raises(ValueError): | ||
with vllm_runner(model_info.name, task="embed", | ||
max_model_len=1024): | ||
pass | ||
else: | ||
with vllm_runner(model_info.name, task="embed", | ||
max_model_len=1024) as vllm_model: | ||
model_config = vllm_model.model.llm_engine.model_config | ||
assert model_config.max_model_len == 1024 | ||
|
||
|
||
@pytest.mark.parametrize("model_info", MODELS) | ||
def test_set_max_model_len_illegal(model_info, vllm_runner): | ||
# set max_model_len > 2048 | ||
with pytest.raises(ValueError): | ||
with vllm_runner(model_info.name, task="embed", max_model_len=4096): | ||
pass | ||
|
||
# set max_model_len > 2048 by hf_overrides | ||
hf_overrides = {"max_model_len": 4096} | ||
with pytest.raises(ValueError): | ||
with vllm_runner(model_info.name, | ||
task="embed", | ||
max_model_len=None, | ||
hf_overrides=hf_overrides): | ||
pass | ||
|
||
|
||
@pytest.mark.parametrize("model_info", MODELS) | ||
def test_use_rope_scaling_legal(model_info, vllm_runner): | ||
hf_overrides = { | ||
"rope_theta": rope_theta, | ||
"rope_scaling": { | ||
"rope_type": "yarn", | ||
"factor": factor, | ||
"original_max_position_embeddings": | ||
original_max_position_embeddings | ||
}, | ||
"max_model_len": max_model_len | ||
} | ||
|
||
with vllm_runner(model_info.name, | ||
task="embed", | ||
max_model_len=None, | ||
hf_overrides=hf_overrides): | ||
pass | ||
|
||
|
||
@pytest.mark.parametrize("model_info", MODELS) | ||
def test_use_rope_scaling_illegal(model_info, vllm_runner): | ||
hf_overrides = { | ||
"rope_theta": rope_theta, | ||
"rope_scaling": { | ||
"rope_type": "yarn", | ||
"factor": factor, | ||
"original_max_position_embeddings": | ||
original_max_position_embeddings | ||
} | ||
} | ||
# illegal max_model_len | ||
with pytest.raises(ValueError): | ||
with vllm_runner(model_info.name, | ||
task="embed", | ||
max_model_len=max_model_len + 1, | ||
hf_overrides=hf_overrides): | ||
pass | ||
|
||
hf_overrides = { | ||
"rope_theta": rope_theta, | ||
"rope_scaling": { | ||
"rope_type": "yarn", | ||
"factor": factor, | ||
"original_max_position_embeddings": | ||
original_max_position_embeddings | ||
}, | ||
"max_model_len": max_model_len + 1 | ||
} | ||
# illegal max_model_len by hf_overrides | ||
with pytest.raises(ValueError): | ||
with vllm_runner(model_info.name, | ||
task="embed", | ||
max_model_len=None, | ||
hf_overrides=hf_overrides): | ||
pass |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.