Skip to content

Commit 195a77d

Browse files
committed
Add a prompt for the print() function.
1 parent d43e9da commit 195a77d

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

llm/inference/janus_pro/generation.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def generate(
7878

7979
generated_tokens = ops.zeros(parallel_size, image_token_num_per_image, dtype=ms.int32)
8080

81-
print()
81+
print("Generating tokens: ")
8282
for i in range(image_token_num_per_image):
8383
outputs = mmgpt.language_model.model(inputs_embeds=inputs_embeds, use_cache=True, past_key_values=outputs.past_key_values if i != 0 else None)
8484
hidden_states = outputs.last_hidden_state # (parallel_size*2, len(input_ids), 2048)
@@ -123,4 +123,4 @@ def generate(
123123
vl_gpt,
124124
vl_chat_processor,
125125
prompt,
126-
)
126+
)

0 commit comments

Comments
 (0)