Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 26 additions & 6 deletions maxperf.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import sys
import PIL
from PyQt5 import QtWidgets, QtCore
Expand All @@ -19,7 +20,19 @@

mw = None
batchSize = 10
prompts = ['Evil space kitty', 'Cute dog in hat, H.R. Giger style', 'Horse wearing a tie', 'Cartoon pig', 'Donkey on Mars', 'Cute kitties baked in a cake', 'Boxing chickens on farm, Maxfield Parish style', 'Future spaceship', 'A city of the past', 'Jabba the Hut wearing jewelery']

custom_prompts_path = "prompts.txt"
if os.path.exists(custom_prompts_path):
with open(custom_prompts_path, "r") as file:
lines = file.readlines()
prompts = [line.strip() for line in lines]

else:
prompts = ['Evil space kitty', 'Cute dog in hat, H.R. Giger style', 'Horse wearing a tie', 'Cartoon pig', 'Donkey on Mars', 'Cute kitties baked in a cake', 'Boxing chickens on farm, Maxfield Parish style', 'Future spaceship', 'A city of the past', 'Jabba the Hut wearing jewelery']

print(f"Using the following prompts:", *prompts, sep='\n')

prompts_len = len(prompts)

def dwencode(pipe, prompts, batchSize: int, nTokens: int):
tokenizer = pipe.tokenizer
Expand Down Expand Up @@ -244,14 +257,21 @@ def genit(mode, prompts, batchSize, nSteps):
return images

if __name__ == '__main__':


if len(sys.argv) == 2:
batchSize = int(sys.argv[1])
if batchSize > 10:
print('Batchsize must not be greater than 10.')
prompts = prompts[:batchSize]

if batchSize > prompts_len:
prompts=prompts * (1 + batchSize // prompts_len)
print(prompts_len, prompts)


else:
batchSize = 10
prompts = ['Evil space kitty', 'Cute dog in hat, H.R. Giger style', 'Horse wearing a tie', 'Cartoon pig', 'Donkey on Mars', 'Cute kitties baked in a cake', 'Boxing chickens on farm, Maxfield Parish style', 'Future spaceship', 'A city of the past', 'Jabba the Hut wearing jewelery']
batchSize = prompts_len

prompts = prompts[:batchSize]

app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
Expand Down
1 change: 1 addition & 0 deletions prompts.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
a fluffy cat meme
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,5 @@ regex
accelerate
omegaconf
piexif
olefile
pathvalidate
2 changes: 1 addition & 1 deletion src/stable_diffusion_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def setup_torch_compilation(self):
self.pipe.text_encoder = torch.compile(self.pipe.text_encoder, mode='max-autotune')
self.pipe.unet = torch.compile(self.pipe.unet, mode='max-autotune')
self.pipe.vae = torch.compile(self.pipe.vae, mode='max-autotune')
self.perform_warmup()
# self.perform_warmup()

def perform_warmup(self):
self._logger.info(
Expand Down