@@ -315,6 +315,31 @@ const snippetDockerModelRunner = (model: ModelData, filepath?: string): string =
315
315
return `docker model run hf.co/${ model . id } ${ getQuantTag ( filepath ) } ` ;
316
316
} ;
317
317
318
+ const snippetLemonade = ( model : ModelData , filepath ?: string ) : LocalAppSnippet [ ] => {
319
+ const tagName = getQuantTag ( filepath ) ;
320
+ const modelName = model . id . split ( "/" ) [ 1 ] ;
321
+ return [
322
+ {
323
+ title : "Pull the model" ,
324
+ setup : "# Download Lemonade from https://lemonade-server.ai/" ,
325
+ content : [
326
+ `lemonade-server pull user.${ modelName } \\
327
+ --checkpoint ${ model . id } ${ tagName } \\
328
+ --recipe llamacpp` ,
329
+ "# Note: If you installed from source, use the lemonade-server-dev command instead." ,
330
+ ] . join ( "\n" ) ,
331
+ } ,
332
+ {
333
+ title : "Run and chat with the model" ,
334
+ content : `lemonade-server run user.${ modelName } ` ,
335
+ } ,
336
+ {
337
+ title : "List all available models" ,
338
+ content : "lemonade-server list" ,
339
+ } ,
340
+ ] ;
341
+ } ;
342
+
318
343
/**
319
344
* Add your new local app here.
320
345
*
@@ -492,6 +517,13 @@ export const LOCAL_APPS = {
492
517
displayOnModelPage : isLlamaCppGgufModel ,
493
518
snippet : snippetDockerModelRunner ,
494
519
} ,
520
+ lemonade : {
521
+ prettyLabel : "Lemonade" ,
522
+ docsUrl : "https://lemonade-server.ai" ,
523
+ mainTask : "text-generation" ,
524
+ displayOnModelPage : isLlamaCppGgufModel ,
525
+ snippet : snippetLemonade ,
526
+ } ,
495
527
} satisfies Record < string , LocalApp > ;
496
528
497
529
export type LocalAppKey = keyof typeof LOCAL_APPS ;
0 commit comments