1
1
import type { ModelData } from "./model-data" ;
2
2
import type { PipelineType } from "./pipelines" ;
3
3
4
- interface Snippet {
4
+ export interface LocalAppSnippet {
5
+ /**
6
+ * Title of the snippet
7
+ */
5
8
title : string ;
6
- setup : string ;
7
- command : string ;
9
+ /**
10
+ * Optional setup guide
11
+ */
12
+ setup ?: string ;
13
+ /**
14
+ * Content (or command) to be run
15
+ */
16
+ content : string ;
8
17
}
9
18
10
19
/**
@@ -45,15 +54,15 @@ export type LocalApp = {
45
54
* And if not (mostly llama.cpp), snippet to copy/paste in your terminal
46
55
* Support the placeholder {{GGUF_FILE}} that will be replaced by the gguf file path or the list of available files.
47
56
*/
48
- snippet : ( model : ModelData , filepath ?: string ) => string | string [ ] | Snippet | Snippet [ ] ;
57
+ snippet : ( model : ModelData , filepath ?: string ) => string | string [ ] | LocalAppSnippet | LocalAppSnippet [ ] ;
49
58
}
50
59
) ;
51
60
52
61
function isGgufModel ( model : ModelData ) {
53
62
return model . tags . includes ( "gguf" ) ;
54
63
}
55
64
56
- const snippetLlamacpp = ( model : ModelData , filepath ?: string ) : Snippet [ ] => {
65
+ const snippetLlamacpp = ( model : ModelData , filepath ?: string ) : LocalAppSnippet [ ] => {
57
66
const command = ( binary : string ) =>
58
67
[
59
68
"# Load and run the model:" ,
@@ -67,7 +76,7 @@ const snippetLlamacpp = (model: ModelData, filepath?: string): Snippet[] => {
67
76
{
68
77
title : "Install from brew" ,
69
78
setup : "brew install llama.cpp" ,
70
- command : command ( "llama-cli" ) ,
79
+ content : command ( "llama-cli" ) ,
71
80
} ,
72
81
{
73
82
title : "Use pre-built binary" ,
@@ -76,7 +85,7 @@ const snippetLlamacpp = (model: ModelData, filepath?: string): Snippet[] => {
76
85
"# Download pre-built binary from:" ,
77
86
"# https://github.com/ggerganov/llama.cpp/releases" ,
78
87
] . join ( "\n" ) ,
79
- command : command ( "./llama-cli" ) ,
88
+ content : command ( "./llama-cli" ) ,
80
89
} ,
81
90
{
82
91
title : "Build from source code" ,
@@ -85,7 +94,7 @@ const snippetLlamacpp = (model: ModelData, filepath?: string): Snippet[] => {
85
94
"cd llama.cpp" ,
86
95
"LLAMA_CURL=1 make llama-cli" ,
87
96
] . join ( "\n" ) ,
88
- command : command ( "./llama-cli" ) ,
97
+ content : command ( "./llama-cli" ) ,
89
98
} ,
90
99
] ;
91
100
} ;
0 commit comments