One-shot prompt with Ollama
Mix . install ( [
{ :ollama , "~> 0.7.0" } ,
{ :kino , "~> 0.13.2" }
] )
Kino . nothing ( )
client = Ollama . init ( )
models =
client
|> Ollama . list_models ( )
|> then ( fn { :ok , % { "models" => models } } -> models end )
|> Enum . map ( fn model -> { Map . get ( model , "name" ) , Map . get ( model , "name" ) } end )
model_input = Kino.Input . select ( "Model" , models )
system_prompt_input = Kino.Input . textarea ( "System prompt" )
instruction_input = Kino.Input . textarea ( "Instruction" )
form =
Kino.Control . form (
[ model: model_input , system_prompt: system_prompt_input , instruction: instruction_input ] ,
submit: "Start"
)
|> Kino . render ( )
reply_frame =
Kino.Frame . new ( )
|> Kino . render ( )
Kino . listen ( form , fn % {
data: % {
model: model ,
system_prompt: system_prompt ,
instruction: instruction
}
} ->
messages = [
% { role: "system" , content: system_prompt } ,
% { role: "user" , content: instruction }
]
Kino.Frame . clear ( reply_frame )
{ :ok , streaming } =
Ollama . chat ( client ,
model: model ,
messages: messages ,
stream: true
)
streaming
|> Stream . each ( fn % { "message" => % { "content" => chunk } } ->
Kino.Frame . append ( reply_frame , Kino.Markdown . new ( chunk , chunk: true ) )
end )
|> Stream . run ( )
end )
Kino . nothing ( )