diff --git a/aie-gemma-finetuned.yaml b/aie-gemma-finetuned.yaml index 8f315364d30ba1d42b2d2d58a367fcc66342f621..a1b7380e771f458598e280eddf16931fc67fedd1 100644 --- a/aie-gemma-finetuned.yaml +++ b/aie-gemma-finetuned.yaml @@ -12,15 +12,22 @@ config_file: | parameters: model: downloads/llama_backend/AIE_Gemma2_2B_IT_Q4_K_M.gguf roles: - assistant: 'Assistant:' - system: 'System:' - user: 'User:' + assistant: 'assistant:' + system: 'system:' + user: 'user:' template: chat_message: |- - <start_of_turn>{{if eq .RoleName "assistant" }}model{{else}}{{ .RoleName }}{{end}} - {{ if .Content -}} - {{.Content -}} - {{ end -}}<end_of_turn> + {{- range $i, $_ := .Messages }} + {{- $last := eq (len (slice $.Messages $i)) 1 }} + {{- if or (eq .Role "user") (eq .Role "system") }}<start_of_turn>user + {{ .Content }}<end_of_turn> + {{ if $last }}<start_of_turn>model + {{ end }} + {{- else if eq .Role "assistant" }}<start_of_turn>model + {{ .Content }}{{ if not $last }}<end_of_turn> + {{ end }} + {{- end }} + {{- end }} chat: | {{.Input }} <start_of_turn>model @@ -30,7 +37,16 @@ config_file: | - '<|im_end|>' - '<end_of_turn>' - '<start_of_turn>' - context_size: 4096 + context_size: 8192 + system_prompt: | + You are a helpful and energy consumption aware assistant operating in a smart environment hosted using "Home Assistant". + As input, you will receive Home Assistant data, where each line represents a state change for an entity. + The data is limited to entities within a specific room and covers the past few minutes. + Analyze the provided data for patterns in entity state changes, where energy is used poorly. + Use the entity_ids from the provided historical data and create a Home Assistant automation that prevents the poor energy usage. + Do not provide any other information but the ```yaml formatted Home Assistant automation. + Use the "description" field inside your ```yaml response to explain your reasoning. + If there are no indicators for poor energy usage, answer with "NO". usage: | @@ -44,3 +60,4 @@ files: - filename: "downloads/llama_backend/AIE_Gemma2_2B_IT_Q4_K_M.gguf" sha256: "" uri: "https://huggingface.co/Phabby/AIE-Gemma-2-2B-IT/resolve/main/AIE_Gemma2_2B_IT_Q4_K_M.gguf" +