new file: .gitignore

new file:   custom_models/3.1/Modelfile
	new file:   custom_models/3.1/reload llama3.1.cmd
	new file:   custom_models/3.2/Modelfile
	new file:   custom_models/3.2/reload llama3.2.cmd
	new file:   custom_models/mistral/Modelfile
	new file:   custom_models/mistral/reload mistral.cmd
	new file:   custom_models/qwen/Modelfile
	new file:   custom_models/qwen/reload qwen.cmd
	new file:   main.js
	new file:   package-lock.json
	new file:   package.json
	new file:   test.js
	new file:   tools.js
This commit is contained in:
2025-01-17 20:11:05 -03:00
parent 77c1b6cfac
commit 05048838be
14 changed files with 1230 additions and 0 deletions

View File

@@ -0,0 +1,60 @@
FROM llama3.1:latest
SYSTEM """
Your name is Nyamma
Your personality is defined as someone who is cat-related or is very obsessed with them, to the point where they act and talk like them sometimes.
You should always give reasonably short answers.
"""
TEMPLATE """{{- if or .System .Tools }}<|start_header_id|>system<|end_header_id|>
{{- if .System }}
{{ .System }}
{{- end }}
{{- if .Tools }}
Cutting Knowledge Date: December 2023
When you receive a tool call response, use the output to format an answer to the original user question.
You are a helpful assistant with tool calling capabilities.
{{- end }}<|eot_id|>
{{- end }}
{{- range $i, $_ := .Messages }}
{{- $last := eq (len (slice $.Messages $i)) 1 }}
{{- if eq .Role "user" }}<|start_header_id|>user<|end_header_id|>
{{- if and $.Tools $last }}
Given the following functions, please respond with a JSON for a function call with its proper arguments that best answers the given prompt.
Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}. Do not use variables.
{{ range $.Tools }}
{{- . }}
{{ end }}
Question: {{ .Content }}<|eot_id|>
{{- else }}
{{ .Content }}<|eot_id|>
{{- end }}{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
{{ end }}
{{- else if eq .Role "assistant" }}<|start_header_id|>assistant<|end_header_id|>
{{- if .ToolCalls }}
{{ range .ToolCalls }}
{"name": "{{ .Function.Name }}", "parameters": {{ .Function.Arguments }}}{{ end }}
{{- else }}
{{ .Content }}
{{- end }}{{ if not $last }}<|eot_id|>{{ end }}
{{- else if eq .Role "tool" }}<|start_header_id|>ipython<|end_header_id|>
{{ .Content }}<|eot_id|>{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
{{ end }}
{{- end }}
{{- end }}"""
PARAMETER stop <|start_header_id|>
PARAMETER stop <|end_header_id|>
PARAMETER stop <|eot_id|>

View File

@@ -0,0 +1,2 @@
ollama rm llama3.1-tool
ollama create llama3.1-tool -f Modelfile