Skip to content

Commit a428b30

Browse files
authored
v0.3.0 (#221)
* Feat add tools (#201) * fix: openai WithTools pointer receiver (#204) * fix: use pointer receiver * chore: add assistant execute * chore: change the way assistant actually works * chore: rename tools in tool * docs: update docs * Refactor ssistant observer (#206) * refactor assistant observer * fix: linting * fix: assistant observer * fix: linting * fix: serpapi response parsing * fix: tools * fix: use json as default output and parse embeddings * fix * fix ollama embed error * fix lint
1 parent 3930e2b commit a428b30

File tree

43 files changed

+1502
-61
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+1502
-61
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,5 @@ bin/
1919
llama.cpp/
2020
whisper.cpp/
2121

22-
*/.hugo_build.lock
22+
*/.hugo_build.lock
23+
docs/public/

assistant/assistant.go

Lines changed: 69 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ package assistant
22

33
import (
44
"context"
5+
"fmt"
56
"strings"
67

78
obs "github.com/henomis/lingoose/observer"
@@ -22,11 +23,16 @@ type observer interface {
2223
SpanEnd(s *obs.Span) (*obs.Span, error)
2324
}
2425

26+
const (
27+
DefaultMaxIterations = 3
28+
)
29+
2530
type Assistant struct {
26-
llm LLM
27-
rag RAG
28-
thread *thread.Thread
29-
parameters Parameters
31+
llm LLM
32+
rag RAG
33+
thread *thread.Thread
34+
parameters Parameters
35+
maxIterations uint
3036
}
3137

3238
type LLM interface {
@@ -48,6 +54,7 @@ func New(llm LLM) *Assistant {
4854
CompanyName: defaultCompanyName,
4955
CompanyDescription: defaultCompanyDescription,
5056
},
57+
maxIterations: DefaultMaxIterations,
5158
}
5259

5360
return assistant
@@ -83,14 +90,41 @@ func (a *Assistant) Run(ctx context.Context) error {
8390
if errGenerate != nil {
8491
return errGenerate
8592
}
93+
} else {
94+
a.injectSystemMessage()
95+
}
96+
97+
for i := 0; i < int(a.maxIterations); i++ {
98+
err = a.runIteration(ctx, i)
99+
if err != nil {
100+
return err
101+
}
102+
103+
if a.thread.LastMessage().Role != thread.RoleTool {
104+
break
105+
}
106+
}
107+
108+
err = a.stopObserveSpan(ctx, spanAssistant)
109+
if err != nil {
110+
return err
111+
}
112+
113+
return nil
114+
}
115+
116+
func (a *Assistant) runIteration(ctx context.Context, iteration int) error {
117+
ctx, spanIteration, err := a.startObserveSpan(ctx, fmt.Sprintf("iteration-%d", iteration+1))
118+
if err != nil {
119+
return err
86120
}
87121

88122
err = a.llm.Generate(ctx, a.thread)
89123
if err != nil {
90124
return err
91125
}
92126

93-
err = a.stopObserveSpan(ctx, spanAssistant)
127+
err = a.stopObserveSpan(ctx, spanIteration)
94128
if err != nil {
95129
return err
96130
}
@@ -123,7 +157,7 @@ func (a *Assistant) generateRAGMessage(ctx context.Context) error {
123157

124158
a.thread.AddMessage(thread.NewSystemMessage().AddContent(
125159
thread.NewTextContent(
126-
systemRAGPrompt,
160+
systemPrompt,
127161
).Format(
128162
types.M{
129163
"assistantName": a.parameters.AssistantName,
@@ -147,6 +181,11 @@ func (a *Assistant) generateRAGMessage(ctx context.Context) error {
147181
return nil
148182
}
149183

184+
func (a *Assistant) WithMaxIterations(maxIterations uint) *Assistant {
185+
a.maxIterations = maxIterations
186+
return a
187+
}
188+
150189
func (a *Assistant) startObserveSpan(ctx context.Context, name string) (context.Context, *obs.Span, error) {
151190
o, ok := obs.ContextValueObserverInstance(ctx).(observer)
152191
if o == nil || !ok {
@@ -183,3 +222,27 @@ func (a *Assistant) stopObserveSpan(ctx context.Context, span *obs.Span) error {
183222
_, err := o.SpanEnd(span)
184223
return err
185224
}
225+
226+
func (a *Assistant) injectSystemMessage() {
227+
for _, message := range a.thread.Messages {
228+
if message.Role == thread.RoleSystem {
229+
return
230+
}
231+
}
232+
233+
systemMessage := thread.NewSystemMessage().AddContent(
234+
thread.NewTextContent(
235+
systemPrompt,
236+
).Format(
237+
types.M{
238+
"assistantName": a.parameters.AssistantName,
239+
"assistantIdentity": a.parameters.AssistantIdentity,
240+
"assistantScope": a.parameters.AssistantScope,
241+
"companyName": a.parameters.CompanyName,
242+
"companyDescription": a.parameters.CompanyDescription,
243+
},
244+
),
245+
)
246+
247+
a.thread.Messages = append([]*thread.Message{systemMessage}, a.thread.Messages...)
248+
}

assistant/prompt.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ const (
44
//nolint:lll
55
baseRAGPrompt = "Use the following pieces of retrieved context to answer the question.\n\nQuestion: {{.question}}\nContext:\n{{range .results}}{{.}}\n\n{{end}}"
66
//nolint:lll
7-
systemRAGPrompt = "You name is {{.assistantName}}, and you are {{.assistantIdentity}} {{if ne .companyName \"\" }}at {{.companyName}}{{end}}{{if ne .companyDescription \"\" }}, {{.companyDescription}}{{end}}. Your task is to assist humans {{.assistantScope}}."
7+
systemPrompt = "{{if ne .assistantName \"\"}}You name is {{.assistantName}}, {{end}}{{if ne .assistantIdentity \"\"}}you are {{.assistantIdentity}}.{{end}} {{if ne .companyName \"\" }}at {{.companyName}}{{end}}{{if ne .companyDescription \"\" }}, {{.companyDescription}}.{{end}} Your task is to assist humans {{.assistantScope}}."
88

99
defaultAssistantName = "AI assistant"
1010
defaultAssistantIdentity = "a helpful and polite assistant"

docs/content/reference/assistant.md

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,4 +31,37 @@ if err != nil {
3131
fmt.Println(myAssistant.Thread())
3232
```
3333

34-
We can define the LinGoose `Assistant` as a `Thread` runner with an optional `RAG` component that will help to produce the response.
34+
We can define the LinGoose `Assistant` as a `Thread` runner with an optional `RAG` component that will help to produce the response.
35+
36+
## Assistant as Agent
37+
38+
The `Assistant` can be used as an agent in a conversation. It can be used to automate tasks, answer questions, and provide information.
39+
40+
```go
41+
auto := "auto"
42+
myAgent := assistant.New(
43+
openai.New().WithModel(openai.GPT4o).WithToolChoice(&auto).WithTools(
44+
pythontool.New(),
45+
serpapitool.New(),
46+
),
47+
).WithParameters(
48+
assistant.Parameters{
49+
AssistantName: "AI Assistant",
50+
AssistantIdentity: "an helpful assistant",
51+
AssistantScope: "with their questions.",
52+
CompanyName: "",
53+
CompanyDescription: "",
54+
},
55+
).WithThread(
56+
thread.New().AddMessages(
57+
thread.NewUserMessage().AddContent(
58+
thread.NewTextContent("calculate the average temperature in celsius degrees of New York, Rome, and Tokyo."),
59+
),
60+
),
61+
).WithMaxIterations(10)
62+
63+
err := myAgent.Run(context.Background())
64+
if err != nil {
65+
panic(err)
66+
}
67+
```

docs/content/reference/examples.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
title: "LinGoose Examples"
33
description:
44
linkTitle: "Examples"
5-
menu: { main: { parent: 'reference', weight: -88 } }
5+
menu: { main: { parent: 'reference', weight: -87 } }
66
---
77

88
LinGoose provides a number of examples to help you get started with building your own AI app. You can use these examples as a reference to understand how to build your own assistant.

docs/content/reference/linglet.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
title: "LinGoose Linglets"
33
description:
44
linkTitle: "Linglets"
5-
menu: { main: { parent: 'reference', weight: -89 } }
5+
menu: { main: { parent: 'reference', weight: -88 } }
66
---
77

88
Linglets are pre-built LinGoose Assistants with a specific purpose. They are designed to be used as a starting point for building your own AI app. You can use them as a reference to understand how to build your own assistant.

docs/content/reference/observer.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
---
2-
title: "Observer"
2+
title: "Observe and Analyze LLM Applications"
33
description:
44
linkTitle: "Observer"
55
menu: { main: { parent: 'reference', weight: -92 } }

docs/content/reference/tool.md

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
---
2+
title: "Performing tasks with Tools"
3+
description:
4+
linkTitle: "Tool"
5+
menu: { main: { parent: 'reference', weight: -89 } }
6+
---
7+
8+
Tools are components that can be used to perform specific tasks. They can be used to automate, answer questions, and provide information. LinGoose offers a variety of tools that can be used to perform different actions.
9+
10+
## Available Tools
11+
12+
- *Python*: It can be used to run Python code and get the output.
13+
- *SerpApi*: It can be used to get search results from Google and other search engines.
14+
- *Dall-e*: It can be used to generate images based on text descriptions.
15+
- *DuckDuckGo*: It can be used to get search results from DuckDuckGo.
16+
- *RAG*: It can be used to retrieve relevant documents based on a query.
17+
- *LLM*: It can be used to generate text based on a prompt.
18+
- *Shell*: It can be used to run shell commands and get the output.
19+
20+
21+
## Using Tools
22+
23+
LinGoose tools can be used to perform specific tasks. Here is an example of using the `Python` and `serpapi` tools to get information and run Python code and get the output.
24+
25+
```go
26+
auto := "auto"
27+
myAgent := assistant.New(
28+
openai.New().WithModel(openai.GPT4o).WithToolChoice(&auto).WithTools(
29+
pythontool.New(),
30+
serpapitool.New(),
31+
),
32+
).WithParameters(
33+
assistant.Parameters{
34+
AssistantName: "AI Assistant",
35+
AssistantIdentity: "an helpful assistant",
36+
AssistantScope: "with their questions.",
37+
CompanyName: "",
38+
CompanyDescription: "",
39+
},
40+
).WithThread(
41+
thread.New().AddMessages(
42+
thread.NewUserMessage().AddContent(
43+
thread.NewTextContent("calculate the average temperature in celsius degrees of New York, Rome, and Tokyo."),
44+
),
45+
),
46+
).WithMaxIterations(10)
47+
48+
err := myAgent.Run(context.Background())
49+
if err != nil {
50+
panic(err)
51+
}
52+
```

embedder/llamacpp/llamacpp.go

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,10 @@ package llamacppembedder
22

33
import (
44
"context"
5+
"encoding/json"
6+
"errors"
57
"os"
68
"os/exec"
7-
"strconv"
8-
"strings"
99

1010
"github.com/henomis/lingoose/embedder"
1111
)
@@ -16,6 +16,16 @@ type LlamaCppEmbedder struct {
1616
modelPath string
1717
}
1818

19+
type output struct {
20+
Object string `json:"object"`
21+
Data []data `json:"data"`
22+
}
23+
type data struct {
24+
Object string `json:"object"`
25+
Index int `json:"index"`
26+
Embedding []float64 `json:"embedding"`
27+
}
28+
1929
func New() *LlamaCppEmbedder {
2030
return &LlamaCppEmbedder{
2131
llamacppPath: "./llama.cpp/embedding",
@@ -61,7 +71,7 @@ func (l *LlamaCppEmbedder) embed(ctx context.Context, text string) (embedder.Emb
6171
return nil, err
6272
}
6373

64-
llamacppArgs := []string{"-m", l.modelPath, "-p", text}
74+
llamacppArgs := []string{"-m", l.modelPath, "--embd-output-format", "json", "-p", text}
6575
llamacppArgs = append(llamacppArgs, l.llamacppArgs...)
6676

6777
//nolint:gosec
@@ -74,14 +84,15 @@ func (l *LlamaCppEmbedder) embed(ctx context.Context, text string) (embedder.Emb
7484
}
7585

7686
func parseEmbeddings(str string) (embedder.Embedding, error) {
77-
strSlice := strings.Split(strings.TrimSpace(str), " ")
78-
floatSlice := make([]float64, len(strSlice))
79-
for i, s := range strSlice {
80-
f, err := strconv.ParseFloat(s, 64)
81-
if err != nil {
82-
return nil, err
83-
}
84-
floatSlice[i] = f
87+
var out output
88+
err := json.Unmarshal([]byte(str), &out)
89+
if err != nil {
90+
return nil, err
8591
}
86-
return floatSlice, nil
92+
93+
if len(out.Data) != 1 {
94+
return nil, errors.New("no embeddings found")
95+
}
96+
97+
return out.Data[0].Embedding, nil
8798
}

embedder/ollama/api.go

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ func (r *request) ContentType() string {
3434
type response struct {
3535
HTTPStatusCode int `json:"-"`
3636
acceptContentType string `json:"-"`
37+
RawBody []byte `json:"-"`
3738
Embedding []float64 `json:"embedding"`
3839
CreatedAt string `json:"created_at"`
3940
}
@@ -46,7 +47,13 @@ func (r *response) Decode(body io.Reader) error {
4647
return json.NewDecoder(body).Decode(r)
4748
}
4849

49-
func (r *response) SetBody(_ io.Reader) error {
50+
func (r *response) SetBody(body io.Reader) error {
51+
rawBody, err := io.ReadAll(body)
52+
if err != nil {
53+
return err
54+
}
55+
56+
r.RawBody = rawBody
5057
return nil
5158
}
5259

0 commit comments

Comments
 (0)