Skip to content

Commit f1f8ec1

Browse files
authored
Improve chaining (#16)
* Improve chaining Use Golang idiomatic functions to chain agents. Support chaining at AWS using Step Functions. Add utility for chaining agents over file system.
1 parent 654d5df commit f1f8ec1

File tree

20 files changed

+589
-280
lines changed

20 files changed

+589
-280
lines changed

README.md

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ In this library, an agent is defined as a side-effect function `ƒ: A ⟼ B`, wh
5353
- [Encoder \& Decoder](#encoder--decoder)
5454
- [Commands \& Tools](#commands--tools)
5555
- [Agent profiles](#agent-profiles)
56-
- [Agent chains](#agent-chains)
56+
- [Agent composition (chaining)](#agent-composition-chaining)
5757
- [FAQ](#faq)
5858
- [How To Contribute](#how-to-contribute)
5959
- [commit message](#commit-message)
@@ -266,12 +266,13 @@ The application assembles agents from three elements: memory, reasoner and codec
266266
* `Worker` uses LLMs and external tools to solve the task.
267267

268268

269-
## Agent chains
269+
## Agent composition (chaining)
270270

271-
The `thinker` library does not provide built-in mechanisms for chaining agents. Instead, it encourages the use of standard Go techniques either pure functional chaining or chaining of go routines (e.g. [golem/pipe](https://github.com/fogfish/golem)).
271+
The `thinker` library does not provide built-in mechanisms for chaining agents. Instead, it encourages the use of ideomatic Go, pure functional chaining.
272272

273-
The [chain example](./examples/05_chain/chain.go) demostrates off-the-shelf techniques for agents chaining.
273+
The ["Chain" example](./examples/05_chain/chain.go) demonstrates off-the-shelf techniques for agents chaining.
274274

275+
The ["Text Processor" example](./06_text_processor/processor.go) demonstrates chaining agnet with file system I/O.
275276

276277
## FAQ
277278

@@ -305,6 +306,14 @@ func (*Agent) Deduct(thinker.State[string]) (thinker.Phase, chatter.Prompt, erro
305306
```
306307
</details>
307308

309+
<details>
310+
<summary>How to deploy agents to AWS?</summary>
311+
You might consider a AWS Serverless solution for hosting agents.
312+
AWS Step Functions makes chaining of agents out-of-the-box, which is recommended approach.
313+
314+
You might consider [typestep library](https://github.com/fogfish/typestep) that provides a simplistic approach for defining AWS Step Functions using a type-safe notation in Go.
315+
</details>
316+
308317

309318

310319
## How To Contribute

agent/automata.go

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,9 @@ type Automata[A, B any] struct {
2828
func NewAutomata[A, B any](
2929
llm chatter.Chatter,
3030
memory thinker.Memory,
31-
reasoner thinker.Reasoner[B],
3231
encoder thinker.Encoder[A],
3332
decoder thinker.Decoder[B],
33+
reasoner thinker.Reasoner[B],
3434
) *Automata[A, B] {
3535
return &Automata[A, B]{
3636
llm: llm,
@@ -41,25 +41,18 @@ func NewAutomata[A, B any](
4141
}
4242
}
4343

44-
// Prompting agent to perform a work and forgeting the state.
45-
// The operation is composable using Go-channels
46-
func (automata *Automata[A, B]) Echo(in A) (B, error) {
47-
automata.Purge()
48-
return automata.Prompt(context.Background(), in)
49-
}
50-
51-
// Prompting agent to perform a work and prerve the state.
52-
// The operation is composable using Go-channels
53-
func (automata *Automata[A, B]) Seek(in A) (B, error) {
54-
return automata.Prompt(context.Background(), in)
55-
}
56-
5744
// Purge automata's memory
5845
func (automata *Automata[A, B]) Purge() {
5946
automata.reasoner.Purge()
6047
automata.memory.Purge()
6148
}
6249

50+
// Forget the agent state and prompt within a new session
51+
func (automata *Automata[A, B]) PromptOnce(ctx context.Context, input A, opt ...chatter.Opt) (B, error) {
52+
automata.Purge()
53+
return automata.Prompt(ctx, input, opt...)
54+
}
55+
6356
// Prompt agent
6457
func (automata *Automata[A, B]) Prompt(ctx context.Context, input A, opt ...chatter.Opt) (B, error) {
6558
var nul B

agent/prompter.go

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,18 +30,18 @@ func NewPrompter[A any](llm chatter.Chatter, f func(A) (chatter.Prompt, error))
3030
// observations are retained.
3131
memory.NewVoid(""),
3232

33-
// Configures the reasoner, which determines the agent's next actions and prompts.
34-
// Here, we use a void reasoner, meaning no reasoning is performed—the agent
35-
// simply returns the result.
36-
reasoner.NewVoid[string](),
37-
3833
// Configures the encoder to transform input of type A into a `chatter.Prompt`.
3934
// Here, we use an encoder that converts input into prompt.
4035
codec.FromEncoder(f),
4136

4237
// Configure the decoder to transform output of LLM into type B.
4338
// Here, we use the identity decoder that returns LLMs output as-is.
4439
codec.DecoderID,
40+
41+
// Configures the reasoner, which determines the agent's next actions and prompts.
42+
// Here, we use a void reasoner, meaning no reasoning is performed—the agent
43+
// simply returns the result.
44+
reasoner.NewVoid[string](),
4545
)
4646

4747
return w

agent/worker.go

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -44,18 +44,18 @@ func NewWorker[A any](
4444
You are using and remember context from earlier chat history to execute the task.
4545
`),
4646

47-
// Configures the reasoner, which determines the agent's next actions and prompts.
48-
// Here, we use a sequence of command reasoner, it assumes that input prompt is
49-
// the workflow based on command. LLM guided to execute entire workflow.
50-
reasoner.NewEpoch(attempts, reasoner.NewCmdSeq()),
51-
5247
// Configures the encoder to transform input of type A into a `chatter.Prompt`.
5348
// Here, it is defined by application
5449
codec.FromEncoder(w.encode),
5550

5651
// Configure the decoder to transform output of LLM into type B.
5752
// The registry knows how to interpret the LLM's reply and executed the command.
5853
registry,
54+
55+
// Configures the reasoner, which determines the agent's next actions and prompts.
56+
// Here, we use a sequence of command reasoner, it assumes that input prompt is
57+
// the workflow based on command. LLM guided to execute entire workflow.
58+
reasoner.NewEpoch(attempts, reasoner.NewCmdSeq()),
5959
)
6060

6161
return w

command/xfs/xfs.go

Lines changed: 0 additions & 137 deletions
This file was deleted.

examples/02_rainbow/rainbow.go

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -97,18 +97,18 @@ func main() {
9797
// the agent's observations. Here, we use a stream memory that holds all observations.
9898
memory.NewStream(memory.INFINITE, "You are agent who remembers and uses earlier chat history."),
9999

100-
// Configures the reasoner, which determines the agent's next actions and prompts.
101-
// Here, we use custom (app specific) reasoner. The agent is restricted to execute
102-
// 4 itterattions before it fails.
103-
reasoner.NewEpoch(4, reasoner.From(deduct)),
104-
105100
// Configures the encoder to transform input of type A into a `chatter.Prompt`.
106101
// Here, we use an encoder that builds prompt.
107102
codec.FromEncoder(encode),
108103

109104
// Configure the decoder to transform output of LLM into type B.
110105
// Here, we use custom (app specific) codec that parses LLM response into []string.
111106
codec.FromDecoder(decode),
107+
108+
// Configures the reasoner, which determines the agent's next actions and prompts.
109+
// Here, we use custom (app specific) reasoner. The agent is restricted to execute
110+
// 4 itterattions before it fails.
111+
reasoner.NewEpoch(4, reasoner.From(deduct)),
112112
)
113113

114114
// We ask agent about the rainbow colors.

examples/03_script/script.go

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -98,18 +98,18 @@ func main() {
9898
You are using and remember context from earlier chat history to execute the task.
9999
`),
100100

101-
// Configures the reasoner, which determines the agent's next actions and prompts.
102-
// Here, we use custom (app specific) reasoner. The agent is restricted to execute
103-
// 4 itterattions before it fails.
104-
reasoner.NewEpoch(4, reasoner.From(deduct)),
105-
106101
// Configures the encoder to transform input of type A into a `chatter.Prompt`.
107102
// Here, we use an encoder that builds prompt.
108103
codec.FromEncoder(encode),
109104

110105
// Configure the decoder to transform output of LLM into type B.
111106
// Here, we use the tool registry to "decode" output into command call.
112107
registry,
108+
109+
// Configures the reasoner, which determines the agent's next actions and prompts.
110+
// Here, we use custom (app specific) reasoner. The agent is restricted to execute
111+
// 4 itterattions before it fails.
112+
reasoner.NewEpoch(4, reasoner.From(deduct)),
113113
)
114114

115115
// Execute agent

examples/05_chain/chain.go

Lines changed: 23 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,6 @@ import (
1313
"fmt"
1414
"os"
1515

16-
"github.com/fogfish/golem/pipe/v2"
17-
"github.com/fogfish/golem/pure/monoid"
1816
"github.com/kshard/chatter"
1917
"github.com/kshard/chatter/llm/autoconfig"
2018
"github.com/kshard/thinker"
@@ -81,48 +79,38 @@ func main() {
8179
agtB := NewAgentB(llm)
8280

8381
//
84-
// chaining agents using Go channels and fogfish/golem/pipe
85-
86-
// create context to manage the chain
87-
ctx, close := context.WithCancel(context.Background())
88-
89-
// Input to the chain
90-
who := pipe.Seq("Cat", "Dog", "Cow", "Pig")
91-
92-
// Use agent to transform input into story
93-
story := pipe.StdErr(pipe.Map(ctx, who, pipe.Lift(agtA.Seek)))
94-
95-
// Write stories into file system
96-
file := pipe.StdErr(pipe.Map(ctx, story, pipe.Lift(txt2file)))
97-
98-
// Wait until all files are written
99-
syn := pipe.Fold(ctx, file, mString)
82+
// chaining agents using pure Go
83+
for _, who := range []string{"Cat", "Dog", "Cow", "Pig"} {
84+
// Use agent to transform animal input into story
85+
story, err := agtA.PromptOnce(context.Background(), who)
86+
if err != nil {
87+
panic(err)
88+
}
89+
90+
// Write stories into file system
91+
err = txt2file(story)
92+
if err != nil {
93+
panic(err)
94+
}
95+
}
10096

10197
// Use agent to conduct analysis of local files
102-
act := pipe.StdErr(pipe.Map(ctx, syn, pipe.Lift(agtB.Echo)))
103-
104-
// Output the result of the pipeline
105-
<-pipe.ForEach(ctx, act, pipe.Pure(stdout))
98+
reply, err := agtB.PromptOnce(context.Background(), "")
99+
if err != nil {
100+
panic(err)
101+
}
106102

107-
close()
103+
fmt.Printf("==> %s\n", reply.Output)
108104
}
109105

110-
func txt2file(x string) (string, error) {
106+
func txt2file(x string) error {
111107
fd, err := os.CreateTemp("/tmp/script", "*.txt")
112108
if err != nil {
113-
return "", err
109+
return err
114110
}
115111
defer fd.Close()
116112
if _, err := fd.WriteString(x); err != nil {
117-
return "", err
113+
return err
118114
}
119-
return fd.Name(), nil
120-
}
121-
122-
// naive string monoid
123-
var mString = monoid.FromOp("", func(a string, b string) string { return a + " " + b })
124-
125-
func stdout(x thinker.CmdOut) thinker.CmdOut {
126-
fmt.Printf("==> %s\n", x.Output)
127-
return x
115+
return nil
128116
}

0 commit comments

Comments
 (0)