Skip to content

Commit a47a8ed

Browse files
committed
clean redundant file system I/O
1 parent f1f8ec1 commit a47a8ed

File tree

7 files changed

+132
-162
lines changed

7 files changed

+132
-162
lines changed

agent/jsonify.go

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
package agent
2+
3+
import (
4+
"github.com/kshard/chatter"
5+
"github.com/kshard/thinker"
6+
"github.com/kshard/thinker/codec"
7+
"github.com/kshard/thinker/memory"
8+
"github.com/kshard/thinker/prompt/jsonify"
9+
"github.com/kshard/thinker/reasoner"
10+
)
11+
12+
// Jsonify implementing request/response to LLMs, forcing the response to be JSON array.
13+
type Jsonify[A any] struct {
14+
*Automata[A, []string]
15+
encoder thinker.Encoder[A]
16+
validator func([]string) error
17+
}
18+
19+
func NewJsonify[A any](
20+
llm chatter.Chatter,
21+
attempts int,
22+
encoder thinker.Encoder[A],
23+
validator func([]string) error,
24+
) *Jsonify[A] {
25+
w := &Jsonify[A]{encoder: encoder, validator: validator}
26+
w.Automata = NewAutomata(llm,
27+
28+
// Configures memory for the agent. Typically, memory retains all of
29+
// the agent's observations. Here, we use an infinite stream memory,
30+
// recalling all observations.
31+
memory.NewStream(memory.INFINITE, `
32+
You are automomous agent who perform required tasks, providing results in JSON.
33+
`),
34+
35+
// Configures the encoder to transform input of type A into a `chatter.Prompt`.
36+
// Here, it is defined by application
37+
codec.FromEncoder(w.encode),
38+
39+
// Configure the decoder to transform output of LLM into type B.
40+
// Here, we use the identity decoder that returns LLMs output as-is.
41+
codec.FromDecoder(w.decode),
42+
43+
// Configures the reasoner, which determines the agent's next actions and prompts.
44+
// Here, we use a sequence of command reasoner, it assumes that input prompt is
45+
// the workflow based on command. LLM guided to execute entire workflow.
46+
reasoner.NewEpoch(attempts, reasoner.From(w.deduct)),
47+
)
48+
49+
return w
50+
}
51+
52+
func (w *Jsonify[A]) encode(in A) (prompt chatter.Prompt, err error) {
53+
prompt, err = w.encoder.Encode(in)
54+
if err == nil {
55+
jsonify.Strings.Harden(&prompt)
56+
}
57+
58+
return
59+
}
60+
61+
func (w *Jsonify[A]) decode(reply chatter.Reply) (float64, []string, error) {
62+
var seq []string
63+
if err := jsonify.Strings.Decode(reply, &seq); err != nil {
64+
return 0.0, nil, err
65+
}
66+
67+
if err := w.validator(seq); err != nil {
68+
return 0.1, nil, err
69+
}
70+
71+
return 1.0, seq, nil
72+
}
73+
74+
func (w *Jsonify[A]) deduct(state thinker.State[[]string]) (thinker.Phase, chatter.Prompt, error) {
75+
// Provide feedback to LLM if there are no confidence about the results
76+
if state.Feedback != nil && state.Confidence < 1.0 {
77+
var prompt chatter.Prompt
78+
prompt.WithTask("Refine the previous request using the feedback below.")
79+
prompt.With(state.Feedback)
80+
return thinker.AGENT_REFINE, prompt, nil
81+
}
82+
83+
// We have sufficient confidence, return results
84+
return thinker.AGENT_RETURN, chatter.Prompt{}, nil
85+
}

agent/prompter.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ func NewPrompter[A any](llm chatter.Chatter, f func(A) (chatter.Prompt, error))
2828
// Configures memory for the agent. Typically, memory retains all of
2929
// the agent's observations. Here, we use a void memory, meaning no
3030
// observations are retained.
31-
memory.NewVoid(""),
31+
memory.NewVoid(`You are automomous agent who perform tasks defined in the prompt.`),
3232

3333
// Configures the encoder to transform input of type A into a `chatter.Prompt`.
3434
// Here, we use an encoder that converts input into prompt.

examples/02_rainbow/rainbow.go

Lines changed: 31 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,7 @@ import (
2020
"github.com/kshard/thinker"
2121
"github.com/kshard/thinker/agent"
2222
"github.com/kshard/thinker/codec"
23-
"github.com/kshard/thinker/memory"
2423
"github.com/kshard/thinker/prompt/jsonify"
25-
"github.com/kshard/thinker/reasoner"
2624
)
2725

2826
// Ask LLMs about colors of rainbow
@@ -38,20 +36,6 @@ func encode(any) (prompt chatter.Prompt, err error) {
3836
return
3937
}
4038

41-
// Parse LLMs response into sequence of colors
42-
func decode(reply chatter.Reply) (float64, []string, error) {
43-
var seq []string
44-
if err := jsonify.Strings.Decode(reply, &seq); err != nil {
45-
return 0.0, nil, err
46-
}
47-
48-
if err := validate(seq); err != nil {
49-
return 0.1, nil, err
50-
}
51-
52-
return 1.0, seq, nil
53-
}
54-
5539
// Validate sequence of colors, expecting invisible spectrum.
5640
func validate(seq []string) error {
5741
for _, x := range seq {
@@ -67,19 +51,33 @@ func validate(seq []string) error {
6751
)
6852
}
6953

70-
// deduct new goal for the agent to pursue.
71-
func deduct(state thinker.State[[]string]) (thinker.Phase, chatter.Prompt, error) {
72-
// Provide feedback to LLM if there are no confidence about the results
73-
if state.Feedback != nil && state.Confidence < 1.0 {
74-
var prompt chatter.Prompt
75-
prompt.WithTask("Refine the previous request using the feedback below.")
76-
prompt.With(state.Feedback)
77-
return thinker.AGENT_REFINE, prompt, nil
78-
}
54+
// Parse LLMs response into sequence of colors
55+
// func decode(reply chatter.Reply) (float64, []string, error) {
56+
// var seq []string
57+
// if err := jsonify.Strings.Decode(reply, &seq); err != nil {
58+
// return 0.0, nil, err
59+
// }
7960

80-
// We have sufficient confidence, return results
81-
return thinker.AGENT_RETURN, chatter.Prompt{}, nil
82-
}
61+
// if err := validate(seq); err != nil {
62+
// return 0.1, nil, err
63+
// }
64+
65+
// return 1.0, seq, nil
66+
// }
67+
68+
// deduct new goal for the agent to pursue.
69+
// func deduct(state thinker.State[[]string]) (thinker.Phase, chatter.Prompt, error) {
70+
// // Provide feedback to LLM if there are no confidence about the results
71+
// if state.Feedback != nil && state.Confidence < 1.0 {
72+
// var prompt chatter.Prompt
73+
// prompt.WithTask("Refine the previous request using the feedback below.")
74+
// prompt.With(state.Feedback)
75+
// return thinker.AGENT_REFINE, prompt, nil
76+
// }
77+
78+
// // We have sufficient confidence, return results
79+
// return thinker.AGENT_RETURN, chatter.Prompt{}, nil
80+
// }
8381

8482
func main() {
8583
// create instance of LLM API, see doc/HOWTO.md for details
@@ -88,27 +86,19 @@ func main() {
8886
panic(err)
8987
}
9088

91-
// We create an agent that takes string (sentence) and returns string (anagram).
92-
agt := agent.NewAutomata(
89+
agt := agent.NewJsonify(
9390
// enable debug output for LLMs dialog
9491
aio.NewLogger(os.Stdout, llm),
9592

96-
// Configures memory for the agent. Typically, memory retains all of
97-
// the agent's observations. Here, we use a stream memory that holds all observations.
98-
memory.NewStream(memory.INFINITE, "You are agent who remembers and uses earlier chat history."),
93+
// attempts to request JSON
94+
4,
9995

10096
// Configures the encoder to transform input of type A into a `chatter.Prompt`.
10197
// Here, we use an encoder that builds prompt.
10298
codec.FromEncoder(encode),
10399

104-
// Configure the decoder to transform output of LLM into type B.
105-
// Here, we use custom (app specific) codec that parses LLM response into []string.
106-
codec.FromDecoder(decode),
107-
108-
// Configures the reasoner, which determines the agent's next actions and prompts.
109-
// Here, we use custom (app specific) reasoner. The agent is restricted to execute
110-
// 4 itterattions before it fails.
111-
reasoner.NewEpoch(4, reasoner.From(deduct)),
100+
// Validator function, checks correctness of response
101+
validate,
112102
)
113103

114104
// We ask agent about the rainbow colors.

examples/06_text_processor/processor.go

Lines changed: 12 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,12 @@ import (
1313
"fmt"
1414

1515
"github.com/fogfish/stream/lfs"
16+
"github.com/fogfish/stream/spool"
1617
"github.com/kshard/chatter"
1718
"github.com/kshard/chatter/llm/autoconfig"
1819
"github.com/kshard/thinker/agent"
1920
"github.com/kshard/thinker/codec"
2021
"github.com/kshard/thinker/command"
21-
"github.com/kshard/thinker/x/xfs"
2222
)
2323

2424
func bootstrap(n int) (prompt chatter.Prompt, err error) {
@@ -55,6 +55,7 @@ func main() {
5555
if err != nil {
5656
panic(err)
5757
}
58+
q := spool.New(r, w, spool.Mutable)
5859

5960
// We need 10 files, let's use agents to get itls
6061
fmt.Printf("==> creating files ...\n")
@@ -69,32 +70,14 @@ func main() {
6970
wrk := agent.NewPrompter(llm, processor)
7071

7172
fmt.Printf("==> processing files ...\n")
72-
xfs.NewWorker(r, w).
73-
Walk(context.Background(), "/",
74-
func(ctx context.Context, w *xfs.Worker, path string) error {
75-
fmt.Printf("==> %s\n", path)
76-
77-
txt, err := w.ReadFile(path)
78-
if err != nil {
79-
return err
80-
}
81-
82-
kwd, err := wrk.PromptOnce(ctx, string(txt))
83-
if err != nil {
84-
return err
85-
}
86-
87-
err = w.WriteFile(path, []byte(kwd))
88-
if err != nil {
89-
return err
90-
}
91-
92-
err = w.Reader.Remove(path)
93-
if err != nil {
94-
return err
95-
}
96-
97-
return nil
98-
},
99-
)
73+
q.ForEachFile(context.Background(), "/",
74+
func(ctx context.Context, path string, txt []byte) ([]byte, error) {
75+
fmt.Printf("==> %v ...\n", path)
76+
kwd, err := wrk.PromptOnce(ctx, string(txt))
77+
if err != nil {
78+
return nil, err
79+
}
80+
return []byte(kwd), nil
81+
},
82+
)
10083
}

examples/go.mod

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ require (
1111
github.com/aws/aws-lambda-go v1.47.0
1212
github.com/aws/jsii-runtime-go v1.109.0
1313
github.com/fogfish/scud v0.10.5
14-
github.com/fogfish/stream v1.2.2
14+
github.com/fogfish/stream v1.2.3
1515
github.com/fogfish/typestep v0.0.1
1616
github.com/kshard/chatter v0.5.1
1717
github.com/kshard/chatter/llm/autoconfig v0.0.1

examples/go.sum

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,8 +80,8 @@ github.com/fogfish/opts v0.0.5 h1:Bh3Nucr1kx7G1F0Tq3DxO14/qYgmR6C2GjWr2k6O+Oc=
8080
github.com/fogfish/opts v0.0.5/go.mod h1:+HM1YrMsTzfouZRoHfPOsGT9VZw+0ZBKZ36PMqoNFqM=
8181
github.com/fogfish/scud v0.10.5 h1:B4nSxNTUmUWeFvGwNp7SkKLKKpuX18a2suSeXML6x3E=
8282
github.com/fogfish/scud v0.10.5/go.mod h1:QMWoWSpEDZT1n9ky1qBylBHcm+kXIWBK5ha/S2kIPO4=
83-
github.com/fogfish/stream v1.2.2 h1:aV/kMoBupz2p7JDf/ZNM+wpOaDcEj8lFdgxgOu2nzCM=
84-
github.com/fogfish/stream v1.2.2/go.mod h1:zJGIcKlB0e+VxHpf/GnHPnYYEGRM6Mq8cIGA7O05e9Q=
83+
github.com/fogfish/stream v1.2.3 h1:prL04GKDcIpDm1oe6RDIfbPnFr0YL27ejPclSZHyONE=
84+
github.com/fogfish/stream v1.2.3/go.mod h1:zJGIcKlB0e+VxHpf/GnHPnYYEGRM6Mq8cIGA7O05e9Q=
8585
github.com/fogfish/typestep v0.0.1 h1:1gkJl7niFKYjiK5x+gcbJ3RNWfT+Ugwgmyw4Neim/4A=
8686
github.com/fogfish/typestep v0.0.1/go.mod h1:j9naUk91Owmag5b2WHcMvpDGt3iWG0yMH2fRztaLhSk=
8787
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=

x/xfs/xfs.go

Lines changed: 0 additions & 88 deletions
This file was deleted.

0 commit comments

Comments
 (0)