Skip to content

Commit 37ff127

Browse files
committed
Working on a code specialist agent
1 parent efe36a6 commit 37ff127

File tree

4 files changed

+90
-2
lines changed

4 files changed

+90
-2
lines changed
Binary file not shown.

dev_tools/adr_agent.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,10 +72,10 @@ def generate_high_level_steps(self, feature_request: str) -> List[str]:
7272
return response["result"]
7373

7474

75-
# Main execution
75+
# Example execution
7676
if __name__ == "__main__":
7777
adr_agent = ADRAgent("./docs/adr", retrieval_model_name="codellama:34b", embedding_model_name="mistral") # Replace with your ADR directory path and preferred model
78-
feature_request1 = "Implement put options pricing using Black-Scholes analytical formulas"
78+
feature_request1 = "Implement put options pricing using Black-Scholes analytical formulas. I am only interested in the price."
7979
feature_request2 = "Implement options pricing using Cox Ross Rubinstein Binomial Tree method."
8080
feature_request3 = "Implement options pricing using Heston Model."
8181
feature_request4 = "Implement interest rate forward pricing using discount curves."

dev_tools/code_specialist_agent.py

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
import os
2+
from typing import List, Dict
3+
from langchain_community.vectorstores import Chroma
4+
from langchain_ollama import OllamaEmbeddings, ChatOllama
5+
from langchain.chains import RetrievalQA
6+
from langchain.schema import Document
7+
import julia_chunks
8+
9+
class CodingExpertAgent:
10+
def __init__(self, julia_directory: str, retrieval_model_name: str = "codellama:34b", embedding_model_name: str = "mistral"):
11+
"""
12+
Initializes the Coding Expert Agent by loading and indexing Julia code from the given directory.
13+
"""
14+
self.julia_directory = julia_directory
15+
self.retrieval_model_name = retrieval_model_name
16+
self.embedding_model_name = embedding_model_name
17+
self.code_vectorstore = self.index_julia_code()
18+
self.llm_retrieval = ChatOllama(model=self.retrieval_model_name) # AI model for answering code questions
19+
self.code_qa = RetrievalQA.from_chain_type(self.llm_retrieval, retriever=self.code_vectorstore.as_retriever())
20+
21+
def load_julia_code(self) -> List[Document]:
22+
"""
23+
Loads all Julia functions and structs with docstrings from the specified directory, parses them,
24+
and converts them into LangChain Documents for indexing.
25+
"""
26+
chunks = julia_chunks.chunk_by_docstring(self.julia_directory)
27+
documents = []
28+
29+
for chunk in chunks:
30+
content = f"""
31+
Type: {chunk['type']}
32+
Name: {chunk['name']}
33+
34+
Docstring:
35+
{chunk['docstring']}
36+
37+
Definition:
38+
{chunk['definition_code']}
39+
"""
40+
metadata = {
41+
"filename": chunk["metadata"]["filename"],
42+
"start_line": chunk["metadata"]["start_line"],
43+
"end_line": chunk["metadata"]["end_line"],
44+
"name": chunk["name"]
45+
}
46+
documents.append(Document(page_content=content, metadata=metadata))
47+
48+
return documents
49+
50+
def index_julia_code(self):
51+
"""
52+
Indexes Julia code chunks in ChromaDB for semantic search.
53+
"""
54+
julia_documents = self.load_julia_code()
55+
embedding_model = OllamaEmbeddings(model=self.embedding_model_name)
56+
return Chroma.from_documents(julia_documents, embedding=embedding_model)
57+
58+
def retrieve_relevant_code(self, query: str, k=3) -> List[Document]:
59+
"""
60+
Fetches the most relevant code snippets using vector similarity search.
61+
"""
62+
retriever = self.code_vectorstore.as_retriever(search_kwargs={"k": k})
63+
results = retriever.invoke(query)
64+
return results["documents"]
65+
66+
def answer_code_question(self, question: str) -> str:
67+
"""
68+
Uses AI-powered RetrievalQA to answer a code-related question.
69+
"""
70+
prompt = f"""
71+
You are an AI agent assisting with Hedgehog2.jl, a Julia derivatives pricing library.
72+
Use relevant code snippets and docstrings to provide accurate and well-structured answers with code from the library.
73+
74+
Question: {question}
75+
"""
76+
response = self.code_qa.invoke(prompt)
77+
return response["result"]
78+
79+
80+
# Example execution
81+
if __name__ == "__main__":
82+
coding_agent = CodingExpertAgent("./src", retrieval_model_name="codellama:34b", embedding_model_name="mistral")
83+
84+
question1 = "How can I price a call option with spot price 7, strike 7, rate 0.4, volatility 0.4 using Hedgehog2?"
85+
question2 = "What are the fields of the DiscountCurve struct?"
86+
question3 = "How is Monte Carlo simulation implemented in Hedgehog2.jl?"
87+
88+
print(coding_agent.answer_code_question(question1))
File renamed without changes.

0 commit comments

Comments
 (0)