Skip to content

[WIP] Langchain demo #374

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Feb 9, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,9 @@ dependencies = [
"tomlkit>=0.13.2",
"python-semantic-release",
"uvicorn[standard]>=0.30.0",
"langchain[openai]",
"langchain_core",
"langchain_openai",
]

license = { text = "Apache-2.0" }
Expand Down Expand Up @@ -146,6 +149,7 @@ dev-dependencies = [
"pytest-asyncio<1.0.0,>=0.21.1",
"loguru>=0.7.3",
"httpx<0.28.2,>=0.28.1",
"jupyterlab>=4.3.5",
]


Expand Down
Empty file.
54 changes: 54 additions & 0 deletions src/codegen/extensions/langchain/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
"""Langchain tools for workspace operations."""

from langchain.tools import BaseTool

from codegen import Codebase

from .tools import (
CommitTool,
CreateFileTool,
DeleteFileTool,
EditFileTool,
ListDirectoryTool,
RevealSymbolTool,
SearchTool,
SemanticEditTool,
ViewFileTool,
)

__all__ = [
# Tool classes
"CommitTool",
"CreateFileTool",
"DeleteFileTool",
"EditFileTool",
"ListDirectoryTool",
"RevealSymbolTool",
"SearchTool",
"SemanticEditTool",
"ViewFileTool",
# Helper functions
"get_workspace_tools",
]


def get_workspace_tools(codebase: Codebase) -> list[BaseTool]:
"""Get all workspace tools initialized with a codebase.

Args:
codebase: The codebase to operate on

Returns:
List of initialized Langchain tools
"""
return [
ViewFileTool(codebase),
ListDirectoryTool(codebase),
SearchTool(codebase),
EditFileTool(codebase),
CreateFileTool(codebase),
DeleteFileTool(codebase),
CommitTool(codebase),
RevealSymbolTool(codebase),
SemanticEditTool(codebase),
]
109 changes: 109 additions & 0 deletions src/codegen/extensions/langchain/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
"""Demo implementation of an agent with Codegen tools."""

from langchain import hub
from langchain.agents import AgentExecutor
from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
from langchain_core.chat_history import ChatMessageHistory
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_openai import ChatOpenAI

from codegen import Codebase
from codegen.sdk.enums import ProgrammingLanguage

from .tools import (
CommitTool,
CreateFileTool,
DeleteFileTool,
EditFileTool,
ListDirectoryTool,
MoveSymbolTool,
RenameFileTool,
RevealSymbolTool,
SearchTool,
SemanticEditTool,
ViewFileTool,
)


def create_codebase_agent(
codebase: Codebase,
model_name: str = "gpt-4",
temperature: float = 0,
verbose: bool = True,
) -> RunnableWithMessageHistory:
"""Create an agent with all codebase tools.

Args:
codebase: The codebase to operate on
model_name: Name of the model to use (default: gpt-4)
temperature: Model temperature (default: 0)
verbose: Whether to print agent's thought process (default: True)

Returns:
Initialized agent with message history
"""
# Initialize language model
llm = ChatOpenAI(
model_name=model_name,
temperature=temperature,
)

# Get all codebase tools
tools = [
ViewFileTool(codebase),
ListDirectoryTool(codebase),
SearchTool(codebase),
EditFileTool(codebase),
CreateFileTool(codebase),
DeleteFileTool(codebase),
RenameFileTool(codebase),
MoveSymbolTool(codebase),
RevealSymbolTool(codebase),
SemanticEditTool(codebase),
CommitTool(codebase),
]

# Get the prompt to use
prompt = hub.pull("hwchase17/openai-functions-agent")

# Create the agent
agent = OpenAIFunctionsAgent(
llm=llm,
tools=tools,
prompt=prompt,
)

# Create the agent executor
agent_executor = AgentExecutor(
agent=agent,
tools=tools,
verbose=verbose,
)

# Create message history handler
message_history = ChatMessageHistory()

# Wrap with message history
return RunnableWithMessageHistory(
agent_executor,
lambda session_id: message_history,
input_messages_key="input",
history_messages_key="chat_history",
)


if __name__ == "__main__":
# Initialize codebase
print("Initializing codebase...")
codebase = Codebase.from_repo("fastapi/fastapi", programming_language=ProgrammingLanguage.PYTHON)

# Create agent with history
print("Creating agent...")
agent = create_codebase_agent(codebase)

print("\nAsking agent to analyze symbol relationships...")
result = agent.invoke(
{"input": "What are the dependencies of the reveal_symbol function?"},
config={"configurable": {"session_id": "demo"}},
)
print("Messages:", result["messages"])
Loading