Sign up for my FREE incoming seminar at Soft Uni:
LangChain in Action: How to Build Intelligent AI Applications Easily and Efficiently ?

LangSmith


import { ChatOpenAI } from "@langchain/openai";
import { AgentExecutor, createOpenAIFunctionsAgent } from "langchain/agents";
import { Tool } from "langchain/tools";
import { ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
import { LangChainTracer } from "langchain/callbacks";
import dotenv from "dotenv";
import readline from "readline";

dotenv.config();

// === LLM Model ===
const model = new ChatOpenAI({
  temperature: 0,
  modelName: "gpt-3.5-turbo",
  apiKey: process.env.OPENAI_API_KEY,
});

// === Tools ===
class CalculatorTool extends Tool {
  name = "calculator";
  description = "Useful for when you need to answer questions about math";

  async _call(input: string) {
    try {
      return eval(input).toString();
    } catch (error) {
      console.error(error);
      return "Error: " + error;
    }
  }
}

class TimerTool extends Tool {
  name = "timer";
  description = "Useful for when you need to track time";

  async _call(input: string) {
    return new Date().toLocaleTimeString();
  }
}

class WeatherTool extends Tool {
  name = "weather";
  description = "Fetches the current weather for a given city. Provide the city name as input.";

  async _call(city: string) {
    if (!city) return "Error: Please provide a city name.";

    try {
      const response = await fetch(`${process.env.WEATHER_API_URL}?q=${city}&appid=${process.env.WEATHER_API_KEY}&units=metric`);
      const data = await response.json();

      if (data.cod !== 200) return `Error: ${data.message}`;

      return `The weather in ${data.name} is ${data.weather[0].description} with a temperature of ${data.main.temp}°C.`;
    } catch (error) {
      return "Error fetching weather data.";
    }
  }
}

// === Main Run Function ===
async function run() {
  const tools = [new CalculatorTool(), new TimerTool(), new WeatherTool()];
  let chat_history: { role: string, content: string }[] = [];

  const prompt = ChatPromptTemplate.fromMessages([
    ["system", "You are a helpful AI assistant with access to tools. Follow these steps: 
" +
      "1. Think about the user's question 
" +
      "2. If a tool is needed, decide which one to use
" +
      "3. Call the tool and observe its result 
" +
      "4. Respond to the user in a structured format 
" +
      "Do not respond until you have observed the tool's result.
"
    ],
    new MessagesPlaceholder("chat_history"),
    ["human", "{input}"],
    new MessagesPlaceholder("agent_scratchpad"),
  ]);

  const agent = await createOpenAIFunctionsAgent({
    llm: model,
    tools,
    prompt,
  });

  const executor = new AgentExecutor({
    agent,
    tools,
    callbacks: [new LangChainTracer()],
  });

  const rl = readline.createInterface({
    input: process.stdin,
    output: process.stdout,
  });

  const askQuestion = async () => {
    rl.question("You: ", async (input) => {
      const result = await executor.invoke(
        { input, chat_history, agent_scratchpad: [] },
        {
          runName: "CLI Agent Run",
          tags: ["cli", "langsmith", "tools"],
        }
      );

      console.log("🤖 Agent:", result.output);

      chat_history.push({ role: 'user', content: input });
      chat_history.push({ role: 'assistant', content: result.output });

      askQuestion();
    });
  };

  askQuestion();
}

run().catch(console.error);