Get Started
Function Calling
Build and deploy AI Tools (Function Tools & MCP Tools) globally in minutes.
Vivgrid supports Tools to extend LLM capabilities. Tools can be implemented as Function Tools (via function calling APIs) or MCP Tools (via the Model Context Protocol). This guide walks you through building and deploying your first Tool.
Set Up Your Development Environment
Start by writing your first serverless Tool.
Prepare
Write Your First Tool
package main
import (
"fmt"
"io"
"log/slog"
"net/http"
"os"
"github.com/yomorun/yomo/serverless"
)
// Description outlines the functionality for the Tool feature.
// It provides a detailed description of the function's purpose, essential for
// integration with Tools. The presence of this function and its
// return value make the function discoverable and callable by LLMs.
// For more information on Function Calling, see the OpenAI documentation:
// https://platform.openai.com/docs/guides/function-calling
func Description() string {
return `Get current weather for a given city. If no city is provided, you
should ask to clarify the city. If the city name is given, you should
convert the city name to Latitude and Longitude geo coordinates, keeping
Latitude and Longitude in decimal format.`
}
// InputSchema defines the argument structure for the Tool. It
// utilizes jsonschema tags to detail the definition. For jsonschema in Go,
// see https://github.com/invopop/jsonschema.
func InputSchema() any {
return &LLMArguments{}
}
// LLMArguments defines the arguments for the Tool. These
// arguments are combined to form a prompt automatically.
type LLMArguments struct {
City string `json:"city" jsonschema:"description=The city name to get the weather for"`
Latitude float64 `json:"latitude" jsonschema:"description=The latitude of the city, in decimal format, range should be in (-90, 90)"`
Longitude float64 `json:"longitude" jsonschema:"description=The longitude of the city, in decimal format, range should be in (-180, 180)"`
}
// Handler orchestrates the core processing logic of this function.
// - ctx.ReadLLMArguments() parses Tool arguments (skip if none).
// - ctx.WriteLLMResult() sends the result back to the LLM.
func Handler(ctx serverless.Context) {
var p LLMArguments
// deserilize the arguments from llm tool_call response
ctx.ReadLLMArguments(&p)
// invoke the openweathermap api and return the result back to LLM
result := requestOpenWeatherMapAPI(p.Latitude, p.Longitude)
ctx.WriteLLMResult(result)
slog.Info("get-weather", "city", p.City, "result", result)
}
func requestOpenWeatherMapAPI(lat, lon float64) string {
const apiURL = "https://api.openweathermap.org/data/2.5/weather?lat=%f&lon=%f&appid=%s&units=metric"
apiKey := os.Getenv("OPENWEATHERMAP_API_KEY")
url := fmt.Sprintf(apiURL, lat, lon, apiKey)
resp, err := http.Get(url)
if err != nil {
fmt.Println(err)
return "can not get the weather information at the moment"
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
fmt.Println(err)
return "can not get the weather information at the moment"
}
return string(body)
}Run Locally or on a Self‑Managed Server
export VIVGRID_TOKEN=<YOUR_VIVGRID_APP_KEY>.<YOUR_VIVGRID_APP_SECRET>
export OPENWEATHERMAP_API_KEY=<YOUR_OPENWEATHERMAP_API_KEY>
yomo run app.go -z 'zipper.vivgrid.com:9000' -n 'cc-dev' -d 'app-key-secret:<VIVGRID_TOKEN>'Deploy to Vivgrid as Geo‑Distributed Serverless
Test Your Tool
curl -v -i https://api.vivgrid.com/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Authorization: Bearer <VIVGRID_TOKEN>" \
-d '{
"messages": [{
"role": "user",
"content": "how is the weather today in Paris and Tokyo?"
}],
"stream": true
}'