Inference router. Part of the MIST stack.
go get github.com/greynewell/infermuxtype Provider interface {
Name() string
Models() []string
Infer(ctx context.Context, req protocol.InferRequest) (protocol.InferResponse, error)
}reg := infermux.NewRegistry()
reg.Register(myOpenAIProvider)
reg.Register(myAnthropicProvider)
reporter := tokentrace.NewReporter("infermux", "http://localhost:8700")
router := infermux.NewRouter(reg, reporter)
resp, err := router.Infer(ctx, protocol.InferRequest{
Model: "claude-sonnet-4-5-20250929",
Messages: []protocol.ChatMessage{{Role: "user", Content: "Hello"}},
})Tracks tokens and cost per request. Reports spans to TokenTrace.
handler := infermux.NewHandler(router, reg)
http.HandleFunc("POST /mist", handler.Ingest)
http.HandleFunc("POST /infer", handler.InferDirect)
http.HandleFunc("GET /providers", handler.Providers)infermux serve --addr :8600 --tokentrace http://localhost:8700
infermux infer --model echo-v1 --prompt "Hello world"