Completions
The Completions endpoint creates text given a prompt. Use it for classic instruction following and text generation when you do not need multi-turn message structures.
Create completion
Section titled “Create completion”POST https://api.aifoundryhub.com/v1/completions
Creates a completion for the provided prompt and parameters.
Example request
Section titled “Example request”curl -X POST "https://api.aifoundryhub.com/v1/completions" \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $AI_FOUNDRY_HUB_API_KEY" \ -d '{ "model": "babbage-002", "prompt": "Write a haiku about dawn", "max_tokens": 64, "temperature": 0.7 }'import OpenAI from "openai";
const client = new OpenAI({ apiKey: process.env.AI_FOUNDRY_HUB_API_KEY, baseURL: "https://api.aifoundryhub.com/v1",});
const resp = await client.completions.create({ model: "babbage-002", prompt: "Write a haiku about dawn", max_tokens: 64, temperature: 0.7,});
console.log(resp.choices[0].text);package main
import ( "context" "fmt" "os"
openai "github.com/openai/openai-go" "github.com/openai/openai-go/option")
func main() { client := openai.NewClient( option.WithAPIKey(os.Getenv("AI_FOUNDRY_HUB_API_KEY")), option.WithBaseURL("https://api.aifoundryhub.com/v1"), )
params := openai.CompletionNewParams{ Model: "babbage-002", Prompt: openai.CompletionNewParamsPromptUnion{ OfString: openai.String("Write a haiku about dawn"), }, MaxTokens: openai.Int(64), Temperature: openai.Float(0.7), }
ctx := context.Background() resp, err := client.Completions.New(ctx, params) if err != nil { panic(err) } fmt.Println(resp.Choices[0].Text)}import osfrom openai import OpenAI
client = OpenAI( api_key=os.getenv("AI_FOUNDRY_HUB_API_KEY"), base_url="https://api.aifoundryhub.com/v1",)
resp = client.completions.create( model="babbage-002", prompt="Write a haiku about dawn", max_tokens=64, temperature=0.7,)
print(resp.choices[0].text)Returns
Section titled “Returns”- When
streamis false: a text completion object. - When
streamis true: a Server-Sent Events stream of incremental completion chunks.
SSE snippet
data: {"id":"cmpl_...","object":"text_completion.chunk","choices":[{"index":0,"delta":{"text":"The"},"finish_reason":null}]}
data: {"id":"cmpl_...","object":"text_completion.chunk","choices":[{"index":0,"delta":{"text":" dawn"},"finish_reason":null}]}
data: [DONE]Example response (non-streaming)
Section titled “Example response (non-streaming)”{ "id": "cmpl-abc123", "object": "text_completion", "created": 1714569952, "model": "babbage-002", "choices": [ { "index": 0, "text": "Pale light spills over\nWaking earth in gentle hush\nDay breaks soft and new", "logprobs": null, "finish_reason": "stop" } ], "usage": { "prompt_tokens": 6, "completion_tokens": 24, "total_tokens": 30 }}The completion object
Section titled “The completion object”Streaming chunk (delta) object
Section titled “Streaming chunk (delta) object”Note: The stream ends with a terminal
data: [DONE]line. Some providers also include a final non-chunk summary event withusage.
Streaming examples
Section titled “Streaming examples”curl -N -X POST "https://api.aifoundryhub.com/v1/completions" \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $AI_FOUNDRY_HUB_API_KEY" \ -d '{ "model": "babbage-002", "prompt": "Write a haiku about dawn", "stream": true, "max_tokens": 64 }'const stream = await client.completions.create({ model: "babbage-002", prompt: "Write a haiku about dawn", stream: true, max_tokens: 64,});
for await (const part of stream) { const delta = part?.choices?.[0]?.delta?.text; const legacy = part?.choices?.[0]?.text; process.stdout.write(delta ?? legacy ?? "");}st := client.Completions.NewStreaming(ctx, params)defer st.Close()for st.Next() { chunk := st.Current() for _, ch := range chunk.Choices { if ch.Delta != nil { fmt.Print(ch.Delta.Text) } else { fmt.Print(ch.Text) } }}if err := st.Err(); err != nil { panic(err) }stream = client.completions.create( model="babbage-002", prompt="Write a haiku about dawn", stream=True, max_tokens=64,)
for event in stream: # Prefer delta.text if present, else fall back to text delta_text = getattr(getattr(event.choices[0], "delta", None), "text", None) text = delta_text or getattr(event.choices[0], "text", "") print(text, end="")