import PromptFoundry from '@prompt-foundry/typescript-sdk';
const client = new PromptFoundry({
apiKey: process.env['PROMPT_FOUNDRY_API_KEY'], // This is the default and can be omitted
});
async function main() {
const promptConfigurations = await client.prompts.list();
console.log(promptConfigurations);
}
main();[
{
"id": "<string>",
"name": "<string>",
"parameters": {
"provider": "ANTHROPIC",
"name": "<string>",
"responseFormat": "JSON",
"temperature": 123,
"topP": 123,
"topK": 2,
"frequencyPenalty": 123,
"presencePenalty": 123,
"maxTokens": 123,
"seed": 123,
"toolChoice": "<string>",
"stream": true,
"parallelToolCalls": true
},
"messages": [
{
"content": [
{
"type": "TEXT",
"text": "<string>"
}
],
"role": "assistant"
}
],
"tools": [
{
"id": "<string>",
"name": "<string>",
"description": "<string>",
"parameters": {}
}
]
}
]Retrieve all prompts
import PromptFoundry from '@prompt-foundry/typescript-sdk';
const client = new PromptFoundry({
apiKey: process.env['PROMPT_FOUNDRY_API_KEY'], // This is the default and can be omitted
});
async function main() {
const promptConfigurations = await client.prompts.list();
console.log(promptConfigurations);
}
main();[
{
"id": "<string>",
"name": "<string>",
"parameters": {
"provider": "ANTHROPIC",
"name": "<string>",
"responseFormat": "JSON",
"temperature": 123,
"topP": 123,
"topK": 2,
"frequencyPenalty": 123,
"presencePenalty": 123,
"maxTokens": 123,
"seed": 123,
"toolChoice": "<string>",
"stream": true,
"parallelToolCalls": true
},
"messages": [
{
"content": [
{
"type": "TEXT",
"text": "<string>"
}
],
"role": "assistant"
}
],
"tools": [
{
"id": "<string>",
"name": "<string>",
"description": "<string>",
"parameters": {}
}
]
}
]Successful operation
Example: "PROMPT_1"
Example: "Check the weather"
Show child attributes
The LLM model provider.
ANTHROPIC, OPENAI The name of the model for the provider.
Example: PromptResponseFormat.TEXT
JSON, TEXT Example: 1
Example: 1
Example: 50
x >= 1Example: 0
Example: 0
Example: 100
Example: 97946543
The configured messages WITHOUT variables replaced.
Show child attributes
assistant, system, tool, user Show child attributes
The initial messages to be included with your call to the LLM API.
The name of the tool to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.
A description of what the tool does, used by the model to choose when and how to call the tool.
Was this page helpful?