POST
/
sdk
/
v1
/
prompts
/
{id}
import PromptFoundry from '@prompt-foundry/typescript-sdk';

const client = new PromptFoundry({
  apiKey: process.env['PROMPT_FOUNDRY_API_KEY'], // This is the default and can be omitted
});

async function main() {
  const response = await client.prompts.getParameters('1212121');

  console.log(response);
}

main();
{
  "provider": "anthropic",
  "name": "<string>",
  "parameters": {
    "max_tokens": 123,
    "messages": [
      {
        "content": "<string>",
        "role": "user"
      }
    ],
    "model": "<string>",
    "metadata": {
      "user_id": "<string>"
    },
    "stop_sequences": [
      "<string>"
    ],
    "system": "<string>",
    "temperature": 123,
    "tool_choice": {
      "type": "auto"
    },
    "tools": [
      {
        "input_schema": {
          "type": "object",
          "properties": "<any>"
        },
        "name": "<string>",
        "description": "<string>"
      }
    ],
    "top_k": 123,
    "top_p": 123,
    "stream": true
  }
}

Path Parameters

id
string
required
Example:

"1212121"

Body

application/json
variables
object

The template variables added to the prompt when executing the prompt.

overrideMessages
object[]

Replaces the configured prompt messages when running the prompt.

appendMessages
object[]

Appended the the end of the configured prompt messages before running the prompt.

user
string

A unique identifier representing your end-user, which can help monitor and detect abuse.

Response

200
application/json
Successfully retrieved the prompt configuration
provider
enum<string>
required
Available options:
anthropic
name
string
required
parameters
object
required