Interface: PromptRunSettings

Defined in: src/types/experiment.types.ts:13

Extends

  • PromptRunSettingsInput

Properties

deployment_name?

optional deployment_name: null | string;

Defined in: src/types/api.types.ts:8602

Deployment Name

Inherited from

PromptRunSettingsInput.deployment_name;

echo?

optional echo: boolean;

Defined in: src/types/api.types.ts:8595

Echo

Default

false;

Inherited from

PromptRunSettingsInput.echo;

frequency_penalty?

optional frequency_penalty: number;

Defined in: src/types/api.types.ts:8634

Frequency Penalty

Default

0;

Inherited from

PromptRunSettingsInput.frequency_penalty;

known_models?

optional known_models: object[];

Defined in: src/types/api.types.ts:8653

Known Models

alias

alias: string;

Alias

alternative_names?

optional alternative_names: string[];

Alternative Names

Description

Alternative names for the model, used for matching with various current, versioned or legacy names.

api_version?

optional api_version: null | string;

Api Version

assistant_role?

optional assistant_role: null | string;

Assistant Role

cost_by?

optional cost_by: "tokens" | "characters";
Default
tokens;

formatting_tokens?

optional formatting_tokens: number;

Formatting Tokens

Default
0;

input_map?

optional input_map:
  | null
  | {
  prefix?: string;
  prompt: string;
  suffix?: string;
};
Type declaration

null

{
  prefix?: string;
  prompt: string;
  suffix?: string;
}

input_price?

optional input_price: number;

Input Price

Default
0;

input_token_limit?

optional input_token_limit: null | number;

Input Token Limit

integration?

optional integration:
  | "anthropic"
  | "aws_bedrock"
  | "aws_sagemaker"
  | "azure"
  | "databricks"
  | "mistral"
  | "nvidia"
  | "openai"
  | "vegas_gateway"
  | "vertex_ai"
  | "writer";
Default
openai;

is_chat?

optional is_chat: boolean;

Is Chat

Default
false;

name

name: string;

Name

output_map?

optional output_map:
  | null
  | {
  completion_reason?: null | string;
  input_token_count?: null | string;
  output_token_count?: null | string;
  response: string;
  token_count?: null | string;
};
Type declaration

null

{
  completion_reason?: null | string;
  input_token_count?: null | string;
  output_token_count?: null | string;
  response: string;
  token_count?: null | string;
}

output_price?

optional output_price: number;

Output Price

Default
0;

output_token_limit?

optional output_token_limit: null | number;

Output Token Limit

params_map?

optional params_map: object;
params_map.api_version?
optional api_version: null | string;

Api Version

params_map.deployment_name?
optional deployment_name: null | string;

Deployment Name

params_map.echo?
optional echo: null | string;

Echo

params_map.frequency_penalty?
optional frequency_penalty: null | string;

Frequency Penalty

params_map.logprobs?
optional logprobs: null | string;

Logprobs

params_map.max_tokens?
optional max_tokens: null | string;

Max Tokens

params_map.model?
optional model: null | string;

Model

params_map.n?
optional n: null | string;

N

params_map.presence_penalty?
optional presence_penalty: null | string;

Presence Penalty

params_map.response_format?
optional response_format: null | string;

Response Format

params_map.stop_sequences?
optional stop_sequences: null | string;

Stop Sequences

params_map.temperature?
optional temperature: null | string;

Temperature

params_map.tool_choice?
optional tool_choice: null | string;

Tool Choice

params_map.tools?
optional tools: null | string;

Tools

params_map.top_k?
optional top_k: null | string;

Top K

params_map.top_logprobs?
optional top_logprobs: null | string;

Top Logprobs

params_map.top_p?
optional top_p: null | string;

Top P

provides_log_probs?

optional provides_log_probs: boolean;

Provides Log Probs

Default
false;

response_prefix_tokens?

optional response_prefix_tokens: number;

Response Prefix Tokens

Default
0;

system_supported?

optional system_supported: boolean;

System Supported

Default
false;

token_limit?

optional token_limit: null | number;

Token Limit

user_role?

optional user_role: null | string;

User Role

Inherited from

PromptRunSettingsInput.known_models;

logprobs?

optional logprobs: boolean;

Defined in: src/types/api.types.ts:8585

Logprobs

Default

true;

Inherited from

PromptRunSettingsInput.logprobs;

max_tokens?

optional max_tokens: number;

Defined in: src/types/api.types.ts:8617

Max Tokens

Default

1024;

Inherited from

PromptRunSettingsInput.max_tokens;

model_alias?

optional model_alias: string;

Defined in: src/types/api.types.ts:8607

Model Alias

Default

GPT-4o mini

Inherited from

PromptRunSettingsInput.model_alias;

n?

optional n: number;

Defined in: src/types/api.types.ts:8600

N

Default

1;

Inherited from

PromptRunSettingsInput.n;

presence_penalty?

optional presence_penalty: number;

Defined in: src/types/api.types.ts:8639

Presence Penalty

Default

0;

Inherited from

PromptRunSettingsInput.presence_penalty;

response_format?

optional response_format:
  | null
  | {
[key: string]: string;
};

Defined in: src/types/api.types.ts:8649

Response Format

Inherited from

PromptRunSettingsInput.response_format;

stop_sequences?

optional stop_sequences: null | string[];

Defined in: src/types/api.types.ts:8619

Stop Sequences

Inherited from

PromptRunSettingsInput.stop_sequences;

temperature?

optional temperature: number;

Defined in: src/types/api.types.ts:8612

Temperature

Default

1;

Inherited from

PromptRunSettingsInput.temperature;

tool_choice?

optional tool_choice:
  | null
  | string
  | {
  function: {
     name: string;
  };
  type?: string;
};

Defined in: src/types/api.types.ts:8647

Tool Choice

Type declaration

null

string

{
  function: {
     name: string;
  };
  type?: string;
}

function

function: object;
function.name
name: string;

Name

type?

optional type: string;

Type

Default
function

Inherited from

PromptRunSettingsInput.tool_choice;

tools?

optional tools: null | object[];

Defined in: src/types/api.types.ts:8641

Tools

Inherited from

PromptRunSettingsInput.tools;

top_k?

optional top_k: number;

Defined in: src/types/api.types.ts:8629

Top K

Default

40;

Inherited from

PromptRunSettingsInput.top_k;

top_logprobs?

optional top_logprobs: number;

Defined in: src/types/api.types.ts:8590

Top Logprobs

Default

5;

Inherited from

PromptRunSettingsInput.top_logprobs;

top_p?

optional top_p: number;

Defined in: src/types/api.types.ts:8624

Top P

Default

1;

Inherited from

PromptRunSettingsInput.top_p;