| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121 | export type Inputs = Record<string, string | number | object>export type PromptVariable = {  key: string  name: string  type: string // "string" | "number" | "select",  default?: string | number  required: boolean  options?: string[]  max_length?: number}export type CompletionParams = {  max_tokens: number  temperature: number  top_p: number  presence_penalty: number  frequency_penalty: number}export type ModelId = 'gpt-3.5-turbo' | 'text-davinci-003'export type PromptConfig = {  prompt_template: string  prompt_variables: PromptVariable[]}export type MoreLikeThisConfig = {  enabled: boolean}export type SuggestedQuestionsAfterAnswerConfig = MoreLikeThisConfig// frontend use. Not the same as backendexport type ModelConfig = {  provider: string // LLM Provider: for example "OPENAI"  model_id: string  configs: PromptConfig  opening_statement: string | null  more_like_this: {    enabled: boolean  } | null  suggested_questions_after_answer: {    enabled: boolean  } | null  dataSets: any[]}export type DebugRequestBody = {  inputs: Inputs  query: string  completion_params: CompletionParams  model_config: ModelConfig}export type DebugResponse = {  id: string  answer: string  created_at: string}export type DebugResponseStream = {  id: string  data: string  created_at: string}export type FeedBackRequestBody = {  message_id: string  rating: 'like' | 'dislike'  content?: string  from_source: 'api' | 'log'}export type FeedBackResponse = {  message_id: string  rating: 'like' | 'dislike'}// Log session listexport type LogSessionListQuery = {  keyword?: string  start?: string // format datetime(YYYY-mm-dd HH:ii)  end?: string // format datetime(YYYY-mm-dd HH:ii)  page: number  limit: number // default 20. 1-100}export type LogSessionListResponse = {  data: {    id: string    conversation_id: string    query: string // user's query question    message: string // prompt send to LLM    answer: string    creat_at: string  }[]  total: number  page: number}// log session detail and debugexport type LogSessionDetailResponse = {  id: string  cnversation_id: string  model_provider: string  query: string  inputs: Record<string, string | number | object>[]  message: string  message_tokens: number // number of tokens in message  answer: string  answer_tokens: number // number of tokens in answer  provider_response_latency: number // used time in ms  from_source: 'api' | 'log'}export type SavedMessage = {  id: string  answer: string}
 |