浏览代码

refactor: remove model cli

Yeuoly 11 月之前
父节点
当前提交
dc72adc2dc

+ 0 - 43
cmd/commandline/model.go

@@ -1,43 +0,0 @@
-package main
-
-import (
-	"github.com/langgenius/dify-plugin-daemon/cmd/commandline/model"
-	"github.com/spf13/cobra"
-)
-
-var (
-	modelTemplatesCommand = &cobra.Command{
-		Use:   "templates [-t provider|model] [-m model_type] [name]",
-		Short: "Templates",
-		Long:  "List all model templates, you can use it to create new model",
-		Run: func(cmd *cobra.Command, args []string) {
-			// get provider or model
-			typ, _ := cmd.Flags().GetString("type")
-			// get model_type
-			model_type, _ := cmd.Flags().GetString("model_type")
-			name := ""
-			if len(args) > 0 {
-				name = args[0]
-			}
-			model.ListTemplates(typ, model_type, name)
-		},
-	}
-
-	newProviderCommand = &cobra.Command{
-		Use:   "provider [template] name",
-		Short: "Provider",
-		Long:  "Using template to create new provider, one plugin only support one provider",
-	}
-
-	newModelCommand = &cobra.Command{
-		Use:   "new [template] name",
-		Short: "Model",
-		Long:  "Using template to create new model, you need to create a provider first",
-	}
-)
-
-func init() {
-	pluginModelCommand.AddCommand(modelTemplatesCommand)
-	pluginModelCommand.AddCommand(newProviderCommand)
-	pluginModelCommand.AddCommand(newModelCommand)
-}

+ 0 - 113
cmd/commandline/model/template.go

@@ -1,113 +0,0 @@
-package model
-
-import (
-	"embed"
-	"fmt"
-	"strings"
-
-	"github.com/langgenius/dify-plugin-daemon/internal/utils/log"
-)
-
-//go:embed templates
-var templates embed.FS
-
-// provider_templates is a map of provider type to the template name
-var provider_templates map[string]string
-
-// model_templates is a map of model type to a map of template name to the template content
-var model_templates map[string]map[string]string
-
-func init() {
-	provider_templates = make(map[string]string)
-	model_templates = make(map[string]map[string]string)
-
-	files, err := templates.ReadDir("templates")
-	if err != nil {
-		log.Error("Failed to read templates: %v", err)
-		return
-	}
-
-	for _, file := range files {
-		if file.IsDir() {
-			continue
-		}
-
-		// get the file name
-		filename := file.Name()
-		// read the file content
-		file_content, err := templates.ReadFile("templates/" + filename)
-		if err != nil {
-			log.Error("Failed to read template: %v", err)
-			continue
-		}
-		filenames := strings.Split(filename, "_")
-		// check the first element is a provider
-		if filenames[0] == "provider" {
-			if len(filenames) != 2 {
-				log.Error("Invalid provider template: %s", filename)
-				continue
-			}
-			provider_templates[filenames[1]] = string(file_content)
-		} else if filenames[0] == "model" {
-			if len(filenames) != 3 {
-				log.Error("Invalid model template: %s", filename)
-				continue
-			}
-			if _, ok := model_templates[filenames[1]]; !ok {
-				model_templates[filenames[1]] = make(map[string]string)
-			}
-
-			model_templates[filenames[1]][filenames[2]] = string(file_content)
-		}
-	}
-}
-
-func ListTemplates(typ string, model_type string, name string) {
-	color_reset := "\033[0m"
-	color_cyan := "\033[36m"
-	color_yellow := "\033[33m"
-	color_green := "\033[32m"
-
-	if typ == "provider" || typ == "" {
-		fmt.Printf("%sProvider Templates:%s\n", color_cyan, color_reset)
-		for template := range provider_templates {
-			if name == "" || strings.Contains(template, name) {
-				fmt.Printf("  %s%s%s\n", color_yellow, template, color_reset)
-			}
-		}
-		fmt.Println()
-	}
-
-	if typ == "model" || typ == "" {
-		fmt.Printf("%sModel Templates:%s\n", color_cyan, color_reset)
-		if model_type == "" {
-			for model_type, templates := range model_templates {
-				fmt.Printf("%s%s:%s\n", color_yellow, model_type, color_reset)
-				for template := range templates {
-					if name == "" || strings.Contains(template, name) {
-						fmt.Printf("  %s%s%s\n", color_green, template, color_reset)
-					}
-				}
-				fmt.Println()
-			}
-		} else {
-			if templates, ok := model_templates[model_type]; ok {
-				fmt.Printf("%s%s:%s\n", color_yellow, model_type, color_reset)
-				for template := range templates {
-					if name == "" || strings.Contains(template, name) {
-						fmt.Printf("  %s%s%s\n", color_green, template, color_reset)
-					}
-				}
-				fmt.Println()
-			}
-		}
-	}
-}
-
-func GetTemplate(typ string, name string) {
-
-}
-
-func CreateFromTemplate(root string, typ string, name string) {
-
-}

+ 0 - 18
cmd/commandline/model/templates/model_llm_default.yaml

@@ -1,18 +0,0 @@
-model: gpt-4
-label:
-  zh_Hans: gpt-4
-  en_US: gpt-4
-model_type: llm
-model_properties:
-  mode: chat
-  context_size: 8192
-parameter_rules:
-  - name: temperature
-    use_template: temperature
-  - name: top_p
-    use_template: top_p
-pricing:
-  input: '0.001'
-  output: '0.002'
-  unit: '0.001'
-  currency: USD

+ 0 - 43
cmd/commandline/model/templates/model_llm_gpt35.yaml

@@ -1,43 +0,0 @@
-model: gpt-3.5-turbo
-label:
-  zh_Hans: gpt-3.5-turbo
-  en_US: gpt-3.5-turbo
-model_type: llm
-features:
-  - multi-tool-call
-  - agent-thought
-  - stream-tool-call
-model_properties:
-  mode: chat
-  context_size: 16385
-parameter_rules:
-  - name: temperature
-    use_template: temperature
-  - name: top_p
-    use_template: top_p
-  - name: presence_penalty
-    use_template: presence_penalty
-  - name: frequency_penalty
-    use_template: frequency_penalty
-  - name: max_tokens
-    use_template: max_tokens
-    default: 512
-    min: 1
-    max: 4096
-  - name: response_format
-    label:
-      zh_Hans: 回复格式
-      en_US: response_format
-    type: string
-    help:
-      zh_Hans: 指定模型必须输出的格式
-      en_US: specifying the format that the model must output
-    required: false
-    options:
-      - text
-      - json_object
-pricing:
-  input: '0.001'
-  output: '0.002'
-  unit: '0.001'
-  currency: USD

+ 0 - 56
cmd/commandline/model/templates/model_llm_gpt4.yaml

@@ -1,56 +0,0 @@
-model: gpt-4
-label:
-  zh_Hans: gpt-4
-  en_US: gpt-4
-model_type: llm
-features:
-  - multi-tool-call
-  - agent-thought
-  - stream-tool-call
-model_properties:
-  mode: chat
-  context_size: 8192
-parameter_rules:
-  - name: temperature
-    use_template: temperature
-  - name: top_p
-    use_template: top_p
-  - name: presence_penalty
-    use_template: presence_penalty
-  - name: frequency_penalty
-    use_template: frequency_penalty
-  - name: max_tokens
-    use_template: max_tokens
-    default: 512
-    min: 1
-    max: 8192
-  - name: seed
-    label:
-      zh_Hans: 种子
-      en_US: Seed
-    type: int
-    help:
-      zh_Hans: 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint
-        响应参数来监视变化。
-      en_US: If specified, model will make a best effort to sample deterministically,
-        such that repeated requests with the same seed and parameters should return
-        the same result. Determinism is not guaranteed, and you should refer to the
-        system_fingerprint response parameter to monitor changes in the backend.
-    required: false
-  - name: response_format
-    label:
-      zh_Hans: 回复格式
-      en_US: response_format
-    type: string
-    help:
-      zh_Hans: 指定模型必须输出的格式
-      en_US: specifying the format that the model must output
-    required: false
-    options:
-      - text
-      - json_object
-pricing:
-  input: '0.03'
-  output: '0.06'
-  unit: '0.001'
-  currency: USD

+ 0 - 5
cmd/commandline/model/templates/model_moderation_default.yaml

@@ -1,5 +0,0 @@
-model: default
-model_type: moderation
-model_properties:
-  max_chunks: 32
-  max_characters_per_chunk: 2000

+ 0 - 4
cmd/commandline/model/templates/model_rerank_default.yaml

@@ -1,4 +0,0 @@
-model: default
-model_type: rerank
-model_properties:
-  context_size: 8192

+ 0 - 5
cmd/commandline/model/templates/model_speech2text_default.yaml

@@ -1,5 +0,0 @@
-model: default
-model_type: speech2text
-model_properties:
-  file_upload_limit: 25
-  supported_file_extensions: flac,mp3,mp4,mpeg,mpga,m4a,ogg,wav,webm

+ 0 - 9
cmd/commandline/model/templates/model_textembedding_default.yaml

@@ -1,9 +0,0 @@
-model: default
-model_type: text-embedding
-model_properties:
-  context_size: 8191
-  max_chunks: 32
-pricing:
-  input: '0.00002'
-  unit: '0.001'
-  currency: USD

+ 0 - 31
cmd/commandline/model/templates/model_tts_default.yaml

@@ -1,31 +0,0 @@
-model: default
-model_type: tts
-model_properties:
-  default_voice: 'alloy'
-  voices:
-    - mode: 'alloy'
-      name: 'Alloy'
-      language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID']
-    - mode: 'echo'
-      name: 'Echo'
-      language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID']
-    - mode: 'fable'
-      name: 'Fable'
-      language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID']
-    - mode: 'onyx'
-      name: 'Onyx'
-      language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID']
-    - mode: 'nova'
-      name: 'Nova'
-      language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID']
-    - mode: 'shimmer'
-      name: 'Shimmer'
-      language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID']
-  word_limit: 3500
-  audio_type: 'mp3'
-  max_workers: 5
-pricing:
-  input: '0.015'
-  output: '0'
-  unit: '0.001'
-  currency: USD

+ 0 - 91
cmd/commandline/model/templates/provider_openai.yaml

@@ -1,91 +0,0 @@
-type: model
-provider:
-  provider: openai
-  label:
-    en_US: OpenAI
-  description:
-    en_US: Models provided by OpenAI, such as GPT-3.5-Turbo and GPT-4.
-    zh_Hans: OpenAI 提供的模型,例如 GPT-3.5-Turbo 和 GPT-4。
-  icon_small:
-    en_US: icon_s_en.svg
-  icon_large:
-    en_US: icon_l_en.svg
-  background: "#E5E7EB"
-  help:
-    title:
-      en_US: Get your API Key from OpenAI
-      zh_Hans: 从 OpenAI 获取 API Key
-    url:
-      en_US: https://platform.openai.com/account/api-keys
-  supported_model_types:
-    - llm
-    - text-embedding
-    - speech2text
-    - moderation
-    - tts
-  configurate_methods:
-    - predefined-model
-    - customizable-model
-  model_credential_schema:
-    model:
-      label:
-        en_US: Model Name
-        zh_Hans: 模型名称
-      placeholder:
-        en_US: Enter your model name
-        zh_Hans: 输入模型名称
-    credential_form_schemas:
-      - variable: openai_api_key
-        label:
-          en_US: API Key
-        type: secret-input
-        required: true
-        placeholder:
-          zh_Hans: 在此输入您的 API Key
-          en_US: Enter your API Key
-      - variable: openai_organization
-        label:
-          zh_Hans: 组织 ID
-          en_US: Organization
-        type: text-input
-        required: false
-        placeholder:
-          zh_Hans: 在此输入您的组织 ID
-          en_US: Enter your Organization ID
-      - variable: openai_api_base
-        label:
-          zh_Hans: API Base
-          en_US: API Base
-        type: text-input
-        required: false
-        placeholder:
-          zh_Hans: 在此输入您的 API Base
-          en_US: Enter your API Base
-  provider_credential_schema:
-    credential_form_schemas:
-      - variable: openai_api_key
-        label:
-          en_US: API Key
-        type: secret-input
-        required: true
-        placeholder:
-          zh_Hans: 在此输入您的 API Key
-          en_US: Enter your API Key
-      - variable: openai_organization
-        label:
-          zh_Hans: 组织 ID
-          en_US: Organization
-        type: text-input
-        required: false
-        placeholder:
-          zh_Hans: 在此输入您的组织 ID
-          en_US: Enter your Organization ID
-      - variable: openai_api_base
-        label:
-          zh_Hans: API Base
-          en_US: API Base
-        type: text-input
-        required: false
-        placeholder:
-          zh_Hans: 在此输入您的 API Base, 如:https://api.openai.com
-          en_US: Enter your API Base, e.g. https://api.openai.com

+ 0 - 21
cmd/commandline/plugin.go

@@ -15,24 +15,6 @@ var (
 		},
 	}
 
-	pluginModelCommand = &cobra.Command{
-		Use:   "model",
-		Short: "Model",
-		Long:  "Model management for plugin",
-	}
-
-	pluginToolCommand = &cobra.Command{
-		Use:   "tool",
-		Short: "Tool",
-		Long:  "Tool management for plugin",
-	}
-
-	pluginEndpointCommand = &cobra.Command{
-		Use:   "endpoint",
-		Short: "Endpoint",
-		Long:  "Endpoint management for plugin",
-	}
-
 	pluginPackageCommand = &cobra.Command{
 		Use:   "package",
 		Short: "Package",
@@ -71,9 +53,6 @@ endpoint				- allow plugin to register endpoint`,
 
 func init() {
 	pluginCommand.AddCommand(pluginInitCommand)
-	pluginCommand.AddCommand(pluginModelCommand)
-	pluginCommand.AddCommand(pluginToolCommand)
-	pluginCommand.AddCommand(pluginEndpointCommand)
 	pluginCommand.AddCommand(pluginPackageCommand)
 	pluginCommand.AddCommand(pluginPermissionCommand)
 	pluginPermissionCommand.AddCommand(pluginPermissionAddCommand)

+ 13 - 13
internal/types/entities/plugin_entities/model_configuration.go

@@ -198,19 +198,19 @@ type ModelProviderHelpEntity struct {
 }
 
 type ModelProviderConfiguration struct {
-	Provider                 string                           `json:"provider" validate:"required,lt=256"`
-	Label                    I18nObject                       `json:"label" validate:"required"`
-	Description              *I18nObject                      `json:"description" validate:"omitempty"`
-	IconSmall                *I18nObject                      `json:"icon_small" validate:"omitempty"`
-	IconLarge                *I18nObject                      `json:"icon_large" validate:"omitempty"`
-	Background               *string                          `json:"background" validate:"omitempty"`
-	Help                     *ModelProviderHelpEntity         `json:"help" validate:"omitempty"`
-	SupportedModelTypes      []ModelType                      `json:"supported_model_types" validate:"required,lte=16,dive,model_type"`
-	ConfigurateMethods       []ModelProviderConfigurateMethod `json:"configurate_methods" validate:"required,lte=16,dive,model_provider_configurate_method"`
-	Models                   []string                         `json:"models" validate:"required,lte=1024"`
-	ProviderCredentialSchema *ModelProviderCredentialSchema   `json:"provider_credential_schema" validate:"omitempty"`
-	ModelCredentialSchema    *ModelCredentialSchema           `json:"model_credential_schema" validate:"omitempty"`
-	ModelDeclarations        []ModelConfiguration             `json:"model_declarations"`
+	Provider                 string                           `json:"provider" yaml:"provider" validate:"required,lt=256"`
+	Label                    I18nObject                       `json:"label" yaml:"label" validate:"required"`
+	Description              *I18nObject                      `json:"description" yaml:"description,omitempty" validate:"omitempty"`
+	IconSmall                *I18nObject                      `json:"icon_small" yaml:"icon_small,omitempty" validate:"omitempty"`
+	IconLarge                *I18nObject                      `json:"icon_large" yaml:"icon_large,omitempty" validate:"omitempty"`
+	Background               *string                          `json:"background" yaml:"background,omitempty" validate:"omitempty"`
+	Help                     *ModelProviderHelpEntity         `json:"help" yaml:"help,omitempty" validate:"omitempty"`
+	SupportedModelTypes      []ModelType                      `json:"supported_model_types" yaml:"supported_model_types" validate:"required,lte=16,dive,model_type"`
+	ConfigurateMethods       []ModelProviderConfigurateMethod `json:"configurate_methods" yaml:"configurate_methods" validate:"required,lte=16,dive,model_provider_configurate_method"`
+	Models                   []string                         `json:"models" yaml:"models" validate:"required,lte=1024"`
+	ProviderCredentialSchema *ModelProviderCredentialSchema   `json:"provider_credential_schema" yaml:"provider_credential_schema,omitempty" validate:"omitempty"`
+	ModelCredentialSchema    *ModelCredentialSchema           `json:"model_credential_schema" yaml:"model_credential_schema,omitempty" validate:"omitempty"`
+	ModelDeclarations        []ModelConfiguration             `json:"model_declarations" yaml:"model_declarations,omitempty"`
 }
 
 func init() {

+ 18 - 1
internal/utils/log/log.go

@@ -26,6 +26,14 @@ const (
 	LOG_LEVEL_ERROR = 3
 )
 
+const (
+	LOG_LEVEL_DEBUG_COLOR = "\033[34m"
+	LOG_LEVEL_INFO_COLOR  = "\033[32m"
+	LOG_LEVEL_WARN_COLOR  = "\033[33m"
+	LOG_LEVEL_ERROR_COLOR = "\033[31m"
+	LOG_LEVEL_COLOR_END   = "\033[0m"
+)
+
 func (l *Log) Debug(format string, stdout bool, v ...interface{}) {
 	if l.Level <= LOG_LEVEL_DEBUG {
 		l.writeLog("DEBUG", format, stdout, v...)
@@ -69,11 +77,20 @@ func (l *Log) writeLog(level string, format string, stdout bool, v ...interface{
 			panic(err)
 		}
 	}
+
 	//write log
 	format = fmt.Sprintf("["+level+"]"+format, v...)
 
 	if show_log && stdout {
-		logger.Output(4, format)
+		if level == "DEBUG" {
+			logger.Output(4, LOG_LEVEL_DEBUG_COLOR+format+LOG_LEVEL_COLOR_END)
+		} else if level == "INFO" {
+			logger.Output(4, LOG_LEVEL_INFO_COLOR+format+LOG_LEVEL_COLOR_END)
+		} else if level == "WARN" {
+			logger.Output(4, LOG_LEVEL_WARN_COLOR+format+LOG_LEVEL_COLOR_END)
+		} else if level == "ERROR" {
+			logger.Output(4, LOG_LEVEL_ERROR_COLOR+format+LOG_LEVEL_COLOR_END)
+		}
 	}
 
 	_, err := l.File.Write([]byte(format + "\n"))