app_dsl_service.py 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706
  1. import logging
  2. import uuid
  3. from enum import StrEnum
  4. from typing import Optional, cast
  5. from uuid import uuid4
  6. import yaml # type: ignore
  7. from packaging import version
  8. from pydantic import BaseModel, Field
  9. from sqlalchemy import select
  10. from sqlalchemy.orm import Session
  11. from core.helper import ssrf_proxy
  12. from core.model_runtime.utils.encoders import jsonable_encoder
  13. from core.plugin.entities.plugin import PluginDependency
  14. from core.workflow.nodes.enums import NodeType
  15. from core.workflow.nodes.knowledge_retrieval.entities import KnowledgeRetrievalNodeData
  16. from core.workflow.nodes.llm.entities import LLMNodeData
  17. from core.workflow.nodes.parameter_extractor.entities import ParameterExtractorNodeData
  18. from core.workflow.nodes.question_classifier.entities import QuestionClassifierNodeData
  19. from core.workflow.nodes.tool.entities import ToolNodeData
  20. from events.app_event import app_model_config_was_updated, app_was_created
  21. from extensions.ext_redis import redis_client
  22. from factories import variable_factory
  23. from models import Account, App, AppMode
  24. from models.model import AppModelConfig
  25. from models.workflow import Workflow
  26. from services.plugin.dependencies_analysis import DependenciesAnalysisService
  27. from services.workflow_service import WorkflowService
  28. logger = logging.getLogger(__name__)
  29. IMPORT_INFO_REDIS_KEY_PREFIX = "app_import_info:"
  30. CHECK_DEPENDENCIES_REDIS_KEY_PREFIX = "app_check_dependencies:"
  31. IMPORT_INFO_REDIS_EXPIRY = 10 * 60 # 10 minutes
  32. DSL_MAX_SIZE = 10 * 1024 * 1024 # 10MB
  33. CURRENT_DSL_VERSION = "0.1.5"
  34. class ImportMode(StrEnum):
  35. YAML_CONTENT = "yaml-content"
  36. YAML_URL = "yaml-url"
  37. class ImportStatus(StrEnum):
  38. COMPLETED = "completed"
  39. COMPLETED_WITH_WARNINGS = "completed-with-warnings"
  40. PENDING = "pending"
  41. FAILED = "failed"
  42. class Import(BaseModel):
  43. id: str
  44. status: ImportStatus
  45. app_id: Optional[str] = None
  46. current_dsl_version: str = CURRENT_DSL_VERSION
  47. imported_dsl_version: str = ""
  48. error: str = ""
  49. class CheckDependenciesResult(BaseModel):
  50. leaked_dependencies: list[PluginDependency] = Field(default_factory=list)
  51. def _check_version_compatibility(imported_version: str) -> ImportStatus:
  52. """Determine import status based on version comparison"""
  53. try:
  54. current_ver = version.parse(CURRENT_DSL_VERSION)
  55. imported_ver = version.parse(imported_version)
  56. except version.InvalidVersion:
  57. return ImportStatus.FAILED
  58. # Compare major version and minor version
  59. if current_ver.major != imported_ver.major or current_ver.minor != imported_ver.minor:
  60. return ImportStatus.PENDING
  61. if current_ver.micro != imported_ver.micro:
  62. return ImportStatus.COMPLETED_WITH_WARNINGS
  63. return ImportStatus.COMPLETED
  64. class PendingData(BaseModel):
  65. import_mode: str
  66. yaml_content: str
  67. name: str | None
  68. description: str | None
  69. icon_type: str | None
  70. icon: str | None
  71. icon_background: str | None
  72. app_id: str | None
  73. class CheckDependenciesPendingData(BaseModel):
  74. dependencies: list[PluginDependency]
  75. app_id: str | None
  76. class AppDslService:
  77. def __init__(self, session: Session):
  78. self._session = session
  79. def import_app(
  80. self,
  81. *,
  82. account: Account,
  83. import_mode: str,
  84. yaml_content: Optional[str] = None,
  85. yaml_url: Optional[str] = None,
  86. name: Optional[str] = None,
  87. description: Optional[str] = None,
  88. icon_type: Optional[str] = None,
  89. icon: Optional[str] = None,
  90. icon_background: Optional[str] = None,
  91. app_id: Optional[str] = None,
  92. ) -> Import:
  93. """Import an app from YAML content or URL."""
  94. import_id = str(uuid.uuid4())
  95. # Validate import mode
  96. try:
  97. mode = ImportMode(import_mode)
  98. except ValueError:
  99. raise ValueError(f"Invalid import_mode: {import_mode}")
  100. # Get YAML content
  101. content: bytes | str = b""
  102. if mode == ImportMode.YAML_URL:
  103. if not yaml_url:
  104. return Import(
  105. id=import_id,
  106. status=ImportStatus.FAILED,
  107. error="yaml_url is required when import_mode is yaml-url",
  108. )
  109. try:
  110. # tricky way to handle url from github to github raw url
  111. if yaml_url.startswith("https://github.com") and yaml_url.endswith((".yml", ".yaml")):
  112. yaml_url = yaml_url.replace("https://github.com", "https://raw.githubusercontent.com")
  113. yaml_url = yaml_url.replace("/blob/", "/")
  114. response = ssrf_proxy.get(yaml_url.strip(), follow_redirects=True, timeout=(10, 10))
  115. response.raise_for_status()
  116. content = response.content
  117. if len(content) > DSL_MAX_SIZE:
  118. return Import(
  119. id=import_id,
  120. status=ImportStatus.FAILED,
  121. error="File size exceeds the limit of 10MB",
  122. )
  123. if not content:
  124. return Import(
  125. id=import_id,
  126. status=ImportStatus.FAILED,
  127. error="Empty content from url",
  128. )
  129. try:
  130. content = cast(bytes, content).decode("utf-8")
  131. except UnicodeDecodeError as e:
  132. return Import(
  133. id=import_id,
  134. status=ImportStatus.FAILED,
  135. error=f"Error decoding content: {e}",
  136. )
  137. except Exception as e:
  138. return Import(
  139. id=import_id,
  140. status=ImportStatus.FAILED,
  141. error=f"Error fetching YAML from URL: {str(e)}",
  142. )
  143. elif mode == ImportMode.YAML_CONTENT:
  144. if not yaml_content:
  145. return Import(
  146. id=import_id,
  147. status=ImportStatus.FAILED,
  148. error="yaml_content is required when import_mode is yaml-content",
  149. )
  150. content = yaml_content
  151. # Process YAML content
  152. try:
  153. # Parse YAML to validate format
  154. data = yaml.safe_load(content)
  155. if not isinstance(data, dict):
  156. return Import(
  157. id=import_id,
  158. status=ImportStatus.FAILED,
  159. error="Invalid YAML format: content must be a mapping",
  160. )
  161. # Validate and fix DSL version
  162. if not data.get("version"):
  163. data["version"] = "0.1.0"
  164. if not data.get("kind") or data.get("kind") != "app":
  165. data["kind"] = "app"
  166. imported_version = data.get("version", "0.1.0")
  167. # check if imported_version is a float-like string
  168. if not isinstance(imported_version, str):
  169. raise ValueError(f"Invalid version type, expected str, got {type(imported_version)}")
  170. status = _check_version_compatibility(imported_version)
  171. # Extract app data
  172. app_data = data.get("app")
  173. if not app_data:
  174. return Import(
  175. id=import_id,
  176. status=ImportStatus.FAILED,
  177. error="Missing app data in YAML content",
  178. )
  179. # If app_id is provided, check if it exists
  180. app = None
  181. if app_id:
  182. stmt = select(App).where(App.id == app_id, App.tenant_id == account.current_tenant_id)
  183. app = self._session.scalar(stmt)
  184. if not app:
  185. return Import(
  186. id=import_id,
  187. status=ImportStatus.FAILED,
  188. error="App not found",
  189. )
  190. if app.mode not in [AppMode.WORKFLOW.value, AppMode.ADVANCED_CHAT.value]:
  191. return Import(
  192. id=import_id,
  193. status=ImportStatus.FAILED,
  194. error="Only workflow or advanced chat apps can be overwritten",
  195. )
  196. # If major version mismatch, store import info in Redis
  197. if status == ImportStatus.PENDING:
  198. pending_data = PendingData(
  199. import_mode=import_mode,
  200. yaml_content=content,
  201. name=name,
  202. description=description,
  203. icon_type=icon_type,
  204. icon=icon,
  205. icon_background=icon_background,
  206. app_id=app_id,
  207. )
  208. redis_client.setex(
  209. f"{IMPORT_INFO_REDIS_KEY_PREFIX}{import_id}",
  210. IMPORT_INFO_REDIS_EXPIRY,
  211. pending_data.model_dump_json(),
  212. )
  213. return Import(
  214. id=import_id,
  215. status=status,
  216. app_id=app_id,
  217. imported_dsl_version=imported_version,
  218. )
  219. # Extract dependencies
  220. dependencies = data.get("dependencies", [])
  221. check_dependencies_pending_data = None
  222. if dependencies:
  223. check_dependencies_pending_data = [PluginDependency.model_validate(d) for d in dependencies]
  224. # Create or update app
  225. app = self._create_or_update_app(
  226. app=app,
  227. data=data,
  228. account=account,
  229. name=name,
  230. description=description,
  231. icon_type=icon_type,
  232. icon=icon,
  233. icon_background=icon_background,
  234. dependencies=check_dependencies_pending_data,
  235. )
  236. return Import(
  237. id=import_id,
  238. status=status,
  239. app_id=app.id,
  240. imported_dsl_version=imported_version,
  241. )
  242. except yaml.YAMLError as e:
  243. return Import(
  244. id=import_id,
  245. status=ImportStatus.FAILED,
  246. error=f"Invalid YAML format: {str(e)}",
  247. )
  248. except Exception as e:
  249. logger.exception("Failed to import app")
  250. return Import(
  251. id=import_id,
  252. status=ImportStatus.FAILED,
  253. error=str(e),
  254. )
  255. def confirm_import(self, *, import_id: str, account: Account) -> Import:
  256. """
  257. Confirm an import that requires confirmation
  258. """
  259. redis_key = f"{IMPORT_INFO_REDIS_KEY_PREFIX}{import_id}"
  260. pending_data = redis_client.get(redis_key)
  261. if not pending_data:
  262. return Import(
  263. id=import_id,
  264. status=ImportStatus.FAILED,
  265. error="Import information expired or does not exist",
  266. )
  267. try:
  268. if not isinstance(pending_data, str | bytes):
  269. return Import(
  270. id=import_id,
  271. status=ImportStatus.FAILED,
  272. error="Invalid import information",
  273. )
  274. pending_data = PendingData.model_validate_json(pending_data)
  275. data = yaml.safe_load(pending_data.yaml_content)
  276. app = None
  277. if pending_data.app_id:
  278. stmt = select(App).where(App.id == pending_data.app_id, App.tenant_id == account.current_tenant_id)
  279. app = self._session.scalar(stmt)
  280. # Create or update app
  281. app = self._create_or_update_app(
  282. app=app,
  283. data=data,
  284. account=account,
  285. name=pending_data.name,
  286. description=pending_data.description,
  287. icon_type=pending_data.icon_type,
  288. icon=pending_data.icon,
  289. icon_background=pending_data.icon_background,
  290. )
  291. # Delete import info from Redis
  292. redis_client.delete(redis_key)
  293. return Import(
  294. id=import_id,
  295. status=ImportStatus.COMPLETED,
  296. app_id=app.id,
  297. current_dsl_version=CURRENT_DSL_VERSION,
  298. imported_dsl_version=data.get("version", "0.1.0"),
  299. )
  300. except Exception as e:
  301. logger.exception("Error confirming import")
  302. return Import(
  303. id=import_id,
  304. status=ImportStatus.FAILED,
  305. error=str(e),
  306. )
  307. def check_dependencies(
  308. self,
  309. *,
  310. app_model: App,
  311. ) -> CheckDependenciesResult:
  312. """Check dependencies"""
  313. # Get dependencies from Redis
  314. redis_key = f"{CHECK_DEPENDENCIES_REDIS_KEY_PREFIX}{app_model.id}"
  315. dependencies = redis_client.get(redis_key)
  316. if not dependencies:
  317. return CheckDependenciesResult()
  318. # Extract dependencies
  319. dependencies = CheckDependenciesPendingData.model_validate_json(dependencies)
  320. # Get leaked dependencies
  321. leaked_dependencies = DependenciesAnalysisService.get_leaked_dependencies(
  322. tenant_id=app_model.tenant_id, dependencies=dependencies.dependencies
  323. )
  324. return CheckDependenciesResult(
  325. leaked_dependencies=leaked_dependencies,
  326. )
  327. def _create_or_update_app(
  328. self,
  329. *,
  330. app: Optional[App],
  331. data: dict,
  332. account: Account,
  333. name: Optional[str] = None,
  334. description: Optional[str] = None,
  335. icon_type: Optional[str] = None,
  336. icon: Optional[str] = None,
  337. icon_background: Optional[str] = None,
  338. dependencies: Optional[list[PluginDependency]] = None,
  339. ) -> App:
  340. """Create a new app or update an existing one."""
  341. app_data = data.get("app", {})
  342. app_mode = app_data.get("mode")
  343. if not app_mode:
  344. raise ValueError("loss app mode")
  345. app_mode = AppMode(app_mode)
  346. # Set icon type
  347. icon_type_value = icon_type or app_data.get("icon_type")
  348. if icon_type_value in ["emoji", "link"]:
  349. icon_type = icon_type_value
  350. else:
  351. icon_type = "emoji"
  352. icon = icon or str(app_data.get("icon", ""))
  353. if app:
  354. # Update existing app
  355. app.name = name or app_data.get("name", app.name)
  356. app.description = description or app_data.get("description", app.description)
  357. app.icon_type = icon_type
  358. app.icon = icon
  359. app.icon_background = icon_background or app_data.get("icon_background", app.icon_background)
  360. app.updated_by = account.id
  361. else:
  362. if account.current_tenant_id is None:
  363. raise ValueError("Current tenant is not set")
  364. # Create new app
  365. app = App()
  366. app.id = str(uuid4())
  367. app.tenant_id = account.current_tenant_id
  368. app.mode = app_mode.value
  369. app.name = name or app_data.get("name", "")
  370. app.description = description or app_data.get("description", "")
  371. app.icon_type = icon_type
  372. app.icon = icon
  373. app.icon_background = icon_background or app_data.get("icon_background", "#FFFFFF")
  374. app.enable_site = True
  375. app.enable_api = True
  376. app.use_icon_as_answer_icon = app_data.get("use_icon_as_answer_icon", False)
  377. app.created_by = account.id
  378. app.updated_by = account.id
  379. self._session.add(app)
  380. self._session.commit()
  381. app_was_created.send(app, account=account)
  382. # save dependencies
  383. if dependencies:
  384. redis_client.setex(
  385. f"{CHECK_DEPENDENCIES_REDIS_KEY_PREFIX}{app.id}",
  386. IMPORT_INFO_REDIS_EXPIRY,
  387. CheckDependenciesPendingData(app_id=app.id, dependencies=dependencies).model_dump_json(),
  388. )
  389. # Initialize app based on mode
  390. if app_mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
  391. workflow_data = data.get("workflow")
  392. if not workflow_data or not isinstance(workflow_data, dict):
  393. raise ValueError("Missing workflow data for workflow/advanced chat app")
  394. environment_variables_list = workflow_data.get("environment_variables", [])
  395. environment_variables = [
  396. variable_factory.build_environment_variable_from_mapping(obj) for obj in environment_variables_list
  397. ]
  398. conversation_variables_list = workflow_data.get("conversation_variables", [])
  399. conversation_variables = [
  400. variable_factory.build_conversation_variable_from_mapping(obj) for obj in conversation_variables_list
  401. ]
  402. workflow_service = WorkflowService()
  403. current_draft_workflow = workflow_service.get_draft_workflow(app_model=app)
  404. if current_draft_workflow:
  405. unique_hash = current_draft_workflow.unique_hash
  406. else:
  407. unique_hash = None
  408. workflow_service.sync_draft_workflow(
  409. app_model=app,
  410. graph=workflow_data.get("graph", {}),
  411. features=workflow_data.get("features", {}),
  412. unique_hash=unique_hash,
  413. account=account,
  414. environment_variables=environment_variables,
  415. conversation_variables=conversation_variables,
  416. )
  417. elif app_mode in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.COMPLETION}:
  418. # Initialize model config
  419. model_config = data.get("model_config")
  420. if not model_config or not isinstance(model_config, dict):
  421. raise ValueError("Missing model_config for chat/agent-chat/completion app")
  422. # Initialize or update model config
  423. if not app.app_model_config:
  424. app_model_config = AppModelConfig().from_model_config_dict(model_config)
  425. app_model_config.id = str(uuid4())
  426. app_model_config.app_id = app.id
  427. app_model_config.created_by = account.id
  428. app_model_config.updated_by = account.id
  429. app.app_model_config_id = app_model_config.id
  430. self._session.add(app_model_config)
  431. app_model_config_was_updated.send(app, app_model_config=app_model_config)
  432. else:
  433. raise ValueError("Invalid app mode")
  434. return app
  435. @classmethod
  436. def export_dsl(cls, app_model: App, include_secret: bool = False) -> str:
  437. """
  438. Export app
  439. :param app_model: App instance
  440. :return:
  441. """
  442. app_mode = AppMode.value_of(app_model.mode)
  443. export_data = {
  444. "version": CURRENT_DSL_VERSION,
  445. "kind": "app",
  446. "app": {
  447. "name": app_model.name,
  448. "mode": app_model.mode,
  449. "icon": "🤖" if app_model.icon_type == "image" else app_model.icon,
  450. "icon_background": "#FFEAD5" if app_model.icon_type == "image" else app_model.icon_background,
  451. "description": app_model.description,
  452. "use_icon_as_answer_icon": app_model.use_icon_as_answer_icon,
  453. },
  454. }
  455. if app_mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
  456. cls._append_workflow_export_data(
  457. export_data=export_data, app_model=app_model, include_secret=include_secret
  458. )
  459. else:
  460. cls._append_model_config_export_data(export_data, app_model)
  461. return yaml.dump(export_data, allow_unicode=True) # type: ignore
  462. @classmethod
  463. def _append_workflow_export_data(cls, *, export_data: dict, app_model: App, include_secret: bool) -> None:
  464. """
  465. Append workflow export data
  466. :param export_data: export data
  467. :param app_model: App instance
  468. """
  469. workflow_service = WorkflowService()
  470. workflow = workflow_service.get_draft_workflow(app_model)
  471. if not workflow:
  472. raise ValueError("Missing draft workflow configuration, please check.")
  473. export_data["workflow"] = workflow.to_dict(include_secret=include_secret)
  474. dependencies = cls._extract_dependencies_from_workflow(workflow)
  475. export_data["dependencies"] = [
  476. jsonable_encoder(d.model_dump())
  477. for d in DependenciesAnalysisService.generate_dependencies(
  478. tenant_id=app_model.tenant_id, dependencies=dependencies
  479. )
  480. ]
  481. @classmethod
  482. def _append_model_config_export_data(cls, export_data: dict, app_model: App) -> None:
  483. """
  484. Append model config export data
  485. :param export_data: export data
  486. :param app_model: App instance
  487. """
  488. app_model_config = app_model.app_model_config
  489. if not app_model_config:
  490. raise ValueError("Missing app configuration, please check.")
  491. export_data["model_config"] = app_model_config.to_dict()
  492. dependencies = cls._extract_dependencies_from_model_config(app_model_config)
  493. export_data["dependencies"] = [
  494. jsonable_encoder(d.model_dump())
  495. for d in DependenciesAnalysisService.generate_dependencies(
  496. tenant_id=app_model.tenant_id, dependencies=dependencies
  497. )
  498. ]
  499. @classmethod
  500. def _extract_dependencies_from_workflow(cls, workflow: Workflow) -> list[str]:
  501. """
  502. Extract dependencies from workflow
  503. :param workflow: Workflow instance
  504. :return: dependencies list format like ["langgenius/google"]
  505. """
  506. graph = workflow.graph_dict
  507. dependencies = []
  508. for node in graph.get("nodes", []):
  509. try:
  510. typ = node.get("data", {}).get("type")
  511. match typ:
  512. case NodeType.TOOL.value:
  513. tool_entity = ToolNodeData(**node["data"])
  514. dependencies.append(
  515. DependenciesAnalysisService.analyze_tool_dependency(tool_entity.provider_id),
  516. )
  517. case NodeType.LLM.value:
  518. llm_entity = LLMNodeData(**node["data"])
  519. dependencies.append(
  520. DependenciesAnalysisService.analyze_model_provider_dependency(llm_entity.model.provider),
  521. )
  522. case NodeType.QUESTION_CLASSIFIER.value:
  523. question_classifier_entity = QuestionClassifierNodeData(**node["data"])
  524. dependencies.append(
  525. DependenciesAnalysisService.analyze_model_provider_dependency(
  526. question_classifier_entity.model.provider
  527. ),
  528. )
  529. case NodeType.PARAMETER_EXTRACTOR.value:
  530. parameter_extractor_entity = ParameterExtractorNodeData(**node["data"])
  531. dependencies.append(
  532. DependenciesAnalysisService.analyze_model_provider_dependency(
  533. parameter_extractor_entity.model.provider
  534. ),
  535. )
  536. case NodeType.KNOWLEDGE_RETRIEVAL.value:
  537. knowledge_retrieval_entity = KnowledgeRetrievalNodeData(**node["data"])
  538. if knowledge_retrieval_entity.retrieval_mode == "multiple":
  539. if knowledge_retrieval_entity.multiple_retrieval_config:
  540. if (
  541. knowledge_retrieval_entity.multiple_retrieval_config.reranking_mode
  542. == "reranking_model"
  543. ):
  544. if knowledge_retrieval_entity.multiple_retrieval_config.reranking_model:
  545. dependencies.append(
  546. DependenciesAnalysisService.analyze_model_provider_dependency(
  547. knowledge_retrieval_entity.multiple_retrieval_config.reranking_model.provider
  548. ),
  549. )
  550. elif (
  551. knowledge_retrieval_entity.multiple_retrieval_config.reranking_mode
  552. == "weighted_score"
  553. ):
  554. if knowledge_retrieval_entity.multiple_retrieval_config.weights:
  555. vector_setting = (
  556. knowledge_retrieval_entity.multiple_retrieval_config.weights.vector_setting
  557. )
  558. dependencies.append(
  559. DependenciesAnalysisService.analyze_model_provider_dependency(
  560. vector_setting.embedding_provider_name
  561. ),
  562. )
  563. elif knowledge_retrieval_entity.retrieval_mode == "single":
  564. model_config = knowledge_retrieval_entity.single_retrieval_config
  565. if model_config:
  566. dependencies.append(
  567. DependenciesAnalysisService.analyze_model_provider_dependency(
  568. model_config.model.provider
  569. ),
  570. )
  571. case _:
  572. # TODO: Handle default case or unknown node types
  573. pass
  574. except Exception as e:
  575. logger.exception("Error extracting node dependency", exc_info=e)
  576. return dependencies
  577. @classmethod
  578. def _extract_dependencies_from_model_config(cls, model_config: AppModelConfig) -> list[str]:
  579. """
  580. Extract dependencies from model config
  581. :param model_config: AppModelConfig instance
  582. :return: dependencies list format like ["langgenius/google:1.0.0@abcdef1234567890"]
  583. """
  584. dependencies = []
  585. try:
  586. # completion model
  587. model_dict = model_config.model_dict
  588. if model_dict:
  589. dependencies.append(
  590. DependenciesAnalysisService.analyze_model_provider_dependency(model_dict.get("provider", ""))
  591. )
  592. # reranking model
  593. dataset_configs = model_config.dataset_configs_dict
  594. if dataset_configs:
  595. for dataset_config in dataset_configs.get("datasets", {}).get("datasets", []):
  596. if dataset_config.get("reranking_model"):
  597. dependencies.append(
  598. DependenciesAnalysisService.analyze_model_provider_dependency(
  599. dataset_config.get("reranking_model", {})
  600. .get("reranking_provider_name", {})
  601. .get("provider")
  602. )
  603. )
  604. # tools
  605. agent_configs = model_config.agent_mode_dict
  606. if agent_configs:
  607. for agent_config in agent_configs.get("tools", []):
  608. dependencies.append(
  609. DependenciesAnalysisService.analyze_tool_dependency(agent_config.get("provider_id"))
  610. )
  611. except Exception as e:
  612. logger.exception("Error extracting model config dependency", exc_info=e)
  613. return dependencies
  614. @classmethod
  615. def get_leaked_dependencies(cls, tenant_id: str, dsl_dependencies: list[dict]) -> list[PluginDependency]:
  616. """
  617. Returns the leaked dependencies in current workspace
  618. """
  619. dependencies = [PluginDependency(**dep) for dep in dsl_dependencies]
  620. if not dependencies:
  621. return []
  622. return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dependencies)