瀏覽代碼

Add new integration with Opik Tracking tool (#11501)

Boris Feld 4 月之前
父節點
當前提交
69d58fbb50
共有 23 個文件被更改,包括 1380 次插入26 次删除
  1. 32 0
      api/core/ops/entities/config_entity.py
  2. 0 0
      api/core/ops/opik_trace/__init__.py
  3. 469 0
      api/core/ops/opik_trace/opik_trace.py
  4. 8 0
      api/core/ops/ops_trace_manager.py
  5. 166 2
      api/poetry.lock
  6. 1 0
      api/pyproject.toml
  7. 10 1
      api/services/ops_service.py
  8. 66 10
      web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx
  9. 1 0
      web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts
  10. 22 5
      web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx
  11. 51 5
      web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx
  12. 2 1
      web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-panel.tsx
  13. 8 0
      web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts
  14. 87 0
      web/app/components/base/icons/assets/public/tracing/opik-icon-big.svg
  15. 88 0
      web/app/components/base/icons/assets/public/tracing/opik-icon.svg
  16. 163 0
      web/app/components/base/icons/src/public/tracing/OpikIcon.json
  17. 16 0
      web/app/components/base/icons/src/public/tracing/OpikIcon.tsx
  18. 162 0
      web/app/components/base/icons/src/public/tracing/OpikIconBig.json
  19. 16 0
      web/app/components/base/icons/src/public/tracing/OpikIconBig.tsx
  20. 2 0
      web/app/components/base/icons/src/public/tracing/index.ts
  21. 4 0
      web/i18n/en-US/app.ts
  22. 4 0
      web/i18n/zh-Hans/app.ts
  23. 2 2
      web/models/app.ts

+ 32 - 0
api/core/ops/entities/config_entity.py

@@ -6,6 +6,7 @@ from pydantic import BaseModel, ValidationInfo, field_validator
 class TracingProviderEnum(Enum):
     LANGFUSE = "langfuse"
     LANGSMITH = "langsmith"
+    OPIK = "opik"
 
 
 class BaseTracingConfig(BaseModel):
@@ -56,5 +57,36 @@ class LangSmithConfig(BaseTracingConfig):
         return v
 
 
+class OpikConfig(BaseTracingConfig):
+    """
+    Model class for Opik tracing config.
+    """
+
+    api_key: str | None = None
+    project: str | None = None
+    workspace: str | None = None
+    url: str = "https://www.comet.com/opik/api/"
+
+    @field_validator("project")
+    @classmethod
+    def project_validator(cls, v, info: ValidationInfo):
+        if v is None or v == "":
+            v = "Default Project"
+
+        return v
+
+    @field_validator("url")
+    @classmethod
+    def url_validator(cls, v, info: ValidationInfo):
+        if v is None or v == "":
+            v = "https://www.comet.com/opik/api/"
+        if not v.startswith(("https://", "http://")):
+            raise ValueError("url must start with https:// or http://")
+        if not v.endswith("/api/"):
+            raise ValueError("url should ends with /api/")
+
+        return v
+
+
 OPS_FILE_PATH = "ops_trace/"
 OPS_TRACE_FAILED_KEY = "FAILED_OPS_TRACE"

+ 0 - 0
api/core/ops/opik_trace/__init__.py


+ 469 - 0
api/core/ops/opik_trace/opik_trace.py

@@ -0,0 +1,469 @@
+import json
+import logging
+import os
+import uuid
+from datetime import datetime, timedelta
+from typing import Optional, cast
+
+from opik import Opik, Trace
+from opik.id_helpers import uuid4_to_uuid7
+
+from core.ops.base_trace_instance import BaseTraceInstance
+from core.ops.entities.config_entity import OpikConfig
+from core.ops.entities.trace_entity import (
+    BaseTraceInfo,
+    DatasetRetrievalTraceInfo,
+    GenerateNameTraceInfo,
+    MessageTraceInfo,
+    ModerationTraceInfo,
+    SuggestedQuestionTraceInfo,
+    ToolTraceInfo,
+    TraceTaskName,
+    WorkflowTraceInfo,
+)
+from extensions.ext_database import db
+from models.model import EndUser, MessageFile
+from models.workflow import WorkflowNodeExecution
+
+logger = logging.getLogger(__name__)
+
+
+def wrap_dict(key_name, data):
+    """Make sure that the input data is a dict"""
+    if not isinstance(data, dict):
+        return {key_name: data}
+
+    return data
+
+
+def wrap_metadata(metadata, **kwargs):
+    """Add common metatada to all Traces and Spans"""
+    metadata["created_from"] = "dify"
+
+    metadata.update(kwargs)
+
+    return metadata
+
+
+def prepare_opik_uuid(user_datetime: Optional[datetime], user_uuid: Optional[str]):
+    """Opik needs UUIDv7 while Dify uses UUIDv4 for identifier of most
+    messages and objects. The type-hints of BaseTraceInfo indicates that
+    objects start_time and message_id could be null which means we cannot map
+    it to a UUIDv7. Given that we have no way to identify that object
+    uniquely, generate a new random one UUIDv7 in that case.
+    """
+
+    if user_datetime is None:
+        user_datetime = datetime.now()
+
+    if user_uuid is None:
+        user_uuid = str(uuid.uuid4())
+
+    return uuid4_to_uuid7(user_datetime, user_uuid)
+
+
+class OpikDataTrace(BaseTraceInstance):
+    def __init__(
+        self,
+        opik_config: OpikConfig,
+    ):
+        super().__init__(opik_config)
+        self.opik_client = Opik(
+            project_name=opik_config.project,
+            workspace=opik_config.workspace,
+            host=opik_config.url,
+            api_key=opik_config.api_key,
+        )
+        self.project = opik_config.project
+        self.file_base_url = os.getenv("FILES_URL", "http://127.0.0.1:5001")
+
+    def trace(self, trace_info: BaseTraceInfo):
+        if isinstance(trace_info, WorkflowTraceInfo):
+            self.workflow_trace(trace_info)
+        if isinstance(trace_info, MessageTraceInfo):
+            self.message_trace(trace_info)
+        if isinstance(trace_info, ModerationTraceInfo):
+            self.moderation_trace(trace_info)
+        if isinstance(trace_info, SuggestedQuestionTraceInfo):
+            self.suggested_question_trace(trace_info)
+        if isinstance(trace_info, DatasetRetrievalTraceInfo):
+            self.dataset_retrieval_trace(trace_info)
+        if isinstance(trace_info, ToolTraceInfo):
+            self.tool_trace(trace_info)
+        if isinstance(trace_info, GenerateNameTraceInfo):
+            self.generate_name_trace(trace_info)
+
+    def workflow_trace(self, trace_info: WorkflowTraceInfo):
+        dify_trace_id = trace_info.workflow_run_id
+        opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id)
+        workflow_metadata = wrap_metadata(
+            trace_info.metadata, message_id=trace_info.message_id, workflow_app_log_id=trace_info.workflow_app_log_id
+        )
+        root_span_id = None
+
+        if trace_info.message_id:
+            dify_trace_id = trace_info.message_id
+            opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id)
+
+            trace_data = {
+                "id": opik_trace_id,
+                "name": TraceTaskName.MESSAGE_TRACE.value,
+                "start_time": trace_info.start_time,
+                "end_time": trace_info.end_time,
+                "metadata": workflow_metadata,
+                "input": wrap_dict("input", trace_info.workflow_run_inputs),
+                "output": wrap_dict("output", trace_info.workflow_run_outputs),
+                "tags": ["message", "workflow"],
+                "project_name": self.project,
+            }
+            self.add_trace(trace_data)
+
+            root_span_id = prepare_opik_uuid(trace_info.start_time, trace_info.workflow_run_id)
+            span_data = {
+                "id": root_span_id,
+                "parent_span_id": None,
+                "trace_id": opik_trace_id,
+                "name": TraceTaskName.WORKFLOW_TRACE.value,
+                "input": wrap_dict("input", trace_info.workflow_run_inputs),
+                "output": wrap_dict("output", trace_info.workflow_run_outputs),
+                "start_time": trace_info.start_time,
+                "end_time": trace_info.end_time,
+                "metadata": workflow_metadata,
+                "tags": ["workflow"],
+                "project_name": self.project,
+            }
+            self.add_span(span_data)
+        else:
+            trace_data = {
+                "id": opik_trace_id,
+                "name": TraceTaskName.MESSAGE_TRACE.value,
+                "start_time": trace_info.start_time,
+                "end_time": trace_info.end_time,
+                "metadata": workflow_metadata,
+                "input": wrap_dict("input", trace_info.workflow_run_inputs),
+                "output": wrap_dict("output", trace_info.workflow_run_outputs),
+                "tags": ["workflow"],
+                "project_name": self.project,
+            }
+            self.add_trace(trace_data)
+
+        # through workflow_run_id get all_nodes_execution
+        workflow_nodes_execution_id_records = (
+            db.session.query(WorkflowNodeExecution.id)
+            .filter(WorkflowNodeExecution.workflow_run_id == trace_info.workflow_run_id)
+            .all()
+        )
+
+        for node_execution_id_record in workflow_nodes_execution_id_records:
+            node_execution = (
+                db.session.query(
+                    WorkflowNodeExecution.id,
+                    WorkflowNodeExecution.tenant_id,
+                    WorkflowNodeExecution.app_id,
+                    WorkflowNodeExecution.title,
+                    WorkflowNodeExecution.node_type,
+                    WorkflowNodeExecution.status,
+                    WorkflowNodeExecution.inputs,
+                    WorkflowNodeExecution.outputs,
+                    WorkflowNodeExecution.created_at,
+                    WorkflowNodeExecution.elapsed_time,
+                    WorkflowNodeExecution.process_data,
+                    WorkflowNodeExecution.execution_metadata,
+                )
+                .filter(WorkflowNodeExecution.id == node_execution_id_record.id)
+                .first()
+            )
+
+            if not node_execution:
+                continue
+
+            node_execution_id = node_execution.id
+            tenant_id = node_execution.tenant_id
+            app_id = node_execution.app_id
+            node_name = node_execution.title
+            node_type = node_execution.node_type
+            status = node_execution.status
+            if node_type == "llm":
+                inputs = (
+                    json.loads(node_execution.process_data).get("prompts", {}) if node_execution.process_data else {}
+                )
+            else:
+                inputs = json.loads(node_execution.inputs) if node_execution.inputs else {}
+            outputs = json.loads(node_execution.outputs) if node_execution.outputs else {}
+            created_at = node_execution.created_at or datetime.now()
+            elapsed_time = node_execution.elapsed_time
+            finished_at = created_at + timedelta(seconds=elapsed_time)
+
+            execution_metadata = (
+                json.loads(node_execution.execution_metadata) if node_execution.execution_metadata else {}
+            )
+            metadata = execution_metadata.copy()
+            metadata.update(
+                {
+                    "workflow_run_id": trace_info.workflow_run_id,
+                    "node_execution_id": node_execution_id,
+                    "tenant_id": tenant_id,
+                    "app_id": app_id,
+                    "app_name": node_name,
+                    "node_type": node_type,
+                    "status": status,
+                }
+            )
+
+            process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
+
+            provider = None
+            model = None
+            total_tokens = 0
+            completion_tokens = 0
+            prompt_tokens = 0
+
+            if process_data and process_data.get("model_mode") == "chat":
+                run_type = "llm"
+                provider = process_data.get("model_provider", None)
+                model = process_data.get("model_name", "")
+                metadata.update(
+                    {
+                        "ls_provider": provider,
+                        "ls_model_name": model,
+                    }
+                )
+
+                try:
+                    if outputs.get("usage"):
+                        total_tokens = outputs["usage"].get("total_tokens", 0)
+                        prompt_tokens = outputs["usage"].get("prompt_tokens", 0)
+                        completion_tokens = outputs["usage"].get("completion_tokens", 0)
+                except Exception:
+                    logger.error("Failed to extract usage", exc_info=True)
+
+            else:
+                run_type = "tool"
+
+            parent_span_id = trace_info.workflow_app_log_id or trace_info.workflow_run_id
+
+            if not total_tokens:
+                total_tokens = execution_metadata.get("total_tokens", 0)
+
+            span_data = {
+                "trace_id": opik_trace_id,
+                "id": prepare_opik_uuid(created_at, node_execution_id),
+                "parent_span_id": prepare_opik_uuid(trace_info.start_time, parent_span_id),
+                "name": node_type,
+                "type": run_type,
+                "start_time": created_at,
+                "end_time": finished_at,
+                "metadata": wrap_metadata(metadata),
+                "input": wrap_dict("input", inputs),
+                "output": wrap_dict("output", outputs),
+                "tags": ["node_execution"],
+                "project_name": self.project,
+                "usage": {
+                    "total_tokens": total_tokens,
+                    "completion_tokens": completion_tokens,
+                    "prompt_tokens": prompt_tokens,
+                },
+                "model": model,
+                "provider": provider,
+            }
+
+            self.add_span(span_data)
+
+    def message_trace(self, trace_info: MessageTraceInfo):
+        # get message file data
+        file_list = cast(list[str], trace_info.file_list) or []
+        message_file_data: Optional[MessageFile] = trace_info.message_file_data
+
+        if message_file_data is not None:
+            file_url = f"{self.file_base_url}/{message_file_data.url}" if message_file_data else ""
+            file_list.append(file_url)
+
+        message_data = trace_info.message_data
+        if message_data is None:
+            return
+
+        metadata = trace_info.metadata
+        message_id = trace_info.message_id
+
+        user_id = message_data.from_account_id
+        metadata["user_id"] = user_id
+        metadata["file_list"] = file_list
+
+        if message_data.from_end_user_id:
+            end_user_data: Optional[EndUser] = (
+                db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first()
+            )
+            if end_user_data is not None:
+                end_user_id = end_user_data.session_id
+                metadata["end_user_id"] = end_user_id
+
+        trace_data = {
+            "id": prepare_opik_uuid(trace_info.start_time, message_id),
+            "name": TraceTaskName.MESSAGE_TRACE.value,
+            "start_time": trace_info.start_time,
+            "end_time": trace_info.end_time,
+            "metadata": wrap_metadata(metadata),
+            "input": trace_info.inputs,
+            "output": message_data.answer,
+            "tags": ["message", str(trace_info.conversation_mode)],
+            "project_name": self.project,
+        }
+        trace = self.add_trace(trace_data)
+
+        span_data = {
+            "trace_id": trace.id,
+            "name": "llm",
+            "type": "llm",
+            "start_time": trace_info.start_time,
+            "end_time": trace_info.end_time,
+            "metadata": wrap_metadata(metadata),
+            "input": {"input": trace_info.inputs},
+            "output": {"output": message_data.answer},
+            "tags": ["llm", str(trace_info.conversation_mode)],
+            "usage": {
+                "completion_tokens": trace_info.answer_tokens,
+                "prompt_tokens": trace_info.message_tokens,
+                "total_tokens": trace_info.total_tokens,
+            },
+            "project_name": self.project,
+        }
+        self.add_span(span_data)
+
+    def moderation_trace(self, trace_info: ModerationTraceInfo):
+        if trace_info.message_data is None:
+            return
+
+        start_time = trace_info.start_time or trace_info.message_data.created_at
+
+        span_data = {
+            "trace_id": prepare_opik_uuid(start_time, trace_info.message_id),
+            "name": TraceTaskName.MODERATION_TRACE.value,
+            "type": "tool",
+            "start_time": start_time,
+            "end_time": trace_info.end_time or trace_info.message_data.updated_at,
+            "metadata": wrap_metadata(trace_info.metadata),
+            "input": wrap_dict("input", trace_info.inputs),
+            "output": {
+                "action": trace_info.action,
+                "flagged": trace_info.flagged,
+                "preset_response": trace_info.preset_response,
+                "inputs": trace_info.inputs,
+            },
+            "tags": ["moderation"],
+        }
+
+        self.add_span(span_data)
+
+    def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo):
+        message_data = trace_info.message_data
+        if message_data is None:
+            return
+
+        start_time = trace_info.start_time or message_data.created_at
+
+        span_data = {
+            "trace_id": prepare_opik_uuid(start_time, trace_info.message_id),
+            "name": TraceTaskName.SUGGESTED_QUESTION_TRACE.value,
+            "type": "tool",
+            "start_time": start_time,
+            "end_time": trace_info.end_time or message_data.updated_at,
+            "metadata": wrap_metadata(trace_info.metadata),
+            "input": wrap_dict("input", trace_info.inputs),
+            "output": wrap_dict("output", trace_info.suggested_question),
+            "tags": ["suggested_question"],
+        }
+
+        self.add_span(span_data)
+
+    def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo):
+        if trace_info.message_data is None:
+            return
+
+        start_time = trace_info.start_time or trace_info.message_data.created_at
+
+        span_data = {
+            "trace_id": prepare_opik_uuid(start_time, trace_info.message_id),
+            "name": TraceTaskName.DATASET_RETRIEVAL_TRACE.value,
+            "type": "tool",
+            "start_time": start_time,
+            "end_time": trace_info.end_time or trace_info.message_data.updated_at,
+            "metadata": wrap_metadata(trace_info.metadata),
+            "input": wrap_dict("input", trace_info.inputs),
+            "output": {"documents": trace_info.documents},
+            "tags": ["dataset_retrieval"],
+        }
+
+        self.add_span(span_data)
+
+    def tool_trace(self, trace_info: ToolTraceInfo):
+        span_data = {
+            "trace_id": prepare_opik_uuid(trace_info.start_time, trace_info.message_id),
+            "name": trace_info.tool_name,
+            "type": "tool",
+            "start_time": trace_info.start_time,
+            "end_time": trace_info.end_time,
+            "metadata": wrap_metadata(trace_info.metadata),
+            "input": wrap_dict("input", trace_info.tool_inputs),
+            "output": wrap_dict("output", trace_info.tool_outputs),
+            "tags": ["tool", trace_info.tool_name],
+        }
+
+        self.add_span(span_data)
+
+    def generate_name_trace(self, trace_info: GenerateNameTraceInfo):
+        trace_data = {
+            "id": prepare_opik_uuid(trace_info.start_time, trace_info.message_id),
+            "name": TraceTaskName.GENERATE_NAME_TRACE.value,
+            "start_time": trace_info.start_time,
+            "end_time": trace_info.end_time,
+            "metadata": wrap_metadata(trace_info.metadata),
+            "input": trace_info.inputs,
+            "output": trace_info.outputs,
+            "tags": ["generate_name"],
+            "project_name": self.project,
+        }
+
+        trace = self.add_trace(trace_data)
+
+        span_data = {
+            "trace_id": trace.id,
+            "name": TraceTaskName.GENERATE_NAME_TRACE.value,
+            "start_time": trace_info.start_time,
+            "end_time": trace_info.end_time,
+            "metadata": wrap_metadata(trace_info.metadata),
+            "input": wrap_dict("input", trace_info.inputs),
+            "output": wrap_dict("output", trace_info.outputs),
+            "tags": ["generate_name"],
+        }
+
+        self.add_span(span_data)
+
+    def add_trace(self, opik_trace_data: dict) -> Trace:
+        try:
+            trace = self.opik_client.trace(**opik_trace_data)
+            logger.debug("Opik Trace created successfully")
+            return trace
+        except Exception as e:
+            raise ValueError(f"Opik Failed to create trace: {str(e)}")
+
+    def add_span(self, opik_span_data: dict):
+        try:
+            self.opik_client.span(**opik_span_data)
+            logger.debug("Opik Span created successfully")
+        except Exception as e:
+            raise ValueError(f"Opik Failed to create span: {str(e)}")
+
+    def api_check(self):
+        try:
+            self.opik_client.auth_check()
+            return True
+        except Exception as e:
+            logger.info(f"Opik API check failed: {str(e)}", exc_info=True)
+            raise ValueError(f"Opik API check failed: {str(e)}")
+
+    def get_project_url(self):
+        try:
+            return self.opik_client.get_project_url(project_name=self.project)
+        except Exception as e:
+            logger.info(f"Opik get run url failed: {str(e)}", exc_info=True)
+            raise ValueError(f"Opik get run url failed: {str(e)}")

+ 8 - 0
api/core/ops/ops_trace_manager.py

@@ -17,6 +17,7 @@ from core.ops.entities.config_entity import (
     OPS_FILE_PATH,
     LangfuseConfig,
     LangSmithConfig,
+    OpikConfig,
     TracingProviderEnum,
 )
 from core.ops.entities.trace_entity import (
@@ -32,6 +33,7 @@ from core.ops.entities.trace_entity import (
 )
 from core.ops.langfuse_trace.langfuse_trace import LangFuseDataTrace
 from core.ops.langsmith_trace.langsmith_trace import LangSmithDataTrace
+from core.ops.opik_trace.opik_trace import OpikDataTrace
 from core.ops.utils import get_message_data
 from extensions.ext_database import db
 from extensions.ext_storage import storage
@@ -52,6 +54,12 @@ provider_config_map: dict[str, dict[str, Any]] = {
         "other_keys": ["project", "endpoint"],
         "trace_instance": LangSmithDataTrace,
     },
+    TracingProviderEnum.OPIK.value: {
+        "config_class": OpikConfig,
+        "secret_keys": ["api_key"],
+        "other_keys": ["project", "url", "workspace"],
+        "trace_instance": OpikDataTrace,
+    },
 }
 
 

+ 166 - 2
api/poetry.lock

@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
 
 [[package]]
 name = "aiofiles"
@@ -4694,6 +4694,134 @@ requests-toolbelt = ">=1.0.0,<2.0.0"
 langsmith-pyo3 = ["langsmith-pyo3 (>=0.1.0rc2,<0.2.0)"]
 
 [[package]]
+name = "levenshtein"
+version = "0.26.1"
+description = "Python extension for computing string edit distances and similarities."
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "levenshtein-0.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8dc4a4aecad538d944a1264c12769c99e3c0bf8e741fc5e454cc954913befb2e"},
+    {file = "levenshtein-0.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec108f368c12b25787c8b1a4537a1452bc53861c3ee4abc810cc74098278edcd"},
+    {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69229d651c97ed5b55b7ce92481ed00635cdbb80fbfb282a22636e6945dc52d5"},
+    {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79dcd157046d62482a7719b08ba9e3ce9ed3fc5b015af8ea989c734c702aedd4"},
+    {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f53f9173ae21b650b4ed8aef1d0ad0c37821f367c221a982f4d2922b3044e0d"},
+    {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3956f3c5c229257dbeabe0b6aacd2c083ebcc1e335842a6ff2217fe6cc03b6b"},
+    {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1e83af732726987d2c4cd736f415dae8b966ba17b7a2239c8b7ffe70bfb5543"},
+    {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4f052c55046c2a9c9b5f742f39e02fa6e8db8039048b8c1c9e9fdd27c8a240a1"},
+    {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9895b3a98f6709e293615fde0dcd1bb0982364278fa2072361a1a31b3e388b7a"},
+    {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a3777de1d8bfca054465229beed23994f926311ce666f5a392c8859bb2722f16"},
+    {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:81c57e1135c38c5e6e3675b5e2077d8a8d3be32bf0a46c57276c092b1dffc697"},
+    {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:91d5e7d984891df3eff7ea9fec8cf06fdfacc03cd074fd1a410435706f73b079"},
+    {file = "levenshtein-0.26.1-cp310-cp310-win32.whl", hash = "sha256:f48abff54054b4142ad03b323e80aa89b1d15cabc48ff49eb7a6ff7621829a56"},
+    {file = "levenshtein-0.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:79dd6ad799784ea7b23edd56e3bf94b3ca866c4c6dee845658ee75bb4aefdabf"},
+    {file = "levenshtein-0.26.1-cp310-cp310-win_arm64.whl", hash = "sha256:3351ddb105ef010cc2ce474894c5d213c83dddb7abb96400beaa4926b0b745bd"},
+    {file = "levenshtein-0.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:44c51f5d33b3cfb9db518b36f1288437a509edd82da94c4400f6a681758e0cb6"},
+    {file = "levenshtein-0.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56b93203e725f9df660e2afe3d26ba07d71871b6d6e05b8b767e688e23dfb076"},
+    {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:270d36c5da04a0d89990660aea8542227cbd8f5bc34e9fdfadd34916ff904520"},
+    {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:480674c05077eeb0b0f748546d4fcbb386d7c737f9fff0010400da3e8b552942"},
+    {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13946e37323728695ba7a22f3345c2e907d23f4600bc700bf9b4352fb0c72a48"},
+    {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ceb673f572d1d0dc9b1cd75792bb8bad2ae8eb78a7c6721e23a3867d318cb6f2"},
+    {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42d6fa242e3b310ce6bfd5af0c83e65ef10b608b885b3bb69863c01fb2fcff98"},
+    {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b8b68295808893a81e0a1dbc2274c30dd90880f14d23078e8eb4325ee615fc68"},
+    {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b01061d377d1944eb67bc40bef5d4d2f762c6ab01598efd9297ce5d0047eb1b5"},
+    {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9d12c8390f156745e533d01b30773b9753e41d8bbf8bf9dac4b97628cdf16314"},
+    {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:48825c9f967f922061329d1481b70e9fee937fc68322d6979bc623f69f75bc91"},
+    {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8ec137170b95736842f99c0e7a9fd8f5641d0c1b63b08ce027198545d983e2b"},
+    {file = "levenshtein-0.26.1-cp311-cp311-win32.whl", hash = "sha256:798f2b525a2e90562f1ba9da21010dde0d73730e277acaa5c52d2a6364fd3e2a"},
+    {file = "levenshtein-0.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:55b1024516c59df55f1cf1a8651659a568f2c5929d863d3da1ce8893753153bd"},
+    {file = "levenshtein-0.26.1-cp311-cp311-win_arm64.whl", hash = "sha256:e52575cbc6b9764ea138a6f82d73d3b1bc685fe62e207ff46a963d4c773799f6"},
+    {file = "levenshtein-0.26.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cc741ca406d3704dc331a69c04b061fc952509a069b79cab8287413f434684bd"},
+    {file = "levenshtein-0.26.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:821ace3b4e1c2e02b43cf5dc61aac2ea43bdb39837ac890919c225a2c3f2fea4"},
+    {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92694c9396f55d4c91087efacf81297bef152893806fc54c289fc0254b45384"},
+    {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51ba374de7a1797d04a14a4f0ad3602d2d71fef4206bb20a6baaa6b6a502da58"},
+    {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7aa5c3327dda4ef952769bacec09c09ff5bf426e07fdc94478c37955681885b"},
+    {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e2517e8d3c221de2d1183f400aed64211fcfc77077b291ed9f3bb64f141cdc"},
+    {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9092b622765c7649dd1d8af0f43354723dd6f4e570ac079ffd90b41033957438"},
+    {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fc16796c85d7d8b259881d59cc8b5e22e940901928c2ff6924b2c967924e8a0b"},
+    {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4370733967f5994ceeed8dc211089bedd45832ee688cecea17bfd35a9eb22b9"},
+    {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3535ecfd88c9b283976b5bc61265855f59bba361881e92ed2b5367b6990c93fe"},
+    {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:90236e93d98bdfd708883a6767826fafd976dac8af8fc4a0fb423d4fa08e1bf0"},
+    {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:04b7cabb82edf566b1579b3ed60aac0eec116655af75a3c551fee8754ffce2ea"},
+    {file = "levenshtein-0.26.1-cp312-cp312-win32.whl", hash = "sha256:ae382af8c76f6d2a040c0d9ca978baf461702ceb3f79a0a3f6da8d596a484c5b"},
+    {file = "levenshtein-0.26.1-cp312-cp312-win_amd64.whl", hash = "sha256:fd091209798cfdce53746f5769987b4108fe941c54fb2e058c016ffc47872918"},
+    {file = "levenshtein-0.26.1-cp312-cp312-win_arm64.whl", hash = "sha256:7e82f2ea44a81ad6b30d92a110e04cd3c8c7c6034b629aca30a3067fa174ae89"},
+    {file = "levenshtein-0.26.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:790374a9f5d2cbdb30ee780403a62e59bef51453ac020668c1564d1e43438f0e"},
+    {file = "levenshtein-0.26.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7b05c0415c386d00efda83d48db9db68edd02878d6dbc6df01194f12062be1bb"},
+    {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3114586032361722ddededf28401ce5baf1cf617f9f49fb86b8766a45a423ff"},
+    {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2532f8a13b68bf09f152d906f118a88da2063da22f44c90e904b142b0a53d534"},
+    {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:219c30be6aa734bf927188d1208b7d78d202a3eb017b1c5f01ab2034d2d4ccca"},
+    {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397e245e77f87836308bd56305bba630010cd8298c34c4c44bd94990cdb3b7b1"},
+    {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeff6ea3576f72e26901544c6c55c72a7b79b9983b6f913cba0e9edbf2f87a97"},
+    {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a19862e3539a697df722a08793994e334cd12791e8144851e8a1dee95a17ff63"},
+    {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:dc3b5a64f57c3c078d58b1e447f7d68cad7ae1b23abe689215d03fc434f8f176"},
+    {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bb6c7347424a91317c5e1b68041677e4c8ed3e7823b5bbaedb95bffb3c3497ea"},
+    {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b817376de4195a207cc0e4ca37754c0e1e1078c2a2d35a6ae502afde87212f9e"},
+    {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7b50c3620ff47c9887debbb4c154aaaac3e46be7fc2e5789ee8dbe128bce6a17"},
+    {file = "levenshtein-0.26.1-cp313-cp313-win32.whl", hash = "sha256:9fb859da90262eb474c190b3ca1e61dee83add022c676520f5c05fdd60df902a"},
+    {file = "levenshtein-0.26.1-cp313-cp313-win_amd64.whl", hash = "sha256:8adcc90e3a5bfb0a463581d85e599d950fe3c2938ac6247b29388b64997f6e2d"},
+    {file = "levenshtein-0.26.1-cp313-cp313-win_arm64.whl", hash = "sha256:c2599407e029865dc66d210b8804c7768cbdbf60f061d993bb488d5242b0b73e"},
+    {file = "levenshtein-0.26.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc54ced948fc3feafce8ad4ba4239d8ffc733a0d70e40c0363ac2a7ab2b7251e"},
+    {file = "levenshtein-0.26.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6516f69213ae393a220e904332f1a6bfc299ba22cf27a6520a1663a08eba0fb"},
+    {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4cfea4eada1746d0c75a864bc7e9e63d4a6e987c852d6cec8d9cb0c83afe25b"},
+    {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a323161dfeeac6800eb13cfe76a8194aec589cd948bcf1cdc03f66cc3ec26b72"},
+    {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c23e749b68ebc9a20b9047317b5cd2053b5856315bc8636037a8adcbb98bed1"},
+    {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f80dd7432d4b6cf493d012d22148db7af769017deb31273e43406b1fb7f091c"},
+    {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ae7cd6e4312c6ef34b2e273836d18f9fff518d84d823feff5ad7c49668256e0"},
+    {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dcdad740e841d791b805421c2b20e859b4ed556396d3063b3aa64cd055be648c"},
+    {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e07afb1613d6f5fd99abd4e53ad3b446b4efaa0f0d8e9dfb1d6d1b9f3f884d32"},
+    {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f1add8f1d83099a98ae4ac472d896b7e36db48c39d3db25adf12b373823cdeff"},
+    {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1010814b1d7a60833a951f2756dfc5c10b61d09976ce96a0edae8fecdfb0ea7c"},
+    {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:33fa329d1bb65ce85e83ceda281aea31cee9f2f6e167092cea54f922080bcc66"},
+    {file = "levenshtein-0.26.1-cp39-cp39-win32.whl", hash = "sha256:488a945312f2f16460ab61df5b4beb1ea2254c521668fd142ce6298006296c98"},
+    {file = "levenshtein-0.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:9f942104adfddd4b336c3997050121328c39479f69de702d7d144abb69ea7ab9"},
+    {file = "levenshtein-0.26.1-cp39-cp39-win_arm64.whl", hash = "sha256:c1d8f85b2672939f85086ed75effcf768f6077516a3e299c2ba1f91bc4644c22"},
+    {file = "levenshtein-0.26.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6cf8f1efaf90ca585640c5d418c30b7d66d9ac215cee114593957161f63acde0"},
+    {file = "levenshtein-0.26.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d5b2953978b8c158dd5cd93af8216a5cfddbf9de66cf5481c2955f44bb20767a"},
+    {file = "levenshtein-0.26.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b952b3732c4631c49917d4b15d78cb4a2aa006c1d5c12e2a23ba8e18a307a055"},
+    {file = "levenshtein-0.26.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07227281e12071168e6ae59238918a56d2a0682e529f747b5431664f302c0b42"},
+    {file = "levenshtein-0.26.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8191241cd8934feaf4d05d0cc0e5e72877cbb17c53bbf8c92af9f1aedaa247e9"},
+    {file = "levenshtein-0.26.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9e70d7ee157a9b698c73014f6e2b160830e7d2d64d2e342fefc3079af3c356fc"},
+    {file = "levenshtein-0.26.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0eb3059f826f6cb0a5bca4a85928070f01e8202e7ccafcba94453470f83e49d4"},
+    {file = "levenshtein-0.26.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:6c389e44da12d6fb1d7ba0a709a32a96c9391e9be4160ccb9269f37e040599ee"},
+    {file = "levenshtein-0.26.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e9de292f2c51a7d34a0ae23bec05391b8f61f35781cd3e4c6d0533e06250c55"},
+    {file = "levenshtein-0.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d87215113259efdca8716e53b6d59ab6d6009e119d95d45eccc083148855f33"},
+    {file = "levenshtein-0.26.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f00a3eebf68a82fb651d8d0e810c10bfaa60c555d21dde3ff81350c74fb4c2"},
+    {file = "levenshtein-0.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b3554c1b59de63d05075577380340c185ff41b028e541c0888fddab3c259a2b4"},
+    {file = "levenshtein-0.26.1.tar.gz", hash = "sha256:0d19ba22330d50609b2349021ec3cf7d905c6fe21195a2d0d876a146e7ed2575"},
+]
+
+[package.dependencies]
+rapidfuzz = ">=3.9.0,<4.0.0"
+
+[[package]]
+name = "litellm"
+version = "1.51.3"
+description = "Library to easily interface with LLM API providers"
+optional = false
+python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
+files = [
+    {file = "litellm-1.51.3-py3-none-any.whl", hash = "sha256:440d3c7cc5ab8eeb12cee8f4d806bff05b7db834ebc11117d7fa070a1142ced5"},
+    {file = "litellm-1.51.3.tar.gz", hash = "sha256:31eff9fcbf7b058bac0fd7432c4ea0487e8555f12446a1f30e5862e33716f44d"},
+]
+
+[package.dependencies]
+aiohttp = "*"
+click = "*"
+importlib-metadata = ">=6.8.0"
+jinja2 = ">=3.1.2,<4.0.0"
+jsonschema = ">=4.22.0,<5.0.0"
+openai = ">=1.52.0"
+pydantic = ">=2.0.0,<3.0.0"
+python-dotenv = ">=0.2.0"
+requests = ">=2.31.0,<3.0.0"
+tiktoken = ">=0.7.0"
+tokenizers = "*"
+
+[package.extras]
+extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "resend (>=0.8.0,<0.9.0)"]
+proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=42.0.5,<43.0.0)", "fastapi (>=0.111.0,<0.112.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=22.0.0,<23.0.0)", "orjson (>=3.9.7,<4.0.0)", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.9,<0.0.10)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"]
+
+[[package]]
 name = "llvmlite"
 version = "0.43.0"
 description = "lightweight wrapper around basic LLVM functionality"
@@ -6266,6 +6394,31 @@ files = [
 ]
 
 [[package]]
+name = "opik"
+version = "1.3.4"
+description = "Comet tool for logging and evaluating LLM traces"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "opik-1.3.4-py3-none-any.whl", hash = "sha256:c5e10a9f1fb18188471cce2ae8b841e8b187d04ee3b1aed01c643102bae588fb"},
+    {file = "opik-1.3.4.tar.gz", hash = "sha256:6013d3af4aea61f38b9e7121aa5d8cf4305a5ed3807b3f43d9ab91602b2a5785"},
+]
+
+[package.dependencies]
+click = "*"
+httpx = "<0.28.0"
+levenshtein = "<1.0.0"
+litellm = "*"
+openai = "<2.0.0"
+pydantic = ">=2.0.0,<3.0.0"
+pydantic-settings = ">=2.0.0,<3.0.0"
+pytest = "*"
+rich = "*"
+tenacity = "*"
+tqdm = "*"
+uuid6 = "*"
+
+[[package]]
 name = "oracledb"
 version = "2.2.1"
 description = "Python interface to Oracle Database"
@@ -10231,6 +10384,17 @@ socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
 zstd = ["zstandard (>=0.18.0)"]
 
 [[package]]
+name = "uuid6"
+version = "2024.7.10"
+description = "New time-based UUID formats which are suited for use as a database key"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "uuid6-2024.7.10-py3-none-any.whl", hash = "sha256:93432c00ba403751f722829ad21759ff9db051dea140bf81493271e8e4dd18b7"},
+    {file = "uuid6-2024.7.10.tar.gz", hash = "sha256:2d29d7f63f593caaeea0e0d0dd0ad8129c9c663b29e19bdf882e864bedf18fb0"},
+]
+
+[[package]]
 name = "uvicorn"
 version = "0.34.0"
 description = "The lightning-fast ASGI server."
@@ -11220,4 +11384,4 @@ cffi = ["cffi (>=1.11)"]
 [metadata]
 lock-version = "2.0"
 python-versions = ">=3.11,<3.13"
-content-hash = "907718f7ca775ad226c1f668f4bb6c6dbfa6cacc556fce43a8ad0b6f3c35095a"
+content-hash = "3bb0ce64c87712cf105c75105a0ca75c0523d6b27001ff6a623bb2a0d1343003"

+ 1 - 0
api/pyproject.toml

@@ -59,6 +59,7 @@ numpy = "~1.26.4"
 oci = "~2.135.1"
 openai = "~1.52.0"
 openpyxl = "~3.1.5"
+opik = "~1.3.4"
 pandas = { version = "~2.2.2", extras = ["performance", "excel"] }
 pandas-stubs = "~2.2.3.241009"
 psycogreen = "~1.0.2"

+ 10 - 1
api/services/ops_service.py

@@ -59,6 +59,15 @@ class OpsService:
             except Exception:
                 new_decrypt_tracing_config.update({"project_url": "https://smith.langchain.com/"})
 
+        if tracing_provider == "opik" and (
+            "project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url")
+        ):
+            try:
+                project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider)
+                new_decrypt_tracing_config.update({"project_url": project_url})
+            except Exception:
+                new_decrypt_tracing_config.update({"project_url": "https://www.comet.com/opik/"})
+
         trace_config_data.tracing_config = new_decrypt_tracing_config
         return trace_config_data.to_dict()
 
@@ -92,7 +101,7 @@ class OpsService:
         if tracing_provider == "langfuse":
             project_key = OpsTraceManager.get_trace_config_project_key(tracing_config, tracing_provider)
             project_url = "{host}/project/{key}".format(host=tracing_config.get("host"), key=project_key)
-        elif tracing_provider == "langsmith":
+        elif tracing_provider in ("langsmith", "opik"):
             project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider)
         else:
             project_url = None

+ 66 - 10
web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx

@@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next'
 import { useBoolean } from 'ahooks'
 import TracingIcon from './tracing-icon'
 import ProviderPanel from './provider-panel'
-import type { LangFuseConfig, LangSmithConfig } from './type'
+import type { LangFuseConfig, LangSmithConfig, OpikConfig } from './type'
 import { TracingProvider } from './type'
 import ProviderConfigModal from './provider-config-modal'
 import Indicator from '@/app/components/header/indicator'
@@ -23,7 +23,8 @@ export type PopupProps = {
   onChooseProvider: (provider: TracingProvider) => void
   langSmithConfig: LangSmithConfig | null
   langFuseConfig: LangFuseConfig | null
-  onConfigUpdated: (provider: TracingProvider, payload: LangSmithConfig | LangFuseConfig) => void
+  opikConfig: OpikConfig | null
+  onConfigUpdated: (provider: TracingProvider, payload: LangSmithConfig | LangFuseConfig | OpikConfig) => void
   onConfigRemoved: (provider: TracingProvider) => void
 }
 
@@ -36,6 +37,7 @@ const ConfigPopup: FC<PopupProps> = ({
   onChooseProvider,
   langSmithConfig,
   langFuseConfig,
+  opikConfig,
   onConfigUpdated,
   onConfigRemoved,
 }) => {
@@ -59,7 +61,7 @@ const ConfigPopup: FC<PopupProps> = ({
     }
   }, [onChooseProvider])
 
-  const handleConfigUpdated = useCallback((payload: LangSmithConfig | LangFuseConfig) => {
+  const handleConfigUpdated = useCallback((payload: LangSmithConfig | LangFuseConfig | OpikConfig) => {
     onConfigUpdated(currentProvider!, payload)
     hideConfigModal()
   }, [currentProvider, hideConfigModal, onConfigUpdated])
@@ -69,8 +71,8 @@ const ConfigPopup: FC<PopupProps> = ({
     hideConfigModal()
   }, [currentProvider, hideConfigModal, onConfigRemoved])
 
-  const providerAllConfigured = langSmithConfig && langFuseConfig
-  const providerAllNotConfigured = !langSmithConfig && !langFuseConfig
+  const providerAllConfigured = langSmithConfig && langFuseConfig && opikConfig
+  const providerAllNotConfigured = !langSmithConfig && !langFuseConfig && !opikConfig
 
   const switchContent = (
     <Switch
@@ -90,6 +92,7 @@ const ConfigPopup: FC<PopupProps> = ({
       onConfig={handleOnConfig(TracingProvider.langSmith)}
       isChosen={chosenProvider === TracingProvider.langSmith}
       onChoose={handleOnChoose(TracingProvider.langSmith)}
+      key="langSmith-provider-panel"
     />
   )
 
@@ -102,9 +105,61 @@ const ConfigPopup: FC<PopupProps> = ({
       onConfig={handleOnConfig(TracingProvider.langfuse)}
       isChosen={chosenProvider === TracingProvider.langfuse}
       onChoose={handleOnChoose(TracingProvider.langfuse)}
+      key="langfuse-provider-panel"
     />
   )
 
+  const opikPanel = (
+    <ProviderPanel
+      type={TracingProvider.opik}
+      readOnly={readOnly}
+      config={opikConfig}
+      hasConfigured={!!opikConfig}
+      onConfig={handleOnConfig(TracingProvider.opik)}
+      isChosen={chosenProvider === TracingProvider.opik}
+      onChoose={handleOnChoose(TracingProvider.opik)}
+      key="opik-provider-panel"
+    />
+  )
+
+  const configuredProviderPanel = () => {
+    const configuredPanels: ProviderPanel[] = []
+
+    if (langSmithConfig)
+      configuredPanels.push(langSmithPanel)
+
+    if (langFuseConfig)
+      configuredPanels.push(langfusePanel)
+
+    if (opikConfig)
+      configuredPanels.push(opikPanel)
+
+    return configuredPanels
+  }
+
+  const moreProviderPanel = () => {
+    const notConfiguredPanels: ProviderPanel[] = []
+
+    if (!langSmithConfig)
+      notConfiguredPanels.push(langSmithPanel)
+
+    if (!langFuseConfig)
+      notConfiguredPanels.push(langfusePanel)
+
+    if (!opikConfig)
+      notConfiguredPanels.push(opikPanel)
+
+    return notConfiguredPanels
+  }
+
+  const configuredProviderConfig = () => {
+    if (currentProvider === TracingProvider.langSmith)
+      return langSmithConfig
+    if (currentProvider === TracingProvider.langfuse)
+      return langFuseConfig
+    return opikConfig
+  }
+
   return (
     <div className='w-[420px] p-4 rounded-2xl bg-white border-[0.5px] border-black/5 shadow-lg'>
       <div className='flex justify-between items-center'>
@@ -146,18 +201,19 @@ const ConfigPopup: FC<PopupProps> = ({
               <div className='mt-2 space-y-2'>
                 {langSmithPanel}
                 {langfusePanel}
+                {opikPanel}
               </div>
             </>
           )
           : (
             <>
               <div className='leading-4 text-xs font-medium text-gray-500 uppercase'>{t(`${I18N_PREFIX}.configProviderTitle.configured`)}</div>
-              <div className='mt-2'>
-                {langSmithConfig ? langSmithPanel : langfusePanel}
+              <div className='mt-2 space-y-2'>
+                {configuredProviderPanel()}
               </div>
               <div className='mt-3 leading-4 text-xs font-medium text-gray-500 uppercase'>{t(`${I18N_PREFIX}.configProviderTitle.moreProvider`)}</div>
-              <div className='mt-2'>
-                {!langSmithConfig ? langSmithPanel : langfusePanel}
+              <div className='mt-2 space-y-2'>
+                {moreProviderPanel()}
               </div>
             </>
           )}
@@ -167,7 +223,7 @@ const ConfigPopup: FC<PopupProps> = ({
         <ProviderConfigModal
           appId={appId}
           type={currentProvider!}
-          payload={currentProvider === TracingProvider.langSmith ? langSmithConfig : langFuseConfig}
+          payload={configuredProviderConfig()}
           onCancel={hideConfigModal}
           onSaved={handleConfigUpdated}
           onChosen={onChooseProvider}

+ 1 - 0
web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts

@@ -3,4 +3,5 @@ import { TracingProvider } from './type'
 export const docURL = {
   [TracingProvider.langSmith]: 'https://docs.smith.langchain.com/',
   [TracingProvider.langfuse]: 'https://docs.langfuse.com',
+  [TracingProvider.opik]: 'https://www.comet.com/docs/opik/tracing/integrations/dify#setup-instructions',
 }

+ 22 - 5
web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx

@@ -9,7 +9,7 @@ import { TracingProvider } from './type'
 import TracingIcon from './tracing-icon'
 import ConfigButton from './config-button'
 import cn from '@/utils/classnames'
-import { LangfuseIcon, LangsmithIcon } from '@/app/components/base/icons/src/public/tracing'
+import { LangfuseIcon, LangsmithIcon, OpikIcon } from '@/app/components/base/icons/src/public/tracing'
 import Indicator from '@/app/components/header/indicator'
 import { fetchTracingConfig as doFetchTracingConfig, fetchTracingStatus, updateTracingStatus } from '@/service/apps'
 import type { TracingStatus } from '@/models/app'
@@ -70,11 +70,20 @@ const Panel: FC = () => {
     })
   }
   const inUseTracingProvider: TracingProvider | null = tracingStatus?.tracing_provider || null
-  const InUseProviderIcon = inUseTracingProvider === TracingProvider.langSmith ? LangsmithIcon : LangfuseIcon
+
+  const InUseProviderIcon
+    = inUseTracingProvider === TracingProvider.langSmith
+      ? LangsmithIcon
+      : inUseTracingProvider === TracingProvider.langfuse
+        ? LangfuseIcon
+        : inUseTracingProvider === TracingProvider.opik
+          ? OpikIcon
+          : null
 
   const [langSmithConfig, setLangSmithConfig] = useState<LangSmithConfig | null>(null)
   const [langFuseConfig, setLangFuseConfig] = useState<LangFuseConfig | null>(null)
-  const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig)
+  const [opikConfig, setOpikConfig] = useState<OpikConfig | null>(null)
+  const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig)
 
   const fetchTracingConfig = async () => {
     const { tracing_config: langSmithConfig, has_not_configured: langSmithHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langSmith })
@@ -83,6 +92,9 @@ const Panel: FC = () => {
     const { tracing_config: langFuseConfig, has_not_configured: langFuseHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langfuse })
     if (!langFuseHasNotConfig)
       setLangFuseConfig(langFuseConfig as LangFuseConfig)
+    const { tracing_config: opikConfig, has_not_configured: OpikHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.opik })
+    if (!OpikHasNotConfig)
+      setOpikConfig(opikConfig as OpikConfig)
   }
 
   const handleTracingConfigUpdated = async (provider: TracingProvider) => {
@@ -90,15 +102,19 @@ const Panel: FC = () => {
     const { tracing_config } = await doFetchTracingConfig({ appId, provider })
     if (provider === TracingProvider.langSmith)
       setLangSmithConfig(tracing_config as LangSmithConfig)
-    else
+    else if (provider === TracingProvider.langSmith)
       setLangFuseConfig(tracing_config as LangFuseConfig)
+    else if (provider === TracingProvider.opik)
+      setOpikConfig(tracing_config as OpikConfig)
   }
 
   const handleTracingConfigRemoved = (provider: TracingProvider) => {
     if (provider === TracingProvider.langSmith)
       setLangSmithConfig(null)
-    else
+    else if (provider === TracingProvider.langSmith)
       setLangFuseConfig(null)
+    else if (provider === TracingProvider.opik)
+      setOpikConfig(null)
     if (provider === inUseTracingProvider) {
       handleTracingStatusChange({
         enabled: false,
@@ -167,6 +183,7 @@ const Panel: FC = () => {
             onChooseProvider={handleChooseProvider}
             langSmithConfig={langSmithConfig}
             langFuseConfig={langFuseConfig}
+            opikConfig={opikConfig}
             onConfigUpdated={handleTracingConfigUpdated}
             onConfigRemoved={handleTracingConfigRemoved}
             controlShowPopup={controlShowPopup}

+ 51 - 5
web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx

@@ -4,7 +4,7 @@ import React, { useCallback, useState } from 'react'
 import { useTranslation } from 'react-i18next'
 import { useBoolean } from 'ahooks'
 import Field from './field'
-import type { LangFuseConfig, LangSmithConfig } from './type'
+import type { LangFuseConfig, LangSmithConfig, OpikConfig } from './type'
 import { TracingProvider } from './type'
 import { docURL } from './config'
 import {
@@ -21,10 +21,10 @@ import Toast from '@/app/components/base/toast'
 type Props = {
   appId: string
   type: TracingProvider
-  payload?: LangSmithConfig | LangFuseConfig | null
+  payload?: LangSmithConfig | LangFuseConfig | OpikConfig | null
   onRemoved: () => void
   onCancel: () => void
-  onSaved: (payload: LangSmithConfig | LangFuseConfig) => void
+  onSaved: (payload: LangSmithConfig | LangFuseConfig | OpikConfig) => void
   onChosen: (provider: TracingProvider) => void
 }
 
@@ -42,6 +42,13 @@ const langFuseConfigTemplate = {
   host: '',
 }
 
+const opikConfigTemplate = {
+  api_key: '',
+  project: '',
+  url: '',
+  workspace: '',
+}
+
 const ProviderConfigModal: FC<Props> = ({
   appId,
   type,
@@ -55,14 +62,17 @@ const ProviderConfigModal: FC<Props> = ({
   const isEdit = !!payload
   const isAdd = !isEdit
   const [isSaving, setIsSaving] = useState(false)
-  const [config, setConfig] = useState<LangSmithConfig | LangFuseConfig>((() => {
+  const [config, setConfig] = useState<LangSmithConfig | LangFuseConfig | OpikConfig>((() => {
     if (isEdit)
       return payload
 
     if (type === TracingProvider.langSmith)
       return langSmithConfigTemplate
 
-    return langFuseConfigTemplate
+    else if (type === TracingProvider.langfuse)
+      return langFuseConfigTemplate
+
+    return opikConfigTemplate
   })())
   const [isShowRemoveConfirm, {
     setTrue: showRemoveConfirm,
@@ -111,6 +121,10 @@ const ProviderConfigModal: FC<Props> = ({
         errorMessage = t('common.errorMsg.fieldRequired', { field: 'Host' })
     }
 
+    if (type === TracingProvider.opik) {
+      const postData = config as OpikConfig
+    }
+
     return errorMessage
   }, [config, t, type])
   const handleSave = useCallback(async () => {
@@ -215,6 +229,38 @@ const ProviderConfigModal: FC<Props> = ({
                           />
                         </>
                       )}
+                      {type === TracingProvider.opik && (
+                        <>
+                          <Field
+                            label='API Key'
+                            labelClassName='!text-sm'
+                            value={(config as OpikConfig).api_key}
+                            onChange={handleConfigChange('api_key')}
+                            placeholder={t(`${I18N_PREFIX}.placeholder`, { key: 'API Key' })!}
+                          />
+                          <Field
+                            label={t(`${I18N_PREFIX}.project`)!}
+                            labelClassName='!text-sm'
+                            value={(config as OpikConfig).project}
+                            onChange={handleConfigChange('project')}
+                            placeholder={t(`${I18N_PREFIX}.placeholder`, { key: t(`${I18N_PREFIX}.project`) })!}
+                          />
+                          <Field
+                            label='Workspace'
+                            labelClassName='!text-sm'
+                            value={(config as OpikConfig).workspace}
+                            onChange={handleConfigChange('workspace')}
+                            placeholder={'default'}
+                          />
+                          <Field
+                            label='Url'
+                            labelClassName='!text-sm'
+                            value={(config as OpikConfig).url}
+                            onChange={handleConfigChange('url')}
+                            placeholder={'https://www.comet.com/opik/api/'}
+                          />
+                        </>
+                      )}
 
                     </div>
                     <div className='my-8 flex justify-between items-center h-8'>

+ 2 - 1
web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-panel.tsx

@@ -4,7 +4,7 @@ import React, { useCallback } from 'react'
 import { useTranslation } from 'react-i18next'
 import { TracingProvider } from './type'
 import cn from '@/utils/classnames'
-import { LangfuseIconBig, LangsmithIconBig } from '@/app/components/base/icons/src/public/tracing'
+import { LangfuseIconBig, LangsmithIconBig, OpikIconBig } from '@/app/components/base/icons/src/public/tracing'
 import { Settings04 } from '@/app/components/base/icons/src/vender/line/general'
 import { Eye as View } from '@/app/components/base/icons/src/vender/solid/general'
 
@@ -24,6 +24,7 @@ const getIcon = (type: TracingProvider) => {
   return ({
     [TracingProvider.langSmith]: LangsmithIconBig,
     [TracingProvider.langfuse]: LangfuseIconBig,
+    [TracingProvider.opik]: OpikIconBig,
   })[type]
 }
 

+ 8 - 0
web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts

@@ -1,6 +1,7 @@
 export enum TracingProvider {
   langSmith = 'langsmith',
   langfuse = 'langfuse',
+  opik = 'opik',
 }
 
 export type LangSmithConfig = {
@@ -14,3 +15,10 @@ export type LangFuseConfig = {
   secret_key: string
   host: string
 }
+
+export type OpikConfig = {
+  api_key: string
+  project: string
+  workspace: string
+  url: string
+}

文件差異過大導致無法顯示
+ 87 - 0
web/app/components/base/icons/assets/public/tracing/opik-icon-big.svg


文件差異過大導致無法顯示
+ 88 - 0
web/app/components/base/icons/assets/public/tracing/opik-icon.svg


文件差異過大導致無法顯示
+ 163 - 0
web/app/components/base/icons/src/public/tracing/OpikIcon.json


+ 16 - 0
web/app/components/base/icons/src/public/tracing/OpikIcon.tsx

@@ -0,0 +1,16 @@
+// GENERATE BY script
+// DON NOT EDIT IT MANUALLY
+
+import * as React from 'react'
+import data from './OpikIcon.json'
+import IconBase from '@/app/components/base/icons/IconBase'
+import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
+
+const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
+  props,
+  ref,
+) => <IconBase {...props} ref={ref} data={data as IconData} />)
+
+Icon.displayName = 'OpikIcon'
+
+export default Icon

文件差異過大導致無法顯示
+ 162 - 0
web/app/components/base/icons/src/public/tracing/OpikIconBig.json


+ 16 - 0
web/app/components/base/icons/src/public/tracing/OpikIconBig.tsx

@@ -0,0 +1,16 @@
+// GENERATE BY script
+// DON NOT EDIT IT MANUALLY
+
+import * as React from 'react'
+import data from './OpikIconBig.json'
+import IconBase from '@/app/components/base/icons/IconBase'
+import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
+
+const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
+  props,
+  ref,
+) => <IconBase {...props} ref={ref} data={data as IconData} />)
+
+Icon.displayName = 'OpikIconBig'
+
+export default Icon

+ 2 - 0
web/app/components/base/icons/src/public/tracing/index.ts

@@ -2,4 +2,6 @@ export { default as LangfuseIconBig } from './LangfuseIconBig'
 export { default as LangfuseIcon } from './LangfuseIcon'
 export { default as LangsmithIconBig } from './LangsmithIconBig'
 export { default as LangsmithIcon } from './LangsmithIcon'
+export { default as OpikIconBig } from './OpikIconBig'
+export { default as OpikIcon } from './OpikIcon'
 export { default as TracingIcon } from './TracingIcon'

+ 4 - 0
web/i18n/en-US/app.ts

@@ -157,6 +157,10 @@ const translation = {
       title: 'Langfuse',
       description: 'Traces, evals, prompt management and metrics to debug and improve your LLM application.',
     },
+    opik: {
+      title: 'Opik',
+      description: 'Opik is an open-source platform for evaluating, testing, and monitoring LLM applications.',
+    },
     inUse: 'In use',
     configProvider: {
       title: 'Config ',

+ 4 - 0
web/i18n/zh-Hans/app.ts

@@ -157,6 +157,10 @@ const translation = {
       title: 'Langfuse',
       description: '跟踪、评估、提示管理和指标,以调试和改进您的 LLM 应用程序。',
     },
+    opik: {
+      title: 'Opik',
+      description: '一个全方位的开发者平台,适用于 LLM 驱动应用程序生命周期的每个步骤。',
+    },
     inUse: '使用中',
     configProvider: {
       title: '配置 ',

+ 2 - 2
web/models/app.ts

@@ -1,4 +1,4 @@
-import type { LangFuseConfig, LangSmithConfig, TracingProvider } from '@/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type'
+import type { LangFuseConfig, LangSmithConfig, OpikConfig, TracingProvider } from '@/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type'
 import type { App, AppSSO, AppTemplate, SiteConfig } from '@/types/app'
 
 /* export type App = {
@@ -165,5 +165,5 @@ export type TracingStatus = {
 
 export type TracingConfig = {
   tracing_provider: TracingProvider
-  tracing_config: LangSmithConfig | LangFuseConfig
+  tracing_config: LangSmithConfig | LangFuseConfig | OpikConfig
 }