Browse Source

fix: type is wrong issue (#12165)

Signed-off-by: yihong0618 <zouzou0208@gmail.com>
yihong 3 months ago
parent
commit
a3293b154e

+ 4 - 4
api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py

@@ -15,11 +15,11 @@ def handle(sender, **kwargs):
 
 
     app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all()
     app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all()
 
 
-    removed_dataset_ids: set[int] = set()
+    removed_dataset_ids: set[str] = set()
     if not app_dataset_joins:
     if not app_dataset_joins:
         added_dataset_ids = dataset_ids
         added_dataset_ids = dataset_ids
     else:
     else:
-        old_dataset_ids: set[int] = set()
+        old_dataset_ids: set[str] = set()
         old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins)
         old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins)
 
 
         added_dataset_ids = dataset_ids - old_dataset_ids
         added_dataset_ids = dataset_ids - old_dataset_ids
@@ -39,8 +39,8 @@ def handle(sender, **kwargs):
     db.session.commit()
     db.session.commit()
 
 
 
 
-def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set[int]:
-    dataset_ids: set[int] = set()
+def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set[str]:
+    dataset_ids: set[str] = set()
     if not app_model_config:
     if not app_model_config:
         return dataset_ids
         return dataset_ids
 
 

+ 5 - 5
api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py

@@ -17,11 +17,11 @@ def handle(sender, **kwargs):
     dataset_ids = get_dataset_ids_from_workflow(published_workflow)
     dataset_ids = get_dataset_ids_from_workflow(published_workflow)
     app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all()
     app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all()
 
 
-    removed_dataset_ids: set[int] = set()
+    removed_dataset_ids: set[str] = set()
     if not app_dataset_joins:
     if not app_dataset_joins:
         added_dataset_ids = dataset_ids
         added_dataset_ids = dataset_ids
     else:
     else:
-        old_dataset_ids: set[int] = set()
+        old_dataset_ids: set[str] = set()
         old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins)
         old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins)
 
 
         added_dataset_ids = dataset_ids - old_dataset_ids
         added_dataset_ids = dataset_ids - old_dataset_ids
@@ -41,8 +41,8 @@ def handle(sender, **kwargs):
     db.session.commit()
     db.session.commit()
 
 
 
 
-def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[int]:
-    dataset_ids: set[int] = set()
+def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[str]:
+    dataset_ids: set[str] = set()
     graph = published_workflow.graph_dict
     graph = published_workflow.graph_dict
     if not graph:
     if not graph:
         return dataset_ids
         return dataset_ids
@@ -60,7 +60,7 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[int]:
     for node in knowledge_retrieval_nodes:
     for node in knowledge_retrieval_nodes:
         try:
         try:
             node_data = KnowledgeRetrievalNodeData(**node.get("data", {}))
             node_data = KnowledgeRetrievalNodeData(**node.get("data", {}))
-            dataset_ids.update(int(dataset_id) for dataset_id in node_data.dataset_ids)
+            dataset_ids.update(dataset_id for dataset_id in node_data.dataset_ids)
         except Exception as e:
         except Exception as e:
             continue
             continue
 
 

+ 1 - 1
api/services/audio_service.py

@@ -139,7 +139,7 @@ class AudioService:
                     return Response(stream_with_context(response), content_type="audio/mpeg")
                     return Response(stream_with_context(response), content_type="audio/mpeg")
                 return response
                 return response
         else:
         else:
-            if not text:
+            if text is None:
                 raise ValueError("Text is required")
                 raise ValueError("Text is required")
             response = invoke_tts(text, app_model, voice)
             response = invoke_tts(text, app_model, voice)
             if isinstance(response, Generator):
             if isinstance(response, Generator):

+ 2 - 2
api/services/dataset_service.py

@@ -452,7 +452,7 @@ class DatasetService:
 
 
 
 
 class DocumentService:
 class DocumentService:
-    DEFAULT_RULES = {
+    DEFAULT_RULES: dict[str, Any] = {
         "mode": "custom",
         "mode": "custom",
         "rules": {
         "rules": {
             "pre_processing_rules": [
             "pre_processing_rules": [
@@ -466,7 +466,7 @@ class DocumentService:
         },
         },
     }
     }
 
 
-    DOCUMENT_METADATA_SCHEMA = {
+    DOCUMENT_METADATA_SCHEMA: dict[str, Any] = {
         "book": {
         "book": {
             "title": str,
             "title": str,
             "language": str,
             "language": str,