commands.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662
  1. import datetime
  2. import json
  3. import math
  4. import random
  5. import string
  6. import time
  7. import uuid
  8. import click
  9. from tqdm import tqdm
  10. from flask import current_app
  11. from langchain.embeddings import OpenAIEmbeddings
  12. from werkzeug.exceptions import NotFound
  13. from core.embedding.cached_embedding import CacheEmbedding
  14. from core.index.index import IndexBuilder
  15. from core.model_providers.model_factory import ModelFactory
  16. from core.model_providers.models.embedding.openai_embedding import OpenAIEmbedding
  17. from core.model_providers.models.entity.model_params import ModelType
  18. from core.model_providers.providers.hosted import hosted_model_providers
  19. from core.model_providers.providers.openai_provider import OpenAIProvider
  20. from libs.password import password_pattern, valid_password, hash_password
  21. from libs.helper import email as email_validate
  22. from extensions.ext_database import db
  23. from libs.rsa import generate_key_pair
  24. from models.account import InvitationCode, Tenant, TenantAccountJoin
  25. from models.dataset import Dataset, DatasetQuery, Document, DatasetCollectionBinding
  26. from models.model import Account, AppModelConfig, App
  27. import secrets
  28. import base64
  29. from models.provider import Provider, ProviderType, ProviderQuotaType, ProviderModel
  30. @click.command('reset-password', help='Reset the account password.')
  31. @click.option('--email', prompt=True, help='The email address of the account whose password you need to reset')
  32. @click.option('--new-password', prompt=True, help='the new password.')
  33. @click.option('--password-confirm', prompt=True, help='the new password confirm.')
  34. def reset_password(email, new_password, password_confirm):
  35. if str(new_password).strip() != str(password_confirm).strip():
  36. click.echo(click.style('sorry. The two passwords do not match.', fg='red'))
  37. return
  38. account = db.session.query(Account). \
  39. filter(Account.email == email). \
  40. one_or_none()
  41. if not account:
  42. click.echo(click.style('sorry. the account: [{}] not exist .'.format(email), fg='red'))
  43. return
  44. try:
  45. valid_password(new_password)
  46. except:
  47. click.echo(
  48. click.style('sorry. The passwords must match {} '.format(password_pattern), fg='red'))
  49. return
  50. # generate password salt
  51. salt = secrets.token_bytes(16)
  52. base64_salt = base64.b64encode(salt).decode()
  53. # encrypt password with salt
  54. password_hashed = hash_password(new_password, salt)
  55. base64_password_hashed = base64.b64encode(password_hashed).decode()
  56. account.password = base64_password_hashed
  57. account.password_salt = base64_salt
  58. db.session.commit()
  59. click.echo(click.style('Congratulations!, password has been reset.', fg='green'))
  60. @click.command('reset-email', help='Reset the account email.')
  61. @click.option('--email', prompt=True, help='The old email address of the account whose email you need to reset')
  62. @click.option('--new-email', prompt=True, help='the new email.')
  63. @click.option('--email-confirm', prompt=True, help='the new email confirm.')
  64. def reset_email(email, new_email, email_confirm):
  65. if str(new_email).strip() != str(email_confirm).strip():
  66. click.echo(click.style('Sorry, new email and confirm email do not match.', fg='red'))
  67. return
  68. account = db.session.query(Account). \
  69. filter(Account.email == email). \
  70. one_or_none()
  71. if not account:
  72. click.echo(click.style('sorry. the account: [{}] not exist .'.format(email), fg='red'))
  73. return
  74. try:
  75. email_validate(new_email)
  76. except:
  77. click.echo(
  78. click.style('sorry. {} is not a valid email. '.format(email), fg='red'))
  79. return
  80. account.email = new_email
  81. db.session.commit()
  82. click.echo(click.style('Congratulations!, email has been reset.', fg='green'))
  83. @click.command('reset-encrypt-key-pair', help='Reset the asymmetric key pair of workspace for encrypt LLM credentials. '
  84. 'After the reset, all LLM credentials will become invalid, '
  85. 'requiring re-entry.'
  86. 'Only support SELF_HOSTED mode.')
  87. @click.confirmation_option(prompt=click.style('Are you sure you want to reset encrypt key pair?'
  88. ' this operation cannot be rolled back!', fg='red'))
  89. def reset_encrypt_key_pair():
  90. if current_app.config['EDITION'] != 'SELF_HOSTED':
  91. click.echo(click.style('Sorry, only support SELF_HOSTED mode.', fg='red'))
  92. return
  93. tenant = db.session.query(Tenant).first()
  94. if not tenant:
  95. click.echo(click.style('Sorry, no workspace found. Please enter /install to initialize.', fg='red'))
  96. return
  97. tenant.encrypt_public_key = generate_key_pair(tenant.id)
  98. db.session.query(Provider).filter(Provider.provider_type == 'custom').delete()
  99. db.session.query(ProviderModel).delete()
  100. db.session.commit()
  101. click.echo(click.style('Congratulations! '
  102. 'the asymmetric key pair of workspace {} has been reset.'.format(tenant.id), fg='green'))
  103. @click.command('generate-invitation-codes', help='Generate invitation codes.')
  104. @click.option('--batch', help='The batch of invitation codes.')
  105. @click.option('--count', prompt=True, help='Invitation codes count.')
  106. def generate_invitation_codes(batch, count):
  107. if not batch:
  108. now = datetime.datetime.now()
  109. batch = now.strftime('%Y%m%d%H%M%S')
  110. if not count or int(count) <= 0:
  111. click.echo(click.style('sorry. the count must be greater than 0.', fg='red'))
  112. return
  113. count = int(count)
  114. click.echo('Start generate {} invitation codes for batch {}.'.format(count, batch))
  115. codes = ''
  116. for i in range(count):
  117. code = generate_invitation_code()
  118. invitation_code = InvitationCode(
  119. code=code,
  120. batch=batch
  121. )
  122. db.session.add(invitation_code)
  123. click.echo(code)
  124. codes += code + "\n"
  125. db.session.commit()
  126. filename = 'storage/invitation-codes-{}.txt'.format(batch)
  127. with open(filename, 'w') as f:
  128. f.write(codes)
  129. click.echo(click.style(
  130. 'Congratulations! Generated {} invitation codes for batch {} and saved to the file \'{}\''.format(count, batch,
  131. filename),
  132. fg='green'))
  133. def generate_invitation_code():
  134. code = generate_upper_string()
  135. while db.session.query(InvitationCode).filter(InvitationCode.code == code).count() > 0:
  136. code = generate_upper_string()
  137. return code
  138. def generate_upper_string():
  139. letters_digits = string.ascii_uppercase + string.digits
  140. result = ""
  141. for i in range(8):
  142. result += random.choice(letters_digits)
  143. return result
  144. @click.command('recreate-all-dataset-indexes', help='Recreate all dataset indexes.')
  145. def recreate_all_dataset_indexes():
  146. click.echo(click.style('Start recreate all dataset indexes.', fg='green'))
  147. recreate_count = 0
  148. page = 1
  149. while True:
  150. try:
  151. datasets = db.session.query(Dataset).filter(Dataset.indexing_technique == 'high_quality') \
  152. .order_by(Dataset.created_at.desc()).paginate(page=page, per_page=50)
  153. except NotFound:
  154. break
  155. page += 1
  156. for dataset in datasets:
  157. try:
  158. click.echo('Recreating dataset index: {}'.format(dataset.id))
  159. index = IndexBuilder.get_index(dataset, 'high_quality')
  160. if index and index._is_origin():
  161. index.recreate_dataset(dataset)
  162. recreate_count += 1
  163. else:
  164. click.echo('passed.')
  165. except Exception as e:
  166. click.echo(
  167. click.style('Recreate dataset index error: {} {}'.format(e.__class__.__name__, str(e)), fg='red'))
  168. continue
  169. click.echo(click.style('Congratulations! Recreate {} dataset indexes.'.format(recreate_count), fg='green'))
  170. @click.command('clean-unused-dataset-indexes', help='Clean unused dataset indexes.')
  171. def clean_unused_dataset_indexes():
  172. click.echo(click.style('Start clean unused dataset indexes.', fg='green'))
  173. clean_days = int(current_app.config.get('CLEAN_DAY_SETTING'))
  174. start_at = time.perf_counter()
  175. thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=clean_days)
  176. page = 1
  177. while True:
  178. try:
  179. datasets = db.session.query(Dataset).filter(Dataset.created_at < thirty_days_ago) \
  180. .order_by(Dataset.created_at.desc()).paginate(page=page, per_page=50)
  181. except NotFound:
  182. break
  183. page += 1
  184. for dataset in datasets:
  185. dataset_query = db.session.query(DatasetQuery).filter(
  186. DatasetQuery.created_at > thirty_days_ago,
  187. DatasetQuery.dataset_id == dataset.id
  188. ).all()
  189. if not dataset_query or len(dataset_query) == 0:
  190. documents = db.session.query(Document).filter(
  191. Document.dataset_id == dataset.id,
  192. Document.indexing_status == 'completed',
  193. Document.enabled == True,
  194. Document.archived == False,
  195. Document.updated_at > thirty_days_ago
  196. ).all()
  197. if not documents or len(documents) == 0:
  198. try:
  199. # remove index
  200. vector_index = IndexBuilder.get_index(dataset, 'high_quality')
  201. kw_index = IndexBuilder.get_index(dataset, 'economy')
  202. # delete from vector index
  203. if vector_index:
  204. if dataset.collection_binding_id:
  205. vector_index.delete_by_group_id(dataset.id)
  206. else:
  207. if dataset.collection_binding_id:
  208. vector_index.delete_by_group_id(dataset.id)
  209. else:
  210. vector_index.delete()
  211. kw_index.delete()
  212. # update document
  213. update_params = {
  214. Document.enabled: False
  215. }
  216. Document.query.filter_by(dataset_id=dataset.id).update(update_params)
  217. db.session.commit()
  218. click.echo(click.style('Cleaned unused dataset {} from db success!'.format(dataset.id),
  219. fg='green'))
  220. except Exception as e:
  221. click.echo(
  222. click.style('clean dataset index error: {} {}'.format(e.__class__.__name__, str(e)),
  223. fg='red'))
  224. end_at = time.perf_counter()
  225. click.echo(click.style('Cleaned unused dataset from db success latency: {}'.format(end_at - start_at), fg='green'))
  226. @click.command('sync-anthropic-hosted-providers', help='Sync anthropic hosted providers.')
  227. def sync_anthropic_hosted_providers():
  228. if not hosted_model_providers.anthropic:
  229. click.echo(click.style('Anthropic hosted provider is not configured.', fg='red'))
  230. return
  231. click.echo(click.style('Start sync anthropic hosted providers.', fg='green'))
  232. count = 0
  233. new_quota_limit = hosted_model_providers.anthropic.quota_limit
  234. page = 1
  235. while True:
  236. try:
  237. providers = db.session.query(Provider).filter(
  238. Provider.provider_name == 'anthropic',
  239. Provider.provider_type == ProviderType.SYSTEM.value,
  240. Provider.quota_type == ProviderQuotaType.TRIAL.value,
  241. Provider.quota_limit != new_quota_limit
  242. ).order_by(Provider.created_at.desc()).paginate(page=page, per_page=100)
  243. except NotFound:
  244. break
  245. page += 1
  246. for provider in providers:
  247. try:
  248. click.echo('Syncing tenant anthropic hosted provider: {}, origin: limit {}, used {}'
  249. .format(provider.tenant_id, provider.quota_limit, provider.quota_used))
  250. original_quota_limit = provider.quota_limit
  251. division = math.ceil(new_quota_limit / 1000)
  252. provider.quota_limit = new_quota_limit if original_quota_limit == 1000 \
  253. else original_quota_limit * division
  254. provider.quota_used = division * provider.quota_used
  255. db.session.commit()
  256. count += 1
  257. except Exception as e:
  258. click.echo(click.style(
  259. 'Sync tenant anthropic hosted provider error: {} {}'.format(e.__class__.__name__, str(e)),
  260. fg='red'))
  261. continue
  262. click.echo(click.style('Congratulations! Synced {} anthropic hosted providers.'.format(count), fg='green'))
  263. @click.command('create-qdrant-indexes', help='Create qdrant indexes.')
  264. def create_qdrant_indexes():
  265. click.echo(click.style('Start create qdrant indexes.', fg='green'))
  266. create_count = 0
  267. page = 1
  268. while True:
  269. try:
  270. datasets = db.session.query(Dataset).filter(Dataset.indexing_technique == 'high_quality') \
  271. .order_by(Dataset.created_at.desc()).paginate(page=page, per_page=50)
  272. except NotFound:
  273. break
  274. page += 1
  275. for dataset in datasets:
  276. if dataset.index_struct_dict:
  277. if dataset.index_struct_dict['type'] != 'qdrant':
  278. try:
  279. click.echo('Create dataset qdrant index: {}'.format(dataset.id))
  280. try:
  281. embedding_model = ModelFactory.get_embedding_model(
  282. tenant_id=dataset.tenant_id,
  283. model_provider_name=dataset.embedding_model_provider,
  284. model_name=dataset.embedding_model
  285. )
  286. except Exception:
  287. try:
  288. embedding_model = ModelFactory.get_embedding_model(
  289. tenant_id=dataset.tenant_id
  290. )
  291. dataset.embedding_model = embedding_model.name
  292. dataset.embedding_model_provider = embedding_model.model_provider.provider_name
  293. except Exception:
  294. provider = Provider(
  295. id='provider_id',
  296. tenant_id=dataset.tenant_id,
  297. provider_name='openai',
  298. provider_type=ProviderType.SYSTEM.value,
  299. encrypted_config=json.dumps({'openai_api_key': 'TEST'}),
  300. is_valid=True,
  301. )
  302. model_provider = OpenAIProvider(provider=provider)
  303. embedding_model = OpenAIEmbedding(name="text-embedding-ada-002",
  304. model_provider=model_provider)
  305. embeddings = CacheEmbedding(embedding_model)
  306. from core.index.vector_index.qdrant_vector_index import QdrantVectorIndex, QdrantConfig
  307. index = QdrantVectorIndex(
  308. dataset=dataset,
  309. config=QdrantConfig(
  310. endpoint=current_app.config.get('QDRANT_URL'),
  311. api_key=current_app.config.get('QDRANT_API_KEY'),
  312. root_path=current_app.root_path
  313. ),
  314. embeddings=embeddings
  315. )
  316. if index:
  317. index.create_qdrant_dataset(dataset)
  318. index_struct = {
  319. "type": 'qdrant',
  320. "vector_store": {
  321. "class_prefix": dataset.index_struct_dict['vector_store']['class_prefix']}
  322. }
  323. dataset.index_struct = json.dumps(index_struct)
  324. db.session.commit()
  325. create_count += 1
  326. else:
  327. click.echo('passed.')
  328. except Exception as e:
  329. click.echo(
  330. click.style('Create dataset index error: {} {}'.format(e.__class__.__name__, str(e)),
  331. fg='red'))
  332. continue
  333. click.echo(click.style('Congratulations! Create {} dataset indexes.'.format(create_count), fg='green'))
  334. @click.command('update-qdrant-indexes', help='Update qdrant indexes.')
  335. def update_qdrant_indexes():
  336. click.echo(click.style('Start Update qdrant indexes.', fg='green'))
  337. create_count = 0
  338. page = 1
  339. while True:
  340. try:
  341. datasets = db.session.query(Dataset).filter(Dataset.indexing_technique == 'high_quality') \
  342. .order_by(Dataset.created_at.desc()).paginate(page=page, per_page=50)
  343. except NotFound:
  344. break
  345. page += 1
  346. for dataset in datasets:
  347. if dataset.index_struct_dict:
  348. if dataset.index_struct_dict['type'] != 'qdrant':
  349. try:
  350. click.echo('Update dataset qdrant index: {}'.format(dataset.id))
  351. try:
  352. embedding_model = ModelFactory.get_embedding_model(
  353. tenant_id=dataset.tenant_id,
  354. model_provider_name=dataset.embedding_model_provider,
  355. model_name=dataset.embedding_model
  356. )
  357. except Exception:
  358. provider = Provider(
  359. id='provider_id',
  360. tenant_id=dataset.tenant_id,
  361. provider_name='openai',
  362. provider_type=ProviderType.CUSTOM.value,
  363. encrypted_config=json.dumps({'openai_api_key': 'TEST'}),
  364. is_valid=True,
  365. )
  366. model_provider = OpenAIProvider(provider=provider)
  367. embedding_model = OpenAIEmbedding(name="text-embedding-ada-002",
  368. model_provider=model_provider)
  369. embeddings = CacheEmbedding(embedding_model)
  370. from core.index.vector_index.qdrant_vector_index import QdrantVectorIndex, QdrantConfig
  371. index = QdrantVectorIndex(
  372. dataset=dataset,
  373. config=QdrantConfig(
  374. endpoint=current_app.config.get('QDRANT_URL'),
  375. api_key=current_app.config.get('QDRANT_API_KEY'),
  376. root_path=current_app.root_path
  377. ),
  378. embeddings=embeddings
  379. )
  380. if index:
  381. index.update_qdrant_dataset(dataset)
  382. create_count += 1
  383. else:
  384. click.echo('passed.')
  385. except Exception as e:
  386. click.echo(
  387. click.style('Create dataset index error: {} {}'.format(e.__class__.__name__, str(e)),
  388. fg='red'))
  389. continue
  390. click.echo(click.style('Congratulations! Update {} dataset indexes.'.format(create_count), fg='green'))
  391. @click.command('normalization-collections', help='restore all collections in one')
  392. def normalization_collections():
  393. click.echo(click.style('Start normalization collections.', fg='green'))
  394. normalization_count = 0
  395. page = 1
  396. while True:
  397. try:
  398. datasets = db.session.query(Dataset).filter(Dataset.indexing_technique == 'high_quality') \
  399. .order_by(Dataset.created_at.desc()).paginate(page=page, per_page=50)
  400. except NotFound:
  401. break
  402. page += 1
  403. for dataset in datasets:
  404. if not dataset.collection_binding_id:
  405. try:
  406. click.echo('restore dataset index: {}'.format(dataset.id))
  407. try:
  408. embedding_model = ModelFactory.get_embedding_model(
  409. tenant_id=dataset.tenant_id,
  410. model_provider_name=dataset.embedding_model_provider,
  411. model_name=dataset.embedding_model
  412. )
  413. except Exception:
  414. provider = Provider(
  415. id='provider_id',
  416. tenant_id=dataset.tenant_id,
  417. provider_name='openai',
  418. provider_type=ProviderType.CUSTOM.value,
  419. encrypted_config=json.dumps({'openai_api_key': 'TEST'}),
  420. is_valid=True,
  421. )
  422. model_provider = OpenAIProvider(provider=provider)
  423. embedding_model = OpenAIEmbedding(name="text-embedding-ada-002",
  424. model_provider=model_provider)
  425. embeddings = CacheEmbedding(embedding_model)
  426. dataset_collection_binding = db.session.query(DatasetCollectionBinding). \
  427. filter(DatasetCollectionBinding.provider_name == embedding_model.model_provider.provider_name,
  428. DatasetCollectionBinding.model_name == embedding_model.name). \
  429. order_by(DatasetCollectionBinding.created_at). \
  430. first()
  431. if not dataset_collection_binding:
  432. dataset_collection_binding = DatasetCollectionBinding(
  433. provider_name=embedding_model.model_provider.provider_name,
  434. model_name=embedding_model.name,
  435. collection_name="Vector_index_" + str(uuid.uuid4()).replace("-", "_") + '_Node'
  436. )
  437. db.session.add(dataset_collection_binding)
  438. db.session.commit()
  439. from core.index.vector_index.qdrant_vector_index import QdrantVectorIndex, QdrantConfig
  440. index = QdrantVectorIndex(
  441. dataset=dataset,
  442. config=QdrantConfig(
  443. endpoint=current_app.config.get('QDRANT_URL'),
  444. api_key=current_app.config.get('QDRANT_API_KEY'),
  445. root_path=current_app.root_path
  446. ),
  447. embeddings=embeddings
  448. )
  449. if index:
  450. index.restore_dataset_in_one(dataset, dataset_collection_binding)
  451. else:
  452. click.echo('passed.')
  453. original_index = QdrantVectorIndex(
  454. dataset=dataset,
  455. config=QdrantConfig(
  456. endpoint=current_app.config.get('QDRANT_URL'),
  457. api_key=current_app.config.get('QDRANT_API_KEY'),
  458. root_path=current_app.root_path
  459. ),
  460. embeddings=embeddings
  461. )
  462. if original_index:
  463. original_index.delete_original_collection(dataset, dataset_collection_binding)
  464. normalization_count += 1
  465. else:
  466. click.echo('passed.')
  467. except Exception as e:
  468. click.echo(
  469. click.style('Create dataset index error: {} {}'.format(e.__class__.__name__, str(e)),
  470. fg='red'))
  471. continue
  472. click.echo(click.style('Congratulations! restore {} dataset indexes.'.format(normalization_count), fg='green'))
  473. @click.command('update_app_model_configs', help='Migrate data to support paragraph variable.')
  474. @click.option("--batch-size", default=500, help="Number of records to migrate in each batch.")
  475. def update_app_model_configs(batch_size):
  476. pre_prompt_template = '{{default_input}}'
  477. user_input_form_template = {
  478. "en-US": [
  479. {
  480. "paragraph": {
  481. "label": "Query",
  482. "variable": "default_input",
  483. "required": False,
  484. "default": ""
  485. }
  486. }
  487. ],
  488. "zh-Hans": [
  489. {
  490. "paragraph": {
  491. "label": "查询内容",
  492. "variable": "default_input",
  493. "required": False,
  494. "default": ""
  495. }
  496. }
  497. ]
  498. }
  499. click.secho("Start migrate old data that the text generator can support paragraph variable.", fg='green')
  500. total_records = db.session.query(AppModelConfig) \
  501. .join(App, App.app_model_config_id == AppModelConfig.id) \
  502. .filter(App.mode == 'completion') \
  503. .count()
  504. if total_records == 0:
  505. click.secho("No data to migrate.", fg='green')
  506. return
  507. num_batches = (total_records + batch_size - 1) // batch_size
  508. with tqdm(total=total_records, desc="Migrating Data") as pbar:
  509. for i in range(num_batches):
  510. offset = i * batch_size
  511. limit = min(batch_size, total_records - offset)
  512. click.secho(f"Fetching batch {i + 1}/{num_batches} from source database...", fg='green')
  513. data_batch = db.session.query(AppModelConfig) \
  514. .join(App, App.app_model_config_id == AppModelConfig.id) \
  515. .filter(App.mode == 'completion') \
  516. .order_by(App.created_at) \
  517. .offset(offset).limit(limit).all()
  518. if not data_batch:
  519. click.secho("No more data to migrate.", fg='green')
  520. break
  521. try:
  522. click.secho(f"Migrating {len(data_batch)} records...", fg='green')
  523. for data in data_batch:
  524. # click.secho(f"Migrating data {data.id}, pre_prompt: {data.pre_prompt}, user_input_form: {data.user_input_form}", fg='green')
  525. if data.pre_prompt is None:
  526. data.pre_prompt = pre_prompt_template
  527. else:
  528. if pre_prompt_template in data.pre_prompt:
  529. continue
  530. data.pre_prompt += pre_prompt_template
  531. app_data = db.session.query(App) \
  532. .filter(App.id == data.app_id) \
  533. .one()
  534. account_data = db.session.query(Account) \
  535. .join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id) \
  536. .filter(TenantAccountJoin.role == 'owner') \
  537. .filter(TenantAccountJoin.tenant_id == app_data.tenant_id) \
  538. .one_or_none()
  539. if not account_data:
  540. continue
  541. if data.user_input_form is None or data.user_input_form == 'null':
  542. data.user_input_form = json.dumps(user_input_form_template[account_data.interface_language])
  543. else:
  544. raw_json_data = json.loads(data.user_input_form)
  545. raw_json_data.append(user_input_form_template[account_data.interface_language][0])
  546. data.user_input_form = json.dumps(raw_json_data)
  547. # click.secho(f"Updated data {data.id}, pre_prompt: {data.pre_prompt}, user_input_form: {data.user_input_form}", fg='green')
  548. db.session.commit()
  549. except Exception as e:
  550. click.secho(f"Error while migrating data: {e}, app_id: {data.app_id}, app_model_config_id: {data.id}",
  551. fg='red')
  552. continue
  553. click.secho(f"Successfully migrated batch {i + 1}/{num_batches}.", fg='green')
  554. pbar.update(len(data_batch))
  555. def register_commands(app):
  556. app.cli.add_command(reset_password)
  557. app.cli.add_command(reset_email)
  558. app.cli.add_command(generate_invitation_codes)
  559. app.cli.add_command(reset_encrypt_key_pair)
  560. app.cli.add_command(recreate_all_dataset_indexes)
  561. app.cli.add_command(sync_anthropic_hosted_providers)
  562. app.cli.add_command(clean_unused_dataset_indexes)
  563. app.cli.add_command(create_qdrant_indexes)
  564. app.cli.add_command(update_qdrant_indexes)
  565. app.cli.add_command(update_app_model_configs)
  566. app.cli.add_command(normalization_collections)