test_llm.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271
  1. import os
  2. import pytest
  3. from typing import Generator
  4. from time import sleep
  5. from core.model_runtime.entities.message_entities import AssistantPromptMessage, UserPromptMessage, SystemPromptMessage
  6. from core.model_runtime.entities.model_entities import AIModelEntity
  7. from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunkDelta, \
  8. LLMResultChunk
  9. from core.model_runtime.errors.validate import CredentialsValidateFailedError
  10. from core.model_runtime.model_providers.wenxin.llm.llm import ErnieBotLarguageModel
  11. def test_predefined_models():
  12. model = ErnieBotLarguageModel()
  13. model_schemas = model.predefined_models()
  14. assert len(model_schemas) >= 1
  15. assert isinstance(model_schemas[0], AIModelEntity)
  16. def test_validate_credentials_for_chat_model():
  17. sleep(3)
  18. model = ErnieBotLarguageModel()
  19. with pytest.raises(CredentialsValidateFailedError):
  20. model.validate_credentials(
  21. model='ernie-bot',
  22. credentials={
  23. 'api_key': 'invalid_key',
  24. 'secret_key': 'invalid_key'
  25. }
  26. )
  27. model.validate_credentials(
  28. model='ernie-bot',
  29. credentials={
  30. 'api_key': os.environ.get('WENXIN_API_KEY'),
  31. 'secret_key': os.environ.get('WENXIN_SECRET_KEY')
  32. }
  33. )
  34. def test_invoke_model_ernie_bot():
  35. sleep(3)
  36. model = ErnieBotLarguageModel()
  37. response = model.invoke(
  38. model='ernie-bot',
  39. credentials={
  40. 'api_key': os.environ.get('WENXIN_API_KEY'),
  41. 'secret_key': os.environ.get('WENXIN_SECRET_KEY')
  42. },
  43. prompt_messages=[
  44. UserPromptMessage(
  45. content='Hello World!'
  46. )
  47. ],
  48. model_parameters={
  49. 'temperature': 0.7,
  50. 'top_p': 1.0,
  51. },
  52. stop=['you'],
  53. user="abc-123",
  54. stream=False
  55. )
  56. assert isinstance(response, LLMResult)
  57. assert len(response.message.content) > 0
  58. assert response.usage.total_tokens > 0
  59. def test_invoke_model_ernie_bot_turbo():
  60. sleep(3)
  61. model = ErnieBotLarguageModel()
  62. response = model.invoke(
  63. model='ernie-bot-turbo',
  64. credentials={
  65. 'api_key': os.environ.get('WENXIN_API_KEY'),
  66. 'secret_key': os.environ.get('WENXIN_SECRET_KEY')
  67. },
  68. prompt_messages=[
  69. UserPromptMessage(
  70. content='Hello World!'
  71. )
  72. ],
  73. model_parameters={
  74. 'temperature': 0.7,
  75. 'top_p': 1.0,
  76. },
  77. stop=['you'],
  78. user="abc-123",
  79. stream=False
  80. )
  81. assert isinstance(response, LLMResult)
  82. assert len(response.message.content) > 0
  83. assert response.usage.total_tokens > 0
  84. def test_invoke_model_ernie_8k():
  85. sleep(3)
  86. model = ErnieBotLarguageModel()
  87. response = model.invoke(
  88. model='ernie-bot-8k',
  89. credentials={
  90. 'api_key': os.environ.get('WENXIN_API_KEY'),
  91. 'secret_key': os.environ.get('WENXIN_SECRET_KEY')
  92. },
  93. prompt_messages=[
  94. UserPromptMessage(
  95. content='Hello World!'
  96. )
  97. ],
  98. model_parameters={
  99. 'temperature': 0.7,
  100. 'top_p': 1.0,
  101. },
  102. stop=['you'],
  103. user="abc-123",
  104. stream=False
  105. )
  106. assert isinstance(response, LLMResult)
  107. assert len(response.message.content) > 0
  108. assert response.usage.total_tokens > 0
  109. def test_invoke_model_ernie_bot_4():
  110. sleep(3)
  111. model = ErnieBotLarguageModel()
  112. response = model.invoke(
  113. model='ernie-bot-4',
  114. credentials={
  115. 'api_key': os.environ.get('WENXIN_API_KEY'),
  116. 'secret_key': os.environ.get('WENXIN_SECRET_KEY')
  117. },
  118. prompt_messages=[
  119. UserPromptMessage(
  120. content='Hello World!'
  121. )
  122. ],
  123. model_parameters={
  124. 'temperature': 0.7,
  125. 'top_p': 1.0,
  126. },
  127. stop=['you'],
  128. user="abc-123",
  129. stream=False
  130. )
  131. assert isinstance(response, LLMResult)
  132. assert len(response.message.content) > 0
  133. assert response.usage.total_tokens > 0
  134. def test_invoke_stream_model():
  135. sleep(3)
  136. model = ErnieBotLarguageModel()
  137. response = model.invoke(
  138. model='ernie-bot',
  139. credentials={
  140. 'api_key': os.environ.get('WENXIN_API_KEY'),
  141. 'secret_key': os.environ.get('WENXIN_SECRET_KEY')
  142. },
  143. prompt_messages=[
  144. UserPromptMessage(
  145. content='Hello World!'
  146. )
  147. ],
  148. model_parameters={
  149. 'temperature': 0.7,
  150. 'top_p': 1.0,
  151. },
  152. stop=['you'],
  153. stream=True,
  154. user="abc-123"
  155. )
  156. assert isinstance(response, Generator)
  157. for chunk in response:
  158. assert isinstance(chunk, LLMResultChunk)
  159. assert isinstance(chunk.delta, LLMResultChunkDelta)
  160. assert isinstance(chunk.delta.message, AssistantPromptMessage)
  161. assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
  162. def test_invoke_model_with_system():
  163. sleep(3)
  164. model = ErnieBotLarguageModel()
  165. response = model.invoke(
  166. model='ernie-bot',
  167. credentials={
  168. 'api_key': os.environ.get('WENXIN_API_KEY'),
  169. 'secret_key': os.environ.get('WENXIN_SECRET_KEY')
  170. },
  171. prompt_messages=[
  172. SystemPromptMessage(
  173. content='你是Kasumi'
  174. ),
  175. UserPromptMessage(
  176. content='你是谁?'
  177. )
  178. ],
  179. model_parameters={
  180. 'temperature': 0.7,
  181. 'top_p': 1.0,
  182. },
  183. stop=['you'],
  184. stream=False,
  185. user="abc-123"
  186. )
  187. assert isinstance(response, LLMResult)
  188. assert 'kasumi' in response.message.content.lower()
  189. def test_invoke_with_search():
  190. sleep(3)
  191. model = ErnieBotLarguageModel()
  192. response = model.invoke(
  193. model='ernie-bot',
  194. credentials={
  195. 'api_key': os.environ.get('WENXIN_API_KEY'),
  196. 'secret_key': os.environ.get('WENXIN_SECRET_KEY')
  197. },
  198. prompt_messages=[
  199. UserPromptMessage(
  200. content='北京今天的天气怎么样'
  201. )
  202. ],
  203. model_parameters={
  204. 'temperature': 0.7,
  205. 'top_p': 1.0,
  206. 'disable_search': True,
  207. },
  208. stop=[],
  209. stream=True,
  210. user="abc-123"
  211. )
  212. assert isinstance(response, Generator)
  213. total_message = ''
  214. for chunk in response:
  215. assert isinstance(chunk, LLMResultChunk)
  216. assert isinstance(chunk.delta, LLMResultChunkDelta)
  217. assert isinstance(chunk.delta.message, AssistantPromptMessage)
  218. total_message += chunk.delta.message.content
  219. print(chunk.delta.message.content)
  220. assert len(chunk.delta.message.content) > 0 if not chunk.delta.finish_reason else True
  221. # there should be 对不起、我不能、不支持……
  222. assert ('不' in total_message or '抱歉' in total_message or '无法' in total_message)
  223. def test_get_num_tokens():
  224. sleep(3)
  225. model = ErnieBotLarguageModel()
  226. response = model.get_num_tokens(
  227. model='ernie-bot',
  228. credentials={
  229. 'api_key': os.environ.get('WENXIN_API_KEY'),
  230. 'secret_key': os.environ.get('WENXIN_SECRET_KEY')
  231. },
  232. prompt_messages=[
  233. UserPromptMessage(
  234. content='Hello World!'
  235. )
  236. ],
  237. tools=[]
  238. )
  239. assert isinstance(response, int)
  240. assert response == 10