Skip to main content

lindorm_chat

Lindorm Chat AI

This notebook covers how to get started with Lindorm AI.

import environs
from lindormai.model_manager import ModelManager

env = environs.Env()
env.read_env(".env")


class Config:
AI_CHAT_LLM_ENDPOINT = env.str("AI_CHAT_LLM_ENDPOINT", "<CHAT_ENDPOINT>")
AI_CHAT_USERNAME = env.str("AI_CHAT_USERNAME", "root")
AI_CHAT_PWD = env.str("AI_CHAT_PWD", "<PASSWORD>")
AI_DEFAULT_CHAT_MODEL = "qa_model_qwen_72b_chat"


LDAI_CHAT_LLM_ENDPOINT = Config.AI_CHAT_LLM_ENDPOINT
LDAI_CHAT_USERNAME = Config.AI_CHAT_USERNAME
LDAI_CHAT_PWD = Config.AI_CHAT_PWD

Define Helper functions

def check_model_exist(model_mgr, model_name):
model_list = model_mgr.list()
for model in model_list:
if model_name == model["name"] and "READY" == model["status"]:
return True
return False


def create_llm_model(model_mgr, model_name, path, algo):
task = "QUESTION_ANSWERING"
result = model_mgr.create(name=model_name, task=task, path=path, algo=algo)
return result

Create & Deploy LLM Model

ldai_model_mgr = ModelManager(LDAI_CHAT_LLM_ENDPOINT, LDAI_CHAT_USERNAME, LDAI_CHAT_PWD)

llm_model_name = "qa_model_qwen_72b_chat"
llm_model_path = "modelscope://qwen/qwen-72b-chat-int4"
llm_model_algo = "QWEN_72B_CHAT_INT4"


if not check_model_exist(ldai_model_mgr, llm_model_name):
create_llm_model(ldai_model_mgr, llm_model_name, llm_model_path, llm_model_algo)
else:
print(f"model {llm_model_name} exist!")

Init ChatLindormAI

from langchain_community.chat_models.lindormai import ChatLindormAI

ldai_chat = ChatLindormAI(
endpoint=LDAI_CHAT_LLM_ENDPOINT,
username=LDAI_CHAT_USERNAME,
password=LDAI_CHAT_PWD,
model_name=llm_model_name,
)
question = "hello? who are you?"

print(ldai_chat.invoke(question))

Was this page helpful?