I am currently trying to work on the following code to log my Langchain - chain using mlflow. But it throws me following error while doing mlflow.langchain.log_model
and I am not sure what exactly is the issue.
MlflowException: Failed to save runnable sequence: {'2': "ChatDatabricks -- No module named 'langchain_databricks'"}.
Some background information:
- I am not using
langchain_databricks
package since it has been deprecated. Hence I am usingdatabricks_langchain
. - If I install
langchain_databricks
package, model will be logged but will not be able to open it. (anyway I don't want to use this package because it has been deprecated.) - Versions:
mlflow==2.20.2, langchain_core==0.3.35, databricks_langchain==0.3.0
from databricks_langchain import DatabricksVectorSearch, ChatDatabricks
from langchain.prompts import PromptTemplate
from langchain.schema.runnable import RunnableMap, RunnableLambda
from langchain.schema.output_parser import StrOutputParser
from operator import itemgetter
import mlflow
vs_endpoint = "your_vector_search_endpoint"
my_index_name = "your_index_name"
def retriever_loader():
my_index = DatabricksVectorSearch(
endpoint=vs_endpoint,
index_name=my_index_name,
columns=["ID", "TEXT"]
)
return my_index.as_retriever(search_kwargs={"k": 3, "query_type": "HYBRID"})
my_retriever = retriever_loader()
prompt = PromptTemplate.from_template(
template="""Some template: {query} and {context} """
)
def format_context(text):
return modified(text)
llm_endpoint = ChatDatabricks(endpoint="databricks-meta-llama-3-3-70b-instruct")
chain = (
RunnableMap({
"query": RunnableLambda(itemgetter("messages")),
"context": RunnableLambda(itemgetter("messages")) | my_retriever | RunnableLambda(format_context),
})
| prompt
| llm_endpoint
| StrOutputParser()
)
model_name = "some_model_name"
input_example = {"messages": "Your example query here"}
resp = chain.invoke(input_example)
with mlflow.start_run(run_name="run_name") as run:
model_info = mlflow.langchain.log_model(
chain,
loader_fn=retriever_loader,
artifact_path="path_to_artifact",
registered_model_name=model_name,
input_example=input_example
)