Skip to content

Commit f6faf47

Browse files
vertex-sdk-botcopybara-github
authored andcommitted
chore: migrate legacy langchain imports to maintain compatibility
PiperOrigin-RevId: 904941775
1 parent 6332d33 commit f6faf47

2 files changed

Lines changed: 136 additions & 64 deletions

File tree

vertexai/agent_engines/templates/langchain.py

Lines changed: 68 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -43,11 +43,12 @@
4343
RunnableSerializable = Any
4444

4545
try:
46-
from langchain_google_vertexai.functions_utils import _ToolsType
47-
48-
_ToolsType = _ToolsType
46+
from langchain_google_genai.functions_utils import _ToolsType
4947
except ImportError:
50-
_ToolsType = Any
48+
try:
49+
from langchain_google_vertexai.functions_utils import _ToolsType
50+
except ImportError:
51+
_ToolsType = Any
5152

5253
try:
5354
from opentelemetry.sdk import trace
@@ -81,13 +82,15 @@ def _default_runnable_kwargs(has_history: bool) -> Mapping[str, Any]:
8182

8283
def _default_output_parser():
8384
try:
84-
from langchain.agents.output_parsers.tools import ToolsAgentOutputParser
85+
from langchain_classic.agents.output_parsers.tools import ToolsAgentOutputParser
8586
except (ModuleNotFoundError, ImportError):
86-
# Fallback to an older version if needed.
87-
from langchain.agents.output_parsers.openai_tools import (
88-
OpenAIToolsAgentOutputParser as ToolsAgentOutputParser,
89-
)
90-
87+
try:
88+
from langchain.agents.output_parsers.tools import ToolsAgentOutputParser
89+
except (ModuleNotFoundError, ImportError):
90+
# Fallback to an older version if needed.
91+
from langchain.agents.output_parsers.openai_tools import (
92+
OpenAIToolsAgentOutputParser as ToolsAgentOutputParser,
93+
)
9194
return ToolsAgentOutputParser()
9295

9396

@@ -98,17 +101,29 @@ def _default_model_builder(
98101
location: str,
99102
model_kwargs: Optional[Mapping[str, Any]] = None,
100103
) -> "BaseLanguageModel":
101-
import vertexai
102-
from google.cloud.aiplatform import initializer
103-
from langchain_google_vertexai import ChatVertexAI
104-
105104
model_kwargs = model_kwargs or {}
106-
current_project = initializer.global_config.project
107-
current_location = initializer.global_config.location
108-
vertexai.init(project=project, location=location)
109-
model = ChatVertexAI(model_name=model_name, **model_kwargs)
110-
vertexai.init(project=current_project, location=current_location)
111-
return model
105+
try:
106+
from langchain_google_genai import ChatGoogleGenerativeAI
107+
108+
model = ChatGoogleGenerativeAI(
109+
model=model_name,
110+
project=project,
111+
location=location,
112+
vertexai=True,
113+
**model_kwargs,
114+
)
115+
return model
116+
except ImportError:
117+
import vertexai
118+
from google.cloud.aiplatform import initializer
119+
from langchain_google_vertexai import ChatVertexAI
120+
121+
current_project = initializer.global_config.project
122+
current_location = initializer.global_config.location
123+
vertexai.init(project=project, location=location)
124+
model = ChatVertexAI(model_name=model_name, **model_kwargs)
125+
vertexai.init(project=current_project, location=current_location)
126+
return model
112127

113128

114129
def _default_runnable_builder(
@@ -124,8 +139,16 @@ def _default_runnable_builder(
124139
runnable_kwargs: Optional[Mapping[str, Any]] = None,
125140
) -> "RunnableSerializable":
126141
from langchain_core import tools as lc_tools
127-
from langchain.agents import AgentExecutor
128-
from langchain.tools.base import StructuredTool
142+
143+
try:
144+
from langchain_classic.agents import AgentExecutor
145+
except ImportError:
146+
from langchain.agents import AgentExecutor
147+
148+
try:
149+
from langchain_core.tools import StructuredTool
150+
except ImportError:
151+
from langchain.tools.base import StructuredTool
129152

130153
# The prompt template and runnable_kwargs needs to be customized depending
131154
# on whether the user intends for the agent to have history. The way the
@@ -261,12 +284,16 @@ def _default_prompt(
261284
from langchain_core import prompts
262285

263286
try:
264-
from langchain.agents.format_scratchpad.tools import format_to_tool_messages
265-
except (ModuleNotFoundError, ImportError):
266-
# Fallback to an older version if needed.
267-
from langchain.agents.format_scratchpad.openai_tools import (
268-
format_to_openai_tool_messages as format_to_tool_messages,
287+
from langchain_classic.agents.format_scratchpad.tools import (
288+
format_to_tool_messages,
269289
)
290+
except (ModuleNotFoundError, ImportError):
291+
try:
292+
from langchain.agents.format_scratchpad.tools import format_to_tool_messages
293+
except (ModuleNotFoundError, ImportError):
294+
from langchain.agents.format_scratchpad.openai_tools import (
295+
format_to_openai_tool_messages as format_to_tool_messages,
296+
)
270297

271298
system_instructions = []
272299
if system_instruction:
@@ -629,13 +656,18 @@ def query(
629656
Returns:
630657
The output of querying the Agent with the given input and config.
631658
"""
632-
from langchain.load import dump as langchain_load_dump
659+
try:
660+
from langchain_core.load import dumpd
661+
except ImportError:
662+
from langchain.load import dump as langchain_load_dump
663+
664+
dumpd = langchain_load_dump.dumpd
633665

634666
if isinstance(input, str):
635667
input = {"input": input}
636668
if not self._tmpl_attrs.get("runnable"):
637669
self.set_up()
638-
return langchain_load_dump.dumpd(
670+
return dumpd(
639671
self._tmpl_attrs.get("runnable").invoke(
640672
input=input, config=config, **kwargs
641673
)
@@ -662,7 +694,12 @@ def stream_query(
662694
Yields:
663695
The output of querying the Agent with the given input and config.
664696
"""
665-
from langchain.load import dump as langchain_load_dump
697+
try:
698+
from langchain_core.load import dumpd
699+
except ImportError:
700+
from langchain.load import dump as langchain_load_dump
701+
702+
dumpd = langchain_load_dump.dumpd
666703

667704
if isinstance(input, str):
668705
input = {"input": input}
@@ -673,4 +710,4 @@ def stream_query(
673710
config=config,
674711
**kwargs,
675712
):
676-
yield langchain_load_dump.dumpd(chunk)
713+
yield dumpd(chunk)

vertexai/preview/reasoning_engines/templates/langchain.py

Lines changed: 68 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -43,11 +43,12 @@
4343
RunnableSerializable = Any
4444

4545
try:
46-
from langchain_google_vertexai.functions_utils import _ToolsType
47-
48-
_ToolsType = _ToolsType
46+
from langchain_google_genai.functions_utils import _ToolsType
4947
except ImportError:
50-
_ToolsType = Any
48+
try:
49+
from langchain_google_vertexai.functions_utils import _ToolsType
50+
except ImportError:
51+
_ToolsType = Any
5152

5253
try:
5354
from opentelemetry.sdk import trace
@@ -81,13 +82,15 @@ def _default_runnable_kwargs(has_history: bool) -> Mapping[str, Any]:
8182

8283
def _default_output_parser():
8384
try:
84-
from langchain.agents.output_parsers.tools import ToolsAgentOutputParser
85+
from langchain_classic.agents.output_parsers.tools import ToolsAgentOutputParser
8586
except (ModuleNotFoundError, ImportError):
86-
# Fallback to an older version if needed.
87-
from langchain.agents.output_parsers.openai_tools import (
88-
OpenAIToolsAgentOutputParser as ToolsAgentOutputParser,
89-
)
90-
87+
try:
88+
from langchain.agents.output_parsers.tools import ToolsAgentOutputParser
89+
except (ModuleNotFoundError, ImportError):
90+
# Fallback to an older version if needed.
91+
from langchain.agents.output_parsers.openai_tools import (
92+
OpenAIToolsAgentOutputParser as ToolsAgentOutputParser,
93+
)
9194
return ToolsAgentOutputParser()
9295

9396

@@ -98,17 +101,29 @@ def _default_model_builder(
98101
location: str,
99102
model_kwargs: Optional[Mapping[str, Any]] = None,
100103
) -> "BaseLanguageModel":
101-
import vertexai
102-
from google.cloud.aiplatform import initializer
103-
from langchain_google_vertexai import ChatVertexAI
104-
105104
model_kwargs = model_kwargs or {}
106-
current_project = initializer.global_config.project
107-
current_location = initializer.global_config.location
108-
vertexai.init(project=project, location=location)
109-
model = ChatVertexAI(model_name=model_name, **model_kwargs)
110-
vertexai.init(project=current_project, location=current_location)
111-
return model
105+
try:
106+
from langchain_google_genai import ChatGoogleGenerativeAI
107+
108+
model = ChatGoogleGenerativeAI(
109+
model=model_name,
110+
project=project,
111+
location=location,
112+
vertexai=True,
113+
**model_kwargs,
114+
)
115+
return model
116+
except ImportError:
117+
import vertexai
118+
from google.cloud.aiplatform import initializer
119+
from langchain_google_vertexai import ChatVertexAI
120+
121+
current_project = initializer.global_config.project
122+
current_location = initializer.global_config.location
123+
vertexai.init(project=project, location=location)
124+
model = ChatVertexAI(model_name=model_name, **model_kwargs)
125+
vertexai.init(project=current_project, location=current_location)
126+
return model
112127

113128

114129
def _default_runnable_builder(
@@ -124,8 +139,16 @@ def _default_runnable_builder(
124139
runnable_kwargs: Optional[Mapping[str, Any]] = None,
125140
) -> "RunnableSerializable":
126141
from langchain_core import tools as lc_tools
127-
from langchain.agents import AgentExecutor
128-
from langchain.tools.base import StructuredTool
142+
143+
try:
144+
from langchain_classic.agents import AgentExecutor
145+
except ImportError:
146+
from langchain.agents import AgentExecutor
147+
148+
try:
149+
from langchain_core.tools import StructuredTool
150+
except ImportError:
151+
from langchain.tools.base import StructuredTool
129152

130153
# The prompt template and runnable_kwargs needs to be customized depending
131154
# on whether the user intends for the agent to have history. The way the
@@ -175,12 +198,16 @@ def _default_prompt(
175198
from langchain_core import prompts
176199

177200
try:
178-
from langchain.agents.format_scratchpad.tools import format_to_tool_messages
179-
except (ModuleNotFoundError, ImportError):
180-
# Fallback to an older version if needed.
181-
from langchain.agents.format_scratchpad.openai_tools import (
182-
format_to_openai_tool_messages as format_to_tool_messages,
201+
from langchain_classic.agents.format_scratchpad.tools import (
202+
format_to_tool_messages,
183203
)
204+
except (ModuleNotFoundError, ImportError):
205+
try:
206+
from langchain.agents.format_scratchpad.tools import format_to_tool_messages
207+
except (ModuleNotFoundError, ImportError):
208+
from langchain.agents.format_scratchpad.openai_tools import (
209+
format_to_openai_tool_messages as format_to_tool_messages,
210+
)
184211

185212
system_instructions = []
186213
if system_instruction:
@@ -605,15 +632,18 @@ def query(
605632
Returns:
606633
The output of querying the Agent with the given input and config.
607634
"""
608-
from langchain.load import dump as langchain_load_dump
635+
try:
636+
from langchain_core.load import dumpd
637+
except ImportError:
638+
from langchain.load import dump as langchain_load_dump
639+
640+
dumpd = langchain_load_dump.dumpd
609641

610642
if isinstance(input, str):
611643
input = {"input": input}
612644
if not self._runnable:
613645
self.set_up()
614-
return langchain_load_dump.dumpd(
615-
self._runnable.invoke(input=input, config=config, **kwargs)
616-
)
646+
return dumpd(self._runnable.invoke(input=input, config=config, **kwargs))
617647

618648
def stream_query(
619649
self,
@@ -636,11 +666,16 @@ def stream_query(
636666
Yields:
637667
The output of querying the Agent with the given input and config.
638668
"""
639-
from langchain.load import dump as langchain_load_dump
669+
try:
670+
from langchain_core.load import dumpd
671+
except ImportError:
672+
from langchain.load import dump as langchain_load_dump
673+
674+
dumpd = langchain_load_dump.dumpd
640675

641676
if isinstance(input, str):
642677
input = {"input": input}
643678
if not self._runnable:
644679
self.set_up()
645680
for chunk in self._runnable.stream(input=input, config=config, **kwargs):
646-
yield langchain_load_dump.dumpd(chunk)
681+
yield dumpd(chunk)

0 commit comments

Comments
 (0)