Skip to content

Commit f896bcd

Browse files
authored
chore(langchain): add mypy pydantic plugin (#32610)
1 parent 73a7de6 commit f896bcd

File tree

22 files changed

+101
-100
lines changed

22 files changed

+101
-100
lines changed

libs/langchain/langchain/agents/openai_functions_agent/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ def from_llm_and_tools(
277277
extra_prompt_messages=extra_prompt_messages,
278278
system_message=system_message_,
279279
)
280-
return cls( # type: ignore[call-arg]
280+
return cls(
281281
llm=llm,
282282
prompt=prompt,
283283
tools=tools,

libs/langchain/langchain/agents/openai_functions_multi_agent/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,7 @@ def from_llm_and_tools(
328328
extra_prompt_messages=extra_prompt_messages,
329329
system_message=system_message_,
330330
)
331-
return cls( # type: ignore[call-arg]
331+
return cls(
332332
llm=llm,
333333
prompt=prompt,
334334
tools=tools,

libs/langchain/langchain/chains/llm_checker/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def _load_question_to_checked_assertions_chain(
5656
revised_answer_chain,
5757
]
5858
return SequentialChain(
59-
chains=chains, # type: ignore[arg-type]
59+
chains=chains,
6060
input_variables=["question"],
6161
output_variables=["revised_statement"],
6262
verbose=True,

libs/langchain/langchain/chains/loading.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ def _load_map_reduce_documents_chain(
169169

170170
return MapReduceDocumentsChain(
171171
llm_chain=llm_chain,
172-
reduce_documents_chain=reduce_documents_chain, # type: ignore[arg-type]
172+
reduce_documents_chain=reduce_documents_chain,
173173
**config,
174174
)
175175

@@ -293,10 +293,10 @@ def _load_llm_checker_chain(config: dict, **kwargs: Any) -> LLMCheckerChain:
293293
revised_answer_prompt = load_prompt(config.pop("revised_answer_prompt_path"))
294294
return LLMCheckerChain(
295295
llm=llm,
296-
create_draft_answer_prompt=create_draft_answer_prompt, # type: ignore[arg-type]
297-
list_assertions_prompt=list_assertions_prompt, # type: ignore[arg-type]
298-
check_assertions_prompt=check_assertions_prompt, # type: ignore[arg-type]
299-
revised_answer_prompt=revised_answer_prompt, # type: ignore[arg-type]
296+
create_draft_answer_prompt=create_draft_answer_prompt,
297+
list_assertions_prompt=list_assertions_prompt,
298+
check_assertions_prompt=check_assertions_prompt,
299+
revised_answer_prompt=revised_answer_prompt,
300300
**config,
301301
)
302302

@@ -325,7 +325,7 @@ def _load_llm_math_chain(config: dict, **kwargs: Any) -> LLMMathChain:
325325
elif "prompt_path" in config:
326326
prompt = load_prompt(config.pop("prompt_path"))
327327
if llm_chain:
328-
return LLMMathChain(llm_chain=llm_chain, prompt=prompt, **config) # type: ignore[arg-type]
328+
return LLMMathChain(llm_chain=llm_chain, prompt=prompt, **config)
329329
return LLMMathChain(llm=llm, prompt=prompt, **config)
330330

331331

@@ -341,7 +341,7 @@ def _load_map_rerank_documents_chain(
341341
else:
342342
msg = "One of `llm_chain` or `llm_chain_path` must be present."
343343
raise ValueError(msg)
344-
return MapRerankDocumentsChain(llm_chain=llm_chain, **config) # type: ignore[arg-type]
344+
return MapRerankDocumentsChain(llm_chain=llm_chain, **config)
345345

346346

347347
def _load_pal_chain(config: dict, **kwargs: Any) -> Any:
@@ -377,8 +377,8 @@ def _load_refine_documents_chain(config: dict, **kwargs: Any) -> RefineDocuments
377377
elif "document_prompt_path" in config:
378378
document_prompt = load_prompt(config.pop("document_prompt_path"))
379379
return RefineDocumentsChain(
380-
initial_llm_chain=initial_llm_chain, # type: ignore[arg-type]
381-
refine_llm_chain=refine_llm_chain, # type: ignore[arg-type]
380+
initial_llm_chain=initial_llm_chain,
381+
refine_llm_chain=refine_llm_chain,
382382
document_prompt=document_prompt,
383383
**config,
384384
)
@@ -402,7 +402,7 @@ def _load_qa_with_sources_chain(config: dict, **kwargs: Any) -> QAWithSourcesCha
402402
"`combine_documents_chain_path` must be present."
403403
)
404404
raise ValueError(msg)
405-
return QAWithSourcesChain(combine_documents_chain=combine_documents_chain, **config) # type: ignore[arg-type]
405+
return QAWithSourcesChain(combine_documents_chain=combine_documents_chain, **config)
406406

407407

408408
def _load_sql_database_chain(config: dict, **kwargs: Any) -> Any:
@@ -445,7 +445,7 @@ def _load_vector_db_qa_with_sources_chain(
445445
)
446446
raise ValueError(msg)
447447
return VectorDBQAWithSourcesChain(
448-
combine_documents_chain=combine_documents_chain, # type: ignore[arg-type]
448+
combine_documents_chain=combine_documents_chain,
449449
vectorstore=vectorstore,
450450
**config,
451451
)
@@ -475,7 +475,7 @@ def _load_retrieval_qa(config: dict, **kwargs: Any) -> RetrievalQA:
475475
)
476476
raise ValueError(msg)
477477
return RetrievalQA(
478-
combine_documents_chain=combine_documents_chain, # type: ignore[arg-type]
478+
combine_documents_chain=combine_documents_chain,
479479
retriever=retriever,
480480
**config,
481481
)
@@ -508,7 +508,7 @@ def _load_retrieval_qa_with_sources_chain(
508508
)
509509
raise ValueError(msg)
510510
return RetrievalQAWithSourcesChain(
511-
combine_documents_chain=combine_documents_chain, # type: ignore[arg-type]
511+
combine_documents_chain=combine_documents_chain,
512512
retriever=retriever,
513513
**config,
514514
)
@@ -538,7 +538,7 @@ def _load_vector_db_qa(config: dict, **kwargs: Any) -> VectorDBQA:
538538
)
539539
raise ValueError(msg)
540540
return VectorDBQA(
541-
combine_documents_chain=combine_documents_chain, # type: ignore[arg-type]
541+
combine_documents_chain=combine_documents_chain,
542542
vectorstore=vectorstore,
543543
**config,
544544
)
@@ -606,8 +606,8 @@ def _load_api_chain(config: dict, **kwargs: Any) -> APIChain:
606606
msg = "`requests_wrapper` must be present."
607607
raise ValueError(msg)
608608
return APIChain(
609-
api_request_chain=api_request_chain, # type: ignore[arg-type]
610-
api_answer_chain=api_answer_chain, # type: ignore[arg-type]
609+
api_request_chain=api_request_chain,
610+
api_answer_chain=api_answer_chain,
611611
requests_wrapper=requests_wrapper,
612612
**config,
613613
)

libs/langchain/langchain/chains/qa_with_sources/loading.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -66,12 +66,12 @@ def _load_stuff_chain(
6666
verbose: Optional[bool] = None,
6767
**kwargs: Any,
6868
) -> StuffDocumentsChain:
69-
llm_chain = LLMChain(llm=llm, prompt=prompt, verbose=verbose) # type: ignore[arg-type]
69+
llm_chain = LLMChain(llm=llm, prompt=prompt, verbose=verbose)
7070
return StuffDocumentsChain(
7171
llm_chain=llm_chain,
7272
document_variable_name=document_variable_name,
7373
document_prompt=document_prompt,
74-
verbose=verbose, # type: ignore[arg-type]
74+
verbose=verbose,
7575
**kwargs,
7676
)
7777

@@ -91,14 +91,14 @@ def _load_map_reduce_chain(
9191
token_max: int = 3000,
9292
**kwargs: Any,
9393
) -> MapReduceDocumentsChain:
94-
map_chain = LLMChain(llm=llm, prompt=question_prompt, verbose=verbose) # type: ignore[arg-type]
94+
map_chain = LLMChain(llm=llm, prompt=question_prompt, verbose=verbose)
9595
_reduce_llm = reduce_llm or llm
96-
reduce_chain = LLMChain(llm=_reduce_llm, prompt=combine_prompt, verbose=verbose) # type: ignore[arg-type]
96+
reduce_chain = LLMChain(llm=_reduce_llm, prompt=combine_prompt, verbose=verbose)
9797
combine_documents_chain = StuffDocumentsChain(
9898
llm_chain=reduce_chain,
9999
document_variable_name=combine_document_variable_name,
100100
document_prompt=document_prompt,
101-
verbose=verbose, # type: ignore[arg-type]
101+
verbose=verbose,
102102
)
103103
if collapse_prompt is None:
104104
collapse_chain = None
@@ -114,7 +114,7 @@ def _load_map_reduce_chain(
114114
llm_chain=LLMChain(
115115
llm=_collapse_llm,
116116
prompt=collapse_prompt,
117-
verbose=verbose, # type: ignore[arg-type]
117+
verbose=verbose,
118118
),
119119
document_variable_name=combine_document_variable_name,
120120
document_prompt=document_prompt,
@@ -123,13 +123,13 @@ def _load_map_reduce_chain(
123123
combine_documents_chain=combine_documents_chain,
124124
collapse_documents_chain=collapse_chain,
125125
token_max=token_max,
126-
verbose=verbose, # type: ignore[arg-type]
126+
verbose=verbose,
127127
)
128128
return MapReduceDocumentsChain(
129129
llm_chain=map_chain,
130130
reduce_documents_chain=reduce_documents_chain,
131131
document_variable_name=map_reduce_document_variable_name,
132-
verbose=verbose, # type: ignore[arg-type]
132+
verbose=verbose,
133133
**kwargs,
134134
)
135135

@@ -146,16 +146,16 @@ def _load_refine_chain(
146146
verbose: Optional[bool] = None,
147147
**kwargs: Any,
148148
) -> RefineDocumentsChain:
149-
initial_chain = LLMChain(llm=llm, prompt=question_prompt, verbose=verbose) # type: ignore[arg-type]
149+
initial_chain = LLMChain(llm=llm, prompt=question_prompt, verbose=verbose)
150150
_refine_llm = refine_llm or llm
151-
refine_chain = LLMChain(llm=_refine_llm, prompt=refine_prompt, verbose=verbose) # type: ignore[arg-type]
151+
refine_chain = LLMChain(llm=_refine_llm, prompt=refine_prompt, verbose=verbose)
152152
return RefineDocumentsChain(
153153
initial_llm_chain=initial_chain,
154154
refine_llm_chain=refine_chain,
155155
document_variable_name=document_variable_name,
156156
initial_response_name=initial_response_name,
157157
document_prompt=document_prompt,
158-
verbose=verbose, # type: ignore[arg-type]
158+
verbose=verbose,
159159
**kwargs,
160160
)
161161

libs/langchain/langchain/chains/question_answering/chain.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -80,15 +80,15 @@ def _load_stuff_chain(
8080
llm_chain = LLMChain(
8181
llm=llm,
8282
prompt=_prompt,
83-
verbose=verbose, # type: ignore[arg-type]
83+
verbose=verbose,
8484
callback_manager=callback_manager,
8585
callbacks=callbacks,
8686
)
8787
# TODO: document prompt
8888
return StuffDocumentsChain(
8989
llm_chain=llm_chain,
9090
document_variable_name=document_variable_name,
91-
verbose=verbose, # type: ignore[arg-type]
91+
verbose=verbose,
9292
callback_manager=callback_manager,
9393
callbacks=callbacks,
9494
**kwargs,
@@ -120,23 +120,23 @@ def _load_map_reduce_chain(
120120
map_chain = LLMChain(
121121
llm=llm,
122122
prompt=_question_prompt,
123-
verbose=verbose, # type: ignore[arg-type]
123+
verbose=verbose,
124124
callback_manager=callback_manager,
125125
callbacks=callbacks,
126126
)
127127
_reduce_llm = reduce_llm or llm
128128
reduce_chain = LLMChain(
129129
llm=_reduce_llm,
130130
prompt=_combine_prompt,
131-
verbose=verbose, # type: ignore[arg-type]
131+
verbose=verbose,
132132
callback_manager=callback_manager,
133133
callbacks=callbacks,
134134
)
135135
# TODO: document prompt
136136
combine_documents_chain = StuffDocumentsChain(
137137
llm_chain=reduce_chain,
138138
document_variable_name=combine_document_variable_name,
139-
verbose=verbose, # type: ignore[arg-type]
139+
verbose=verbose,
140140
callback_manager=callback_manager,
141141
callbacks=callbacks,
142142
)
@@ -154,12 +154,12 @@ def _load_map_reduce_chain(
154154
llm_chain=LLMChain(
155155
llm=_collapse_llm,
156156
prompt=collapse_prompt,
157-
verbose=verbose, # type: ignore[arg-type]
157+
verbose=verbose,
158158
callback_manager=callback_manager,
159159
callbacks=callbacks,
160160
),
161161
document_variable_name=combine_document_variable_name,
162-
verbose=verbose, # type: ignore[arg-type]
162+
verbose=verbose,
163163
callback_manager=callback_manager,
164164
)
165165
reduce_documents_chain = ReduceDocumentsChain(
@@ -172,7 +172,7 @@ def _load_map_reduce_chain(
172172
llm_chain=map_chain,
173173
document_variable_name=map_reduce_document_variable_name,
174174
reduce_documents_chain=reduce_documents_chain,
175-
verbose=verbose, # type: ignore[arg-type]
175+
verbose=verbose,
176176
callback_manager=callback_manager,
177177
callbacks=callbacks,
178178
**kwargs,
@@ -201,15 +201,15 @@ def _load_refine_chain(
201201
initial_chain = LLMChain(
202202
llm=llm,
203203
prompt=_question_prompt,
204-
verbose=verbose, # type: ignore[arg-type]
204+
verbose=verbose,
205205
callback_manager=callback_manager,
206206
callbacks=callbacks,
207207
)
208208
_refine_llm = refine_llm or llm
209209
refine_chain = LLMChain(
210210
llm=_refine_llm,
211211
prompt=_refine_prompt,
212-
verbose=verbose, # type: ignore[arg-type]
212+
verbose=verbose,
213213
callback_manager=callback_manager,
214214
callbacks=callbacks,
215215
)
@@ -218,7 +218,7 @@ def _load_refine_chain(
218218
refine_llm_chain=refine_chain,
219219
document_variable_name=document_variable_name,
220220
initial_response_name=initial_response_name,
221-
verbose=verbose, # type: ignore[arg-type]
221+
verbose=verbose,
222222
callback_manager=callback_manager,
223223
callbacks=callbacks,
224224
**kwargs,

libs/langchain/langchain/chains/summarize/chain.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -35,12 +35,12 @@ def _load_stuff_chain(
3535
verbose: Optional[bool] = None,
3636
**kwargs: Any,
3737
) -> StuffDocumentsChain:
38-
llm_chain = LLMChain(llm=llm, prompt=prompt, verbose=verbose) # type: ignore[arg-type]
38+
llm_chain = LLMChain(llm=llm, prompt=prompt, verbose=verbose)
3939
# TODO: document prompt
4040
return StuffDocumentsChain(
4141
llm_chain=llm_chain,
4242
document_variable_name=document_variable_name,
43-
verbose=verbose, # type: ignore[arg-type]
43+
verbose=verbose,
4444
**kwargs,
4545
)
4646

@@ -64,21 +64,21 @@ def _load_map_reduce_chain(
6464
map_chain = LLMChain(
6565
llm=llm,
6666
prompt=map_prompt,
67-
verbose=verbose, # type: ignore[arg-type]
67+
verbose=verbose,
6868
callbacks=callbacks,
6969
)
7070
_reduce_llm = reduce_llm or llm
7171
reduce_chain = LLMChain(
7272
llm=_reduce_llm,
7373
prompt=combine_prompt,
74-
verbose=verbose, # type: ignore[arg-type]
74+
verbose=verbose,
7575
callbacks=callbacks,
7676
)
7777
# TODO: document prompt
7878
combine_documents_chain = StuffDocumentsChain(
7979
llm_chain=reduce_chain,
8080
document_variable_name=combine_document_variable_name,
81-
verbose=verbose, # type: ignore[arg-type]
81+
verbose=verbose,
8282
callbacks=callbacks,
8383
)
8484
if collapse_prompt is None:
@@ -95,7 +95,7 @@ def _load_map_reduce_chain(
9595
llm_chain=LLMChain(
9696
llm=_collapse_llm,
9797
prompt=collapse_prompt,
98-
verbose=verbose, # type: ignore[arg-type]
98+
verbose=verbose,
9999
callbacks=callbacks,
100100
),
101101
document_variable_name=combine_document_variable_name,
@@ -104,15 +104,15 @@ def _load_map_reduce_chain(
104104
combine_documents_chain=combine_documents_chain,
105105
collapse_documents_chain=collapse_chain,
106106
token_max=token_max,
107-
verbose=verbose, # type: ignore[arg-type]
107+
verbose=verbose,
108108
callbacks=callbacks,
109109
collapse_max_retries=collapse_max_retries,
110110
)
111111
return MapReduceDocumentsChain(
112112
llm_chain=map_chain,
113113
reduce_documents_chain=reduce_documents_chain,
114114
document_variable_name=map_reduce_document_variable_name,
115-
verbose=verbose, # type: ignore[arg-type]
115+
verbose=verbose,
116116
callbacks=callbacks,
117117
**kwargs,
118118
)
@@ -129,15 +129,15 @@ def _load_refine_chain(
129129
verbose: Optional[bool] = None,
130130
**kwargs: Any,
131131
) -> RefineDocumentsChain:
132-
initial_chain = LLMChain(llm=llm, prompt=question_prompt, verbose=verbose) # type: ignore[arg-type]
132+
initial_chain = LLMChain(llm=llm, prompt=question_prompt, verbose=verbose)
133133
_refine_llm = refine_llm or llm
134-
refine_chain = LLMChain(llm=_refine_llm, prompt=refine_prompt, verbose=verbose) # type: ignore[arg-type]
134+
refine_chain = LLMChain(llm=_refine_llm, prompt=refine_prompt, verbose=verbose)
135135
return RefineDocumentsChain(
136136
initial_llm_chain=initial_chain,
137137
refine_llm_chain=refine_chain,
138138
document_variable_name=document_variable_name,
139139
initial_response_name=initial_response_name,
140-
verbose=verbose, # type: ignore[arg-type]
140+
verbose=verbose,
141141
**kwargs,
142142
)
143143

0 commit comments

Comments
 (0)