o
    i                     @  s  d Z ddlmZ ddlZddlZddlmZmZ ddl	m
Z
mZmZ ddlmZmZ ddlmZmZmZ ddlmZ dd	lmZmZ dd
lmZmZmZmZmZ ddlZddlm Z m!Z! ddl"m#Z# ddl$m%Z% ddl&m'Z'm(Z( ddl)m*Z* ddl+m,Z, ddl-m.Z. ddl/m0Z0 ddl1m2Z2m3Z3m4Z4m5Z5m6Z6m7Z7m8Z8m9Z9m:Z:m;Z;m<Z<m=Z=m>Z>m?Z?m@Z@mAZAmBZB ddlCmDZD ddlEmFZFmGZGmHZH ddlImJZJmKZK ddlLmMZMmNZN ddlOmPZP ddlQmRZR e ded ZS	 edddG d d! d!ZTG d"d# d#eZUeG d$d% d%eZVdaW	 did(d)ZXedjd-d.ZYeKfdkd3d4ZZdd5d6d7dld>d?Z[edmdld@dAZ\edBe]e^Z_G dCdD dDe!ee_ Z`e	EdndodMdNZae	EdndpdQdNZa	R	EdqdrdUdNZaedsdWdXZbdtd]d^ZcdudadbZddvdgdhZedS )wzLogic related to making requests to an LLM.

The aim here is to make a common interface for different LLMs, so that the rest of the code can be agnostic to the
specific LLM being used.
    )annotationsN)ABCabstractmethod)AsyncIteratorCallableIterator)asynccontextmanagercontextmanager)	dataclassfieldreplace)datetime)cachecached_property)AnyGenericLiteralTypeVaroverload)TypeAliasType	TypedDict   )_utils)JsonSchemaTransformer)OutputObjectDefinitionPromptedOutputSchema)ModelResponsePartsManager)
RunContext)AbstractBuiltinTool)	UserError)BaseToolCallPartBinaryImageFilePartFileUrlFinalResultEventFinishReasonModelMessageModelRequestModelResponseModelResponsePartModelResponseStreamEventPartEndEventPartStartEventTextPartThinkingPartToolCallPartVideoUrl)
OutputMode)DEFAULT_PROFILEModelProfileModelProfileSpec)Providerinfer_provider)ModelSettingsmerge_model_settings)ToolDefinition)RequestUsageKnownModelName)z#anthropic:claude-3-5-haiku-20241022z!anthropic:claude-3-5-haiku-latestz$anthropic:claude-3-5-sonnet-20240620z$anthropic:claude-3-5-sonnet-20241022z"anthropic:claude-3-5-sonnet-latestz$anthropic:claude-3-7-sonnet-20250219z"anthropic:claude-3-7-sonnet-latestz!anthropic:claude-3-haiku-20240307z anthropic:claude-3-opus-20240229zanthropic:claude-3-opus-latestz anthropic:claude-4-opus-20250514z"anthropic:claude-4-sonnet-20250514zanthropic:claude-haiku-4-5z#anthropic:claude-haiku-4-5-20251001zanthropic:claude-opus-4-0z"anthropic:claude-opus-4-1-20250805z anthropic:claude-opus-4-20250514zanthropic:claude-sonnet-4-0z"anthropic:claude-sonnet-4-20250514zanthropic:claude-sonnet-4-5z$anthropic:claude-sonnet-4-5-20250929z$bedrock:amazon.titan-text-express-v1z!bedrock:amazon.titan-text-lite-v1zbedrock:amazon.titan-tg1-largez0bedrock:anthropic.claude-3-5-haiku-20241022-v1:0z1bedrock:anthropic.claude-3-5-sonnet-20240620-v1:0z1bedrock:anthropic.claude-3-5-sonnet-20241022-v2:0z1bedrock:anthropic.claude-3-7-sonnet-20250219-v1:0z.bedrock:anthropic.claude-3-haiku-20240307-v1:0z-bedrock:anthropic.claude-3-opus-20240229-v1:0z/bedrock:anthropic.claude-3-sonnet-20240229-v1:0z0bedrock:anthropic.claude-haiku-4-5-20251001-v1:0z#bedrock:anthropic.claude-instant-v1z-bedrock:anthropic.claude-opus-4-20250514-v1:0z/bedrock:anthropic.claude-sonnet-4-20250514-v1:0z1bedrock:anthropic.claude-sonnet-4-5-20250929-v1:0zbedrock:anthropic.claude-v2zbedrock:anthropic.claude-v2:1z%bedrock:cohere.command-light-text-v14z"bedrock:cohere.command-r-plus-v1:0zbedrock:cohere.command-r-v1:0zbedrock:cohere.command-text-v14z3bedrock:eu.anthropic.claude-haiku-4-5-20251001-v1:0z2bedrock:eu.anthropic.claude-sonnet-4-20250514-v1:0z4bedrock:eu.anthropic.claude-sonnet-4-5-20250929-v1:0z(bedrock:meta.llama3-1-405b-instruct-v1:0z'bedrock:meta.llama3-1-70b-instruct-v1:0z&bedrock:meta.llama3-1-8b-instruct-v1:0z%bedrock:meta.llama3-70b-instruct-v1:0z$bedrock:meta.llama3-8b-instruct-v1:0z(bedrock:mistral.mistral-7b-instruct-v0:2z'bedrock:mistral.mistral-large-2402-v1:0z'bedrock:mistral.mistral-large-2407-v1:0z*bedrock:mistral.mixtral-8x7b-instruct-v0:1z bedrock:us.amazon.nova-lite-v1:0z!bedrock:us.amazon.nova-micro-v1:0zbedrock:us.amazon.nova-pro-v1:0z3bedrock:us.anthropic.claude-3-5-haiku-20241022-v1:0z4bedrock:us.anthropic.claude-3-5-sonnet-20240620-v1:0z4bedrock:us.anthropic.claude-3-5-sonnet-20241022-v2:0z4bedrock:us.anthropic.claude-3-7-sonnet-20250219-v1:0z1bedrock:us.anthropic.claude-3-haiku-20240307-v1:0z0bedrock:us.anthropic.claude-3-opus-20240229-v1:0z2bedrock:us.anthropic.claude-3-sonnet-20240229-v1:0z3bedrock:us.anthropic.claude-haiku-4-5-20251001-v1:0z0bedrock:us.anthropic.claude-opus-4-20250514-v1:0z2bedrock:us.anthropic.claude-sonnet-4-20250514-v1:0z4bedrock:us.anthropic.claude-sonnet-4-5-20250929-v1:0z*bedrock:us.meta.llama3-1-70b-instruct-v1:0z)bedrock:us.meta.llama3-1-8b-instruct-v1:0z*bedrock:us.meta.llama3-2-11b-instruct-v1:0z)bedrock:us.meta.llama3-2-1b-instruct-v1:0z)bedrock:us.meta.llama3-2-3b-instruct-v1:0z*bedrock:us.meta.llama3-2-90b-instruct-v1:0z*bedrock:us.meta.llama3-3-70b-instruct-v1:0zcerebras:gpt-oss-120bzcerebras:llama-3.3-70bzcerebras:llama3.1-8bz'cerebras:qwen-3-235b-a22b-instruct-2507z'cerebras:qwen-3-235b-a22b-thinking-2507zcerebras:qwen-3-32bzcerebras:zai-glm-4.6zcohere:c4ai-aya-expanse-32bzcohere:c4ai-aya-expanse-8bzcohere:command-nightlyzcohere:command-r-08-2024zcohere:command-r-plus-08-2024zcohere:command-r7b-12-2024zdeepseek:deepseek-chatzdeepseek:deepseek-reasonerzgoogle-gla:gemini-2.0-flashz google-gla:gemini-2.0-flash-litezgoogle-gla:gemini-2.5-flashz google-gla:gemini-2.5-flash-litez0google-gla:gemini-2.5-flash-lite-preview-09-2025z+google-gla:gemini-2.5-flash-preview-09-2025zgoogle-gla:gemini-2.5-prozgoogle-gla:gemini-flash-latestz#google-gla:gemini-flash-lite-latestzgoogle-vertex:gemini-2.0-flashz#google-vertex:gemini-2.0-flash-litezgoogle-vertex:gemini-2.5-flashz#google-vertex:gemini-2.5-flash-litez3google-vertex:gemini-2.5-flash-lite-preview-09-2025z.google-vertex:gemini-2.5-flash-preview-09-2025zgoogle-vertex:gemini-2.5-proz!google-vertex:gemini-flash-latestz&google-vertex:gemini-flash-lite-latestzgrok:grok-2-image-1212zgrok:grok-2-vision-1212zgrok:grok-3zgrok:grok-3-fastzgrok:grok-3-minizgrok:grok-3-mini-fastzgrok:grok-4zgrok:grok-4-0709z"groq:deepseek-r1-distill-llama-70bz!groq:deepseek-r1-distill-qwen-32bzgroq:distil-whisper-large-v3-enzgroq:gemma2-9b-itzgroq:llama-3.1-8b-instantz!groq:llama-3.2-11b-vision-previewzgroq:llama-3.2-1b-previewzgroq:llama-3.2-3b-previewz!groq:llama-3.2-90b-vision-previewzgroq:llama-3.3-70b-specdeczgroq:llama-3.3-70b-versatilezgroq:llama-guard-3-8bzgroq:llama3-70b-8192zgroq:llama3-8b-8192zgroq:mistral-saba-24bz groq:moonshotai/kimi-k2-instructzgroq:playai-ttszgroq:playai-tts-arabiczgroq:qwen-2.5-32bzgroq:qwen-2.5-coder-32bzgroq:qwen-qwq-32bzgroq:whisper-large-v3zgroq:whisper-large-v3-turbozheroku:amazon-rerank-1-0zheroku:claude-3-5-haikuzheroku:claude-3-5-sonnet-latestzheroku:claude-3-7-sonnetzheroku:claude-3-haikuzheroku:claude-4-5-haikuzheroku:claude-4-5-sonnetzheroku:claude-4-sonnetzheroku:cohere-rerank-3-5zheroku:gpt-oss-120bzheroku:nova-litezheroku:nova-prozhuggingface:Qwen/QwQ-32Bz%huggingface:Qwen/Qwen2.5-72B-Instructz huggingface:Qwen/Qwen3-235B-A22Bzhuggingface:Qwen/Qwen3-32Bz#huggingface:deepseek-ai/DeepSeek-R1z-huggingface:meta-llama/Llama-3.3-70B-Instructz9huggingface:meta-llama/Llama-4-Maverick-17B-128E-Instructz5huggingface:meta-llama/Llama-4-Scout-17B-16E-Instructzmistral:codestral-latestzmistral:mistral-large-latestz!mistral:mistral-moderation-latestzmistral:mistral-small-latestzmoonshotai:kimi-k2-0711-previewzmoonshotai:kimi-latestz moonshotai:kimi-thinking-previewzmoonshotai:moonshot-v1-128kz*moonshotai:moonshot-v1-128k-vision-previewzmoonshotai:moonshot-v1-32kz)moonshotai:moonshot-v1-32k-vision-previewzmoonshotai:moonshot-v1-8kz(moonshotai:moonshot-v1-8k-vision-previewzopenai:chatgpt-4o-latestzopenai:codex-mini-latestzopenai:computer-use-previewz&openai:computer-use-preview-2025-03-11zopenai:gpt-3.5-turbozopenai:gpt-3.5-turbo-0125zopenai:gpt-3.5-turbo-0301zopenai:gpt-3.5-turbo-0613zopenai:gpt-3.5-turbo-1106zopenai:gpt-3.5-turbo-16kzopenai:gpt-3.5-turbo-16k-0613zopenai:gpt-4zopenai:gpt-4-0125-previewzopenai:gpt-4-0314zopenai:gpt-4-0613zopenai:gpt-4-1106-previewzopenai:gpt-4-32kzopenai:gpt-4-32k-0314zopenai:gpt-4-32k-0613zopenai:gpt-4-turbozopenai:gpt-4-turbo-2024-04-09zopenai:gpt-4-turbo-previewzopenai:gpt-4-vision-previewzopenai:gpt-4.1zopenai:gpt-4.1-2025-04-14zopenai:gpt-4.1-minizopenai:gpt-4.1-mini-2025-04-14zopenai:gpt-4.1-nanozopenai:gpt-4.1-nano-2025-04-14zopenai:gpt-4ozopenai:gpt-4o-2024-05-13zopenai:gpt-4o-2024-08-06zopenai:gpt-4o-2024-11-20zopenai:gpt-4o-audio-previewz&openai:gpt-4o-audio-preview-2024-10-01z&openai:gpt-4o-audio-preview-2024-12-17z&openai:gpt-4o-audio-preview-2025-06-03zopenai:gpt-4o-minizopenai:gpt-4o-mini-2024-07-18z openai:gpt-4o-mini-audio-previewz+openai:gpt-4o-mini-audio-preview-2024-12-17z!openai:gpt-4o-mini-search-previewz,openai:gpt-4o-mini-search-preview-2025-03-11zopenai:gpt-4o-search-previewz'openai:gpt-4o-search-preview-2025-03-11zopenai:gpt-5zopenai:gpt-5-2025-08-07zopenai:gpt-5-chat-latestzopenai:gpt-5-codexzopenai:gpt-5-minizopenai:gpt-5-mini-2025-08-07zopenai:gpt-5-nanozopenai:gpt-5-nano-2025-08-07zopenai:gpt-5-prozopenai:gpt-5-pro-2025-10-06zopenai:gpt-5.1zopenai:gpt-5.1-2025-11-13zopenai:gpt-5.1-chat-latestzopenai:gpt-5.1-codexzopenai:gpt-5.1-miniz	openai:o1zopenai:o1-2024-12-17zopenai:o1-minizopenai:o1-mini-2024-09-12zopenai:o1-previewzopenai:o1-preview-2024-09-12zopenai:o1-prozopenai:o1-pro-2025-03-19z	openai:o3zopenai:o3-2025-04-16zopenai:o3-deep-researchz"openai:o3-deep-research-2025-06-26zopenai:o3-minizopenai:o3-mini-2025-01-31zopenai:o3-prozopenai:o3-pro-2025-06-10zopenai:o4-minizopenai:o4-mini-2025-04-16zopenai:o4-mini-deep-researchz'openai:o4-mini-deep-research-2025-06-26testFT)reprkw_onlyc                   @  s   e Zd ZU dZeedZded< eedZded< dZ	ded	< d
Z
ded< eedZded< d
Zded< dZded< dZded< edddZedddZejZd
S )ModelRequestParameterszcConfiguration for an agent's request to a model, specifically related to tools and output handling.)default_factoryzlist[ToolDefinition]function_toolszlist[AbstractBuiltinTool]builtin_toolstextr1   output_modeNzOutputObjectDefinition | Noneoutput_objectoutput_tools
str | Noneprompted_output_templateTboolallow_text_outputFallow_image_outputreturndict[str, ToolDefinition]c                 C  s   dd g | j | jD S )Nc                 S     i | ]}|j |qS  )name).0tool_defrO   rO   a/var/www/html/karishye-ai-python/venv/lib/python3.10/site-packages/pydantic_ai/models/__init__.py
<dictcomp>O      z4ModelRequestParameters.tool_defs.<locals>.<dictcomp>rA   rF   selfrO   rO   rS   	tool_defsM  s   z ModelRequestParameters.tool_defsc                 C  s*   | j dkr| jr| jrt| j| jS d S )Nprompted)rD   rH   rE   r   build_instructionsrW   rO   rO   rS   prompted_output_instructionsQ  s   z3ModelRequestParameters.prompted_output_instructions)rL   rM   rL   rG   )__name__
__module____qualname____doc__r   listrA   __annotations__rB   rD   rE   rF   rH   rJ   rK   r   rY   r\   r   dataclasses_no_defaults_repr__repr__rO   rO   rO   rS   r?   ?  s   
 
r?   c                   @  s   e Zd ZU dZdZded< dZded< dddd3ddZed4ddZ	e
d5ddZd6ddZe	d7d8ddZd9d d!Zd:d#d$Zee
d;d&d'Zed<d)d*Zee
d;d+d,Zed=d.d/Ze	d7d>d1d2ZdS )?ModelzAbstract class for a model.NModelProfileSpec | None_profileModelSettings | None	_settings)settingsprofilerk   rl   rL   Nonec                C  s   || _ || _dS )zInitialize the model with optional settings and profile.

        Args:
            settings: Model-specific settings that will be used as defaults for this model.
            profile: The model profile to use.
        N)rj   rh   )rX   rk   rl   rO   rO   rS   __init__`  s   
zModel.__init__c                 C     | j S )zGet the model settings.)rj   rW   rO   rO   rS   rk   o  s   zModel.settingsmessageslist[ModelMessage]model_settingsmodel_request_parametersr?   r(   c                   s   t  )zMake a request to the model.NotImplementedErrorrX   rp   rr   rs   rO   rO   rS   requestt  s   zModel.requestr:   c                   s   t d| jj )z0Make a request to the model for counting tokens.z8Token counting ahead of the request is not supported by ru   	__class__r^   rv   rO   rO   rS   count_tokens~  s   zModel.count_tokensrun_contextRunContext[Any] | NoneAsyncIterator[StreamedResponse]c                 C s   t d| jj )z<Make a request to the model and return a streaming response.z(Streamed requests not supported by this rx   )rX   rp   rr   rs   r{   rO   rO   rS   request_stream  s   
zModel.request_streamc                   sZ   | j j  r+t| fdd|jD  fdd|jD d}|j }r+t|t |d}|S )aP  Customize the request parameters for the model.

        This method can be overridden by subclasses to modify the request parameters before sending them to the model.
        In particular, this method can be used to make modifications to the generated tool JSON schemas if necessary
        for vendor/model-specific reasons.
        c                      g | ]}t  |qS rO   _customize_tool_defrQ   ttransformerrO   rS   
<listcomp>      z6Model.customize_request_parameters.<locals>.<listcomp>c                   r   rO   r   r   r   rO   rS   r     r   rV   rE   )rl   json_schema_transformerr   rA   rF   rE   _customize_output_object)rX   rs   rE   rO   r   rS   customize_request_parameters  s   
z"Model.customize_request_parameters3tuple[ModelSettings | None, ModelRequestParameters]c                 C  s(  t | j|}| |}|j }rt|tdd |D  d}|jdkr1| jj	}t|||dv d}|j
r?|jdkr?t|g d}|jrM|jdvrMt|d	d
}|jr[|jdkr[t|d	d}|jdkrk|jskt|| jjd}|jdkrx| jjsxtd|jdkr| jjstd|jr| jjstd||fS )a)  Prepare request inputs before they are passed to the provider.

        This merges the given `model_settings` with the model's own `settings` attribute and ensures
        `customize_request_parameters` is applied to the resolved
        [`ModelRequestParameters`][pydantic_ai.models.ModelRequestParameters]. Subclasses can override this method if
        they need to customize the preparation flow further, but most implementations should simply call
        `self.prepare_request(...)` at the start of their `request` (and related) methods.
        c                 S  rN   rO   )	unique_id)rQ   toolrO   rO   rS   rT     rU   z)Model.prepare_request.<locals>.<dictcomp>)rB   auto)nativerZ   )rD   rJ   r   )rF   Nr   rZ   )rH   r   z8Native structured output is not supported by this model.z+Tool output is not supported by this model.z,Image output is not supported by this model.)r8   rk   r   rB   r   rb   valuesrD   rl   default_structured_output_moderF   rE   rH   supports_json_schema_outputr   supports_toolsrK   supports_image_output)rX   rr   rs   paramsrB   rD   rO   rO   rS   prepare_request  s:   


zModel.prepare_requeststrc                 C     t  )zThe model name.rt   rW   rO   rO   rS   
model_name     zModel.model_namer3   c                 C  s(   | j }t|r|| j}|du rtS |S )zThe model profile.N)rh   callabler   r2   )rX   rh   rO   rO   rS   rl     s   
zModel.profilec                 C  r   )a+  The model provider, ex: openai.

        Use to populate the `gen_ai.system` OpenTelemetry semantic convention attribute,
        so should use well-known values listed in
        https://opentelemetry.io/docs/specs/semconv/attributes-registry/gen-ai/#gen-ai-system
        when applicable.
        rt   rW   rO   rO   rS   system  s   
zModel.systemrG   c                 C  s   dS )z0The base URL for the provider API, if available.NrO   rW   rO   rO   rS   base_url   s   zModel.base_urlModelRequestParameters | Nonec                 C  s   d}g }t | D ]}t|tr&|| t|dkr n|jdur&|j} nq|du rFt|dkrF|d }|d }tdd |jD rF|j}|rZ|j }rZ|rXd	||g}|S |}|S )a  Get instructions from the first ModelRequest found when iterating messages in reverse.

        In the case that a "mock" request was generated to include a tool-return part for a result tool,
        we want to use the instructions from the second-to-most-recent request (which should correspond to the
        original request that generated the response that resulted in the tool-return part).
        Nr   r      c                 s  s$    | ]}|j d kp|j dkV  qdS )ztool-returnzretry-promptN)	part_kind)rQ   prO   rO   rS   	<genexpr>.  s   " z*Model._get_instructions.<locals>.<genexpr>z

)
reversed
isinstancer'   appendleninstructionsallpartsr\   join)rp   rs   r   last_two_requestsmessagemost_recent_requestsecond_most_recent_requestoutput_instructionsrO   rO   rS   _get_instructions  s,   



zModel._get_instructions)rk   ri   rl   rg   rL   rm   )rL   ri   )rp   rq   rr   ri   rs   r?   rL   r(   )rp   rq   rr   ri   rs   r?   rL   r:   N)
rp   rq   rr   ri   rs   r?   r{   r|   rL   r}   )rs   r?   rL   r?   )rr   ri   rs   r?   rL   r   rL   r   )rL   r3   r]   )rp   rq   rs   r   rL   rG   )r^   r_   r`   ra   rh   rc   rj   rn   propertyrk   r   rw   rz   r   r~   r   r   r   r   rl   r   r   staticmethodr   rO   rO   rO   rS   rf   Z  s<   
 
	


6
rf   c                   @  s  e Zd ZU dZded< edddZded< edddZd	ed
< edddZded< edddZ	ded< ee
ddZded< edddZded< eeddZded< d)ddZed)ddZd*ddZd+dd Zeed,d"d#Zeed-d$d%Zeed.d'd(ZdS )/StreamedResponsez2Streamed response from an LLM when calling a tool.r?   rs   NF)defaultinitFinalResultEvent | Nonefinal_result_eventrG   provider_response_idzdict[str, Any] | Noneprovider_detailszFinishReason | Nonefinish_reason)r@   r   r   _parts_managerz.AsyncIterator[ModelResponseStreamEvent] | None_event_iteratorr:   _usagerL   'AsyncIterator[ModelResponseStreamEvent]c                   s>    j du rd	 fdd}d	 fdd}||   _  j S )
ax  Stream the response as an async iterable of [`ModelResponseStreamEvent`][pydantic_ai.messages.ModelResponseStreamEvent]s.

        This proxies the `_event_iterator()` and emits all events, while also checking for matches
        on the result schema and emitting a [`FinalResultEvent`][pydantic_ai.messages.FinalResultEvent] if/when the
        first match is found.
        Niteratorr   rL   c                  s^   | 2 z3 d H W }|V  t | j }d ur| _|V   nq6 | 2 z	3 d H W }|V  q"6 d S r   )_get_final_result_eventrs   r   )r   eventr   rW   rO   rS   iterator_with_final_eventS  s   z=StreamedResponse.__aiter__.<locals>.iterator_with_final_eventc                  sz   d  dd fdd}| 2 z!3 d H W }t |tr, r*||j}|r%|V   jj|_| |V  q6 | }|r;|V  d S d S )	N	next_partModelResponsePart | NonerL   PartEndEvent | Nonec                   sN    sd S  j }j | }t|ttB tB sd S t||| r#| jdS d dS )N)indexpartnext_part_kind)	r   r   	get_partsr   r-   r.   r    r+   r   )r   r   r   )last_start_eventrX   rO   rS   part_end_eventi  s   zRStreamedResponse.__aiter__.<locals>.iterator_with_part_end.<locals>.part_end_eventr   )r   r   rL   r   )r   r,   r   r   previous_part_kind)r   r   r   	end_eventrW   )r   rS   iterator_with_part_endd  s"   



z:StreamedResponse.__aiter__.<locals>.iterator_with_part_end)r   r   rL   r   )r   _get_event_iterator)rX   r   r   rO   rW   rS   	__aiter__J  s
   
%zStreamedResponse.__aiter__c                 C s   t  )ay  Return an async iterator of [`ModelResponseStreamEvent`][pydantic_ai.messages.ModelResponseStreamEvent]s.

        This method should be implemented by subclasses to translate the vendor-specific stream of events into
        pydantic_ai-format events.

        It should use the `_parts_manager` to handle deltas, and should update the `_usage` attributes as it goes.
        rt   rW   rO   rO   rS   r     s   	z$StreamedResponse._get_event_iteratorr(   c              
   C  s.   t | j | j| j|  | j| j| j| j	dS )zlBuild a [`ModelResponse`][pydantic_ai.messages.ModelResponse] from the data received from the stream so far.)r   r   	timestampusageprovider_namer   r   r   )
r(   r   r   r   r   r   r   r   r   r   rW   rO   rO   rS   get  s   zStreamedResponse.getc                 C  ro   )zeGet the usage of the response so far. This will not be the final usage until the stream is exhausted.)r   rW   rO   rO   rS   r     s   zStreamedResponse.usager   c                 C  r   )z#Get the model name of the response.rt   rW   rO   rO   rS   r     r   zStreamedResponse.model_namec                 C  r   )zGet the provider name.rt   rW   rO   rO   rS   r     r   zStreamedResponse.provider_namer   c                 C  r   )z"Get the timestamp of the response.rt   rW   rO   rO   rS   r     r   zStreamedResponse.timestamp)rL   r   )rL   r(   )rL   r:   r   r]   )rL   r   )r^   r_   r`   ra   rc   r   r   r   r   r   r   r   r   r:   r   r   r   r   r   r   r   r   r   r   rO   rO   rO   rS   r   :  s0   
 
B

r   rL   rm   c                   C  s   t stddS )ad  Check if model requests are allowed.

    If you're defining your own models that have costs or latency associated with their use, you should call this in
    [`Model.request`][pydantic_ai.models.Model.request] and [`Model.request_stream`][pydantic_ai.models.Model.request_stream].

    Raises:
        RuntimeError: If model requests are not allowed.
    zCModel requests are not allowed, since ALLOW_MODEL_REQUESTS is FalseN)ALLOW_MODEL_REQUESTSRuntimeErrorrO   rO   rO   rS   check_allow_model_requests  s   	r   allow_model_requestsrI   Iterator[None]c                 c  s"    t }| a zdV  W |a dS |a w )zContext manager to temporarily override [`ALLOW_MODEL_REQUESTS`][pydantic_ai.models.ALLOW_MODEL_REQUESTS].

    Args:
        allow_model_requests: Whether to allow model requests within the context.
    N)r   )r   	old_valuerO   rO   rS   override_allow_model_requests  s   r   modelModel | KnownModelName | strprovider_factoryCallable[[str], Provider[Any]]c                 C  sZ  t | tr| S | dkrddlm} | S z| jddd\}}W n@ ty_   d}| }|dr2d}n|d	r:d
}n|drAd}|durVtd|d| d| dt	 nt
d|  Y nw |dkrltdt	 d}||}|}|drddlm} |d}||}|dv rd}n|dv rd}|dkrddlm} |||dS |dkrddlm}	 |	||dS |dkrddlm}
 |
||dS |d krdd!lm} |||dS |d"krdd#lm} |||dS |d$krdd%lm} |||dS |d
krdd&lm} |||dS |d'krdd(lm} |||dS |d)kr&dd*lm} |||dS t
d|  )+ak  Infer the model from the name.

    Args:
        model:
            Model name to instantiate, in the format of `provider:model`. Use the string "test" to instantiate TestModel.
        provider_factory:
            Function that instantiates a provider object. The provider name is passed into the function parameter. Defaults to `provider.infer_provider`.
    r<   r   )	TestModel:)maxsplitN)gpto1o3openaiclaude	anthropicgemini
google-glazLSpecifying a model name without a provider prefix is deprecated. Instead of z, use 'z'.zUnknown model: vertexaizHThe 'vertexai' provider name is deprecated. Use 'google-vertex' instead.google-vertexzgateway/r   )normalize_gateway_provider)r   azuredeepseekcerebras	fireworksgithubgrokheroku
moonshotaiollama
openroutertogethervercellitellmnebiusovhcloudzopenai-chat)r   r   google)OpenAIChatModel)providerzopenai-responses)OpenAIResponsesModel)GoogleModelgroq)	GroqModelcohere)CohereModelmistral)MistralModel)AnthropicModelbedrock)BedrockConverseModelhuggingface)HuggingFaceModel)r   rf   r<   r   split
ValueError
startswithwarningswarnDeprecationWarningr   providers.gatewayr   removeprefixr   r  r  r   r  r  r  r  r  r	  r
  r   r  r  r  r  r  )r   r   r   r   r   r  
model_kindr   r  r  r  r  r  r
  r  r  r  rO   rO   rS   infer_model  s   








r  X     r  timeoutconnectr  rG   r  intr  httpx.AsyncClientc                 C  s.   t | ||d}|jrt   t | ||d}|S )a`  Cached HTTPX async client that creates a separate client for each provider.

    The client is cached based on the provider parameter. If provider is None, it's used for non-provider specific
    requests (like downloading images). Multiple agents and calls can share the same client when they use the same provider.

    Each client will get its own transport with its own connection pool. The default pool size is defined by `httpx.DEFAULT_LIMITS`.

    There are good reasons why in production you should use a `httpx.AsyncClient` as an async context manager as
    described in [encode/httpx#2026](https://github.com/encode/httpx/pull/2026), but when experimenting or showing
    examples, it's very useful not to.

    The default timeouts match those of OpenAI,
    see <https://github.com/openai/openai-python/blob/v1.54.4/src/openai/_constants.py#L9>.
    r  )_cached_async_http_client	is_closedcache_clear)r  r  r  clientrO   rO   rS   cached_async_http_clientY  s
   r%  c                 C  s   t jt j||ddt idS )N)r  r  z
User-Agent)r  headers)httpxAsyncClientTimeoutget_user_agentr  rO   rO   rS   r!  r  s   r!  DataTc                   @  s$   e Zd ZU dZded< 	 ded< dS )DownloadedItemz!The downloaded data and its type.r+  datar   	data_typeN)r^   r_   r`   ra   rc   rO   rO   rO   rS   r,  }  s   
 r,  mimeitemr#   data_formatLiteral['bytes']type_formatLiteral['mime', 'extension']DownloadedItem[bytes]c                      d S r   rO   r0  r1  r3  rO   rO   rS   download_item     r8  'Literal['base64', 'base64_uri', 'text']DownloadedItem[str]c                   r6  r   rO   r7  rO   rO   rS   r8    r9  bytes0Literal['bytes', 'base64', 'base64_uri', 'text']+DownloadedItem[str] | DownloadedItem[bytes]c           	        s  | j drtdt| tr| jrtdt }|j| j ddI dH }|  |j	d }r>|
dd	 }|d
kr>d}|pB| j}|}|dkrL| j}|j}|dv rot|d}|dkrgd| d| }tt ||dS |dkr~tt |d|dS tt ||dS )a  Download an item by URL and return the content as a bytes object or a (base64-encoded) string.

    Args:
        item: The item to download.
        data_format: The format to return the content in:
            - `bytes`: The raw bytes of the content.
            - `base64`: The base64-encoded content.
            - `base64_uri`: The base64-encoded content as a data URI.
            - `text`: The content as a string.
        type_format: The format to return the media type in:
            - `mime`: The media type as a MIME type.
            - `extension`: The media type as an extension.

    Raises:
        UserError: If the URL points to a YouTube video or its protocol is gs://.
    zgs://z3Downloading from protocol "gs://" is not supported.z,Downloading YouTube videos is not supported.T)follow_redirectsNzcontent-type;r   zapplication/octet-stream	extension)base64
base64_urizutf-8rC  zdata:z;base64,)r-  r.  rC   )urlr  r   r   r0   
is_youtuber%  r   raise_for_statusr&  r  
media_typeformatcontentrB  	b64encodedecoder,  r   r<  )	r0  r1  r3  r$  responsecontent_typerG  r.  r-  rO   rO   rS   r8    s2   
r   c                  C  s   ddl m}  d|  S )z.Get the user agent string for the HTTP client.r   __version__zpydantic-ai/) rO  rN  rO   rO   rS   r*    s   
r*  r   type[JsonSchemaTransformer]r   r9   c                 C  <   | |j |jd}| }t|||jd u r|jdS |jdS )Nstrict)parameters_json_schemarT  )rU  rT  walkr   is_strict_compatible)r   r   schema_transformerrU  rO   rO   rS   r        r   or   c                 C  rR  )NrS  )json_schemarT  )r[  rT  rV  r   rW  )r   rZ  rX  r[  rO   rO   rS   r     rY  r   er*   r   r   c                 C  s   t | trH| j}t |tr|jst |tr$|jr$t |jtr$t	dddS t |t
rJ|j|j }rL|jdkr?t	|j|jdS |jrNt	dddS dS dS dS dS )zeReturn an appropriate FinalResultEvent if `e` corresponds to a part that will produce a final result.N)	tool_nametool_call_idoutput)r   r,   r   r-   rJ   r"   rK   rI  r!   r$   r/   rY   r   r]  kindr^  defer)r\  r   new_partrR   rO   rO   rS   r     s$   


r   )rL   rm   )r   rI   rL   r   )r   r   r   r   rL   rf   )r  rG   r  r  r  r  rL   r   )r  r  )r/  )r0  r#   r1  r2  r3  r4  rL   r5  )r0  r#   r1  r:  r3  r4  rL   r;  )r<  r/  )r0  r#   r1  r=  r3  r4  rL   r>  r   )r   rQ  r   r9   )r   rQ  rZ  r   )r\  r*   r   r?   rL   r   )fra   
__future__r   _annotationsrB  r  abcr   r   collections.abcr   r   r   
contextlibr   r	   dataclassesr
   r   r   r   	functoolsr   r   typingr   r   r   r   r   r'  typing_extensionsr   r   rP  r   _json_schemar   _outputr   r   r   r   _run_contextr   rB   r   
exceptionsr   rp   r    r!   r"   r#   r$   r%   r&   r'   r(   r)   r*   r+   r,   r-   r.   r/   r0   r_  r1   profilesr2   r3   r4   	providersr5   r6   rk   r7   r8   toolsr9   r   r:   r;   r?   rf   r   r   r   r   r  r%  r!  r   r<  r+  r,  r8  r*  r   r   r   rO   rO   rO   rS   <module>   s    L  
 a 

s	5



