
    5P@iN                       d dl mZ d dlZd dlZd dlZd dlmZmZmZm	Z	 d dl
mc mZ d dlmZ d dlmZ d dlmZ d dlmZ d dlmZ d d	lmZ d$d
Z	 	 	 	 d%dZd&dZ	 	 	 	 d'dZ	 	 	 	 d(dZ	 d)dddd	 	 	 	 	 	 	 	 	 d*dZ	 d)dddd	 	 	 	 	 	 	 	 	 d*dZddddddddd	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d+dZdZddddddddddd
	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d,dZ  ejB                  e jD                        ddddddddddd
	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d-d       Z#ejH                  jJ                  ejH                  jL                  fdk\  rddiZ'ni Z'ejP                   ejB                  ejR                  jD                         ejT                  d.i e'ddi G d dejR                                             Z)	 	 	 	 	 	 	 	 d/dZ+	 d)	 	 	 	 	 d0d Z,	 d)	 	 	 	 	 d1d!Z-ddddedd"	 	 	 	 	 	 	 	 	 	 	 	 	 d2d#Z.y)3    )annotationsN)IterableListOptionalUnion)get_default_discuss_client) get_default_discuss_async_client)string_utils)discuss_types)model_types)safety_typesc                    t        | t        j                        r| S t        | t              rt        j                  |       S t        j                  |       S )z9Creates a `glm.Message` object from the provided content.content)
isinstanceglmMessagestrr   s    e/home/www/lebenam.kofcorporation.com/venv/lib/python3.12/site-packages/google/generativeai/discuss.py_make_messager   !   s=    '3;;''3{{7++{{7##    c                X   t        | t        t        t        j                  f      rt        |       g} n| D cg c]  }t        |       } }t        d | ddd   D              }|sd}n4t        |      dk(  r|j                         }nt        j                  d      t        d | ddd   D              }|sd}n4t        |      dk(  r|j                         }nt        j                  d      t        d	 | D              r| S ||g}t        |       D ]  \  }}||dz     |_         | S c c}w )
a'  
    Creates a list of `glm.Message` objects from the provided messages.

    This function takes a variety of message content inputs, such as strings, dictionaries,
    or `glm.Message` objects, and creates a list of `glm.Message` objects. It ensures that
    the authors of the messages alternate appropriately. If authors are not provided,
    default authors are assigned based on their position in the list.

    Args:
        messages: The messages to convert.

    Returns:
        A list of `glm.Message` objects with alternating authors.
    c              3  N   K   | ]  }|j                   s|j                     y wNauthor.0msgs     r   	<genexpr>z!_make_messages.<locals>.<genexpr>A   s     IccjjszzI   %%N   0   z$Authors are not strictly alternatingc              3  N   K   | ]  }|j                   s|j                     y wr   r   r   s     r   r    z!_make_messages.<locals>.<genexpr>I   s     IScjjcjjIr!   1c              3  4   K   | ]  }|j                     y wr   r   r   s     r   r    z!_make_messages.<locals>.<genexpr>Q   s     
*#3::
*s   )r   r   dictr   r   r   setlenpopr   AuthorErrorall	enumerater   )	messagesmessageeven_authorseven_authorodd_authors
odd_authorauthorsir   s	            r   _make_messagesr7   +   s)   " (S$45!(+,:BCwM'*CCIXcc]IIL	\	a	"&&(''(NOOIHQTTNIIK
	[	Q	 __&
''(NOO

*
**J'GH% $3QU^
$ O3 Ds   D'c                   t        | t        j                        r| S t        | t              rG| j	                         } t        | d         | d<   t        | d         | d<   t        j                  |       S t        | t              r7t        |       \  }}t        j                  t        |      t        |            S t        j                  |       S )z6Creates a `glm.Example` object from the provided item.inputoutputr9   r:   )r   r   Exampler(   copyr   r   list)itemr9   r:   s      r   _make_exampler@   [   s    $$$yy{%d7m4W&tH~6X{{4  $!T
v{{u!5mF>STT ;;tr   c                V   t        |       dz  dk7  r+t        t        j                  dt        |        d            g }g }t	        |       D ]Z  \  }}t        |      }|j                  |       |dz  dk(  r+t        j                  |d   |d         }|j                  |       g }\ |S )a  
    Creates a list of `glm.Example` objects from a list of message options.

    This function takes a list of `discuss_types.MessageOptions` and pairs them into
    `glm.Example` objects. The input examples must be in pairs to create valid examples.

    Args:
        examples: The list of `discuss_types.MessageOptions`.

    Returns:
        A list of `glm.Example objects` created by pairing up the provided messages.

    Raises:
        ValueError: If the provided list of examples is not of even length.
    r"   r   zt            You must pass `Primer` objects, pairs of messages, or an *even* number of messages, got: 
              z	 messagesr$   r;   )	r*   
ValueErrortextwrapdedentr.   r   appendr   r<   )examplesresultpairnr?   r   primers          r   _make_examples_from_flatrK   n   s    $ 8}qAOO8}oY*
 	
 FDX& 
4D!Cq5A:q'7
 	f
 Mr   c                   t        | t        j                        r| gS t        | t              rt	        |       gS t        |       } | s| S | d   }t        |t              r"d|v rt        |       S d|v rd|v s0t        d      t        |t        j                        rt        |       S g }| D ]  }|j                  t	        |              |S )a  
    Creates a list of `glm.Example` objects from the provided examples.

    This function takes various types of example content inputs and creates a list
    of `glm.Example` objects. It handles the conversion of different input types and ensures
    the appropriate structure for creating valid examples.

    Args:
        examples: The examples to convert.

    Returns:
        A list of `glm.Example` objects created from the provided examples.
    r   r   r9   r:   zTTo create an `Example` from a dict you must supply both `input` and an `output` keys)r   r   r<   r(   r@   r>   rK   	TypeErrorr   MESSAGE_OPTIONSrE   )rF   firstrG   r?   s       r   _make_examplesrP      s      (CKK(z(D!h'((H~HQKE%+H55u$U):j  e]::;+H55F +mD)*+Mr   contextrF   r/   c               R   | t        |||      } nN|duxs
 |duxs |du}|rt        d      t        | t        j                        r| S t        | t               rnd| i} t        | j                               }|j                  t        j                        st        d|t        j                  z
         | j                  dd      }|t        |      | d<   | j                  dd      }|t        |      | d<   | j                         D ci c]  \  }}|	|| } }}| S c c}}w )a  
    Creates a `glm.MessagePrompt` object from the provided prompt components.

    This function constructs a `glm.MessagePrompt` object using the provided `context`, `examples`,
    or `messages`. It ensures the proper structure and handling of the input components.

    Either pass a `prompt` or it's component `context`, `examples`, `messages`.

    Args:
        prompt: The complete prompt components.
        context: The context for the prompt.
        examples: The examples for the prompt.
        messages: The messages for the prompt.

    Returns:
        A `glm.MessagePrompt` object created from the provided prompt components.
    NrQ   zWYou can't set `prompt`, and its fields `(context, examples, messages)` at the same timer/   z.Found extra entries in the prompt dictionary: rF   )r(   rB   r   r   MessagePromptr)   keysissubsetr   MESSAGE_PROMPT_KEYSKeyErrorgetrP   r7   items)promptrR   rF   r/   flat_promptrU   kvs           r   _make_message_prompt_dictr_      sA   0 ~
 d*_0D_(Z^J^$  fc//0M% &)Fv{{}D==::;<TMDeDe=e<fg
 	
 zz*d+H+H5zzz*d+H+H5z%||~?tq!ad?F?M @s   
D#D#c               J    t        | |||      } t        j                  |       S )zICreates a `glm.MessagePrompt` object from the provided prompt components.r[   rR   rF   r/   )r_   r   rT   ra   s       r   _make_message_promptrb     s*     'wHF V$$r   )rR   rF   r/   temperaturecandidate_counttop_ptop_kr[   c        	            t        j                  |       } t        ||||      }t        j                  | |||||      S )zFCreates a `glm.GenerateMessageRequest` object for generating messages.ra   )modelr[   rc   re   rf   rd   )r   make_model_namerb   r   GenerateMessageRequest	rh   rR   rF   r/   rc   rd   re   rf   r[   s	            r   _make_generate_message_requestrl     sN     ''.E!wHF %%' r   zmodels/chat-bison-001)
rh   rR   rF   r/   rc   rd   re   rf   r[   clientc        
        D    t        | ||||||||	      }
t        |	|
      S )a#  Calls the API and returns a `types.ChatResponse` containing the response.

    Args:
        model: Which model to call, as a string or a `types.Model`.
        context: Text that should be provided to the model first, to ground the response.

            If not empty, this `context` will be given to the model first before the
            `examples` and `messages`.

            This field can be a description of your prompt to the model to help provide
            context and guide the responses.

            Examples:

            * "Translate the phrase from English to French."
            * "Given a statement, classify the sentiment as happy, sad or neutral."

            Anything included in this field will take precedence over history in `messages`
            if the total input size exceeds the model's `Model.input_token_limit`.
        examples: Examples of what the model should generate.

            This includes both the user input and the response that the model should
            emulate.

            These `examples` are treated identically to conversation messages except
            that they take precedence over the history in `messages`:
            If the total input size exceeds the model's `input_token_limit` the input
            will be truncated. Items will be dropped from `messages` before `examples`
        messages: A snapshot of the conversation history sorted chronologically.

            Turns alternate between two authors.

            If the total input size exceeds the model's `input_token_limit` the input
            will be truncated: The oldest items will be dropped from `messages`.
        temperature: Controls the randomness of the output. Must be positive.

            Typical values are in the range: `[0.0,1.0]`. Higher values produce a
            more random and varied response. A temperature of zero will be deterministic.
        candidate_count: The **maximum** number of generated response messages to return.

            This value must be between `[1, 8]`, inclusive. If unset, this
            will default to `1`.

            Note: Only unique candidates are returned. Higher temperatures are more
            likely to produce unique candidates. Setting `temperature=0.0` will always
            return 1 candidate regardless of the `candidate_count`.
        top_k: The API uses combined [nucleus](https://arxiv.org/abs/1904.09751) and
            top-k sampling.

            `top_k` sets the maximum number of tokens to sample from on each step.
        top_p: The API uses combined [nucleus](https://arxiv.org/abs/1904.09751) and
           top-k sampling.

           `top_p` configures the nucleus sampling. It sets the maximum cumulative
            probability of tokens to sample from.

            For example, if the sorted probabilities are
            `[0.5, 0.2, 0.1, 0.1, 0.05, 0.05]` a `top_p` of `0.8` will sample
            as `[0.625, 0.25, 0.125, 0, 0, 0]`.

            Typical values are in the `[0.9, 1.0]` range.
        prompt: You may pass a `types.MessagePromptOptions` **instead** of a
            setting `context`/`examples`/`messages`, but not both.
        client: If you're not relying on the default client, you pass a
            `glm.DiscussServiceClient` instead.

    Returns:
        A `types.ChatResponse` containing the model's reply.
    rk   rm   request)rl   _generate_responserh   rR   rF   r/   rc   rd   re   rf   r[   rm   rp   s              r   chatrs   3  s9    d -'
G VW==r   c        
        `   K   t        | ||||||||	      }
t        |	|
       d {   S 7 w)Nrk   ro   )rl   _generate_response_asyncrr   s              r   
chat_asyncrv     sB      -'
G *IIIIs   %.,.)   
   kw_onlyTinitFc                     e Zd ZU  ej                  d d      Zded<   d Ze e	j                  ej                  j                  j                        dd              Zej                  dd       Z e	j                  ej                  j                   j                        dd	       Z e	j                  ej                  j                   j                        	 	 	 	 dd
       Zy)ChatResponsec                      y r    r~   r   r   <lambda>zChatResponse.<lambda>  s    r   F)defaultreprglm.DiscussServiceClient | None_clientc                N    |j                         D ]  \  }}t        | ||        y r   )rZ   setattr)selfkwargskeyvalues       r   __init__zChatResponse.__init__  s'     ,,. 	&JCD#u%	&r   c                F    | j                   d   r| j                   d   d   S y )Nr   )r/   )r   s    r   lastzChatResponse.last  s'     ====$Y//r   c                l    t        |      }t        |      j                  |      }|| j                  d<   y )Nr   )r   typeto_dictr/   )r   r0   s     r   r   zChatResponse.last  s.    (w-''0#br   c                   t        | j                  t        j                        rt	        d      | j
                  t        d| j                         | j                         }|j                  d       |j                  dd        t        |d         |d<   |d   j                  t        |             t        di |}t        || j                        S )NzBreply can't be called on an async client, use reply_async instead.zThe last response from the model did not return any candidates.
Check the `.filters` attribute to see why the responses were filtered:

candidatesfiltersr/   rp   rm   r~   )r   r   r   DiscussServiceAsyncClientrM   r   rB   r   r   r+   r>   rE   r   rl   rq   r   r0   rp   s      r   replyzChatResponse.reply  s    dllC$A$AB`bb99[<<."  ,,.L!It$"7:#67

""=#9:0;7;!'$,,GGr   c                  K   t        | j                  t        j                        rt	        d      | j                         }|j                  d       |j                  dd        t        |d         |d<   |d   j                  t        |             t        di |}t        || j                         d {   S 7 w)NzEreply_async can't be called on a non-async client, use reply instead.r   r   r/   r   r~   )r   r   r   DiscussServiceClientrM   r   r+   r>   rE   r   rl   ru   r   s      r   reply_asynczChatResponse.reply_async  s      dllC$<$<=W  ,,.L!It$"7:#67

""=#9:0;7;-gdllSSSSs   B6B?8B=9B?N)return
str | None)r0   discuss_types.MessageOptions)r0   r   r   discuss_types.ChatResponse)__name__
__module____qualname__dataclassesfieldr   __annotations__r   propertyr
   set_docr   r|   r   __doc__setterr   r   r~   r   r   r|   r|     s     0A{/@/@\a/bG,b& \-4499AAB C  
[[$ $
 \-44::BBCH DH$ \-44::BBCT3T	#T DTr   r|   c                   t        |       j                  |       } | j                  d      }|d   | d<   |d   | d<   |d   | d<   t        |      j                  |      }|j                  d       t        j                  |d         |d<   |d   r	|d   d   }nd }| d   j                  |       | j                  dd        | j                  d	d        t        dd
|i|| S )Nr[   rF   rR   r/   r   r   r   rc   rd   r   r~   )r   r   r+   r   convert_filters_to_enumsrE   
setdefaultr|   )rp   responserm   r[   r   s        r   _build_chat_responser     s    
 7m##G,G[["F ,GJ	*GI ,GJH~%%h/HLL&??@STHY%a(Jt$}d+($/>>(>g>>r   c                V    |
t               }|j                  |       }t        | ||      S r   )r   generate_messager   rp   rm   r   s      r   rq   rq     s0     ~+-&&w/H6::r   c                r   K   |
t               }|j                  |        d {   }t        | ||      S 7 wr   )r	   r   r   r   s      r   ru   ru     s<      ~13,,W55H6:: 6s   !757)r[   rR   rF   r/   rh   rm   c                    t        j                  |      }t        | |||      } |
t               }|j	                  ||       }t        |      j                  |      S )NrQ   )rh   r[   )r   ri   rb   r   count_message_tokensr   r   )r[   rR   rF   r/   rh   rm   rG   s          r   r   r   &  s\     ''.E!&'HW_`F~+-((uV(DF<''r   )r   r   r   zglm.Message)r/   zdiscuss_types.MessagesOptionsr   zList[glm.Message])r?   zdiscuss_types.ExampleOptionsr   zglm.Example)rF   z"List[discuss_types.MessageOptions]r   List[glm.Example])rF   zdiscuss_types.ExamplesOptionsr   r   r   )
r[   "discuss_types.MessagePromptOptionsrR   r   rF   $discuss_types.ExamplesOptions | Noner/   $discuss_types.MessagesOptions | Noner   zglm.MessagePrompt)rh   &model_types.AnyModelNameOptions | NonerR   r   rF   r   r/   r   rc   float | Nonerd   
int | Nonere   r   rf   r   r[   )discuss_types.MessagePromptOptions | Noner   glm.GenerateMessageRequest)rh   r   rR   r   rF   r   r/   r   rc   r   rd   r   re   r   rf   r   r[   r   rm   r   r   r   )rh   r   rR   r   rF   r   r/   r   rc   r   rd   r   re   r   rf   r   r[   r   rm   $glm.DiscussServiceAsyncClient | Noner   r   r~   )rp   r   r   zglm.GenerateMessageResponserm   z8glm.DiscussServiceClient | glm.DiscussServiceAsyncClientr   r|   )rp   r   rm   r   r   r|   )rp   r   rm   r   r   r|   )r[   r   rR   r   rF   r   r/   r   rh   zmodel_types.AnyModelNameOptionsrm   r   r   zdiscuss_types.TokenCount)/
__future__r   r   sysrC   typingr   r   r   r   google.ai.generativelanguageaigenerativelanguager   google.generativeai.clientr   r	   google.generativeair
   google.generativeai.typesr   r   r   r   r7   r@   rK   rP   r_   rb   rl   DEFAULT_DISCUSS_MODELrs   r   r   rv   version_infomajorminorDATACLASS_KWARGSprettyprintr|   	dataclassr   rq   ru   r   r~   r   r   <module>r      sj   #  
  2 2 * * A G , 3 1 2$-+--`&'0''T-+--b 26: 5959:.: : 3	:
 3: :| 26% 5959%.% % 3	%
 3% %" 5959 $"&8<1  3	
 3      6  : 0 
 5L5959 $"&8<.2^>1^> ^> 3	^>
 3^> ^>  ^> ^> ^> 6^> ,^>  ^>B dll# 5L5959 $"&8<37J1J J 3	J
 3J J  J J J 6J 1J  J $J8 C,,223w>!4( m008896)666T=-- 6T 7 : 6Tr?'?)? E? 	?: /3	;'	;+	; 	; 48	;'	;0	; 	; 265959-B37(.( ( 3	(
 3( +( 1( (r   