classClarifai(LLM):"""Clarifai LLM. Examples: `pip install llama-index-llms-clarifai` ```python from llama_index.llms.clarifai import Clarifai llm = Clarifai( user_id="clarifai", app_id="ml", model_name="llama2-7b-alternative-4k", model_url=( "https://clarifai.com/clarifai/ml/models/llama2-7b-alternative-4k" ) response = llm.complete("Hello World!") print(response) ``` """model_url:Optional[str]=Field(description=f"Full URL of the model. e.g. `{EXAMPLE_URL}`")model_version_id:Optional[str]=Field(description="Model Version ID.")app_id:Optional[str]=Field(description="Clarifai application ID of the model.")user_id:Optional[str]=Field(description="Clarifai user ID of the model.")pat:Optional[str]=Field(description="Personal Access Tokens(PAT) to validate requests.")_model:Any=PrivateAttr()_is_chat_model:bool=PrivateAttr()def__init__(self,model_name:Optional[str]=None,model_url:Optional[str]=None,model_version_id:Optional[str]="",app_id:Optional[str]=None,user_id:Optional[str]=None,pat:Optional[str]=None,temperature:float=0.1,max_tokens:int=512,additional_kwargs:Optional[Dict[str,Any]]=None,callback_manager:Optional[CallbackManager]=None,system_prompt:Optional[str]=None,messages_to_prompt:Optional[Callable[[Sequence[ChatMessage]],str]]=None,completion_to_prompt:Optional[Callable[[str],str]]=None,pydantic_program_mode:PydanticProgramMode=PydanticProgramMode.DEFAULT,output_parser:Optional[BaseOutputParser]=None,):ifpatisNoneandos.environ.get("CLARIFAI_PAT")isnotNone:pat=os.environ.get("CLARIFAI_PAT")ifnotpatandos.environ.get("CLARIFAI_PAT")isNone:raiseValueError("Set `CLARIFAI_PAT` as env variable or pass `pat` as constructor argument")ifmodel_urlisnotNoneandmodel_nameisnotNone:raiseValueError("You can only specify one of model_url or model_name.")ifmodel_urlisNoneandmodel_nameisNone:raiseValueError("You must specify one of model_url or model_name.")model=Noneifmodel_nameisnotNone:ifapp_idisNoneoruser_idisNone:raiseValueError(f"Missing one app ID or user ID of the model: {app_id=}, {user_id=}")else:model=Model(user_id=user_id,app_id=app_id,model_id=model_name,model_version={"id":model_version_id},pat=pat,)ifmodel_urlisnotNone:model=Model(model_url,pat=pat)model_name=model.idis_chat_model=Falseif"chat"inmodel.app_idor"chat"inmodel.id:is_chat_model=Trueadditional_kwargs=additional_kwargsor{}super().__init__(temperature=temperature,max_tokens=max_tokens,additional_kwargs=additional_kwargs,callback_manager=callback_manager,model_name=model_name,system_prompt=system_prompt,messages_to_prompt=messages_to_prompt,completion_to_prompt=completion_to_prompt,pydantic_program_mode=pydantic_program_mode,output_parser=output_parser,)self._model=modelself._is_chat_model=is_chat_model@classmethoddefclass_name(cls)->str:return"ClarifaiLLM"@propertydefmetadata(self)->LLMMetadata:"""LLM metadata."""returnLLMMetadata(context_window=self.context_window,num_output=self.max_tokens,model_name=self._model,is_chat_model=self._is_chat_model,)# TODO: When the Clarifai python SDK supports inference params, add here.defchat(self,messages:Sequence[ChatMessage],inference_params:Optional[Dict]={},**kwargs:Any,)->ChatResponse:"""Chat endpoint for LLM."""prompt="".join([str(m)forminmessages])try:response=(self._model.predict_by_bytes(input_bytes=prompt.encode(encoding="UTF-8"),input_type="text",inference_params=inference_params,).outputs[0].data.text.raw)exceptExceptionase:raiseException(f"Prediction failed: {e}")returnChatResponse(message=ChatMessage(content=response))defcomplete(self,prompt:str,formatted:bool=False,inference_params:Optional[Dict]={},**kwargs:Any,)->CompletionResponse:"""Completion endpoint for LLM."""try:response=(self._model.predict_by_bytes(input_bytes=prompt.encode(encoding="utf-8"),input_type="text",inference_params=inference_params,).outputs[0].data.text.raw)exceptExceptionase:raiseException(f"Prediction failed: {e}")returnCompletionResponse(text=response)defstream_chat(self,messages:Sequence[ChatMessage],**kwargs:Any)->ChatResponseGen:raiseNotImplementedError("Clarifai does not currently support streaming completion.")defstream_complete(self,prompt:str,formatted:bool=False,**kwargs:Any)->CompletionResponseGen:raiseNotImplementedError("Clarifai does not currently support streaming completion.")@llm_chat_callback()asyncdefachat(self,messages:Sequence[ChatMessage],**kwargs:Any)->ChatResponse:raiseNotImplementedError("Currently not supported.")@llm_completion_callback()asyncdefacomplete(self,prompt:str,formatted:bool=False,**kwargs:Any)->CompletionResponse:returnself.complete(prompt,**kwargs)@llm_chat_callback()asyncdefastream_chat(self,messages:Sequence[ChatMessage],**kwargs:Any)->ChatResponseAsyncGen:raiseNotImplementedError("Currently not supported.")@llm_completion_callback()asyncdefastream_complete(self,prompt:str,formatted:bool=False,**kwargs:Any)->CompletionResponseAsyncGen:raiseNotImplementedError("Clarifai does not currently support this function.")