classCustomLLM(LLM):"""Simple abstract base class for custom LLMs. Subclasses must implement the `__init__`, `_complete`, `_stream_complete`, and `metadata` methods. """@llm_chat_callback()defchat(self,messages:Sequence[ChatMessage],**kwargs:Any)->ChatResponse:prompt=self.messages_to_prompt(messages)completion_response=self.complete(prompt,formatted=True,**kwargs)returncompletion_response_to_chat_response(completion_response)@llm_chat_callback()defstream_chat(self,messages:Sequence[ChatMessage],**kwargs:Any)->ChatResponseGen:prompt=self.messages_to_prompt(messages)completion_response_gen=self.stream_complete(prompt,formatted=True,**kwargs)returnstream_completion_response_to_chat_response(completion_response_gen)@llm_chat_callback()asyncdefachat(self,messages:Sequence[ChatMessage],**kwargs:Any,)->ChatResponse:returnself.chat(messages,**kwargs)@llm_chat_callback()asyncdefastream_chat(self,messages:Sequence[ChatMessage],**kwargs:Any,)->ChatResponseAsyncGen:asyncdefgen()->ChatResponseAsyncGen:formessageinself.stream_chat(messages,**kwargs):yieldmessage# NOTE: convert generator to async generatorreturngen()@llm_completion_callback()asyncdefacomplete(self,prompt:str,formatted:bool=False,**kwargs:Any)->CompletionResponse:returnself.complete(prompt,formatted=formatted,**kwargs)@llm_completion_callback()asyncdefastream_complete(self,prompt:str,formatted:bool=False,**kwargs:Any)->CompletionResponseAsyncGen:asyncdefgen()->CompletionResponseAsyncGen:formessageinself.stream_complete(prompt,formatted=formatted,**kwargs):yieldmessage# NOTE: convert generator to async generatorreturngen()@classmethoddefclass_name(cls)->str:return"custom_llm"