OpenaiAPIWiseAgentLLM

Bases: WiseAgentRemoteLLM

A class to define a WiseAgentLLM that uses the OpenAI API.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
class OpenaiAPIWiseAgentLLM(WiseAgentRemoteLLM):
    '''A class to define a WiseAgentLLM that uses the OpenAI API.'''
    client = None
    yaml_tag = u'!OpenaiAPIWiseAgentLLM'



    def __new__(cls, *args, **kwargs):
        '''Create a new instance of the class, setting default values for the instance variables.'''
        obj = super().__new__(cls)
        obj._api_key = "sk-no-key-required"
        obj._remote_address = "http://localhost:8001/v1"
        obj.chain = None
        return obj

    def __init__(self, system_message, model_name, remote_address = "http://localhost:8001/v1", api_key: Optional[str]="sk-no-key-required"):
        '''Initialize the agent.

        Args:
            system_message (str): the system message
            model_name (str): the model name
            remote_address (str): the remote address of the agent. Default is "http://localhost:8001/v1"
            api_key (str): the API key. Default is "sk-no-key-required"'''

        super().__init__(system_message, model_name, remote_address)
        self._api_key = api_key
        self.chain = None


    def __repr__(self):
        '''Return a string representation of the agent.'''
        return (f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name},"
                f"remote_address={self.remote_address}, api_key={self.api_key})")

    def __getstate__(self) -> object:
        '''Return the state of the agent. Removing the instance variable client to avoid it is serialized/deserialized by pyyaml.'''
        state = self.__dict__.copy()
        if 'client' in state.keys():
            del state['client']
        return state 

    def connect(self):
        '''Connect to the remote machine.'''
        self.client = openai.OpenAI(base_url=self.remote_address, 
                api_key=self.api_key)


    def process_single_prompt(self, prompt):
        '''Process a single prompt. This method is implemented from superclass WiseAgentLLM.
        The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

        Args:
            prompt (str): the prompt to process'''
        print(f"Executing WiseAgentLLM on remote machine at {self.remote_address}")
        if (self.client is None):
            self.connect()
        messages = []
        messages.append({"role": "system", "content": self.system_message})
        messages.append({"role": "user", "content": prompt})
        response = self.client.chat.completions.create(
            messages=messages,
            model=self.model_name,
            #tools=tools,
            tool_choice="auto",  # auto is default, but we'll be explicit
            )
        return response.choices[0].message

    def process_chat_completion(self, 
                                messages: Iterable[ChatCompletionMessageParam], 
                                tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
        '''Process a chat completion. This method is implemented from superclass WiseAgentLLM.
        The context and state is passed in input and returned as part of the output.
        Deal with the messages and tools is responsibility of the caller.

        Args:
            messages (Iterable[ChatCompletionMessageParam]): the messages to process
            tools (Iterable[ChatCompletionToolParam]): the tools to use

        Returns:
                ChatCompletion: the chat completion result'''
        print(f"Executing WiseAgentLLM on remote machine at {self.remote_address}")
        if (self.client is None):
            self.connect()
        #messages = []
        #messages.append({"role": "system", "content": self.system_message})
        #messages.append({"role": "user", "content": message})
        response = self.client.chat.completions.create(
            messages=messages,
            model=self.model_name,
            tools=tools,
            tool_choice="auto",  # auto is default, but we'll be explicit
            )
        return response

    @property
    def api_key(self):
        '''Get the API key.'''
        return self._api_key

api_key property

Get the API key.

__getstate__()

Return the state of the agent. Removing the instance variable client to avoid it is serialized/deserialized by pyyaml.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
42
43
44
45
46
47
def __getstate__(self) -> object:
    '''Return the state of the agent. Removing the instance variable client to avoid it is serialized/deserialized by pyyaml.'''
    state = self.__dict__.copy()
    if 'client' in state.keys():
        del state['client']
    return state 

__init__(system_message, model_name, remote_address='http://localhost:8001/v1', api_key='sk-no-key-required')

Initialize the agent.

Parameters:
  • system_message (str) –

    the system message

  • model_name (str) –

    the model name

  • remote_address (str, default: 'http://localhost:8001/v1' ) –

    the remote address of the agent. Default is "http://localhost:8001/v1"

  • api_key (str, default: 'sk-no-key-required' ) –

    the API key. Default is "sk-no-key-required"

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
23
24
25
26
27
28
29
30
31
32
33
34
def __init__(self, system_message, model_name, remote_address = "http://localhost:8001/v1", api_key: Optional[str]="sk-no-key-required"):
    '''Initialize the agent.

    Args:
        system_message (str): the system message
        model_name (str): the model name
        remote_address (str): the remote address of the agent. Default is "http://localhost:8001/v1"
        api_key (str): the API key. Default is "sk-no-key-required"'''

    super().__init__(system_message, model_name, remote_address)
    self._api_key = api_key
    self.chain = None

__new__(*args, **kwargs)

Create a new instance of the class, setting default values for the instance variables.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
15
16
17
18
19
20
21
def __new__(cls, *args, **kwargs):
    '''Create a new instance of the class, setting default values for the instance variables.'''
    obj = super().__new__(cls)
    obj._api_key = "sk-no-key-required"
    obj._remote_address = "http://localhost:8001/v1"
    obj.chain = None
    return obj

__repr__()

Return a string representation of the agent.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
37
38
39
40
def __repr__(self):
    '''Return a string representation of the agent.'''
    return (f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name},"
            f"remote_address={self.remote_address}, api_key={self.api_key})")

connect()

Connect to the remote machine.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
49
50
51
52
def connect(self):
    '''Connect to the remote machine.'''
    self.client = openai.OpenAI(base_url=self.remote_address, 
            api_key=self.api_key)

process_chat_completion(messages, tools)

Process a chat completion. This method is implemented from superclass WiseAgentLLM. The context and state is passed in input and returned as part of the output. Deal with the messages and tools is responsibility of the caller.

Parameters:
  • messages (Iterable[ChatCompletionMessageParam]) –

    the messages to process

  • tools (Iterable[ChatCompletionToolParam]) –

    the tools to use

Returns:
  • ChatCompletion( ChatCompletion ) –

    the chat completion result

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
def process_chat_completion(self, 
                            messages: Iterable[ChatCompletionMessageParam], 
                            tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
    '''Process a chat completion. This method is implemented from superclass WiseAgentLLM.
    The context and state is passed in input and returned as part of the output.
    Deal with the messages and tools is responsibility of the caller.

    Args:
        messages (Iterable[ChatCompletionMessageParam]): the messages to process
        tools (Iterable[ChatCompletionToolParam]): the tools to use

    Returns:
            ChatCompletion: the chat completion result'''
    print(f"Executing WiseAgentLLM on remote machine at {self.remote_address}")
    if (self.client is None):
        self.connect()
    #messages = []
    #messages.append({"role": "system", "content": self.system_message})
    #messages.append({"role": "user", "content": message})
    response = self.client.chat.completions.create(
        messages=messages,
        model=self.model_name,
        tools=tools,
        tool_choice="auto",  # auto is default, but we'll be explicit
        )
    return response

process_single_prompt(prompt)

Process a single prompt. This method is implemented from superclass WiseAgentLLM. The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

Parameters:
  • prompt (str) –

    the prompt to process

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
def process_single_prompt(self, prompt):
    '''Process a single prompt. This method is implemented from superclass WiseAgentLLM.
    The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

    Args:
        prompt (str): the prompt to process'''
    print(f"Executing WiseAgentLLM on remote machine at {self.remote_address}")
    if (self.client is None):
        self.connect()
    messages = []
    messages.append({"role": "system", "content": self.system_message})
    messages.append({"role": "user", "content": prompt})
    response = self.client.chat.completions.create(
        messages=messages,
        model=self.model_name,
        #tools=tools,
        tool_choice="auto",  # auto is default, but we'll be explicit
        )
    return response.choices[0].message

WiseAgentLLM

Bases: YAMLObject

Abstract class to define the interface for a WiseAgentLLM.

Source code in wiseagents/llm/wise_agent_LLM.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
class WiseAgentLLM(yaml.YAMLObject):
    """Abstract class to define the interface for a WiseAgentLLM."""
    yaml_tag = u'!WiseAgentLLM'    
    def __init__(self, system_message, model_name):
        '''Initialize the agent.

        Args:
            system_message (str): the system message
            model_name (str): the model name
        '''
        super().__init__()
        self._system_message = system_message
        self._model_name = model_name

    def __repr__(self):
        '''Return a string representation of the agent.'''
        return f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name})"    

    @property  
    def system_message(self):
        '''Get the system message.'''
        return self._system_message

    @property
    def model_name(self):
        '''Get the model name.'''
        return self._model_name     

    @abstractmethod
    def process_single_prompt(self, prompt):
        '''Process a single prompt. This method should be implemented by subclasses.
        The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

        Args:
            prompt (str): the prompt to process'''

        ...

    @abstractmethod
    def process_chat_completion(self, 
                                messages: Iterable[ChatCompletionMessageParam], 
                                tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
        '''Process a chat completion. This method should be implemented by subclasses.
        The context and state is passed in input and returned as part of the output.
        Deal with the messages and tools is responsibility of the caller.

        Args:
            messages (Iterable[ChatCompletionMessageParam]): the messages to process
            tools (Iterable[ChatCompletionToolParam]): the tools to use

        Returns:
                ChatCompletion: the chat completion result'''
        ...

model_name property

Get the model name.

system_message property

Get the system message.

__init__(system_message, model_name)

Initialize the agent.

Parameters:
  • system_message (str) –

    the system message

  • model_name (str) –

    the model name

Source code in wiseagents/llm/wise_agent_LLM.py
 9
10
11
12
13
14
15
16
17
18
def __init__(self, system_message, model_name):
    '''Initialize the agent.

    Args:
        system_message (str): the system message
        model_name (str): the model name
    '''
    super().__init__()
    self._system_message = system_message
    self._model_name = model_name

__repr__()

Return a string representation of the agent.

Source code in wiseagents/llm/wise_agent_LLM.py
20
21
22
def __repr__(self):
    '''Return a string representation of the agent.'''
    return f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name})"    

process_chat_completion(messages, tools) abstractmethod

Process a chat completion. This method should be implemented by subclasses. The context and state is passed in input and returned as part of the output. Deal with the messages and tools is responsibility of the caller.

Parameters:
  • messages (Iterable[ChatCompletionMessageParam]) –

    the messages to process

  • tools (Iterable[ChatCompletionToolParam]) –

    the tools to use

Returns:
  • ChatCompletion( ChatCompletion ) –

    the chat completion result

Source code in wiseagents/llm/wise_agent_LLM.py
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
@abstractmethod
def process_chat_completion(self, 
                            messages: Iterable[ChatCompletionMessageParam], 
                            tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
    '''Process a chat completion. This method should be implemented by subclasses.
    The context and state is passed in input and returned as part of the output.
    Deal with the messages and tools is responsibility of the caller.

    Args:
        messages (Iterable[ChatCompletionMessageParam]): the messages to process
        tools (Iterable[ChatCompletionToolParam]): the tools to use

    Returns:
            ChatCompletion: the chat completion result'''
    ...

process_single_prompt(prompt) abstractmethod

Process a single prompt. This method should be implemented by subclasses. The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

Parameters:
  • prompt (str) –

    the prompt to process

Source code in wiseagents/llm/wise_agent_LLM.py
34
35
36
37
38
39
40
41
42
@abstractmethod
def process_single_prompt(self, prompt):
    '''Process a single prompt. This method should be implemented by subclasses.
    The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

    Args:
        prompt (str): the prompt to process'''

    ...

WiseAgentRemoteLLM

Bases: WiseAgentLLM

Extend WiseAgentLLM to support remote execution of WiseAgentLLM on a remote machine.

Source code in wiseagents/llm/wise_agent_remote_LLM.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
class WiseAgentRemoteLLM(WiseAgentLLM):
    """Extend WiseAgentLLM to support remote execution of WiseAgentLLM on a remote machine."""
    yaml_tag = u'!WiseAgentRemoteLLM'    

    def __init__(self, system_message, model_name, remote_address):
        super().__init__(system_message, model_name)
        self._remote_address = remote_address

    def __repr__(self):
        '''Return a string representation of the agent.'''
        return f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name}, remote_address={self.remote_address})"

    @property
    def remote_address(self):
        '''Get the remote address.'''
        return self._remote_address

    @abstractmethod
    def process_single_prompt(self, prompt):
        '''Process a single prompt. This method should be implemented by subclasses.
        The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

        Args:
            prompt (str): the prompt to process'''

        ...

    @abstractmethod
    def process_chat_completion(self, 
                                messages: Iterable[ChatCompletionMessageParam], 
                                tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
        '''Process a chat completion. This method should be implemented by subclasses.
        The context and state is passed in input and returned as part of the output.
        Deal with the messages and tools is responsibility of the caller.

        Args:
            messages (Iterable[ChatCompletionMessageParam]): the messages to process
            tools (Iterable[ChatCompletionToolParam]): the tools to use

        Returns:
                ChatCompletion: the chat completion result'''
        ...

remote_address property

Get the remote address.

__repr__()

Return a string representation of the agent.

Source code in wiseagents/llm/wise_agent_remote_LLM.py
17
18
19
def __repr__(self):
    '''Return a string representation of the agent.'''
    return f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name}, remote_address={self.remote_address})"

process_chat_completion(messages, tools) abstractmethod

Process a chat completion. This method should be implemented by subclasses. The context and state is passed in input and returned as part of the output. Deal with the messages and tools is responsibility of the caller.

Parameters:
  • messages (Iterable[ChatCompletionMessageParam]) –

    the messages to process

  • tools (Iterable[ChatCompletionToolParam]) –

    the tools to use

Returns:
  • ChatCompletion( ChatCompletion ) –

    the chat completion result

Source code in wiseagents/llm/wise_agent_remote_LLM.py
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
@abstractmethod
def process_chat_completion(self, 
                            messages: Iterable[ChatCompletionMessageParam], 
                            tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
    '''Process a chat completion. This method should be implemented by subclasses.
    The context and state is passed in input and returned as part of the output.
    Deal with the messages and tools is responsibility of the caller.

    Args:
        messages (Iterable[ChatCompletionMessageParam]): the messages to process
        tools (Iterable[ChatCompletionToolParam]): the tools to use

    Returns:
            ChatCompletion: the chat completion result'''
    ...

process_single_prompt(prompt) abstractmethod

Process a single prompt. This method should be implemented by subclasses. The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

Parameters:
  • prompt (str) –

    the prompt to process

Source code in wiseagents/llm/wise_agent_remote_LLM.py
26
27
28
29
30
31
32
33
34
@abstractmethod
def process_single_prompt(self, prompt):
    '''Process a single prompt. This method should be implemented by subclasses.
    The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

    Args:
        prompt (str): the prompt to process'''

    ...