OpenaiAPIWiseAgentLLM

Bases: WiseAgentRemoteLLM

A class to define a WiseAgentLLM that uses the OpenAI API.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
class OpenaiAPIWiseAgentLLM(WiseAgentRemoteLLM):
    '''A class to define a WiseAgentLLM that uses the OpenAI API.'''
    client = None
    yaml_tag = u'!wiseagents.llm.OpenaiAPIWiseAgentLLM'



    def __new__(cls, *args, **kwargs):
        '''Create a new instance of the class, setting default values for the instance variables.'''
        obj = super().__new__(cls)
        obj._api_key = "sk-no-key-required"
        obj._remote_address = "http://localhost:8001/v1"
        obj._openai_config = {}
        obj._system_message = None
        return obj

    def __init__(self, model_name, remote_address = "http://localhost:8001/v1", api_key: Optional[str]="sk-no-key-required",
                 openai_config: Optional[Dict[str,str]]={}, system_message: Optional[str] = None):
        '''Initialize the agent.

        Args:
            model_name (str): the model name
            remote_address (str): the remote address of the agent. Default is "http://localhost:8001/v1"
            api_key (str): the API key. Default is "sk-no-key-required"
            system_message (Optional[str]): the optional system message
        '''

        super().__init__(model_name=model_name, remote_address=remote_address, system_message=system_message)
        self._api_key = api_key
        self._openai_config = openai_config


    def __repr__(self):
        '''Return a string representation of the agent.'''
        return (f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name},"
                f"remote_address={self.remote_address}, api_key={self.api_key})")

    def __getstate__(self) -> object:
        '''Return the state of the agent. Removing the instance variable client to avoid it is serialized/deserialized by pyyaml.'''
        state = super().__getstate__()
        if 'client' in state.keys():
            del state['client']
        return state 

    def connect(self):
        '''Connect to the remote machine.'''
        logging.getLogger(__name__).info(f"Connecting to {self._agent_name} on remote machine at {self.remote_address} with API key ***********")
        self.client = openai.OpenAI(base_url=self.remote_address, 
                api_key=self.api_key)


    def process_single_prompt(self, prompt):
        '''Process a single prompt. This method is implemented from superclass WiseAgentLLM.
        The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

        Args:
            prompt (str): the prompt to process'''
        logging.getLogger(__name__).info(f"Executing {self._agent_name} on remote machine at {self.remote_address}")
        if (self.client is None):
            self.connect()
        messages = []
        if self.system_message:
            messages.append({"role": "system", "content": self.system_message})
        messages.append({"role": "user", "content": prompt})
        response = self.client.chat.completions.create(
            messages=messages,
            model=self.model_name,
            #tools=tools,
            tool_choice="auto",  # auto is default, but we'll be explicit
            **self.openai_config
            )
        return response.choices[0].message

    def process_chat_completion(self, 
                                messages: Iterable[ChatCompletionMessageParam], 
                                tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
        '''Process a chat completion. This method is implemented from superclass WiseAgentLLM.
        The context and state is passed in input and returned as part of the output.
        Deal with the messages and tools is responsibility of the caller.

        Args:
            messages (Iterable[ChatCompletionMessageParam]): the messages to process
            tools (Iterable[ChatCompletionToolParam]): the tools to use

        Returns:
                ChatCompletion: the chat completion result'''
        logging.getLogger(__name__).info(f"Executing {self._agent_name} on remote machine at {self.remote_address}")
        if (self.client is None):
            self.connect()
        #messages = []
        #messages.append({"role": "system", "content": self.system_message})
        #messages.append({"role": "user", "content": message})
        response = self.client.chat.completions.create(
            messages=messages,
            model=self.model_name,
            tools=tools,
            tool_choice="auto",  # auto is default, but we'll be explicit
            **self.openai_config
            )
        return response

    @property
    def api_key(self):
        '''Get the API key.'''
        return self._api_key
    @property
    def openai_config(self):
        '''Get the OpenAI configuration.'''
        return self._openai_config

api_key property

Get the API key.

openai_config property

Get the OpenAI configuration.

__getstate__()

Return the state of the agent. Removing the instance variable client to avoid it is serialized/deserialized by pyyaml.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
47
48
49
50
51
52
def __getstate__(self) -> object:
    '''Return the state of the agent. Removing the instance variable client to avoid it is serialized/deserialized by pyyaml.'''
    state = super().__getstate__()
    if 'client' in state.keys():
        del state['client']
    return state 

__init__(model_name, remote_address='http://localhost:8001/v1', api_key='sk-no-key-required', openai_config={}, system_message=None)

Initialize the agent.

Parameters:
  • model_name (str) –

    the model name

  • remote_address (str, default: 'http://localhost:8001/v1' ) –

    the remote address of the agent. Default is "http://localhost:8001/v1"

  • api_key (str, default: 'sk-no-key-required' ) –

    the API key. Default is "sk-no-key-required"

  • system_message (Optional[str], default: None ) –

    the optional system message

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
26
27
28
29
30
31
32
33
34
35
36
37
38
39
def __init__(self, model_name, remote_address = "http://localhost:8001/v1", api_key: Optional[str]="sk-no-key-required",
             openai_config: Optional[Dict[str,str]]={}, system_message: Optional[str] = None):
    '''Initialize the agent.

    Args:
        model_name (str): the model name
        remote_address (str): the remote address of the agent. Default is "http://localhost:8001/v1"
        api_key (str): the API key. Default is "sk-no-key-required"
        system_message (Optional[str]): the optional system message
    '''

    super().__init__(model_name=model_name, remote_address=remote_address, system_message=system_message)
    self._api_key = api_key
    self._openai_config = openai_config

__new__(*args, **kwargs)

Create a new instance of the class, setting default values for the instance variables.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
17
18
19
20
21
22
23
24
def __new__(cls, *args, **kwargs):
    '''Create a new instance of the class, setting default values for the instance variables.'''
    obj = super().__new__(cls)
    obj._api_key = "sk-no-key-required"
    obj._remote_address = "http://localhost:8001/v1"
    obj._openai_config = {}
    obj._system_message = None
    return obj

__repr__()

Return a string representation of the agent.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
42
43
44
45
def __repr__(self):
    '''Return a string representation of the agent.'''
    return (f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name},"
            f"remote_address={self.remote_address}, api_key={self.api_key})")

connect()

Connect to the remote machine.

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
54
55
56
57
58
def connect(self):
    '''Connect to the remote machine.'''
    logging.getLogger(__name__).info(f"Connecting to {self._agent_name} on remote machine at {self.remote_address} with API key ***********")
    self.client = openai.OpenAI(base_url=self.remote_address, 
            api_key=self.api_key)

process_chat_completion(messages, tools)

Process a chat completion. This method is implemented from superclass WiseAgentLLM. The context and state is passed in input and returned as part of the output. Deal with the messages and tools is responsibility of the caller.

Parameters:
  • messages (Iterable[ChatCompletionMessageParam]) –

    the messages to process

  • tools (Iterable[ChatCompletionToolParam]) –

    the tools to use

Returns:
  • ChatCompletion( ChatCompletion ) –

    the chat completion result

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
def process_chat_completion(self, 
                            messages: Iterable[ChatCompletionMessageParam], 
                            tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
    '''Process a chat completion. This method is implemented from superclass WiseAgentLLM.
    The context and state is passed in input and returned as part of the output.
    Deal with the messages and tools is responsibility of the caller.

    Args:
        messages (Iterable[ChatCompletionMessageParam]): the messages to process
        tools (Iterable[ChatCompletionToolParam]): the tools to use

    Returns:
            ChatCompletion: the chat completion result'''
    logging.getLogger(__name__).info(f"Executing {self._agent_name} on remote machine at {self.remote_address}")
    if (self.client is None):
        self.connect()
    #messages = []
    #messages.append({"role": "system", "content": self.system_message})
    #messages.append({"role": "user", "content": message})
    response = self.client.chat.completions.create(
        messages=messages,
        model=self.model_name,
        tools=tools,
        tool_choice="auto",  # auto is default, but we'll be explicit
        **self.openai_config
        )
    return response

process_single_prompt(prompt)

Process a single prompt. This method is implemented from superclass WiseAgentLLM. The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

Parameters:
  • prompt (str) –

    the prompt to process

Source code in wiseagents/llm/openai_API_wise_agent_LLM.py
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
def process_single_prompt(self, prompt):
    '''Process a single prompt. This method is implemented from superclass WiseAgentLLM.
    The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

    Args:
        prompt (str): the prompt to process'''
    logging.getLogger(__name__).info(f"Executing {self._agent_name} on remote machine at {self.remote_address}")
    if (self.client is None):
        self.connect()
    messages = []
    if self.system_message:
        messages.append({"role": "system", "content": self.system_message})
    messages.append({"role": "user", "content": prompt})
    response = self.client.chat.completions.create(
        messages=messages,
        model=self.model_name,
        #tools=tools,
        tool_choice="auto",  # auto is default, but we'll be explicit
        **self.openai_config
        )
    return response.choices[0].message

WiseAgentLLM

Bases: WiseAgentsYAMLObject

Abstract class to define the interface for a WiseAgentLLM.

Source code in wiseagents/llm/wise_agent_LLM.py
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
class WiseAgentLLM(WiseAgentsYAMLObject):
    """Abstract class to define the interface for a WiseAgentLLM."""
    def __init__(self, model_name, system_message: Optional[str] = None):
        '''Initialize the agent.

        Args:
            model_name (str): the model name
            system_message (Optional[str]): the optional system message
        '''
        super().__init__()
        enforce_no_abstract_class_instances(self.__class__, WiseAgentLLM)
        self._system_message = system_message
        self._model_name = model_name

    def __repr__(self):
        '''Return a string representation of the agent.'''
        return f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name})"    

    @property  
    def system_message(self) -> Optional[str]:
        '''Get the system message or None if no system message has been defined.'''
        return self._system_message

    @property
    def model_name(self):
        '''Get the model name.'''
        return self._model_name


    def set_agent_name(self, agent_name: str) :
        self._agent_name = agent_name

    @abstractmethod
    def process_single_prompt(self, prompt):
        '''Process a single prompt. This method should be implemented by subclasses.
        The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

        Args:
            prompt (str): the prompt to process'''

        ...

    @abstractmethod
    def process_chat_completion(self, 
                                messages: Iterable[ChatCompletionMessageParam], 
                                tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
        '''Process a chat completion. This method should be implemented by subclasses.
        The context and state is passed in input and returned as part of the output.
        Deal with the messages and tools is responsibility of the caller.

        Args:
            messages (Iterable[ChatCompletionMessageParam]): the messages to process
            tools (Iterable[ChatCompletionToolParam]): the tools to use

        Returns:
                ChatCompletion: the chat completion result'''
        ...

model_name property

Get the model name.

system_message: Optional[str] property

Get the system message or None if no system message has been defined.

__init__(model_name, system_message=None)

Initialize the agent.

Parameters:
  • model_name (str) –

    the model name

  • system_message (Optional[str], default: None ) –

    the optional system message

Source code in wiseagents/llm/wise_agent_LLM.py
13
14
15
16
17
18
19
20
21
22
23
def __init__(self, model_name, system_message: Optional[str] = None):
    '''Initialize the agent.

    Args:
        model_name (str): the model name
        system_message (Optional[str]): the optional system message
    '''
    super().__init__()
    enforce_no_abstract_class_instances(self.__class__, WiseAgentLLM)
    self._system_message = system_message
    self._model_name = model_name

__repr__()

Return a string representation of the agent.

Source code in wiseagents/llm/wise_agent_LLM.py
25
26
27
def __repr__(self):
    '''Return a string representation of the agent.'''
    return f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name})"    

process_chat_completion(messages, tools) abstractmethod

Process a chat completion. This method should be implemented by subclasses. The context and state is passed in input and returned as part of the output. Deal with the messages and tools is responsibility of the caller.

Parameters:
  • messages (Iterable[ChatCompletionMessageParam]) –

    the messages to process

  • tools (Iterable[ChatCompletionToolParam]) –

    the tools to use

Returns:
  • ChatCompletion( ChatCompletion ) –

    the chat completion result

Source code in wiseagents/llm/wise_agent_LLM.py
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
@abstractmethod
def process_chat_completion(self, 
                            messages: Iterable[ChatCompletionMessageParam], 
                            tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
    '''Process a chat completion. This method should be implemented by subclasses.
    The context and state is passed in input and returned as part of the output.
    Deal with the messages and tools is responsibility of the caller.

    Args:
        messages (Iterable[ChatCompletionMessageParam]): the messages to process
        tools (Iterable[ChatCompletionToolParam]): the tools to use

    Returns:
            ChatCompletion: the chat completion result'''
    ...

process_single_prompt(prompt) abstractmethod

Process a single prompt. This method should be implemented by subclasses. The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

Parameters:
  • prompt (str) –

    the prompt to process

Source code in wiseagents/llm/wise_agent_LLM.py
43
44
45
46
47
48
49
50
51
@abstractmethod
def process_single_prompt(self, prompt):
    '''Process a single prompt. This method should be implemented by subclasses.
    The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

    Args:
        prompt (str): the prompt to process'''

    ...

WiseAgentRemoteLLM

Bases: WiseAgentLLM

Extend WiseAgentLLM to support remote execution of WiseAgentLLM on a remote machine.

Source code in wiseagents/llm/wise_agent_remote_LLM.py
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
class WiseAgentRemoteLLM(WiseAgentLLM):
    """Extend WiseAgentLLM to support remote execution of WiseAgentLLM on a remote machine."""

    def __init__(self, model_name, remote_address, system_message: Optional[str] = None):
        super().__init__(model_name=model_name, system_message=system_message)
        enforce_no_abstract_class_instances(self.__class__, WiseAgentRemoteLLM)
        self._remote_address = remote_address

    def __repr__(self):
        '''Return a string representation of the agent.'''
        return f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name}, remote_address={self.remote_address})"

    @property
    def remote_address(self):
        '''Get the remote address.'''
        return self._remote_address

    @abstractmethod
    def process_single_prompt(self, prompt):
        '''Process a single prompt. This method should be implemented by subclasses.
        The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

        Args:
            prompt (str): the prompt to process'''

        ...

    @abstractmethod
    def process_chat_completion(self, 
                                messages: Iterable[ChatCompletionMessageParam], 
                                tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
        '''Process a chat completion. This method should be implemented by subclasses.
        The context and state is passed in input and returned as part of the output.
        Deal with the messages and tools is responsibility of the caller.

        Args:
            messages (Iterable[ChatCompletionMessageParam]): the messages to process
            tools (Iterable[ChatCompletionToolParam]): the tools to use

        Returns:
                ChatCompletion: the chat completion result'''
        ...

remote_address property

Get the remote address.

__repr__()

Return a string representation of the agent.

Source code in wiseagents/llm/wise_agent_remote_LLM.py
18
19
20
def __repr__(self):
    '''Return a string representation of the agent.'''
    return f"{self.__class__.__name__}(system_message={self.system_message}, model_name={self.model_name}, remote_address={self.remote_address})"

process_chat_completion(messages, tools) abstractmethod

Process a chat completion. This method should be implemented by subclasses. The context and state is passed in input and returned as part of the output. Deal with the messages and tools is responsibility of the caller.

Parameters:
  • messages (Iterable[ChatCompletionMessageParam]) –

    the messages to process

  • tools (Iterable[ChatCompletionToolParam]) –

    the tools to use

Returns:
  • ChatCompletion( ChatCompletion ) –

    the chat completion result

Source code in wiseagents/llm/wise_agent_remote_LLM.py
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
@abstractmethod
def process_chat_completion(self, 
                            messages: Iterable[ChatCompletionMessageParam], 
                            tools: Iterable[ChatCompletionToolParam]) -> ChatCompletion:
    '''Process a chat completion. This method should be implemented by subclasses.
    The context and state is passed in input and returned as part of the output.
    Deal with the messages and tools is responsibility of the caller.

    Args:
        messages (Iterable[ChatCompletionMessageParam]): the messages to process
        tools (Iterable[ChatCompletionToolParam]): the tools to use

    Returns:
            ChatCompletion: the chat completion result'''
    ...

process_single_prompt(prompt) abstractmethod

Process a single prompt. This method should be implemented by subclasses. The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

Parameters:
  • prompt (str) –

    the prompt to process

Source code in wiseagents/llm/wise_agent_remote_LLM.py
27
28
29
30
31
32
33
34
35
@abstractmethod
def process_single_prompt(self, prompt):
    '''Process a single prompt. This method should be implemented by subclasses.
    The single prompt is processed and the result is returned, all the context and state is maintained locally in the method

    Args:
        prompt (str): the prompt to process'''

    ...