diff --git a/camel/model_backend.py b/camel/model_backend.py index 18f3542e..d23330ef 100644 --- a/camel/model_backend.py +++ b/camel/model_backend.py @@ -69,7 +69,7 @@ class OpenAIModel(ModelBackend): num_max_completion_tokens = num_max_token - num_prompt_tokens self.model_config_dict['max_tokens'] = num_max_completion_tokens print("using model: {}".format(self.model_type.value)) - response = openai.chat.completions.create(*args, **kwargs, + response = openai.ChatCompletion.create(*args, **kwargs, model=self.model_type.value, **self.model_config_dict) cost = prompt_cost(