mirror of
https://github.com/oddluck/limnoria-plugins.git
synced 2025-04-29 15:01:11 -05:00
Conditionally exclude a parameter if Gemini is the model
This commit is contained in:
parent
4604606a01
commit
3725ecbd59
@ -61,29 +61,38 @@ class ChatGPT(callbacks.Plugin):
|
|||||||
|
|
||||||
# Initialize client
|
# Initialize client
|
||||||
client = OpenAI(
|
client = OpenAI(
|
||||||
api_key=self.registryValue("api_key"),
|
api_key=self.registryValue("api_key"),
|
||||||
base_url=self.registryValue("base_url")
|
base_url=self.registryValue("base_url")
|
||||||
)
|
)
|
||||||
self.history.setdefault(channel, None)
|
self.history.setdefault(channel, None)
|
||||||
max_history = self.registryValue("max_history", msg.channel)
|
max_history = self.registryValue("max_history", msg.channel)
|
||||||
prompt = self.registryValue("prompt", msg.channel).replace("$botnick", irc.nick)
|
prompt = self.registryValue("prompt", msg.channel).replace("$botnick", irc.nick)
|
||||||
|
|
||||||
if not self.history[channel] or max_history < 1:
|
if not self.history[channel] or max_history < 1:
|
||||||
self.history[channel] = []
|
self.history[channel] = []
|
||||||
|
|
||||||
completion = client.chat.completions.create(
|
model_name = self.registryValue("model", msg.channel)
|
||||||
model=self.registryValue("model", msg.channel),
|
|
||||||
messages=self.history[channel][-max_history:] + [
|
# Base request parameters
|
||||||
|
request_params = {
|
||||||
|
"model": model_name,
|
||||||
|
"messages": self.history[channel][-max_history:] + [
|
||||||
{"role": "system", "content": prompt},
|
{"role": "system", "content": prompt},
|
||||||
{"role": "user", "content": text}
|
{"role": "user", "content": text}
|
||||||
],
|
],
|
||||||
temperature=self.registryValue("temperature", msg.channel),
|
"temperature": self.registryValue("temperature", msg.channel),
|
||||||
top_p=self.registryValue("top_p", msg.channel),
|
"top_p": self.registryValue("top_p", msg.channel),
|
||||||
max_tokens=self.registryValue("max_tokens", msg.channel),
|
"max_tokens": self.registryValue("max_tokens", msg.channel),
|
||||||
presence_penalty=self.registryValue("presence_penalty", msg.channel),
|
"presence_penalty": self.registryValue("presence_penalty", msg.channel),
|
||||||
frequency_penalty=self.registryValue("frequency_penalty", msg.channel),
|
"user": msg.nick,
|
||||||
user=msg.nick,
|
}
|
||||||
)
|
|
||||||
|
|
||||||
|
# Gemini models fail if frequency_penalty is included
|
||||||
|
if "gemini" not in model_name.lower():
|
||||||
|
request_params["frequency_penalty"] = self.registryValue("frequency_penalty", msg.channel)
|
||||||
|
|
||||||
|
completion = client.chat.completions.create(**request_params)
|
||||||
|
|
||||||
if self.registryValue("nick_strip", msg.channel):
|
if self.registryValue("nick_strip", msg.channel):
|
||||||
content = re.sub(
|
content = re.sub(
|
||||||
r"^%s: " % (irc.nick), "", completion.choices[0].message.content
|
r"^%s: " % (irc.nick), "", completion.choices[0].message.content
|
||||||
|
Loading…
x
Reference in New Issue
Block a user