fix: Remove Temperature from OpenAI Integration (#6023)

This commit is contained in:
Michael Genson 2025-08-25 01:36:15 -05:00 committed by GitHub
commit 323a8100db
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 4 additions and 8 deletions

View file

@ -131,7 +131,7 @@ For custom mapping variables (e.g. OPENAI_CUSTOM_HEADERS) you should pass values
| OPENAI_ENABLE_IMAGE_SERVICES | True | Whether to enable OpenAI image services, such as creating recipes via image. Leave this enabled unless your custom model doesn't support it, or you want to reduce costs |
| OPENAI_WORKERS | 2 | Number of OpenAI workers per request. Higher values may increase processing speed, but will incur additional API costs |
| OPENAI_SEND_DATABASE_DATA | True | Whether to send Mealie data to OpenAI to improve request accuracy. This will incur additional API costs |
| OPENAI_REQUEST_TIMEOUT | 60 | The number of seconds to wait for an OpenAI request to complete before cancelling the request. Leave this empty unless you're running into timeout issues on slower hardware |
| OPENAI_REQUEST_TIMEOUT | 300 | The number of seconds to wait for an OpenAI request to complete before cancelling the request. Leave this empty unless you're running into timeout issues on slower hardware |
### Theming

View file

@ -398,7 +398,7 @@ class AppSettings(AppLoggingSettings):
Sending database data may increase accuracy in certain requests,
but will incur additional API costs
"""
OPENAI_REQUEST_TIMEOUT: int = 60
OPENAI_REQUEST_TIMEOUT: int = 300
"""
The number of seconds to wait for an OpenAI request to complete before cancelling the request
"""

View file

@ -135,9 +135,7 @@ class OpenAIService(BaseService):
)
return "\n".join(content_parts)
async def _get_raw_response(
self, prompt: str, content: list[dict], temperature=0.2, force_json_response=True
) -> ChatCompletion:
async def _get_raw_response(self, prompt: str, content: list[dict], force_json_response=True) -> ChatCompletion:
client = self.get_client()
return await client.chat.completions.create(
messages=[
@ -151,7 +149,6 @@ class OpenAIService(BaseService):
},
],
model=self.model,
temperature=temperature,
response_format={"type": "json_object"} if force_json_response else NOT_GIVEN,
)
@ -161,7 +158,6 @@ class OpenAIService(BaseService):
message: str,
*,
images: list[OpenAIImageBase] | None = None,
temperature=0.2,
force_json_response=True,
) -> str | None:
"""Send data to OpenAI and return the response message content"""
@ -174,7 +170,7 @@ class OpenAIService(BaseService):
for image in images or []:
user_messages.append(image.build_message())
response = await self._get_raw_response(prompt, user_messages, temperature, force_json_response)
response = await self._get_raw_response(prompt, user_messages, force_json_response)
if not response.choices:
return None
return response.choices[0].message.content