Merge pull request #2421 from hlohaus/model

Fix optional fields in api
This commit is contained in:
H Lohaus 2024-11-25 10:42:23 +01:00 committed by GitHub
commit a722abb8c2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 21 additions and 18 deletions

View File

@ -24,6 +24,7 @@ jobs:
python -m etc.tool.openapi
- uses: actions/upload-artifact@v4
with:
name: openapi
path: openapi.json
publish:
runs-on: ubuntu-latest

View File

@ -91,15 +91,15 @@ def create_app_debug(g4f_api_key: str = None):
class ChatCompletionsConfig(BaseModel):
messages: Messages = Field(examples=[[{"role": "system", "content": ""}, {"role": "user", "content": ""}]])
model: str = Field(default="")
provider: Optional[str] = Field(examples=[None])
provider: Optional[str] = None
stream: bool = False
temperature: Optional[float] = Field(examples=[None])
max_tokens: Optional[int] = Field(examples=[None])
stop: Union[list[str], str, None] = Field(examples=[None])
api_key: Optional[str] = Field(examples=[None])
web_search: Optional[bool] = Field(examples=[None])
proxy: Optional[str] = Field(examples=[None])
conversation_id: Optional[str] = Field(examples=[None])
temperature: Optional[float] = None
max_tokens: Optional[int] = None
stop: Union[list[str], str, None] = None
api_key: Optional[str] = None
web_search: Optional[bool] = None
proxy: Optional[str] = None
conversation_id: Optional[str] = None
class ImageGenerationConfig(BaseModel):
prompt: str

View File

@ -152,8 +152,7 @@ async def async_iter_response(
content = filter_json(content)
yield ChatCompletion.model_construct(content, finish_reason, completion_id, int(time.time()))
finally:
if hasattr(response, 'aclose'):
await safe_aclose(response)
await safe_aclose(response)
async def async_iter_append_model_and_provider(
response: AsyncChatCompletionResponseType
@ -167,8 +166,7 @@ async def async_iter_append_model_and_provider(
chunk.provider = last_provider.get("name")
yield chunk
finally:
if hasattr(response, 'aclose'):
await safe_aclose(response)
await safe_aclose(response)
class Client(BaseClient):
def __init__(
@ -292,7 +290,7 @@ class Images:
proxy = self.client.proxy
response = None
if isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider):
if hasattr(provider_handler, "create_async_generator"):
messages = [{"role": "user", "content": f"Generate a image: {prompt}"}]
async for item in provider_handler.create_async_generator(model, messages, prompt=prompt, **kwargs):
if isinstance(item, ImageResponse):
@ -354,7 +352,7 @@ class Images:
if proxy is None:
proxy = self.client.proxy
if isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider):
if hasattr(provider, "create_async_generator"):
messages = [{"role": "user", "content": "create a variation of this image"}]
generator = None
try:
@ -364,8 +362,7 @@ class Images:
response = chunk
break
finally:
if generator and hasattr(generator, 'aclose'):
await safe_aclose(generator)
await safe_aclose(generator)
elif hasattr(provider, 'create_variation'):
if asyncio.iscoroutinefunction(provider.create_variation):
response = await provider.create_variation(image, model=model, response_format=response_format, proxy=proxy, **kwargs)
@ -454,7 +451,11 @@ class AsyncCompletions:
)
stop = [stop] if isinstance(stop, str) else stop
response = provider.create_completion(
if hasattr(provider, "create_async_generator"):
create_handler = provider.create_async_generator
else:
create_handler = provider.create_completion
response = create_handler(
model,
messages,
stream=stream,

View File

@ -46,7 +46,8 @@ def filter_none(**kwargs) -> dict:
async def safe_aclose(generator: AsyncGenerator) -> None:
try:
await generator.aclose()
if generator and hasattr(generator, 'aclose'):
await generator.aclose()
except Exception as e:
logging.warning(f"Error while closing generator: {e}")