Sort .har files by date, filter None from result

This commit is contained in:
Heiner Lohaus 2024-11-26 19:28:41 +01:00
parent cab71ca8b6
commit 4ae3d98df8
7 changed files with 18 additions and 19 deletions

View File

@ -205,6 +205,7 @@ class ChatGpt(AbstractProvider, ProviderModelMixin):
response = session.post('https://chatgpt.com/backend-anon/conversation',
headers=headers, json=json_data, stream=True)
response.raise_for_status()
replace = ''
for line in response.iter_lines():

View File

@ -23,7 +23,7 @@ from ..typing import CreateResult, Messages, ImageType
from ..errors import MissingRequirementsError
from ..requests.raise_for_status import raise_for_status
from ..providers.asyncio import get_running_loop
from ..Provider.openai.har_file import NoValidHarFileError, get_headers
from ..Provider.openai.har_file import NoValidHarFileError, get_headers, get_har_files
from ..requests import get_nodriver
from ..image import ImageResponse, to_bytes, is_accepted_format
from ..cookies import get_cookies_dir
@ -188,16 +188,9 @@ class Copilot(AbstractProvider):
return access_token, cookies
def readHAR():
harPath = []
for root, _, files in os.walk(get_cookies_dir()):
for file in files:
if file.endswith(".har"):
harPath.append(os.path.join(root, file))
if not harPath:
raise NoValidHarFileError("No .har file found")
api_key = None
cookies = None
for path in harPath:
for path in get_har_files():
with open(path, 'rb') as file:
try:
harFile = json.loads(file.read())

View File

@ -424,10 +424,10 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
for element in c.get("parts"):
if isinstance(element, dict) and element.get("content_type") == "image_asset_pointer":
image = cls.get_generated_image(session, cls._headers, element)
if image is not None:
generated_images.append(image)
generated_images.append(image)
for image_response in await asyncio.gather(*generated_images):
yield image_response
if image_response is not None:
yield image_response
if m.get("author", {}).get("role") == "assistant":
fields.message_id = v.get("message", {}).get("id")
return

View File

@ -45,7 +45,7 @@ class arkReq:
self.arkCookies = arkCookies
self.userAgent = userAgent
def readHAR():
def get_har_files():
harPath = []
for root, _, files in os.walk(get_cookies_dir()):
for file in files:
@ -53,7 +53,11 @@ def readHAR():
harPath.append(os.path.join(root, file))
if not harPath:
raise NoValidHarFileError("No .har file found")
for path in harPath:
harPath.sort(key=lambda x: os.path.getmtime(x))
return harPath
def readHAR():
for path in get_har_files():
with open(path, 'rb') as file:
try:
harFile = json.loads(file.read())

View File

@ -58,7 +58,7 @@ def iter_response(
elif isinstance(chunk, BaseConversation):
yield chunk
continue
elif isinstance(chunk, SynthesizeData):
elif isinstance(chunk, SynthesizeData) or chunk is None:
continue
chunk = str(chunk)
@ -121,7 +121,7 @@ async def async_iter_response(
elif isinstance(chunk, BaseConversation):
yield chunk
continue
elif isinstance(chunk, SynthesizeData):
elif isinstance(chunk, SynthesizeData) or chunk is None:
continue
chunk = str(chunk)

View File

@ -90,9 +90,9 @@ def get_model_and_provider(model : Union[Model, str],
raise StreamNotSupportedError(f'{provider.__name__} does not support "stream" argument')
if model:
debug.log(f'Using {provider.__name__} provider and {model} model')
debug.log(f'Using {type(provider).__name__} provider and {model} model')
else:
debug.log(f'Using {provider.__name__} provider')
debug.log(f'Using {type(provider).__name__} provider')
debug.last_provider = provider
debug.last_model = model
@ -115,7 +115,7 @@ def get_last_provider(as_dict: bool = False) -> Union[ProviderType, dict[str, st
if as_dict:
if last:
return {
"name": last.__name__,
"name": type(last).__name__,
"url": last.url,
"model": debug.last_model,
"label": getattr(last, "label", None) if hasattr(last, "label") else None

View File

@ -477,6 +477,7 @@ body.white .gradient{
right: 8px;
top: 8px;
z-index: 1000;
cursor: pointer;
}
.count_total {