mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-11-22 06:23:59 +03:00
Add some options to messages in gui, saves generated images local
This commit is contained in:
parent
c8d61a08b7
commit
80b2e9b807
1
.gitignore
vendored
1
.gitignore
vendored
@ -65,3 +65,4 @@ x.txt
|
||||
bench.py
|
||||
to-reverse.txt
|
||||
g4f/Provider/OpenaiChat2.py
|
||||
generated_images/
|
@ -92,7 +92,12 @@ As per the survey, here is a list of improvements to come
|
||||
|
||||
```sh
|
||||
docker pull hlohaus789/g4f
|
||||
docker run -p 8080:8080 -p 1337:1337 -p 7900:7900 --shm-size="2g" -v ${PWD}/har_and_cookies:/app/har_and_cookies hlohaus789/g4f:latest
|
||||
docker run \
|
||||
-p 8080:8080 -p 1337:1337 -p 7900:7900 \
|
||||
--shm-size="2g" \
|
||||
-v ${PWD}/har_and_cookies:/app/har_and_cookies \
|
||||
-v ${PWD}/generated_images:/app/generated_images \
|
||||
hlohaus789/g4f:latest
|
||||
```
|
||||
|
||||
3. **Access the Client:**
|
||||
|
@ -4,7 +4,6 @@ import os
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import base64
|
||||
|
||||
from aiohttp import ClientSession, BaseConnector
|
||||
|
||||
@ -193,14 +192,10 @@ class Gemini(AsyncGeneratorProvider):
|
||||
yield content
|
||||
if image_prompt:
|
||||
images = [image[0][3][3] for image in response_part[4][0][12][7][0]]
|
||||
resolved_images = []
|
||||
if response_format == "b64_json":
|
||||
for image in images:
|
||||
async with client.get(image) as response:
|
||||
data = base64.b64encode(await response.content.read()).decode()
|
||||
resolved_images.append(data)
|
||||
yield ImageDataResponse(resolved_images, image_prompt)
|
||||
yield ImageResponse(images, image_prompt, {"cookies": cls._cookies})
|
||||
else:
|
||||
resolved_images = []
|
||||
preview = []
|
||||
for image in images:
|
||||
async with client.get(image, allow_redirects=False) as fetch:
|
||||
|
@ -171,7 +171,8 @@ async def iter_image_response(
|
||||
if isinstance(chunk, ImageProviderResponse):
|
||||
if response_format == "b64_json":
|
||||
async with ClientSession(
|
||||
connector=get_connector(connector, proxy)
|
||||
connector=get_connector(connector, proxy),
|
||||
cookies=chunk.options.get("cookies")
|
||||
) as session:
|
||||
async def fetch_image(image):
|
||||
async with session.get(image) as response:
|
||||
|
@ -32,10 +32,10 @@
|
||||
<script type="module" src="https://cdn.jsdelivr.net/npm/mistral-tokenizer-js" async>
|
||||
import mistralTokenizer from "mistral-tokenizer-js"
|
||||
</script>
|
||||
<script type="module" src="https://belladoreai.github.io/llama-tokenizer-js/llama-tokenizer.js" async>
|
||||
<script type="module" src="https://cdn.jsdelivr.net/gh/belladoreai/llama-tokenizer-js@master/llama-tokenizer.js" async>
|
||||
import llamaTokenizer from "llama-tokenizer-js"
|
||||
</script>
|
||||
<script src="https://unpkg.com/gpt-tokenizer/dist/cl100k_base.js" async></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/gpt-tokenizer/dist/cl100k_base.js" async></script>
|
||||
<script src="/static/js/text_to_speech/index.js" async></script>
|
||||
<!--
|
||||
<script src="/static/js/whisper-web/index.js" async></script>
|
||||
|
@ -265,6 +265,14 @@ body {
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
.message.print {
|
||||
height: 100%;
|
||||
position: absolute;
|
||||
background-color: #fff;
|
||||
z-index: 100;
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.message.regenerate {
|
||||
opacity: 0.75;
|
||||
}
|
||||
@ -339,14 +347,14 @@ body {
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.message .content,
|
||||
.message .content a:link,
|
||||
.message .content a:visited{
|
||||
.message .content_inner,
|
||||
.message .content_inner a:link,
|
||||
.message .content_inner a:visited{
|
||||
font-size: 15px;
|
||||
line-height: 1.3;
|
||||
color: var(--colour-3);
|
||||
}
|
||||
.message .content pre{
|
||||
.message .content_inner pre{
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
@ -389,19 +397,19 @@ body {
|
||||
|
||||
.message .count .fa-clipboard,
|
||||
.message .count .fa-volume-high,
|
||||
.message .count .fa-rotate {
|
||||
.message .count .fa-rotate,
|
||||
.message .count .fa-print {
|
||||
z-index: 1000;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.message .count .fa-clipboard {
|
||||
.message .count .fa-clipboard,
|
||||
.message .count .fa-whatsapp {
|
||||
color: var(--colour-3);
|
||||
}
|
||||
|
||||
.message .count .fa-clipboard.clicked {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.message .count .fa-clipboard.clicked,
|
||||
.message .count .fa-print.clicked,
|
||||
.message .count .fa-volume-high.active {
|
||||
color: var(--accent);
|
||||
}
|
||||
@ -1122,3 +1130,17 @@ a:-webkit-any-link {
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@media print {
|
||||
#systemPrompt:placeholder-shown,
|
||||
.conversations,
|
||||
.conversation .user-input,
|
||||
.conversation .buttons,
|
||||
.conversation .toolbar,
|
||||
.conversation .slide-systemPrompt,
|
||||
.message .count i,
|
||||
.message .assistant,
|
||||
.message .user {
|
||||
display: none;
|
||||
}
|
||||
}
|
@ -192,6 +192,26 @@ const register_message_buttons = async () => {
|
||||
})
|
||||
}
|
||||
});
|
||||
document.querySelectorAll(".message .fa-whatsapp").forEach(async (el) => {
|
||||
if (!el.parentElement.href) {
|
||||
const text = el.parentElement.parentElement.parentElement.innerText;
|
||||
el.parentElement.href = `https://wa.me/?text=${encodeURIComponent(text)}`;
|
||||
}
|
||||
});
|
||||
document.querySelectorAll(".message .fa-print").forEach(async (el) => {
|
||||
if (!("click" in el.dataset)) {
|
||||
el.dataset.click = "true";
|
||||
el.addEventListener("click", async () => {
|
||||
const message_el = el.parentElement.parentElement.parentElement;
|
||||
el.classList.add("clicked");
|
||||
message_box.scrollTop = 0;
|
||||
message_el.classList.add("print");
|
||||
setTimeout(() => el.classList.remove("clicked"), 1000);
|
||||
setTimeout(() => message_el.classList.remove("print"), 1000);
|
||||
window.print()
|
||||
})
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const delete_conversations = async () => {
|
||||
@ -253,6 +273,8 @@ const handle_ask = async () => {
|
||||
${count_words_and_tokens(message, get_selected_model())}
|
||||
<i class="fa-solid fa-volume-high"></i>
|
||||
<i class="fa-regular fa-clipboard"></i>
|
||||
<a><i class="fa-brands fa-whatsapp"></i></a>
|
||||
<i class="fa-solid fa-print"></i>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@ -625,6 +647,8 @@ const load_conversation = async (conversation_id, scroll=true) => {
|
||||
${count_words_and_tokens(item.content, next_provider?.model)}
|
||||
<i class="fa-solid fa-volume-high"></i>
|
||||
<i class="fa-regular fa-clipboard"></i>
|
||||
<a><i class="fa-brands fa-whatsapp"></i></a>
|
||||
<i class="fa-solid fa-print"></i>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -1,18 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import json
|
||||
from typing import Iterator
|
||||
import os
|
||||
import os.path
|
||||
import uuid
|
||||
import asyncio
|
||||
import time
|
||||
from aiohttp import ClientSession
|
||||
from typing import Iterator, Optional
|
||||
from flask import send_from_directory
|
||||
|
||||
from g4f import version, models
|
||||
from g4f import get_last_provider, ChatCompletion
|
||||
from g4f.errors import VersionNotFoundError
|
||||
from g4f.image import ImagePreview
|
||||
from g4f.typing import Cookies
|
||||
from g4f.image import ImagePreview, ImageResponse, is_accepted_format
|
||||
from g4f.requests.aiohttp import get_connector
|
||||
from g4f.Provider import ProviderType, __providers__, __map__
|
||||
from g4f.providers.base_provider import ProviderModelMixin, FinishReason
|
||||
from g4f.providers.conversation import BaseConversation
|
||||
|
||||
conversations: dict[dict[str, BaseConversation]] = {}
|
||||
images_dir = "./generated_images"
|
||||
|
||||
class Api():
|
||||
|
||||
@ -110,14 +119,8 @@ class Api():
|
||||
"latest_version": version.utils.latest_version,
|
||||
}
|
||||
|
||||
def generate_title(self):
|
||||
"""
|
||||
Generates and returns a title based on the request data.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary with the generated title.
|
||||
"""
|
||||
return {'title': ''}
|
||||
def serve_images(self, name):
|
||||
return send_from_directory(os.path.abspath(images_dir), name)
|
||||
|
||||
def _prepare_conversation_kwargs(self, json_data: dict, kwargs: dict):
|
||||
"""
|
||||
@ -185,6 +188,27 @@ class Api():
|
||||
yield self._format_json("message", get_error_message(chunk))
|
||||
elif isinstance(chunk, ImagePreview):
|
||||
yield self._format_json("preview", chunk.to_string())
|
||||
elif isinstance(chunk, ImageResponse):
|
||||
async def copy_images(images: list[str], cookies: Optional[Cookies] = None):
|
||||
async with ClientSession(
|
||||
connector=get_connector(None, os.environ.get("G4F_PROXY")),
|
||||
cookies=cookies
|
||||
) as session:
|
||||
async def copy_image(image):
|
||||
async with session.get(image) as response:
|
||||
target = os.path.join(images_dir, f"{int(time.time())}_{str(uuid.uuid4())}")
|
||||
with open(target, "wb") as f:
|
||||
async for chunk in response.content.iter_any():
|
||||
f.write(chunk)
|
||||
with open(target, "rb") as f:
|
||||
extension = is_accepted_format(f.read(12)).split("/")[-1]
|
||||
extension = "jpg" if extension == "jpeg" else extension
|
||||
new_target = f"{target}.{extension}"
|
||||
os.rename(target, new_target)
|
||||
return f"/images/{os.path.basename(new_target)}"
|
||||
return await asyncio.gather(*[copy_image(image) for image in images])
|
||||
images = asyncio.run(copy_images(chunk.get_list(), chunk.options.get("cookies")))
|
||||
yield self._format_json("content", str(ImageResponse(images, chunk.alt)))
|
||||
elif not isinstance(chunk, FinishReason):
|
||||
yield self._format_json("content", str(chunk))
|
||||
except Exception as e:
|
||||
|
@ -47,13 +47,13 @@ class Backend_Api(Api):
|
||||
'function': self.handle_conversation,
|
||||
'methods': ['POST']
|
||||
},
|
||||
'/backend-api/v2/gen.set.summarize:title': {
|
||||
'function': self.generate_title,
|
||||
'methods': ['POST']
|
||||
},
|
||||
'/backend-api/v2/error': {
|
||||
'function': self.handle_error,
|
||||
'methods': ['POST']
|
||||
},
|
||||
'/images/<path:name>': {
|
||||
'function': self.serve_images,
|
||||
'methods': ['GET']
|
||||
}
|
||||
}
|
||||
|
||||
|
0
generated_images/.gitkeep
Normal file
0
generated_images/.gitkeep
Normal file
Loading…
Reference in New Issue
Block a user