add reka core model (vision)

This commit is contained in:
abc 2024-04-25 00:06:11 +01:00
parent 431621b274
commit 2b271013fb
5 changed files with 197 additions and 2 deletions

View File

@ -304,8 +304,11 @@ While we wait for gpt-5, here is a list of new models that are at least better t
| ------ | ------- | ------ | ------ | | ------ | ------- | ------ | ------ |
| [mixtral-8x22b](https://huggingface.co/mistral-community/Mixtral-8x22B-v0.1) | `g4f.Provider.DeepInfra` | 176B / 44b active | gpt-3.5-turbo | | [mixtral-8x22b](https://huggingface.co/mistral-community/Mixtral-8x22B-v0.1) | `g4f.Provider.DeepInfra` | 176B / 44b active | gpt-3.5-turbo |
| [dbrx-instruct](https://www.databricks.com/blog/introducing-dbrx-new-state-art-open-llm) | `g4f.Provider.DeepInfra` | 132B / 36B active| gpt-3.5-turbo | | [dbrx-instruct](https://www.databricks.com/blog/introducing-dbrx-new-state-art-open-llm) | `g4f.Provider.DeepInfra` | 132B / 36B active| gpt-3.5-turbo |
| [command-r+](https://txt.cohere.com/command-r-plus-microsoft-azure/) | `g4f.Provider.HuggingChat` | 104B | gpt-4-0613 | | [command-r+](https://txt.cohere.com/command-r-plus-microsoft-azure/) | `g4f.Provider.HuggingChat` | 104B | gpt-4-0314 |
| [reka-core](https://chat.reka.ai/) | `g4f.Provider.Reka` | 104B | gpt-4-vision |
| [claude-3-opus](https://anthropic.com/) | `g4f.Provider.You` | ?B | gpt-4-0125-preview |
| [claude-3-sonnet](https://anthropic.com/) | `g4f.Provider.You` | ?B | gpt-4-0314 |
| [llama-3-70b](https://meta.ai/) | `g4f.Provider.Llama` or `DeepInfra` | ?B | gpt-4-0314 |
### GPT-3.5 ### GPT-3.5

View File

@ -0,0 +1,27 @@
# Image Chat with Reca
# !! YOU NEED COOKIES / BE LOGGED IN TO chat.reka.ai
# download an image and save it as test.png in the same folder
from g4f.client import Client
from g4f.Provider import Reka
client = Client(
provider = Reka # Optional if you set model name to reka-core
)
completion = client.chat.completions.create(
model = "reka-core",
messages = [
{
"role": "user",
"content": "What can you see in the image ?"
}
],
stream = True,
image = open("test.png", "rb") # open("path", "rb"), do not use .read(), etc. it must be a file object
)
for message in completion:
print(message.choices[0].delta.content or "")
# >>> In the image there is ...

148
g4f/Provider/Reka.py Normal file
View File

@ -0,0 +1,148 @@
from __future__ import annotations
import os, requests, time, json
from ..typing import CreateResult, Messages, ImageType
from .base_provider import AbstractProvider
from ..cookies import get_cookies
class Reka(AbstractProvider):
url = "https://chat.reka.ai/"
working = True
supports_stream = True
cookies = {}
@classmethod
def create_completion(
cls,
model: str,
messages: Messages,
stream: bool,
proxy: str = None,
timeout: int = 180,
bearer_auth: str = None,
image: ImageType = None, **kwargs) -> CreateResult:
cls.proxy = proxy
if not bearer_auth:
cls.cookies = get_cookies("chat.reka.ai")
if not cls.cookies:
raise ValueError("No cookies found for chat.reka.ai")
elif "appSession" not in cls.cookies:
raise ValueError("No appSession found in cookies for chat.reka.ai, log in or provide bearer_auth")
bearer_auth = cls.get_access_token(cls)
conversation = []
for message in messages:
conversation.append({
"type": "human",
"text": message["content"],
})
if image:
image_url = cls.upload_image(cls, bearer_auth, image)
conversation[-1]["image_url"] = image_url
conversation[-1]["media_type"] = "image"
headers = {
'accept': '*/*',
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
'authorization': f'Bearer {bearer_auth}',
'cache-control': 'no-cache',
'content-type': 'application/json',
'origin': 'https://chat.reka.ai',
'pragma': 'no-cache',
'priority': 'u=1, i',
'sec-ch-ua': '"Chromium";v="124", "Google Chrome";v="124", "Not-A.Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36',
}
json_data = {
'conversation_history': conversation,
'stream': True,
'use_search_engine': False,
'use_code_interpreter': False,
'model_name': 'reka-core',
'random_seed': int(time.time() * 1000),
}
tokens = ''
response = requests.post('https://chat.reka.ai/api/chat',
cookies=cls.cookies, headers=headers, json=json_data, proxies=cls.proxy, stream=True)
for completion in response.iter_lines():
if b'data' in completion:
token_data = json.loads(completion.decode('utf-8')[5:])['text']
yield (token_data.replace(tokens, ''))
tokens = token_data
def upload_image(cls, access_token, image: ImageType) -> str:
boundary_token = os.urandom(8).hex()
headers = {
'accept': '*/*',
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
'cache-control': 'no-cache',
'authorization': f'Bearer {access_token}',
'content-type': f'multipart/form-data; boundary=----WebKitFormBoundary{boundary_token}',
'origin': 'https://chat.reka.ai',
'pragma': 'no-cache',
'priority': 'u=1, i',
'referer': 'https://chat.reka.ai/chat/hPReZExtDOPvUfF8vCPC',
'sec-ch-ua': '"Chromium";v="124", "Google Chrome";v="124", "Not-A.Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36',
}
image_data = image.read()
boundary = f'----WebKitFormBoundary{boundary_token}'
data = f'--{boundary}\r\nContent-Disposition: form-data; name="image"; filename="image.png"\r\nContent-Type: image/png\r\n\r\n'
data += image_data.decode('latin-1')
data += f'\r\n--{boundary}--\r\n'
response = requests.post('https://chat.reka.ai/api/upload-image',
cookies=Reka.cookies, headers=headers, proxies=cls.proxy, data=data.encode('latin-1'))
return response.json()['media_url']
def get_access_token(cls):
headers = {
'accept': '*/*',
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
'cache-control': 'no-cache',
'pragma': 'no-cache',
'priority': 'u=1, i',
'referer': 'https://chat.reka.ai/chat',
'sec-ch-ua': '"Chromium";v="124", "Google Chrome";v="124", "Not-A.Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36',
}
try:
response = requests.get('https://chat.reka.ai/bff/auth/access_token',
cookies=cls.cookies, headers=headers)
return response.json()['accessToken']
except Exception as e:
raise ValueError(f"Failed to get access token: {e}, refresh your cookies / log in into chat.reka.ai")

View File

@ -50,6 +50,7 @@ from .ReplicateImage import ReplicateImage
from .Vercel import Vercel from .Vercel import Vercel
from .WhiteRabbitNeo import WhiteRabbitNeo from .WhiteRabbitNeo import WhiteRabbitNeo
from .You import You from .You import You
from .Reka import Reka
import sys import sys

View File

@ -29,6 +29,7 @@ from .Provider import (
Pi, Pi,
Vercel, Vercel,
You, You,
Reka
) )
@ -306,6 +307,12 @@ blackbox = Model(
best_provider = Blackbox best_provider = Blackbox
) )
reka_core = Model(
name = 'reka-core',
base_provider = 'Reka AI',
best_provider = Reka
)
class ModelUtils: class ModelUtils:
""" """
Utility class for mapping string identifiers to Model instances. Utility class for mapping string identifiers to Model instances.
@ -333,8 +340,12 @@ class ModelUtils:
'llama2-7b' : llama2_7b, 'llama2-7b' : llama2_7b,
'llama2-13b': llama2_13b, 'llama2-13b': llama2_13b,
'llama2-70b': llama2_70b, 'llama2-70b': llama2_70b,
'llama3-8b' : llama3_8b_instruct, # alias
'llama3-70b': llama3_70b_instruct, # alias
'llama3-8b-instruct' : llama3_8b_instruct, 'llama3-8b-instruct' : llama3_8b_instruct,
'llama3-70b-instruct': llama3_70b_instruct, 'llama3-70b-instruct': llama3_70b_instruct,
'codellama-34b-instruct': codellama_34b_instruct, 'codellama-34b-instruct': codellama_34b_instruct,
'codellama-70b-instruct': codellama_70b_instruct, 'codellama-70b-instruct': codellama_70b_instruct,
@ -359,6 +370,11 @@ class ModelUtils:
'claude-3-opus': claude_3_opus, 'claude-3-opus': claude_3_opus,
'claude-3-sonnet': claude_3_sonnet, 'claude-3-sonnet': claude_3_sonnet,
# reka core
'reka-core': reka_core,
'reka': reka_core,
'Reka Core': reka_core,
# other # other
'blackbox': blackbox, 'blackbox': blackbox,
'command-r+': command_r_plus, 'command-r+': command_r_plus,