mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-12-24 03:23:49 +03:00
Update (g4f/Provider/Cloudflare.py)
This commit is contained in:
parent
9a0346199b
commit
c74a6943a8
@ -21,10 +21,7 @@ class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
supports_message_history = True
|
supports_message_history = True
|
||||||
|
|
||||||
default_model = '@cf/meta/llama-3.1-8b-instruct-awq'
|
default_model = '@cf/meta/llama-3.1-8b-instruct-awq'
|
||||||
models = [
|
models = [
|
||||||
'@cf/tiiuae/falcon-7b-instruct', # Specific answer
|
|
||||||
|
|
||||||
|
|
||||||
'@hf/google/gemma-7b-it',
|
'@hf/google/gemma-7b-it',
|
||||||
|
|
||||||
'@cf/meta/llama-2-7b-chat-fp16',
|
'@cf/meta/llama-2-7b-chat-fp16',
|
||||||
@ -120,9 +117,12 @@ class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
|
|
||||||
scraper = cloudscraper.create_scraper()
|
scraper = cloudscraper.create_scraper()
|
||||||
|
|
||||||
|
|
||||||
|
prompt = messages[-1]['content']
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"messages": [
|
"messages": [
|
||||||
{"role": "user", "content": format_prompt(messages)}
|
{"role": "user", "content": prompt}
|
||||||
],
|
],
|
||||||
"lora": None,
|
"lora": None,
|
||||||
"model": model,
|
"model": model,
|
||||||
@ -147,7 +147,7 @@ class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
skip_tokens = ["</s>", "<s>", "[DONE]", "<|endoftext|>", "<|end|>"]
|
skip_tokens = ["</s>", "<s>", "</s>", "[DONE]", "<|endoftext|>", "<|end|>"]
|
||||||
filtered_response = ""
|
filtered_response = ""
|
||||||
|
|
||||||
for line in response.iter_lines():
|
for line in response.iter_lines():
|
||||||
|
Loading…
Reference in New Issue
Block a user