From 493c37a81ecc34247189f7339632518268524655 Mon Sep 17 00:00:00 2001 From: "t.me/xtekky" <98614666+xtekky@users.noreply.github.com> Date: Sat, 1 Apr 2023 15:08:23 +0200 Subject: [PATCH] gpt 3.5 / customize models (site: ora.sh) --- ora/__init__.py | 36 ++++++++++++++++++++++++++++++++++++ ora/model.py | 32 ++++++++++++++++++++++++++++++++ ora/typing.py | 39 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 107 insertions(+) create mode 100644 ora/__init__.py create mode 100644 ora/model.py create mode 100644 ora/typing.py diff --git a/ora/__init__.py b/ora/__init__.py new file mode 100644 index 00000000..546e0940 --- /dev/null +++ b/ora/__init__.py @@ -0,0 +1,36 @@ +from ora.model import CompletionModel +from ora.typing import OraResponse +from requests import post +from time import time + +class Completion: + def create( + model : CompletionModel, + prompt: str, + conversationId: str or None = None) -> OraResponse: + + extra = { + 'conversationId': conversationId} if conversationId else {} + + response = post('https://ora.sh/api/conversation', json = extra | { + 'chatbotId': model.id, + 'input' : prompt, + 'userId' : model.createdBy}).json() + + return OraResponse({ + 'id' : response['conversationId'], + 'object' : 'text_completion', + 'created': int(time()), + 'model' : model.slug, + 'choices': [{ + 'text' : response['response'], + 'index' : 0, + 'logprobs' : None, + 'finish_reason' : 'stop' + }], + 'usage': { + 'prompt_tokens' : len(prompt), + 'completion_tokens' : len(response['response']), + 'total_tokens' : len(prompt) + len(response['response']) + } + }) \ No newline at end of file diff --git a/ora/model.py b/ora/model.py new file mode 100644 index 00000000..69eca526 --- /dev/null +++ b/ora/model.py @@ -0,0 +1,32 @@ +from uuid import uuid4 +from requests import post + +class CompletionModel: + system_prompt = None + description = None + createdBy = None + createdAt = None + slug = None + id = None + + def create( + system_prompt: str = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible', + description : str = 'ChatGPT Openai Language Model', + name : str = 'gpt-3.5'): + + CompletionModel.system_prompt = system_prompt + CompletionModel.description = description + CompletionModel.slug = name + + response = post('https://ora.sh/api/assistant', json = { + 'prompt' : system_prompt, + 'userId' : f'auto:{uuid4()}', + 'name' : name, + 'description': description}) + + CompletionModel.id = response.json()['id'] + CompletionModel.createdBy = response.json()['createdBy'] + CompletionModel.createdAt = response.json()['createdAt'] + + return CompletionModel + \ No newline at end of file diff --git a/ora/typing.py b/ora/typing.py new file mode 100644 index 00000000..f3f0aebf --- /dev/null +++ b/ora/typing.py @@ -0,0 +1,39 @@ +class OraResponse: + + class Completion: + + class Choices: + def __init__(self, choice: dict) -> None: + self.text = choice['text'] + self.content = self.text.encode() + self.index = choice['index'] + self.logprobs = choice['logprobs'] + self.finish_reason = choice['finish_reason'] + + def __repr__(self) -> str: + return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>''' + + def __init__(self, choices: dict) -> None: + self.choices = [self.Choices(choice) for choice in choices] + + class Usage: + def __init__(self, usage_dict: dict) -> None: + self.prompt_tokens = usage_dict['prompt_tokens'] + self.completion_tokens = usage_dict['completion_tokens'] + self.total_tokens = usage_dict['total_tokens'] + + def __repr__(self): + return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>''' + + def __init__(self, response_dict: dict) -> None: + + self.response_dict = response_dict + self.id = response_dict['id'] + self.object = response_dict['object'] + self.created = response_dict['created'] + self.model = response_dict['model'] + self.completion = self.Completion(response_dict['choices']) + self.usage = self.Usage(response_dict['usage']) + + def json(self) -> dict: + return self.response_dict \ No newline at end of file