1
1
mirror of https://github.com/leon-ai/leon.git synced 2024-09-11 10:25:40 +03:00

Merge branch 'develop'

This commit is contained in:
louistiti 2023-05-01 10:15:43 +08:00
commit 9c5d2ea0c8
No known key found for this signature in database
GPG Key ID: 92CD6A2E497E1669
313 changed files with 11587 additions and 43566 deletions

View File

@ -1,23 +0,0 @@
{
"presets": [
["@babel/preset-env", {
"targets": {
"node": "current"
}
}]
],
"plugins": [
["module-resolver", {
"alias": {
"@@": ".",
"@": "./server/src"
},
"compilerOptions": {
"paths": {
"@@": ".",
"@": "./server/src"
}
}
}]
]
}

View File

@ -1,2 +1,9 @@
node_modules/
bridges/python/.venv/*
__pycache__/
**/dist/*
**/build/
**/node_modules/
**/tmp/*
**/src/.venv/*
logs/*
*.pyc
.DS_Store

View File

@ -31,24 +31,15 @@ LEON_HTTP_API_KEY=
# Language used for the HTTP API
LEON_HTTP_API_LANG=en-US
# Enable/disable collaborative logger
LEON_LOGGER=true
# Enable/disable telemetry
LEON_TELEMETRY=true
# Python TCP server
LEON_PY_TCP_SERVER_HOST=0.0.0.0
LEON_PY_TCP_SERVER_PORT=1342
# Path to the Pipfile
PIPENV_PIPFILE=bridges/python/Pipfile
PIPENV_PIPFILE=bridges/python/src/Pipfile
# Path to the virtual env in .venv/
PIPENV_VENV_IN_PROJECT=true
# Fix https://click.palletsprojects.com/en/7.x/python3/#python-3-surrogate-handling
# If Leon replies you something like "Sorry, it seems I have a problem with the ... skill" but
# still gives you the right answer, then:
## 1. Run `locale -a`
## 2. Pick a locale
## 3. Replace the LC_ALL and LANG values with the needed locale
LC_ALL=C.UTF-8
LANG=C.UTF-8

View File

@ -1,9 +1,21 @@
{
"extends": "airbnb-base",
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/recommended",
"plugin:import/recommended",
"plugin:import/typescript",
"prettier"
],
"settings": {
"import/resolver": {
"typescript": true,
"node": true
}
},
"parser": "@typescript-eslint/parser",
"env": {
"node": true,
"browser": true,
"jest/globals": true
"browser": true
},
"parserOptions": {
"ecmaVersion": 2021
@ -11,37 +23,55 @@
"globals": {
"io": true
},
"plugins": [
"import",
"jest"
],
"settings": {
"import/resolver": {
"babel-module": {
"alias": {
"@@": ".",
"@": "./server/src"
}
}
}
},
"plugins": ["@typescript-eslint", "unicorn", "import"],
"ignorePatterns": "*.spec.js",
"rules": {
"no-multi-assign": "off",
"no-trailing-spaces": "off",
"linebreak-style": "off",
"indent": "off",
"strict": "off",
"no-console": "off",
"no-param-reassign": "off",
"no-shadow": "off",
"import/no-extraneous-dependencies": "off",
"import/no-dynamic-require": "off",
"prefer-promise-reject-errors": ["off"],
"@typescript-eslint/no-non-null-assertion": ["off"],
"no-async-promise-executor": ["off"],
"no-underscore-dangle": ["error", { "allowAfterThis": true }],
"space-before-function-paren": ["error", "always"],
"prefer-destructuring": ["error", { "array": true, "object": true }],
"prefer-destructuring": ["error"],
"comma-dangle": ["error", "never"],
"semi": ["error", "never"]
}
"semi": ["error", "never"],
"object-curly-spacing": ["error", "always"],
"unicorn/prefer-node-protocol": "error",
"@typescript-eslint/member-delimiter-style": [
"error",
{
"multiline": {
"delimiter": "none",
"requireLast": true
},
"singleline": {
"delimiter": "comma",
"requireLast": false
}
}
],
"@typescript-eslint/explicit-function-return-type": "off",
"@typescript-eslint/consistent-type-definitions": "error",
"import/no-named-as-default": "off",
"import/no-named-as-default-member": "off",
"import/order": [
"error",
{
"groups": [
"builtin",
"external",
"internal",
"parent",
"sibling",
"index"
],
"newlines-between": "always"
}
]
},
"overrides": [
{
"files": ["*.ts"],
"rules": {
"@typescript-eslint/explicit-function-return-type": "error"
}
}
]
}

View File

@ -14,21 +14,21 @@ appearance, race, religion, or sexual identity and orientation.
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
- Using welcoming and inclusive language
- Being respectful of differing viewpoints and experiences
- Gracefully accepting constructive criticism
- Focusing on what is best for the community
- Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
- The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
- Trolling, insulting/derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities

View File

@ -19,10 +19,9 @@ Here are few examples about how you could help on Leon, by:
- [Improving the documentation](https://github.com/leon-ai/docs.getleon.ai) (translations, typos, better writing, etc.).
- [Sponsoring Leon](http://sponsor.getleon.ai).
## Pull Requests
**Working on your first Pull Request?** You can learn how from this *free* series [How to Contribute to an Open Source Project on GitHub](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github).
**Working on your first Pull Request?** You can learn how from this _free_ series [How to Contribute to an Open Source Project on GitHub](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github).
- **Please first discuss** the change you wish to make via [issue](https://github.com/leon-ai/leon/issues),
email, or any other method with the owners of this repository before making a change.
@ -32,18 +31,18 @@ Here are few examples about how you could help on Leon, by:
against the `master` branch**.
- Ensure your code **respect our coding standards** (cf. [.eslintrc.json](https://github.com/leon-ai/leon/blob/develop/.eslintrc.json)).
To do so, you can run:
To do so, you can run:
```sh
npm run lint
```
- Make sure your **code passes the tests**. You can run the tests via the following command:
```sh
npm test
```
If you're adding new features to Leon, please include tests.
## Development Setup
@ -52,7 +51,7 @@ Choose the setup method you want to go for.
### Single-Click
Gitpod will automatically setup an environment and run an instance for you.
Gitpod will automatically set up an environment and run an instance for you.
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/leon-ai/leon)
@ -106,27 +105,32 @@ The commit message guideline is adapted from the [AngularJS Git Commit Guideline
Types define which kind of changes you made to the project.
| Types | Description |
| ------------- |-------------|
| BREAKING | Changes including breaking changes. |
| build | New build version. |
| chore | Changes to the build process or auxiliary tools such as changelog generation. No production code change. |
| ci | Changes related to continuous integration only (GitHub Actions, CircleCI, etc.). |
| docs | Documentation only changes. |
| feat | A new feature. |
| fix | A bug fix. |
| perf | A code change that improves performance. |
| refactor | A code change that neither fixes a bug nor adds a feature. |
| style | Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc.). |
| test | Adding missing or correcting existing tests. |
| Types | Description |
| -------- | -------------------------------------------------------------------------------------------------------- |
| BREAKING | Changes including breaking changes. |
| build | New build version. |
| chore | Changes to the build process or auxiliary tools such as changelog generation. No production code change. |
| ci | Changes related to continuous integration only (GitHub Actions, CircleCI, etc.). |
| docs | Documentation only changes. |
| feat | A new feature. |
| fix | A bug fix. |
| perf | A code change that improves performance. |
| refactor | A code change that neither fixes a bug nor adds a feature. |
| style | Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc.). |
| test | Adding missing or correcting existing tests. |
### Scopes
Scopes define high-level nodes of Leon.
- web app
- server
- bridge/python
- bridge/nodejs
- docker
- hotword
- scripts
- server
- tcp server
- web app
- skill/skill_name
### Examples
@ -148,6 +152,77 @@ By sponsoring the project you make the project sustainable and faster to develop
The focus is not only limited to the activity you see on GitHub but also a lot of thinking about the direction of the project. Which is naturally related to the overall design, architecture, vision, learning process and so on...
## Contributing to the Python Bridge or TCP HttpServer
Leon makes use of two binaries, the Python bridge and the TCP server. These binaries are compiled from Python sources.
The Python bridge is used to communicate between the core and skills made with Python.
The TCP server is used to communicate between the core and third-party nodes, such as spaCy.
### Set Up the Python Environment
To contribute to these parts, you need to set up a Python environment running with a specific Python version and a specific Pipenv version.
It is recommended to use Pyenv to manage your Python versions.
If you are on GNU/Linux Ubuntu, you can run the following to install Pyenv, otherwise, please refer to the [Pyenv documentation to install it](https://github.com/pyenv/pyenv#installation):
```bash
# Update registry
sudo apt-get update
# Install Pyenv deps
sudo apt-get install make build-essential libssl-dev zlib1g-dev \
libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm \
libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev
# Install Pyenv
curl https://pyenv.run | bash
# Add output lines to .bashrc
# Restart shell
exec "$SHELL"
```
Once Pyenv installed, run:
```bash
# Install Python
pyenv install 3.9.10 --force
pyenv global 3.9.10
# Install Pipenv
pip install pipenv==2022.7.24
```
Your Python environment should be ready now. So now you can set up the respective environments according to what you are going to contribute to and build them:
```bash
# Set up the Python bridge environment
npm run setup:python-bridge
# Set up the TCP server environment
npm run setup:tcp-server
# If you are in China, you can run this to download models faster:
npm run setup:tcp-server cn
# Once your code changes are done, you can build via:
# Build the Python bridge
npm run build:python-bridge
# Build the TCP server
npm run build:tcp-server
# Run the Python bridge
./bridges/python/dist/{OS-CPU_ARCH}/leon-python-bridge server/src/intent-object.sample.json
# Run the TCP server
./tcp_server/dist/{OS-CPU_ARCH}/leon-tcp-server en
```
## Spread the Word
Use [#LeonAI](https://twitter.com/search?f=live&q=%23LeonAI%20(from%3Agrenlouis%20OR%20from%3Alouistiti_fr)&src=typed_query) if you tweet about Leon and/or mention [@grenlouis](https://twitter.com/grenlouis).
Use [#LeonAI](<https://twitter.com/search?f=live&q=%23LeonAI%20(from%3Agrenlouis%20OR%20from%3Alouistiti_fr)&src=typed_query>) if you tweet about Leon and/or mention [@grenlouis](https://twitter.com/grenlouis).

View File

@ -1,7 +0,0 @@
<!--
Thanks for your interest in Leon! ❤️
Please check if there is no similar issue before creating this one.
-->

View File

@ -12,6 +12,7 @@ Please place an x (no spaces - [x]) in all [ ] that apply.
-->
### Documentation Is:
- [ ] Missing
- [ ] Needed
- [ ] Confusing

View File

@ -14,6 +14,7 @@ Please place an x (no spaces - [x]) in all [ ] that apply.
-->
### What type of change does this PR introduce?
- [ ] Bugfix
- [ ] Feature
- [ ] Refactor
@ -21,10 +22,10 @@ Please place an x (no spaces - [x]) in all [ ] that apply.
- [ ] Not Sure?
### Does this PR introduce breaking changes?
- [ ] Yes
- [ ] No
### List any relevant issue numbers:
### Description:

View File

@ -0,0 +1,76 @@
name: Pre-release Node.js bridge
on: workflow_dispatch
jobs:
build:
name: Build
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04]
runs-on: ${{ matrix.os }}
steps:
- name: Clone repository
uses: actions/checkout@v3
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version: lts/*
- name: Set Node.js bridge version
working-directory: bridges/nodejs/src
run: |
echo "NODEJS_BRIDGE_VERSION=$(node --require fs --eval "const fs = require('node:fs'); const [, VERSION] = fs.readFileSync('version.ts', 'utf8').split(\"'\"); console.log(VERSION)")" >> $GITHUB_ENV
- name: Display Node.js bridge version
run: |
echo "Node.js bridge version: ${{ env.NODEJS_BRIDGE_VERSION }}"
- name: Install core
run: npm install
- name: Build Node.js bridge
run: npm run build:nodejs-bridge
- name: Upload Node.js bridge
uses: actions/upload-artifact@v3
with:
path: bridges/nodejs/dist/*.zip
draft-release:
name: Draft-release
needs: [build]
runs-on: ubuntu-20.04
steps:
- name: Clone repository
uses: actions/checkout@v3
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version: lts/*
- name: Set Node.js bridge version
working-directory: bridges/nodejs/src
run: |
echo "NODEJS_BRIDGE_VERSION=$(node --require fs --eval "const fs = require('node:fs'); const [, VERSION] = fs.readFileSync('version.ts', 'utf8').split(\"'\"); console.log(VERSION)")" >> $GITHUB_ENV
- name: Download Node.js bridge
uses: actions/download-artifact@v3
with:
path: bridges/nodejs/dist
- uses: marvinpinto/action-automatic-releases@latest
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
automatic_release_tag: nodejs-bridge_v${{ env.NODEJS_BRIDGE_VERSION }}
draft: true
prerelease: false
title: Node.js Bridge ${{ env.NODEJS_BRIDGE_VERSION }}
files: bridges/nodejs/dist/artifact/*.zip

View File

@ -0,0 +1,91 @@
name: Pre-release Python bridge
on: workflow_dispatch
env:
PIPENV_PIPFILE: bridges/python/src
PIPENV_VENV_IN_PROJECT: true
jobs:
build:
name: Build
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- name: Clone repository
uses: actions/checkout@v3
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: 3.9.10
- name: Install Pipenv
run: pip install --upgrade pip && pip install pipenv==2022.7.24
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version: lts/*
- name: Set Python bridge version
working-directory: bridges/python/src
run: |
echo "PYTHON_BRIDGE_VERSION=$(python -c "from version import __version__; print(__version__)")" >> $GITHUB_ENV
- name: Display Python bridge version
run: |
echo "Python bridge version: ${{ env.PYTHON_BRIDGE_VERSION }}"
- name: Install core
run: npm install
- name: Set up Python bridge
run: npm run setup:python-bridge
- name: Build Python bridge
run: npm run build:python-bridge
- name: Upload Python bridge
uses: actions/upload-artifact@v3
with:
path: bridges/python/dist/*.zip
draft-release:
name: Draft-release
needs: [build]
runs-on: ubuntu-20.04
steps:
- name: Clone repository
uses: actions/checkout@v3
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: 3.9.10
- name: Set Python bridge version
working-directory: bridges/python/src
run: |
echo "PYTHON_BRIDGE_VERSION=$(python -c "from version import __version__; print(__version__)")" >> $GITHUB_ENV
- name: Download Python bridge
uses: actions/download-artifact@v3
with:
path: bridges/python/dist
- uses: marvinpinto/action-automatic-releases@latest
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
automatic_release_tag: python-bridge_v${{ env.PYTHON_BRIDGE_VERSION }}
draft: true
prerelease: false
title: Python Bridge ${{ env.PYTHON_BRIDGE_VERSION }}
files: bridges/python/dist/artifact/*.zip

View File

@ -0,0 +1,91 @@
name: Pre-release TCP server
on: workflow_dispatch
env:
PIPENV_PIPFILE: tcp_server/src
PIPENV_VENV_IN_PROJECT: true
jobs:
build:
name: Build
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- name: Clone repository
uses: actions/checkout@v3
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: 3.9.10
- name: Install Pipenv
run: pip install --upgrade pip && pip install pipenv==2022.7.24
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version: lts/*
- name: Set TCP server version
working-directory: tcp_server/src
run: |
echo "TCP_SERVER_VERSION=$(python -c "from version import __version__; print(__version__)")" >> $GITHUB_ENV
- name: Display TCP server version
run: |
echo "TCP server version: ${{ env.TCP_SERVER_VERSION }}"
- name: Install core
run: npm install
- name: Set up TCP server
run: npm run setup:tcp-server
- name: Build TCP server
run: npm run build:tcp-server
- name: Upload TCP server
uses: actions/upload-artifact@v3
with:
path: tcp_server/dist/*.zip
draft-release:
name: Draft-release
needs: [build]
runs-on: ubuntu-20.04
steps:
- name: Clone repository
uses: actions/checkout@v3
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: 3.9.10
- name: Set TCP server version
working-directory: tcp_server/src
run: |
echo "TCP_SERVER_VERSION=$(python -c "from version import __version__; print(__version__)")" >> $GITHUB_ENV
- name: Download TCP server
uses: actions/download-artifact@v3
with:
path: tcp_server/dist
- uses: marvinpinto/action-automatic-releases@latest
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
automatic_release_tag: tcp-server_v${{ env.TCP_SERVER_VERSION }}
draft: true
prerelease: false
title: TCP Server ${{ env.TCP_SERVER_VERSION }}
files: tcp_server/dist/artifact/*.zip

16
.gitignore vendored
View File

@ -1,25 +1,32 @@
__pycache__/
.idea/
.fleet/
.vscode/
**/dist/
**/dist/*
**/build/
**/node_modules/
test/coverage/
**/tmp/*
bridges/python/.venv/*
**/src/.venv/*
downloads/*
logs/*
core/config/**/*.json
bin/coqui/*
bin/flite/*
package-lock.json
*.pyc
.DS_Store
*.sublime-workspace
npm-debug.log
debug.log
.env
!**/.gitkeep
!bridges/python/.venv/.gitkeep
leon.json
bridges/python/src/Pipfile.lock
tcp_server/src/Pipfile.lock
!tcp_server/**/.gitkeep
!bridges/python/**/.gitkeep
!bridges/nodejs/**/.gitkeep
!**/*.sample*
packages/**/config/config.json
skills/**/src/config.json
@ -28,3 +35,4 @@ skills/**/memory/*.json
core/data/models/*.nlp
package.json.backup
.python-version
schemas/**/*.json

View File

@ -6,6 +6,6 @@ if ! [ -x "$(command -v npm)" ]; then
echo "If you use a version manager tool such as nvm and a git GUI such as GitKraken, please read: https://typicode.github.io/husky/#/?id=command-not-found" >&2
exit 1
else
npm run lint && node_modules/@babel/node/bin/babel-node.js scripts/commit-msg.js
npx ts-node scripts/commit-msg.js
fi

4
.husky/pre-commit Executable file
View File

@ -0,0 +1,4 @@
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"
npx lint-staged

3
.lintstagedrc Normal file
View File

@ -0,0 +1,3 @@
{
"*": ["npm run lint"]
}

2
.npmrc
View File

@ -1 +1,3 @@
engine-strict=true
package-lock=false
save-exact=true

5
.prettierrc.json Normal file
View File

@ -0,0 +1,5 @@
{
"semi": false,
"trailingComma": "none",
"singleQuote": true
}

View File

@ -1,327 +1,362 @@
# [1.0.0-beta.8](https://github.com/leon-ai/leon/compare/v1.0.0-beta.7...v1.0.0-beta.8) (2023-05-01) / Binaries and TypeScript Rewrite
_Please refer to [our latest blog post](https://blog.getleon.ai/binaries-and-typescript-rewrite-1-0-0-beta-8/) for more information on the new release of our dear Leon._
# [1.0.0-beta.7](https://github.com/leon-ai/leon/compare/v1.0.0-beta.6...v1.0.0-beta.7) (2022-08-24) / A Much Better NLP
*Please [read this blog post](https://blog.getleon.ai/a-much-better-nlp-and-future-1-0-0-beta-7/) to know more about all the new features and the exciting future of Leon.*
_Please [read this blog post](https://blog.getleon.ai/a-much-better-nlp-and-future-1-0-0-beta-7/) to know more about all the new features and the exciting future of Leon._
### BREAKING CHANGES
- remove legacy packages [07743657](https://github.com/leon-ai/leon/commit/07743657cd2954e7f850c08eea7c032c24b28a96)
- remove legacy packages [07743657](https://github.com/leon-ai/leon/commit/07743657cd2954e7f850c08eea7c032c24b28a96)
### Features
- create new NLP skills resolvers model + NLP global resolvers model [602604e4](https://github.com/leon-ai/leon/commit/602604e43788c6b6be8c402d54fe54342d0cd5d6)
- better isolate skill resolvers from global resolvers + finish up Akinator skill [905d248e](https://github.com/leon-ai/leon/commit/905d248ebf7e84b1ccc74450520228aef9a8804a)
- transfer language from core to skills + support thematics on Akinator skill [b35a249b](https://github.com/leon-ai/leon/commit/b35a249bf68000d6708aaee4abc4cd97f5b80035)
- actions on slot level + akinator skill progress [7101b8b4](https://github.com/leon-ai/leon/commit/7101b8b4b828b49e009da2fcdac7c5ed2e48c8f8)
- add Cartesian sample training on resolvers + enum entities [6ed88a59](https://github.com/leon-ai/leon/commit/6ed88a5946c77b356e49fe8b9cbe890b8dd1f037)
- map skills resolvers intents [eb5ade76](https://github.com/leon-ai/leon/commit/eb5ade76844dd14f5d5a5c5eeb434eed70fe62f4)
- train skills resolvers and remap as per changes [82df0a3c](https://github.com/leon-ai/leon/commit/82df0a3c235fbd50ad0cfe12e23a51f777dcd658)
- achieve Cartesian training [a1e9011d](https://github.com/leon-ai/leon/commit/a1e9011d5db48ed8e9f49cef2d813ee7e2400ec2)
- introduce suggestions [dcddacca](https://github.com/leon-ai/leon/commit/dcddacca2956529de0aea8ff98e1e6f16104966a)
- communicate suggestions to the client [4b5a8835](https://github.com/leon-ai/leon/commit/4b5a883510fd4421a491f999cc21d8f7dd369a03)
- shared skills memory [795acc5b](https://github.com/leon-ai/leon/commit/795acc5bdd29e9a27d1cf3b4407453648d573973)
- support dynamic variables on skill NLU settings for logic type [10d10a16](https://github.com/leon-ai/leon/commit/10d10a1690cb65970932ee7230e3f324ec67dbce)
- tmp resolvers mapping [b1a332ba](https://github.com/leon-ai/leon/commit/b1a332bab6af8b74a8c58c07bac3ef3a1cebad89)
- start to map resolvers between the core and skills [e88495a9](https://github.com/leon-ai/leon/commit/e88495a9a94e86026fd0c7c4c44f3ff06edb2e80)
- train affirmation and denial resolver [993d52e8](https://github.com/leon-ai/leon/commit/993d52e8686f335039ff3d5e2a82c1a37efb1825)
- Python TCP server and Node.js TCP client for IPC (wip) [5970ec9e](https://github.com/leon-ai/leon/commit/5970ec9e8e4c2784c50e2ddc76b34b71aa4310e6)
- introduce spaCy for complete NER (wip) [caa86fc8](https://github.com/leon-ai/leon/commit/caa86fc8a6850b18f67ba7bedb423be693a88d17)
- slot filling (wip) [76547d94](https://github.com/leon-ai/leon/commit/76547d9411c32e0eb2ccfdac3a4901d2d2fb37f6)
- share data across domains [f4f9fff9](https://github.com/leon-ai/leon/commit/f4f9fff9783861be183990d7869973c7a30c8104)
- dynamic variable binding on NLG [0367b44f](https://github.com/leon-ai/leon/commit/0367b44f211c1629fffe6981a730f171707bf0c0)
- context and slot filling preparation (wip) [975b8ebc](https://github.com/leon-ai/leon/commit/975b8ebcf00db91b44dd067be6dde5c1bf32fff1)
- annotate entities on the fly + prepare for dialog skill type and cross-domains data [4107932d](https://github.com/leon-ai/leon/commit/4107932d000086188d6f44ef67b73cc322fc58e5)
- new NLP training [d8023308](https://github.com/leon-ai/leon/commit/d8023308d0ef1f3eede37f21f45daa2f893031b0)
- **server:**
- trigger next action suggestions or current ones [244d08c0](https://github.com/leon-ai/leon/commit/244d08c0bd0fea315269f52ab899f9b7fe083f51)
- introduce main NLP model and resolvers NLP model [e37526d9](https://github.com/leon-ai/leon/commit/e37526d9056d858ebcf17b81f6714f47b67c77cb)
- change log emojis [843bc428](https://github.com/leon-ai/leon/commit/843bc428b8deb397e2d051a8e0bfaf1b82b459a2)
- provide nextAction even when no slot is set and clean up NLU object on context switch [8377c63d](https://github.com/leon-ai/leon/commit/8377c63db4e4e42ed929171cd8b9abdb13c44b2a)
- report full traceback from skills execution [b69b1fea](https://github.com/leon-ai/leon/commit/b69b1fea16250421bc7d5def1c973dd43e453071)
- support on-the-fly entity annotation for built-in entities [567b030c](https://github.com/leon-ai/leon/commit/567b030c4fcf8df266c39cca61a146fb33b9e0fc)
- save slots within conversation context [fce47cdb](https://github.com/leon-ai/leon/commit/fce47cdbd570993ac5cca2b4ff5bc97969df4e40)
- resolve resolvers tmp [ceea47ff](https://github.com/leon-ai/leon/commit/ceea47ff7dd536bfd3adf3cc355e90e3e94b1cbd)
- prepare the next action on non-slot-filled skills [0acb31a9](https://github.com/leon-ai/leon/commit/0acb31a9c61c1c094b29f3d0ff2647d625eab0be)
- add more affirmative utterance samples [870ab2e8](https://github.com/leon-ai/leon/commit/870ab2e87eba2c548d38dc90d30553e7fa380c1e)
- restart a skill with the original utterance saved in context [f4446ef1](https://github.com/leon-ai/leon/commit/f4446ef17796d38d0f98d5b7e889503622a1a998)
- clean up context if the action loop does not meet the expected items [035c9d52](https://github.com/leon-ai/leon/commit/035c9d5240472ac19a84ae8c1a87844fa0d0af5d)
- add handsigns custom entity [1529c720](https://github.com/leon-ai/leon/commit/1529c72039092c7b8f37304d6064e04f2dc7b795)
- reprocess NLU in case of slot filling interruption [9e242d77](https://github.com/leon-ai/leon/commit/9e242d77d32109e9355eec422790a5a66fd18f9c)
- handle action loop when slots have all been filled at once [f8830502](https://github.com/leon-ai/leon/commit/f88305020a5bc79056b7ff9c1a31f8d3c3a7cdce)
- break the action loop from the skill [27dc801c](https://github.com/leon-ai/leon/commit/27dc801cf53de5af3d54b95f42d2b9e627090867)
- stop action loop from skill to core [99681e25](https://github.com/leon-ai/leon/commit/99681e257795a18361be379b93244088401f640b)
- introduce basic concept of action loop [c5b38400](https://github.com/leon-ai/leon/commit/c5b38400821e5bc5edc4402d007f815f24319d44)
- prepare action loop feature [19e1aa22](https://github.com/leon-ai/leon/commit/19e1aa22f6e989e90eb745e3a7b7ccb8ff5adbfa)
- add current utterance entities to differentiate from the whole context [8b56a185](https://github.com/leon-ai/leon/commit/8b56a1850c9d76e335f1bad1b4395d73ddc5ea19)
- when a context is activated, pick up the most probable classification [8e186879](https://github.com/leon-ai/leon/commit/8e1868798c8750c19b1719a44dc6fb8bca68b250)
- persist entities into contexts [87575773](https://github.com/leon-ai/leon/commit/875757739f6701f54805eeff2c7c350cff36c4ac)
- forward slots to skill + add original utterance [68e40f65](https://github.com/leon-ai/leon/commit/68e40f65df0d1fe29ccad991868a2408c6e1015e)
- handle case when all slots have been filled in one utterance [22e9234b](https://github.com/leon-ai/leon/commit/22e9234b3d2c97e83eaafaeeb5aa9d27c351c95a)
- trigger next action once all slots have been filled [9b870010](https://github.com/leon-ai/leon/commit/9b870010dd929bc1aed6d87696f1cc4e9f177c0b)
- complete slot filling before triggering the next action [9124687e](https://github.com/leon-ai/leon/commit/9124687eb0e17295a30f860752ee622ba44d1440)
- from modules to skills with type at the actions level [77ebaf4a](https://github.com/leon-ai/leon/commit/77ebaf4a9c78b2e471d39872e361ea05b163580d)
- verify if all slots are filled [e27c1b9c](https://github.com/leon-ai/leon/commit/e27c1b9c8f5c2f668f464f152ad227d65ba5ef6b)
- context and slot filling, keep context and await for entities [25adf406](https://github.com/leon-ai/leon/commit/25adf406c810e48b1277105dd6c269a2ed601d28)
- unstack oldest context [1ece25a4](https://github.com/leon-ai/leon/commit/1ece25a497acc9f9876fe158ace5da38beec31e6)
- context setup with slot for each conversation (wip) [8257eb87](https://github.com/leon-ai/leon/commit/8257eb8792c9f4fc90bcc1b393d3fddf8ff541dc)
- resolve slots from slot filling [960a6dc7](https://github.com/leon-ai/leon/commit/960a6dc71c2efb50ad6a8448d447ebd79c559c41)
- pickup questions for slot filling [3bbc2f8a](https://github.com/leon-ai/leon/commit/3bbc2f8a254d10f0c37cdb7abf016b3e418f594a)
- main slots structure (wip) [1d9b1809](https://github.com/leon-ai/leon/commit/1d9b18093b6e042ae49f557149a7822b4420cdb8)
- introduce resolvers for slot filling (wip) [334bf393](https://github.com/leon-ai/leon/commit/334bf393f2c43edd326d9de2e93c037ffeebeab5)
- slot filling PoC (tmp wip) [95bfcfe4](https://github.com/leon-ai/leon/commit/95bfcfe422f21a2946e50031a3623675dfe81b9d)
- slot filling (wip) [969a83e6](https://github.com/leon-ai/leon/commit/969a83e6081de20ec5e2bdd0329a21a3fe448f13)
- trigger unsupported language [1845eed7](https://github.com/leon-ai/leon/commit/1845eed71dadd5f693d76abd7633864014bf8af1)
- context (wip) [d1c2a11d](https://github.com/leon-ai/leon/commit/d1c2a11d8284ca4e1d4563b871c50c006e8ef8a0)
- context (wip) [a9a43ac4](https://github.com/leon-ai/leon/commit/a9a43ac478c46f3832d2af49c287bb574a70cc14)
- differenciate cities from countries for location entities [bf9bf231](https://github.com/leon-ai/leon/commit/bf9bf231f714e1edc1417e43af12fa54c00ba064)
- auto restart the TCP server when language is switching [9be7c700](https://github.com/leon-ai/leon/commit/9be7c700767672ac6e0c875d3b5ae7fa6414e4fa)
- support multi languages on TCP server [a808742c](https://github.com/leon-ai/leon/commit/a808742c927d45c18df45af133e67c98d4a0415a)
- add auto reconnect on TCP client [cbe89ed6](https://github.com/leon-ai/leon/commit/cbe89ed6ccfd727356eb34078a8a4348b2fd696f)
- make TCP client global [006e9fb0](https://github.com/leon-ai/leon/commit/006e9fb01148c2107f6acc6a562ace4809da92be)
- fully implement low-level networking for IPC [8acb82da](https://github.com/leon-ai/leon/commit/8acb82da9bacdb9b7952c4a4d130d094e07def5e)
- more accurate NLG [d5577b1e](https://github.com/leon-ai/leon/commit/d5577b1ef5cf1b8b4a924636ba4425b8b4ae133d)
- unknown_answers fallback on dialog type [28efe6e7](https://github.com/leon-ai/leon/commit/28efe6e7d542f19bf12ddede1815f7fa8cf01036)
- deep data mapping on enum NER [3ca48265](https://github.com/leon-ai/leon/commit/3ca48265e7115c8e0f02c65ba92d90412325ad76)
- NLG and entities mapping [8f2f935b](https://github.com/leon-ai/leon/commit/8f2f935b949ceb965941460d4ff1ed0084b72442)
- bootstrap skill structure [fe90c68e](https://github.com/leon-ai/leon/commit/fe90c68ea0e9b0e857b62aa9f3b0a42ba1ffed6b)
- on-the-fly language switching [f24513a2](https://github.com/leon-ai/leon/commit/f24513a22395d1903e485883f4813cdceccdbd18)
- new NLP containers [34b2aa56](https://github.com/leon-ai/leon/commit/34b2aa5655e55284d59db4569960c49965a0483c)
- (WIP) NLU refactoring [ca3f5f42](https://github.com/leon-ai/leon/commit/ca3f5f42da26eb634e10b56e9b84bd45b5543024)
- add skills domains [cf2a28aa](https://github.com/leon-ai/leon/commit/cf2a28aac2d936cc15e6aa9aa13747015d952053)
- **skill/akinator:**
- finish up [79e7df02](https://github.com/leon-ai/leon/commit/79e7df022f7daedf43db7f892e049a31924ce985)
- finished main business logic [76cae42f](https://github.com/leon-ai/leon/commit/76cae42fdeac0edcd3ebd6aa7718728617687b1b)
- backbone [02a2f714](https://github.com/leon-ai/leon/commit/02a2f71470bb4c0c6ca04526e89461d863d17145)
- **skill/birthday:**
remove birthday skill [be0b345d](https://github.com/leon-ai/leon/commit/be0b345d3f7fea562548e3fbed62b65c32eff4c0)
- **skill/color:**
introduce color skill [ce00989b](https://github.com/leon-ai/leon/commit/ce00989b01f65c5cbb5a2e13f454207c1ba7741c)
- **skill/guess_the_number:**
introduce the Guess the Number skill [fba80966](https://github.com/leon-ai/leon/commit/fba80966c937a32182e48670c47358babb539d64)
- **skill/introduction:**
- add one utterance sample [af0fdd1e](https://github.com/leon-ai/leon/commit/af0fdd1e18975bf8b60abb2957ddf79831281817)
- ask about owner info if necessary [c5cc9bdd](https://github.com/leon-ai/leon/commit/c5cc9bdd52afaaa710f9476d1e9918f3d168e243)
- **skill/mbti:**
- complete form resolver [aad9f3f1](https://github.com/leon-ai/leon/commit/aad9f3f1ef61499d438ea40c9d2d95764667678d)
- finish business logic [99a3f103](https://github.com/leon-ai/leon/commit/99a3f103e00b5a58745ee851d2fa95c61871f75a)
- questions mapping [ae4f69f7](https://github.com/leon-ai/leon/commit/ae4f69f7c7189ff75e004f68c9a2a8b6bb37b6bd)
- complete questionnaire [7f1f8871](https://github.com/leon-ai/leon/commit/7f1f8871598746c5475b24e086ea6e581f2a988e)
- main logic backbone [33109a4c](https://github.com/leon-ai/leon/commit/33109a4c8b5df82e7b98e48e66f8d53f0cc114fb)
- main NLU structure [skip ci] [86d5040a](https://github.com/leon-ai/leon/commit/86d5040a7dc2006036c7e67a2cf54a4c992e64aa)
- **skill/rochambeau:**
- add start answers [192dd0a8](https://github.com/leon-ai/leon/commit/192dd0a87ab5dc025bb90b20b187e36a58be54ea)
- introduce paper scissors rock [57370470](https://github.com/leon-ai/leon/commit/573704706c843d870f2498146bc3cd659bab4f06)
- init [7f5e30ac](https://github.com/leon-ai/leon/commit/7f5e30ac82f2a2d7579e361229a4044348915867)
- **web app:**
- join us on Discord [141c89ec](https://github.com/leon-ai/leon/commit/141c89ecbfd329a8e63d5a603d0ae6b42f9abf38)
- wait for TCP client to be connected first [bc228a68](https://github.com/leon-ai/leon/commit/bc228a68600c07871c489d6624bbc837971079a6)
### Bug Fixes
- check script with new intent-object format [fdf0a389](https://github.com/leon-ai/leon/commit/fdf0a389b76caba5dd47996a43a34c0c7821c70a)
- check new resolvers paths [cfd8f7cb](https://github.com/leon-ai/leon/commit/cfd8f7cbe5e8fd9ce3d1659c725d7af261db8d71)
- use ports.ubuntu.com mirror for the offline TTS [skip ci] [3dd90396](https://github.com/leon-ai/leon/commit/3dd9039678820fceb7ccbb1c96358c8d2f188ede)
- set skill config only when a bridge is set [7513aa7d](https://github.com/leon-ai/leon/commit/7513aa7d20fee1fe9ca5442a7909d22fd1c3b39e)
- only set skill config when it is a logic type [9ce9a8bc](https://github.com/leon-ai/leon/commit/9ce9a8bc4fe0864730a08d8e9a436982f1365aa5)
- **docker:**
- usage of Ubuntu base image with pyenv and nvm (#408) [f507f6f7](https://github.com/leon-ai/leon/commit/f507f6f7e499f56768b3e624164cbcd58193b153)
- check should not allocate a pseudo-TTY (#359) [4372b45f](https://github.com/leon-ai/leon/commit/4372b45fc605893d4130cf7110dd87519b934345)
- create new NLP skills resolvers model + NLP global resolvers model [602604e4](https://github.com/leon-ai/leon/commit/602604e43788c6b6be8c402d54fe54342d0cd5d6)
- better isolate skill resolvers from global resolvers + finish up Akinator skill [905d248e](https://github.com/leon-ai/leon/commit/905d248ebf7e84b1ccc74450520228aef9a8804a)
- transfer language from core to skills + support thematics on Akinator skill [b35a249b](https://github.com/leon-ai/leon/commit/b35a249bf68000d6708aaee4abc4cd97f5b80035)
- actions on slot level + akinator skill progress [7101b8b4](https://github.com/leon-ai/leon/commit/7101b8b4b828b49e009da2fcdac7c5ed2e48c8f8)
- add Cartesian sample training on resolvers + enum entities [6ed88a59](https://github.com/leon-ai/leon/commit/6ed88a5946c77b356e49fe8b9cbe890b8dd1f037)
- map skills resolvers intents [eb5ade76](https://github.com/leon-ai/leon/commit/eb5ade76844dd14f5d5a5c5eeb434eed70fe62f4)
- train skills resolvers and remap as per changes [82df0a3c](https://github.com/leon-ai/leon/commit/82df0a3c235fbd50ad0cfe12e23a51f777dcd658)
- achieve Cartesian training [a1e9011d](https://github.com/leon-ai/leon/commit/a1e9011d5db48ed8e9f49cef2d813ee7e2400ec2)
- introduce suggestions [dcddacca](https://github.com/leon-ai/leon/commit/dcddacca2956529de0aea8ff98e1e6f16104966a)
- communicate suggestions to the client [4b5a8835](https://github.com/leon-ai/leon/commit/4b5a883510fd4421a491f999cc21d8f7dd369a03)
- shared skills memory [795acc5b](https://github.com/leon-ai/leon/commit/795acc5bdd29e9a27d1cf3b4407453648d573973)
- support dynamic variables on skill NLU settings for logic type [10d10a16](https://github.com/leon-ai/leon/commit/10d10a1690cb65970932ee7230e3f324ec67dbce)
- tmp resolvers mapping [b1a332ba](https://github.com/leon-ai/leon/commit/b1a332bab6af8b74a8c58c07bac3ef3a1cebad89)
- start to map resolvers between the core and skills [e88495a9](https://github.com/leon-ai/leon/commit/e88495a9a94e86026fd0c7c4c44f3ff06edb2e80)
- train affirmation and denial resolver [993d52e8](https://github.com/leon-ai/leon/commit/993d52e8686f335039ff3d5e2a82c1a37efb1825)
- Python TCP server and Node.js TCP client for IPC (wip) [5970ec9e](https://github.com/leon-ai/leon/commit/5970ec9e8e4c2784c50e2ddc76b34b71aa4310e6)
- introduce spaCy for complete NER (wip) [caa86fc8](https://github.com/leon-ai/leon/commit/caa86fc8a6850b18f67ba7bedb423be693a88d17)
- slot filling (wip) [76547d94](https://github.com/leon-ai/leon/commit/76547d9411c32e0eb2ccfdac3a4901d2d2fb37f6)
- share data across domains [f4f9fff9](https://github.com/leon-ai/leon/commit/f4f9fff9783861be183990d7869973c7a30c8104)
- dynamic variable binding on NLG [0367b44f](https://github.com/leon-ai/leon/commit/0367b44f211c1629fffe6981a730f171707bf0c0)
- context and slot filling preparation (wip) [975b8ebc](https://github.com/leon-ai/leon/commit/975b8ebcf00db91b44dd067be6dde5c1bf32fff1)
- annotate entities on the fly + prepare for dialog skill type and cross-domains data [4107932d](https://github.com/leon-ai/leon/commit/4107932d000086188d6f44ef67b73cc322fc58e5)
- new NLP training [d8023308](https://github.com/leon-ai/leon/commit/d8023308d0ef1f3eede37f21f45daa2f893031b0)
- **server:**
- make leon handle multiple socket.io-client instances [6e7c0aac](https://github.com/leon-ai/leon/commit/6e7c0aac57008b152b45f1b0f3886ae38777467b)
- fallback on global resolver during resolver classification [ec77dd0f](https://github.com/leon-ai/leon/commit/ec77dd0f02a8ae94fb3f02c7b7847b5509d71406)
- make use of current entities to match global entities [a8d82050](https://github.com/leon-ai/leon/commit/a8d82050c86b5c24c4c898c06e5ffc3882524c0b)
- multiple slots filling [2ac1bc63](https://github.com/leon-ai/leon/commit/2ac1bc63ccd11757d586adfb2e75ce04e3ffbcb5)
- context switching on action loop [6712ae55](https://github.com/leon-ai/leon/commit/6712ae5539ef44ed33e360cfcad71c760c4b13b1)
- check one-shot slot filling case causing infinite loop [782a3aaa](https://github.com/leon-ai/leon/commit/782a3aaa0a07dda667557bc84db906b3fa9b237c)
- clean up active context after all slots have been filled [faabc2c7](https://github.com/leon-ai/leon/commit/faabc2c7b0992fcea035eedf66103d84b101e1a7)
- correctly extract all spaCy entities [6aa60bfb](https://github.com/leon-ai/leon/commit/6aa60bfbd8c72e678fe3faf5e7f9dbd37dfd209f)
- intent not found [8280c658](https://github.com/leon-ai/leon/commit/8280c65897dba0fe470a3589d151b391c51e344e)
- fallback due to modules to skills refactoring [ef0c54b2](https://github.com/leon-ai/leon/commit/ef0c54b22667ef2bd1d2c07003f6b4beb5fa25c0)
- NER due to modules to skills refactoring [e4d3904c](https://github.com/leon-ai/leon/commit/e4d3904ceeb2a3ee2c0187a1817331fac916e1a7)
- **skill/akinator:**
remove direct end on guess action [f6461f73](https://github.com/leon-ai/leon/commit/f6461f733b4a5d944dfa4a987dd1109628c6cbca)
- **skill/color:**
more appropriate answer [cb18ed63](https://github.com/leon-ai/leon/commit/cb18ed6397cb0e0ad8fbea30c57d7d40137441ee)
- **skill/rochambeau:**
final logic [0ebc0518](https://github.com/leon-ai/leon/commit/0ebc0518e61b899c35dd13df65a43f69399e784d)
- trigger next action suggestions or current ones [244d08c0](https://github.com/leon-ai/leon/commit/244d08c0bd0fea315269f52ab899f9b7fe083f51)
- introduce main NLP model and resolvers NLP model [e37526d9](https://github.com/leon-ai/leon/commit/e37526d9056d858ebcf17b81f6714f47b67c77cb)
- change log emojis [843bc428](https://github.com/leon-ai/leon/commit/843bc428b8deb397e2d051a8e0bfaf1b82b459a2)
- provide nextAction even when no slot is set and clean up NLU object on context switch [8377c63d](https://github.com/leon-ai/leon/commit/8377c63db4e4e42ed929171cd8b9abdb13c44b2a)
- report full traceback from skills execution [b69b1fea](https://github.com/leon-ai/leon/commit/b69b1fea16250421bc7d5def1c973dd43e453071)
- support on-the-fly entity annotation for built-in entities [567b030c](https://github.com/leon-ai/leon/commit/567b030c4fcf8df266c39cca61a146fb33b9e0fc)
- save slots within conversation context [fce47cdb](https://github.com/leon-ai/leon/commit/fce47cdbd570993ac5cca2b4ff5bc97969df4e40)
- resolve resolvers tmp [ceea47ff](https://github.com/leon-ai/leon/commit/ceea47ff7dd536bfd3adf3cc355e90e3e94b1cbd)
- prepare the next action on non-slot-filled skills [0acb31a9](https://github.com/leon-ai/leon/commit/0acb31a9c61c1c094b29f3d0ff2647d625eab0be)
- add more affirmative utterance samples [870ab2e8](https://github.com/leon-ai/leon/commit/870ab2e87eba2c548d38dc90d30553e7fa380c1e)
- restart a skill with the original utterance saved in context [f4446ef1](https://github.com/leon-ai/leon/commit/f4446ef17796d38d0f98d5b7e889503622a1a998)
- clean up context if the action loop does not meet the expected items [035c9d52](https://github.com/leon-ai/leon/commit/035c9d5240472ac19a84ae8c1a87844fa0d0af5d)
- add handsigns custom entity [1529c720](https://github.com/leon-ai/leon/commit/1529c72039092c7b8f37304d6064e04f2dc7b795)
- reprocess NLU in case of slot filling interruption [9e242d77](https://github.com/leon-ai/leon/commit/9e242d77d32109e9355eec422790a5a66fd18f9c)
- handle action loop when slots have all been filled at once [f8830502](https://github.com/leon-ai/leon/commit/f88305020a5bc79056b7ff9c1a31f8d3c3a7cdce)
- break the action loop from the skill [27dc801c](https://github.com/leon-ai/leon/commit/27dc801cf53de5af3d54b95f42d2b9e627090867)
- stop action loop from skill to core [99681e25](https://github.com/leon-ai/leon/commit/99681e257795a18361be379b93244088401f640b)
- introduce basic concept of action loop [c5b38400](https://github.com/leon-ai/leon/commit/c5b38400821e5bc5edc4402d007f815f24319d44)
- prepare action loop feature [19e1aa22](https://github.com/leon-ai/leon/commit/19e1aa22f6e989e90eb745e3a7b7ccb8ff5adbfa)
- add current utterance entities to differentiate from the whole context [8b56a185](https://github.com/leon-ai/leon/commit/8b56a1850c9d76e335f1bad1b4395d73ddc5ea19)
- when a context is activated, pick up the most probable classification [8e186879](https://github.com/leon-ai/leon/commit/8e1868798c8750c19b1719a44dc6fb8bca68b250)
- persist entities into contexts [87575773](https://github.com/leon-ai/leon/commit/875757739f6701f54805eeff2c7c350cff36c4ac)
- forward slots to skill + add original utterance [68e40f65](https://github.com/leon-ai/leon/commit/68e40f65df0d1fe29ccad991868a2408c6e1015e)
- handle case when all slots have been filled in one utterance [22e9234b](https://github.com/leon-ai/leon/commit/22e9234b3d2c97e83eaafaeeb5aa9d27c351c95a)
- trigger next action once all slots have been filled [9b870010](https://github.com/leon-ai/leon/commit/9b870010dd929bc1aed6d87696f1cc4e9f177c0b)
- complete slot filling before triggering the next action [9124687e](https://github.com/leon-ai/leon/commit/9124687eb0e17295a30f860752ee622ba44d1440)
- from modules to skills with type at the actions level [77ebaf4a](https://github.com/leon-ai/leon/commit/77ebaf4a9c78b2e471d39872e361ea05b163580d)
- verify if all slots are filled [e27c1b9c](https://github.com/leon-ai/leon/commit/e27c1b9c8f5c2f668f464f152ad227d65ba5ef6b)
- context and slot filling, keep context and await for entities [25adf406](https://github.com/leon-ai/leon/commit/25adf406c810e48b1277105dd6c269a2ed601d28)
- unstack oldest context [1ece25a4](https://github.com/leon-ai/leon/commit/1ece25a497acc9f9876fe158ace5da38beec31e6)
- context setup with slot for each conversation (wip) [8257eb87](https://github.com/leon-ai/leon/commit/8257eb8792c9f4fc90bcc1b393d3fddf8ff541dc)
- resolve slots from slot filling [960a6dc7](https://github.com/leon-ai/leon/commit/960a6dc71c2efb50ad6a8448d447ebd79c559c41)
- pickup questions for slot filling [3bbc2f8a](https://github.com/leon-ai/leon/commit/3bbc2f8a254d10f0c37cdb7abf016b3e418f594a)
- main slots structure (wip) [1d9b1809](https://github.com/leon-ai/leon/commit/1d9b18093b6e042ae49f557149a7822b4420cdb8)
- introduce resolvers for slot filling (wip) [334bf393](https://github.com/leon-ai/leon/commit/334bf393f2c43edd326d9de2e93c037ffeebeab5)
- slot filling PoC (tmp wip) [95bfcfe4](https://github.com/leon-ai/leon/commit/95bfcfe422f21a2946e50031a3623675dfe81b9d)
- slot filling (wip) [969a83e6](https://github.com/leon-ai/leon/commit/969a83e6081de20ec5e2bdd0329a21a3fe448f13)
- trigger unsupported language [1845eed7](https://github.com/leon-ai/leon/commit/1845eed71dadd5f693d76abd7633864014bf8af1)
- context (wip) [d1c2a11d](https://github.com/leon-ai/leon/commit/d1c2a11d8284ca4e1d4563b871c50c006e8ef8a0)
- context (wip) [a9a43ac4](https://github.com/leon-ai/leon/commit/a9a43ac478c46f3832d2af49c287bb574a70cc14)
- differenciate cities from countries for location entities [bf9bf231](https://github.com/leon-ai/leon/commit/bf9bf231f714e1edc1417e43af12fa54c00ba064)
- auto restart the TCP server when language is switching [9be7c700](https://github.com/leon-ai/leon/commit/9be7c700767672ac6e0c875d3b5ae7fa6414e4fa)
- support multi languages on TCP server [a808742c](https://github.com/leon-ai/leon/commit/a808742c927d45c18df45af133e67c98d4a0415a)
- add auto reconnect on TCP client [cbe89ed6](https://github.com/leon-ai/leon/commit/cbe89ed6ccfd727356eb34078a8a4348b2fd696f)
- make TCP client global [006e9fb0](https://github.com/leon-ai/leon/commit/006e9fb01148c2107f6acc6a562ace4809da92be)
- fully implement low-level networking for IPC [8acb82da](https://github.com/leon-ai/leon/commit/8acb82da9bacdb9b7952c4a4d130d094e07def5e)
- more accurate NLG [d5577b1e](https://github.com/leon-ai/leon/commit/d5577b1ef5cf1b8b4a924636ba4425b8b4ae133d)
- unknown_answers fallback on dialog type [28efe6e7](https://github.com/leon-ai/leon/commit/28efe6e7d542f19bf12ddede1815f7fa8cf01036)
- deep data mapping on enum NER [3ca48265](https://github.com/leon-ai/leon/commit/3ca48265e7115c8e0f02c65ba92d90412325ad76)
- NLG and entities mapping [8f2f935b](https://github.com/leon-ai/leon/commit/8f2f935b949ceb965941460d4ff1ed0084b72442)
- bootstrap skill structure [fe90c68e](https://github.com/leon-ai/leon/commit/fe90c68ea0e9b0e857b62aa9f3b0a42ba1ffed6b)
- on-the-fly language switching [f24513a2](https://github.com/leon-ai/leon/commit/f24513a22395d1903e485883f4813cdceccdbd18)
- new NLP containers [34b2aa56](https://github.com/leon-ai/leon/commit/34b2aa5655e55284d59db4569960c49965a0483c)
- (WIP) NLU refactoring [ca3f5f42](https://github.com/leon-ai/leon/commit/ca3f5f42da26eb634e10b56e9b84bd45b5543024)
- add skills domains [cf2a28aa](https://github.com/leon-ai/leon/commit/cf2a28aac2d936cc15e6aa9aa13747015d952053)
- **skill/akinator:**
- finish up [79e7df02](https://github.com/leon-ai/leon/commit/79e7df022f7daedf43db7f892e049a31924ce985)
- finished main business logic [76cae42f](https://github.com/leon-ai/leon/commit/76cae42fdeac0edcd3ebd6aa7718728617687b1b)
- backbone [02a2f714](https://github.com/leon-ai/leon/commit/02a2f71470bb4c0c6ca04526e89461d863d17145)
- **skill/birthday:**
remove birthday skill [be0b345d](https://github.com/leon-ai/leon/commit/be0b345d3f7fea562548e3fbed62b65c32eff4c0)
- **skill/color:**
introduce color skill [ce00989b](https://github.com/leon-ai/leon/commit/ce00989b01f65c5cbb5a2e13f454207c1ba7741c)
- **skill/guess_the_number:**
introduce the Guess the Number skill [fba80966](https://github.com/leon-ai/leon/commit/fba80966c937a32182e48670c47358babb539d64)
- **skill/introduction:**
- add one utterance sample [af0fdd1e](https://github.com/leon-ai/leon/commit/af0fdd1e18975bf8b60abb2957ddf79831281817)
- ask about owner info if necessary [c5cc9bdd](https://github.com/leon-ai/leon/commit/c5cc9bdd52afaaa710f9476d1e9918f3d168e243)
- **skill/mbti:**
- complete form resolver [aad9f3f1](https://github.com/leon-ai/leon/commit/aad9f3f1ef61499d438ea40c9d2d95764667678d)
- finish business logic [99a3f103](https://github.com/leon-ai/leon/commit/99a3f103e00b5a58745ee851d2fa95c61871f75a)
- questions mapping [ae4f69f7](https://github.com/leon-ai/leon/commit/ae4f69f7c7189ff75e004f68c9a2a8b6bb37b6bd)
- complete questionnaire [7f1f8871](https://github.com/leon-ai/leon/commit/7f1f8871598746c5475b24e086ea6e581f2a988e)
- main logic backbone [33109a4c](https://github.com/leon-ai/leon/commit/33109a4c8b5df82e7b98e48e66f8d53f0cc114fb)
- main NLU structure [skip ci] [86d5040a](https://github.com/leon-ai/leon/commit/86d5040a7dc2006036c7e67a2cf54a4c992e64aa)
- **skill/rochambeau:**
- add start answers [192dd0a8](https://github.com/leon-ai/leon/commit/192dd0a87ab5dc025bb90b20b187e36a58be54ea)
- introduce paper scissors rock [57370470](https://github.com/leon-ai/leon/commit/573704706c843d870f2498146bc3cd659bab4f06)
- init [7f5e30ac](https://github.com/leon-ai/leon/commit/7f5e30ac82f2a2d7579e361229a4044348915867)
- **web app:**
- join us on Discord [141c89ec](https://github.com/leon-ai/leon/commit/141c89ecbfd329a8e63d5a603d0ae6b42f9abf38)
- wait for TCP client to be connected first [bc228a68](https://github.com/leon-ai/leon/commit/bc228a68600c07871c489d6624bbc837971079a6)
### Bug Fixes
- check script with new intent-object format [fdf0a389](https://github.com/leon-ai/leon/commit/fdf0a389b76caba5dd47996a43a34c0c7821c70a)
- check new resolvers paths [cfd8f7cb](https://github.com/leon-ai/leon/commit/cfd8f7cbe5e8fd9ce3d1659c725d7af261db8d71)
- use ports.ubuntu.com mirror for the offline TTS [skip ci] [3dd90396](https://github.com/leon-ai/leon/commit/3dd9039678820fceb7ccbb1c96358c8d2f188ede)
- set skill config only when a bridge is set [7513aa7d](https://github.com/leon-ai/leon/commit/7513aa7d20fee1fe9ca5442a7909d22fd1c3b39e)
- only set skill config when it is a logic type [9ce9a8bc](https://github.com/leon-ai/leon/commit/9ce9a8bc4fe0864730a08d8e9a436982f1365aa5)
- **docker:**
- usage of Ubuntu base image with pyenv and nvm (#408) [f507f6f7](https://github.com/leon-ai/leon/commit/f507f6f7e499f56768b3e624164cbcd58193b153)
- check should not allocate a pseudo-TTY (#359) [4372b45f](https://github.com/leon-ai/leon/commit/4372b45fc605893d4130cf7110dd87519b934345)
- **server:**
- make leon handle multiple socket.io-client instances [6e7c0aac](https://github.com/leon-ai/leon/commit/6e7c0aac57008b152b45f1b0f3886ae38777467b)
- fallback on global resolver during resolver classification [ec77dd0f](https://github.com/leon-ai/leon/commit/ec77dd0f02a8ae94fb3f02c7b7847b5509d71406)
- make use of current entities to match global entities [a8d82050](https://github.com/leon-ai/leon/commit/a8d82050c86b5c24c4c898c06e5ffc3882524c0b)
- multiple slots filling [2ac1bc63](https://github.com/leon-ai/leon/commit/2ac1bc63ccd11757d586adfb2e75ce04e3ffbcb5)
- context switching on action loop [6712ae55](https://github.com/leon-ai/leon/commit/6712ae5539ef44ed33e360cfcad71c760c4b13b1)
- check one-shot slot filling case causing infinite loop [782a3aaa](https://github.com/leon-ai/leon/commit/782a3aaa0a07dda667557bc84db906b3fa9b237c)
- clean up active context after all slots have been filled [faabc2c7](https://github.com/leon-ai/leon/commit/faabc2c7b0992fcea035eedf66103d84b101e1a7)
- correctly extract all spaCy entities [6aa60bfb](https://github.com/leon-ai/leon/commit/6aa60bfbd8c72e678fe3faf5e7f9dbd37dfd209f)
- intent not found [8280c658](https://github.com/leon-ai/leon/commit/8280c65897dba0fe470a3589d151b391c51e344e)
- fallback due to modules to skills refactoring [ef0c54b2](https://github.com/leon-ai/leon/commit/ef0c54b22667ef2bd1d2c07003f6b4beb5fa25c0)
- NER due to modules to skills refactoring [e4d3904c](https://github.com/leon-ai/leon/commit/e4d3904ceeb2a3ee2c0187a1817331fac916e1a7)
- **skill/akinator:**
remove direct end on guess action [f6461f73](https://github.com/leon-ai/leon/commit/f6461f733b4a5d944dfa4a987dd1109628c6cbca)
- **skill/color:**
more appropriate answer [cb18ed63](https://github.com/leon-ai/leon/commit/cb18ed6397cb0e0ad8fbea30c57d7d40137441ee)
- **skill/rochambeau:**
final logic [0ebc0518](https://github.com/leon-ai/leon/commit/0ebc0518e61b899c35dd13df65a43f69399e784d)
### Performance Improvements
- check Pipfile instead of Pipfile.lock to judge whether Python packages must be installed [afdb71f7](https://github.com/leon-ai/leon/commit/afdb71f766f2956c5cb4a5e0be9025340d1a89db)
- check Pipfile instead of Pipfile.lock to judge whether Python packages must be installed [afdb71f7](https://github.com/leon-ai/leon/commit/afdb71f766f2956c5cb4a5e0be9025340d1a89db)
### Documentation Changes
- change newsletter link [4bf2a9af](https://github.com/leon-ai/leon/commit/4bf2a9af963f75aeff96f4a43da8ec1024ac583a)
- README - Edited sentence for clarity (#389) [e83a1c42](https://github.com/leon-ai/leon/commit/e83a1c4230897e8b63251ef86225cf773148c38e)
- edit newsletter link [fa558a44](https://github.com/leon-ai/leon/commit/fa558a447ade4071f352d56f14602690ed90f521)
- update sponsor [skip ci] [f30ddb6b](https://github.com/leon-ai/leon/commit/f30ddb6be5f531df2b0042be0ed5ffbe79f73b07)
- remove sponsor [skip ci] [5dbc010f](https://github.com/leon-ai/leon/commit/5dbc010fa643279a24081f3148022e2211af63f4)
- remove sponsor [skip ci] [f36dd20f](https://github.com/leon-ai/leon/commit/f36dd20f822cd33c9e8a03efc2849c8d8d1fc75e)
- remove sponsor [skip ci] [5ee57ddf](https://github.com/leon-ai/leon/commit/5ee57ddf2a9f7817ec35b2e70d49e5bb422d8f78)
- add @ant-media sponsor [skip ci] [b47cbc3a](https://github.com/leon-ai/leon/commit/b47cbc3a5ecb6591f7abb4f62feae8102b9a6468)
- add long dev notice to README [skip ci] [499be77d](https://github.com/leon-ai/leon/commit/499be77d509231b853f591e27f726381da5a50d8)
- move sponsor to new section [skip ci] [8825d687](https://github.com/leon-ai/leon/commit/8825d6877c19d86495e89a858b859b7ab1f9ae37)
- change Twitter handle [skip ci] [c1afc11c](https://github.com/leon-ai/leon/commit/c1afc11cdb283526540d0fecdf83efddf3f3a9f7)
- remove sponsor [skip ci] [99b401a6](https://github.com/leon-ai/leon/commit/99b401a668a6fb248e33c22782940402be7c9b17)
- add new sponsor self-hosted img [skip ci] [238d928c](https://github.com/leon-ai/leon/commit/238d928cace13d4ecd174ca14b136967d8845e0f)
- remove new sponsor link (broken) [skip ci] [254f2848](https://github.com/leon-ai/leon/commit/254f2848aab622b79cce16d10c58d53ff6db9a8f)
- in GitHub BUG.md from modules to skills [4a5480a3](https://github.com/leon-ai/leon/commit/4a5480a3ccc54ee34d42f6edcec2a40224dee7ed)
- change @FluxIndustries sponsorship [skip ci] [1a118b71](https://github.com/leon-ai/leon/commit/1a118b718e5d4ade123756ac94758a01c50b12ae)
- add @FluxIndustries sponsor [skip ci] [9a604d7c](https://github.com/leon-ai/leon/commit/9a604d7ccc0c6aaec257299078141dd0c3077933)
- new #LeonAI link [skip ci] [a0107d62](https://github.com/leon-ai/leon/commit/a0107d629473f7fd057d367926e83822d46f1227)
- changelog new version diff link fix [skip ci] [e14c2498](https://github.com/leon-ai/leon/commit/e14c249826db92af7b85422e566be6aa834a7fb7)
- change newsletter link [4bf2a9af](https://github.com/leon-ai/leon/commit/4bf2a9af963f75aeff96f4a43da8ec1024ac583a)
- README - Edited sentence for clarity (#389) [e83a1c42](https://github.com/leon-ai/leon/commit/e83a1c4230897e8b63251ef86225cf773148c38e)
- edit newsletter link [fa558a44](https://github.com/leon-ai/leon/commit/fa558a447ade4071f352d56f14602690ed90f521)
- update sponsor [skip ci] [f30ddb6b](https://github.com/leon-ai/leon/commit/f30ddb6be5f531df2b0042be0ed5ffbe79f73b07)
- remove sponsor [skip ci] [5dbc010f](https://github.com/leon-ai/leon/commit/5dbc010fa643279a24081f3148022e2211af63f4)
- remove sponsor [skip ci] [f36dd20f](https://github.com/leon-ai/leon/commit/f36dd20f822cd33c9e8a03efc2849c8d8d1fc75e)
- remove sponsor [skip ci] [5ee57ddf](https://github.com/leon-ai/leon/commit/5ee57ddf2a9f7817ec35b2e70d49e5bb422d8f78)
- add @ant-media sponsor [skip ci] [b47cbc3a](https://github.com/leon-ai/leon/commit/b47cbc3a5ecb6591f7abb4f62feae8102b9a6468)
- add long dev notice to README [skip ci] [499be77d](https://github.com/leon-ai/leon/commit/499be77d509231b853f591e27f726381da5a50d8)
- move sponsor to new section [skip ci] [8825d687](https://github.com/leon-ai/leon/commit/8825d6877c19d86495e89a858b859b7ab1f9ae37)
- change Twitter handle [skip ci] [c1afc11c](https://github.com/leon-ai/leon/commit/c1afc11cdb283526540d0fecdf83efddf3f3a9f7)
- remove sponsor [skip ci] [99b401a6](https://github.com/leon-ai/leon/commit/99b401a668a6fb248e33c22782940402be7c9b17)
- add new sponsor self-hosted img [skip ci] [238d928c](https://github.com/leon-ai/leon/commit/238d928cace13d4ecd174ca14b136967d8845e0f)
- remove new sponsor link (broken) [skip ci] [254f2848](https://github.com/leon-ai/leon/commit/254f2848aab622b79cce16d10c58d53ff6db9a8f)
- in GitHub BUG.md from modules to skills [4a5480a3](https://github.com/leon-ai/leon/commit/4a5480a3ccc54ee34d42f6edcec2a40224dee7ed)
- change @FluxIndustries sponsorship [skip ci] [1a118b71](https://github.com/leon-ai/leon/commit/1a118b718e5d4ade123756ac94758a01c50b12ae)
- add @FluxIndustries sponsor [skip ci] [9a604d7c](https://github.com/leon-ai/leon/commit/9a604d7ccc0c6aaec257299078141dd0c3077933)
- new #LeonAI link [skip ci] [a0107d62](https://github.com/leon-ai/leon/commit/a0107d629473f7fd057d367926e83822d46f1227)
- changelog new version diff link fix [skip ci] [e14c2498](https://github.com/leon-ai/leon/commit/e14c249826db92af7b85422e566be6aa834a7fb7)
# [1.0.0-beta.6](https://github.com/leon-ai/leon/compare/v1.0.0-beta.5...v1.0.0-beta.6) (2022-02-07) / Leon Over HTTP + Making Friends with Coqui STT
### Features
- simple coqui-ai stt integration [86a4816b](https://github.com/leon-ai/leon/commit/86a4816b777fee8ec9c89648c5866a75de56c017)
- HTTP API key generator [d10a7fa7](https://github.com/leon-ai/leon/commit/d10a7fa7880a0bf2fb1cae7904d1ef4257f05257)
- avoid unnecessary routes generation
- **server:**
- make Coqui STT the default STT solution [70399187](https://github.com/leon-ai/leon/commit/7039918760c0ef7ba93bf45820e3cae774c42d8c)
- add HTTP API key middleware [cdf41499](https://github.com/leon-ai/leon/commit/cdf4149939cbe3f3ae81039957dba3377a78f5a6)
- expose queries over HTTP [b6428d03](https://github.com/leon-ai/leon/commit/b6428d038452619f1682c863892cd8f376efca84)
- add timeout action over HTTP [115f9c16](https://github.com/leon-ai/leon/commit/115f9c164559d761625cc6f362749f7d2417d300)
- handle built-in and trim entities over HTTP + add "disabled" HTTP API action option [82fb967a](https://github.com/leon-ai/leon/commit/82fb967af8f49421e3b2474184da3d34fb17294f)
- execute modules over HTTP [2e5b2c59](https://github.com/leon-ai/leon/commit/2e5b2c59da0bafe3acd966773c6fac3611b3bd0c)
- generate Fastify routes on the file to expose packages over HTTP [5b41713a](https://github.com/leon-ai/leon/commit/5b41713a68ee628e695212dbebc88f6b9a94b461)
### Bug Fixes
- do not ask to regenerate the HTTP API key if this one isn't available yet [d265377a](https://github.com/leon-ai/leon/commit/d265377a43fd4506cf12db46f261b891f2054ed2)
- Python deps tree check [c6c01291](https://github.com/leon-ai/leon/commit/c6c012915824227efdf0c50df6a8f1cd8d70ed42)
- hotword offline (#342) [f563d01d](https://github.com/leon-ai/leon/commit/f563d01d077499c836e94c86f85cedc2ad4d56e6)
- addressed comments by @JRMeyer [b1c6f5c8](https://github.com/leon-ai/leon/commit/b1c6f5c883103d57d4fe566af640fc3ac5ce713d)
- allow to detect STT offline capabilities [04d62288](https://github.com/leon-ai/leon/commit/04d622884165e0bde65785569a659f59cf9e8582)
- Amazon Polly is always configured on check script due to new structure [e6246d1f](https://github.com/leon-ai/leon/commit/e6246d1f8f9ec15a4ebe9600764afffbaa7e62d9)
- simple coqui-ai stt integration [86a4816b](https://github.com/leon-ai/leon/commit/86a4816b777fee8ec9c89648c5866a75de56c017)
- HTTP API key generator [d10a7fa7](https://github.com/leon-ai/leon/commit/d10a7fa7880a0bf2fb1cae7904d1ef4257f05257)
- avoid unnecessary routes generation
- **server:**
- make Coqui STT the default STT solution [70399187](https://github.com/leon-ai/leon/commit/7039918760c0ef7ba93bf45820e3cae774c42d8c)
- add HTTP API key middleware [cdf41499](https://github.com/leon-ai/leon/commit/cdf4149939cbe3f3ae81039957dba3377a78f5a6)
- expose queries over HTTP [b6428d03](https://github.com/leon-ai/leon/commit/b6428d038452619f1682c863892cd8f376efca84)
- add timeout action over HTTP [115f9c16](https://github.com/leon-ai/leon/commit/115f9c164559d761625cc6f362749f7d2417d300)
- handle built-in and trim entities over HTTP + add "disabled" HTTP API action option [82fb967a](https://github.com/leon-ai/leon/commit/82fb967af8f49421e3b2474184da3d34fb17294f)
- execute modules over HTTP [2e5b2c59](https://github.com/leon-ai/leon/commit/2e5b2c59da0bafe3acd966773c6fac3611b3bd0c)
- generate Fastify routes on the file to expose packages over HTTP [5b41713a](https://github.com/leon-ai/leon/commit/5b41713a68ee628e695212dbebc88f6b9a94b461)
### Bug Fixes
- do not ask to regenerate the HTTP API key if this one isn't available yet [d265377a](https://github.com/leon-ai/leon/commit/d265377a43fd4506cf12db46f261b891f2054ed2)
- Python deps tree check [c6c01291](https://github.com/leon-ai/leon/commit/c6c012915824227efdf0c50df6a8f1cd8d70ed42)
- hotword offline (#342) [f563d01d](https://github.com/leon-ai/leon/commit/f563d01d077499c836e94c86f85cedc2ad4d56e6)
- addressed comments by @JRMeyer [b1c6f5c8](https://github.com/leon-ai/leon/commit/b1c6f5c883103d57d4fe566af640fc3ac5ce713d)
- allow to detect STT offline capabilities [04d62288](https://github.com/leon-ai/leon/commit/04d622884165e0bde65785569a659f59cf9e8582)
- Amazon Polly is always configured on check script due to new structure [e6246d1f](https://github.com/leon-ai/leon/commit/e6246d1f8f9ec15a4ebe9600764afffbaa7e62d9)
### Performance Improvements
- check if Python deps tree has been updated before going through deps install [2d0b0f13](https://github.com/leon-ai/leon/commit/2d0b0f1365d8e4d6eadf9f7cc0a16b7b4b4306f4)
- check if Python deps tree has been updated before going through deps install [2d0b0f13](https://github.com/leon-ai/leon/commit/2d0b0f1365d8e4d6eadf9f7cc0a16b7b4b4306f4)
# [1.0.0-beta.5](https://github.com/leon-ai/leon/compare/v1.0.0-beta.4...v1.0.0-beta.5) (2021-12-28) / Refocus
*This release marks a major turn in the future versions of the Leon core. Please [read this blog post](https://blog.getleon.ai/i-ran-away-from-open-source/) to know more.*
_This release marks a major turn in the future versions of the Leon core. Please [read this blog post](https://blog.getleon.ai/i-ran-away-from-open-source/) to know more._
### BREAKING CHANGES
- Node.js 16+ and npm 8+ minimum requirements [2f66f1c1](https://github.com/leon-ai/leon/commit/2f66f1c17bb2e4a1c18b4251d49de252b8d87344)
- Node.js 16+ and npm 8+ minimum requirements [2f66f1c1](https://github.com/leon-ai/leon/commit/2f66f1c17bb2e4a1c18b4251d49de252b8d87344)
### Features
- **server:** support arrays on NER between conditions [7cf7f979](https://github.com/leon-ai/leon/commit/7cf7f9791254e1950fe9128ce1b3a58079cc2ada)
- **server:** support arrays on NER between conditions [7cf7f979](https://github.com/leon-ai/leon/commit/7cf7f9791254e1950fe9128ce1b3a58079cc2ada)
### Bug Fixes
- jest-extended new setup due to latest update [02f766d6](https://github.com/leon-ai/leon/commit/02f766d6a8453609ebaec78356aa6e6d4df0967b)
- jest-extended new setup due to latest update [02f766d6](https://github.com/leon-ai/leon/commit/02f766d6a8453609ebaec78356aa6e6d4df0967b)
### Performance Improvements
- Windows setup on DeepSpeech dep removal [13f5a49f](https://github.com/leon-ai/leon/commit/13f5a49f678f8f67a93b67d4f558cddcf237e204)
- Windows setup on DeepSpeech dep removal [13f5a49f](https://github.com/leon-ai/leon/commit/13f5a49f678f8f67a93b67d4f558cddcf237e204)
### Documentation Changes
- URL redirect managed by registrar [c16d5b28](https://github.com/leon-ai/leon/commit/c16d5b280b758f7e18305e30678adec79f0a0716)
- URL redirect managed by registrar [c16d5b28](https://github.com/leon-ai/leon/commit/c16d5b280b758f7e18305e30678adec79f0a0716)
# [1.0.0-beta.4](https://github.com/leon-ai/leon/compare/1.0.0-beta.2...v1.0.0-beta.4) (2021-05-01) / Getting Rid of Dust
*This release includes a lot of changes that are made under the hood and are not displayed here, please **[read the blog post](https://blog.getleon.ai/getting-rid-of-dust-1-0-0-beta-4/)** to know more.*
_This release includes a lot of changes that are made under the hood and are not displayed here, please **[read the blog post](https://blog.getleon.ai/getting-rid-of-dust-1-0-0-beta-4/)** to know more._
### BREAKING CHANGES
- **package/checker:** introduce Have I Been Pwned v3 API with API key ([0ca89fe3](https://github.com/leon-ai/leon/commit/0ca89fe32d51c80cec5f9446acf14990390a5917))
- **server:**
- AWS SDK new structure due to v3 and adapt Amazon Polly changes ([f15f2db7](https://github.com/leon-ai/leon/commit/f15f2db78e5781d05e5e2bcb186645966d17debf))
- IBM Watson TTS and STT new structure ([f41ea0e9](https://github.com/leon-ai/leon/commit/f41ea0e9a1479bfd6a1cb2e8d1f70aec744c685b) | [2668c295](https://github.com/leon-ai/leon/commit/2668c295880ee753ef7ca26a91dbc7e0901febff))
### Features
- **package/calendar:** introduce To-Do list module ([0cdd73d6](https://github.com/leon-ai/leon/commit/0cdd73d6c24a287915f691e3b12edacd75fd383a) | [857be947](https://github.com/leon-ai/leon/commit/857be947792c650ac35847e14fc41064008cef24) | [2041be14](https://github.com/leon-ai/leon/commit/2041be14dbc01640a61de96d1982cc20cd05a8b3) | [12e8f5c3](https://github.com/leon-ai/leon/commit/12e8f5c3bfb436aa212557cd99d9926aa431ab4f) | [8575e9e3](https://github.com/leon-ai/leon/commit/8575e9e3ef01499d9f7be6d313a85d48549e9107) | [5e128df0](https://github.com/leon-ai/leon/commit/5e128df023977525de3e66ce2826aace87569308) | [602aa694](https://github.com/leon-ai/leon/commit/602aa694ac49333f48c119cf2ca2aa7f54b8ae44) | [b9693df9](https://github.com/leon-ai/leon/commit/b9693df90cbc01067e18e64db4d377e41b3fd1d4) | [581da8cd](https://github.com/leon-ai/leon/commit/581da8cd9806323aabb0e85778d645df3c0948b9) | [53f7db55](https://github.com/leon-ai/leon/commit/53f7db55c6e916751f1d59c239628d5ea8914009) | [ae073971](https://github.com/leon-ai/leon/commit/ae0739717b6a17373d8f9bc69571c67c1c571b4a))
- **package/checker:** introduce Have I Been Pwned module ([61c1b55a](https://github.com/leon-ai/leon/commit/61c1b55af5691c03f6a6dae0cf3f236a374f1fe7) | [5a999bc6](https://github.com/leon-ai/leon/commit/5a999bc63aa0c667c4e3092daac6a05a6c4b4499) | [36368664](https://github.com/leon-ai/leon/commit/36368664fce8bcf0c17c4c83818aeb418f1e2f23) | [a7a6d885](https://github.com/leon-ai/leon/commit/a7a6d885a83455163eeca74a355177d65db156b8) | [c73ba52b](https://github.com/leon-ai/leon/commit/c73ba52ba8575a64b3329e59a50050d15281d0ec) | [8374e548](https://github.com/leon-ai/leon/commit/8374e5481022de9b134f49180a8dfe28db136261) | [a476fd0f](https://github.com/leon-ai/leon/commit/a476fd0f38f18bf8035db213be2c55f83871038d))
- **package/network:** add speedtest module ([09ad4340](https://github.com/leon-ai/leon/commit/09ad43406d3df8ca65f385a91c159def51f91811))
- **server:**
- add regex entity type [3fda3526](https://github.com/leon-ai/leon/commit/3fda3526c7425bdea4b669474fa77efd61c06a8e)
- catch unsupported action entity type [5bc6c3f1](https://github.com/leon-ai/leon/commit/5bc6c3f116d6b9ece2cc3bebdbdb08f019ee90b9)
- NER backbone [24cf3c9a](https://github.com/leon-ai/leon/commit/24cf3c9a4facd05a4c626ff9d2e7c83a5ae15298)
- introduce actions module [b449376f](https://github.com/leon-ai/leon/commit/b449376f61dc995e2e264c6a14ba123926f5cc58)
- **package/checker:** introduce Have I Been Pwned v3 API with API key ([0ca89fe3](https://github.com/leon-ai/leon/commit/0ca89fe32d51c80cec5f9446acf14990390a5917))
- **server:**
- AWS SDK new structure due to v3 and adapt Amazon Polly changes ([f15f2db7](https://github.com/leon-ai/leon/commit/f15f2db78e5781d05e5e2bcb186645966d17debf))
- IBM Watson TTS and STT new structure ([f41ea0e9](https://github.com/leon-ai/leon/commit/f41ea0e9a1479bfd6a1cb2e8d1f70aec744c685b) | [2668c295](https://github.com/leon-ai/leon/commit/2668c295880ee753ef7ca26a91dbc7e0901febff))
### Features
- **package/calendar:** introduce To-Do list module ([0cdd73d6](https://github.com/leon-ai/leon/commit/0cdd73d6c24a287915f691e3b12edacd75fd383a) | [857be947](https://github.com/leon-ai/leon/commit/857be947792c650ac35847e14fc41064008cef24) | [2041be14](https://github.com/leon-ai/leon/commit/2041be14dbc01640a61de96d1982cc20cd05a8b3) | [12e8f5c3](https://github.com/leon-ai/leon/commit/12e8f5c3bfb436aa212557cd99d9926aa431ab4f) | [8575e9e3](https://github.com/leon-ai/leon/commit/8575e9e3ef01499d9f7be6d313a85d48549e9107) | [5e128df0](https://github.com/leon-ai/leon/commit/5e128df023977525de3e66ce2826aace87569308) | [602aa694](https://github.com/leon-ai/leon/commit/602aa694ac49333f48c119cf2ca2aa7f54b8ae44) | [b9693df9](https://github.com/leon-ai/leon/commit/b9693df90cbc01067e18e64db4d377e41b3fd1d4) | [581da8cd](https://github.com/leon-ai/leon/commit/581da8cd9806323aabb0e85778d645df3c0948b9) | [53f7db55](https://github.com/leon-ai/leon/commit/53f7db55c6e916751f1d59c239628d5ea8914009) | [ae073971](https://github.com/leon-ai/leon/commit/ae0739717b6a17373d8f9bc69571c67c1c571b4a))
- **package/checker:** introduce Have I Been Pwned module ([61c1b55a](https://github.com/leon-ai/leon/commit/61c1b55af5691c03f6a6dae0cf3f236a374f1fe7) | [5a999bc6](https://github.com/leon-ai/leon/commit/5a999bc63aa0c667c4e3092daac6a05a6c4b4499) | [36368664](https://github.com/leon-ai/leon/commit/36368664fce8bcf0c17c4c83818aeb418f1e2f23) | [a7a6d885](https://github.com/leon-ai/leon/commit/a7a6d885a83455163eeca74a355177d65db156b8) | [c73ba52b](https://github.com/leon-ai/leon/commit/c73ba52ba8575a64b3329e59a50050d15281d0ec) | [8374e548](https://github.com/leon-ai/leon/commit/8374e5481022de9b134f49180a8dfe28db136261) | [a476fd0f](https://github.com/leon-ai/leon/commit/a476fd0f38f18bf8035db213be2c55f83871038d))
- **package/network:** add speedtest module ([09ad4340](https://github.com/leon-ai/leon/commit/09ad43406d3df8ca65f385a91c159def51f91811))
- **server:**
- add regex entity type [3fda3526](https://github.com/leon-ai/leon/commit/3fda3526c7425bdea4b669474fa77efd61c06a8e)
- catch unsupported action entity type [5bc6c3f1](https://github.com/leon-ai/leon/commit/5bc6c3f116d6b9ece2cc3bebdbdb08f019ee90b9)
- NER backbone [24cf3c9a](https://github.com/leon-ai/leon/commit/24cf3c9a4facd05a4c626ff9d2e7c83a5ae15298)
- introduce actions module [b449376f](https://github.com/leon-ai/leon/commit/b449376f61dc995e2e264c6a14ba123926f5cc58)
### Bug Fixes
- set correct status code for GET /downloads [690f1841](https://github.com/leon-ai/leon/commit/690f1841d681a1e48e1837e3e166228d6c2ddaf6)
- take `.env` in consideration when using Docker [d38e6095](https://github.com/leon-ai/leon/commit/d38e6095f9b71467b8486430fba4bb7007ec4c5a)
- spinner test [9071c927](https://github.com/leon-ai/leon/commit/9071c92790be674687590e4a896bbf44bc26fb43)
- e2e tests by adding modules actions level [5cf77d90](https://github.com/leon-ai/leon/commit/5cf77d9011a80b326f229b2309a6910ac0f1cfa2)
- **package/leon:** fix english translations [90225707](https://github.com/leon-ai/leon/commit/90225707f94154021cadeb9c61bdc48c3de5aa29)
- **package/network:** make use of new compatible speedtest lib [0c925626](https://github.com/leon-ai/leon/commit/0c925626df65858fa039972b3f3d5f38fde93eb6)
- **package/trend:**
- GitHub module new scraping [68414937](https://github.com/leon-ai/leon/commit/6841493740ca859000c1fd8d692b73fc79fcf500)
- when there is no star provided on the GitHub module [563fb409](https://github.com/leon-ai/leon/commit/563fb40955e2deb5c6d0bd064fc9cc8766a6fcaf)
- **server:**
- make use of Basic plugin from the main NLP container [e1d5bed3](https://github.com/leon-ai/leon/commit/e1d5bed3e688db566a0cb803dda5c2d57c599d8c)
- NER trim entity on after conditions [fa6a5a43](https://github.com/leon-ai/leon/commit/fa6a5a43a60b493aa403a44957082382494c129b)
- set correct status code for GET /downloads [690f1841](https://github.com/leon-ai/leon/commit/690f1841d681a1e48e1837e3e166228d6c2ddaf6)
- take `.env` in consideration when using Docker [d38e6095](https://github.com/leon-ai/leon/commit/d38e6095f9b71467b8486430fba4bb7007ec4c5a)
- spinner test [9071c927](https://github.com/leon-ai/leon/commit/9071c92790be674687590e4a896bbf44bc26fb43)
- e2e tests by adding modules actions level [5cf77d90](https://github.com/leon-ai/leon/commit/5cf77d9011a80b326f229b2309a6910ac0f1cfa2)
- **package/leon:** fix english translations [90225707](https://github.com/leon-ai/leon/commit/90225707f94154021cadeb9c61bdc48c3de5aa29)
- **package/network:** make use of new compatible speedtest lib [0c925626](https://github.com/leon-ai/leon/commit/0c925626df65858fa039972b3f3d5f38fde93eb6)
- **package/trend:**
- GitHub module new scraping [68414937](https://github.com/leon-ai/leon/commit/6841493740ca859000c1fd8d692b73fc79fcf500)
- when there is no star provided on the GitHub module [563fb409](https://github.com/leon-ai/leon/commit/563fb40955e2deb5c6d0bd064fc9cc8766a6fcaf)
- **server:**
- make use of Basic plugin from the main NLP container [e1d5bed3](https://github.com/leon-ai/leon/commit/e1d5bed3e688db566a0cb803dda5c2d57c599d8c)
- NER trim entity on after conditions [fa6a5a43](https://github.com/leon-ai/leon/commit/fa6a5a43a60b493aa403a44957082382494c129b)
### Documentation Changes
- add minimum Pipenv version requirement to README [72e46bd6](https://github.com/leon-ai/leon/commit/72e46bd6c175a4a149fb6b14522823b224d7c152)
- hunt broken links [b2a22792](https://github.com/leon-ai/leon/commit/b2a2279243e7566b57fb7f696024bdf08294e853)
- add "ci" commit type in CONTRIBUTING.md [09e2672b](https://github.com/leon-ai/leon/commit/09e2672b0b399f5ce9dd7cd446d04f4d6fd7c13a)
- use emojies in README [0ea7a78b](https://github.com/leon-ai/leon/commit/0ea7a78b7c94dc44c992913ae1c90fb1cf8a7692)
- add social badges to README [c55c7532](https://github.com/leon-ai/leon/commit/c55c7532b25bf420c4819be71b0f9c21ccc58711)
- Node.js 14 requirement [d1034bd1](https://github.com/leon-ai/leon/commit/d1034bd135fd5a6314a1571d4088fd85a8e6a1da)
- add minimum Pipenv version requirement to README [72e46bd6](https://github.com/leon-ai/leon/commit/72e46bd6c175a4a149fb6b14522823b224d7c152)
- hunt broken links [b2a22792](https://github.com/leon-ai/leon/commit/b2a2279243e7566b57fb7f696024bdf08294e853)
- add "ci" commit type in CONTRIBUTING.md [09e2672b](https://github.com/leon-ai/leon/commit/09e2672b0b399f5ce9dd7cd446d04f4d6fd7c13a)
- use emojies in README [0ea7a78b](https://github.com/leon-ai/leon/commit/0ea7a78b7c94dc44c992913ae1c90fb1cf8a7692)
- add social badges to README [c55c7532](https://github.com/leon-ai/leon/commit/c55c7532b25bf420c4819be71b0f9c21ccc58711)
- Node.js 14 requirement [d1034bd1](https://github.com/leon-ai/leon/commit/d1034bd135fd5a6314a1571d4088fd85a8e6a1da)
# [1.0.0-beta.2](https://github.com/leon-ai/leon/compare/1.0.0-beta.1...1.0.0-beta.2) (2019-04-24)
### Features
- can send custom HTTP headers
- can send custom HTTP headers
([2685cdab](https://github.com/leon-ai/leon/commit/2685cdab07cc1a9ea418eab812e5163d2dd0da90))
- allow HTML output
- allow HTML output
([ec3f02df](https://github.com/leon-ai/leon/commit/ec3f02dfaf2f4b7623ce350350ebee28cf18740e))
- NLU improvement with node-nlp
- NLU improvement with node-nlp
([6585db71](https://github.com/leon-ai/leon/commit/6585db718ccae1d750a35783075cf61cc8fe84f1))
- **package/trend:**
- add answer when the Product Hunt developer token is not provided
([f40b479b](https://github.com/leon-ai/leon/commit/f40b479b295247c5a8a0e6ed81afe56fadfd2730))
- Product Hunt module done
([37794306](https://github.com/leon-ai/leon/commit/3779430621bef970be0e8d048eb0b4bf160ae8a4))
- basics done on the Product Hunt module
([32cc7dbe](https://github.com/leon-ai/leon/commit/32cc7dbe36592fb9618d9c10da5f05a4be7e41b6))
- complete dedicated answers according to the technology and given time
([8997d691](https://github.com/leon-ai/leon/commit/8997d6917445f837c9647a5a9b4d6998d2df4952))
- GitHub module done
([7c6f3922](https://github.com/leon-ai/leon/commit/7c6f3922f299193ee0fb54d0fc97f8b436fc706b))
- be able to choose a limit and a date range for the GitHub module
([3c088371](https://github.com/leon-ai/leon/commit/3c0883716e1c10371c399843a578095a1e16781d))
- format GitHub results in one message
([9d026b94](https://github.com/leon-ai/leon/commit/9d026b94efa8871d421ae2b593b96622a98537ac))
- simple GitHub module results
([5baec074](https://github.com/leon-ai/leon/commit/5baec07455f453d4ad003f1da360b2663b7e15e0))
- list GitHub trends in HTML raw
([3441629e](https://github.com/leon-ai/leon/commit/3441629e3cde933b322cb114d9f1bc3ef0eb3944) | [6b932e94](https://github.com/leon-ai/leon/commit/6b932e947fc365ea6435fda798b7cca32708b443))
- expressions dataset and structure
([f406a5a0](https://github.com/leon-ai/leon/commit/f406a5a09894e12c56a1e76dda609adada00b0d7) | [f54c2272](https://github.com/leon-ai/leon/commit/f54c2272b4b4dc5c56b512b0ccc1519d77ef15a3))
- **package/trend:**
- add answer when the Product Hunt developer token is not provided
([f40b479b](https://github.com/leon-ai/leon/commit/f40b479b295247c5a8a0e6ed81afe56fadfd2730))
- Product Hunt module done
([37794306](https://github.com/leon-ai/leon/commit/3779430621bef970be0e8d048eb0b4bf160ae8a4))
- basics done on the Product Hunt module
([32cc7dbe](https://github.com/leon-ai/leon/commit/32cc7dbe36592fb9618d9c10da5f05a4be7e41b6))
- complete dedicated answers according to the technology and given time
([8997d691](https://github.com/leon-ai/leon/commit/8997d6917445f837c9647a5a9b4d6998d2df4952))
- GitHub module done
([7c6f3922](https://github.com/leon-ai/leon/commit/7c6f3922f299193ee0fb54d0fc97f8b436fc706b))
- be able to choose a limit and a date range for the GitHub module
([3c088371](https://github.com/leon-ai/leon/commit/3c0883716e1c10371c399843a578095a1e16781d))
- format GitHub results in one message
([9d026b94](https://github.com/leon-ai/leon/commit/9d026b94efa8871d421ae2b593b96622a98537ac))
- simple GitHub module results
([5baec074](https://github.com/leon-ai/leon/commit/5baec07455f453d4ad003f1da360b2663b7e15e0))
- list GitHub trends in HTML raw
([3441629e](https://github.com/leon-ai/leon/commit/3441629e3cde933b322cb114d9f1bc3ef0eb3944) | [6b932e94](https://github.com/leon-ai/leon/commit/6b932e947fc365ea6435fda798b7cca32708b443))
- expressions dataset and structure
([f406a5a0](https://github.com/leon-ai/leon/commit/f406a5a09894e12c56a1e76dda609adada00b0d7) | [f54c2272](https://github.com/leon-ai/leon/commit/f54c2272b4b4dc5c56b512b0ccc1519d77ef15a3))
### Bug Fixes
- Leon was not fully installed with Docker if a `.env` file was existing
- Leon was not fully installed with Docker if a `.env` file was existing
([c8a68ab0](https://github.com/leon-ai/leon/commit/c8a68ab02eec9ddaf803b6e36cd7e91a4989cdea))
- **package/trend:**
- **package/trend:**
when there is no contributor on GitHub module
([d845e49b](https://github.com/leon-ai/leon/commit/d845e49b0f18caeb306e2d399c50a03883b2f55d))
- **server:**
- skip Pipenv locking until they fix it
([029381e3](https://github.com/leon-ai/leon/commit/029381e3256933f37f5c2950c4eb1f0192f55ec6) | [ecfdc73f](https://github.com/leon-ai/leon/commit/ecfdc73f8290dd9e1910df9519095516a1227763))
- **server:**
- skip Pipenv locking until they fix it
([029381e3](https://github.com/leon-ai/leon/commit/029381e3256933f37f5c2950c4eb1f0192f55ec6) | [ecfdc73f](https://github.com/leon-ai/leon/commit/ecfdc73f8290dd9e1910df9519095516a1227763))
### Documentation Changes
- add `What is Leon able to do?` section in the readme
- add `What is Leon able to do?` section in the readme
([87f53c91](https://github.com/leon-ai/leon/commit/87f53c91368141966959f3ad7299bb7b643828a5) | [d558fc8b](https://github.com/leon-ai/leon/commit/d558fc8b7c6494babf5dec799802227f77c33d8a))
- open-source != open source
- open-source != open source
([16a9372e](https://github.com/leon-ai/leon/commit/16a9372e05d4d31a7a39a65a52d4708b72499d4c) | [2155cd88](https://github.com/leon-ai/leon/commit/2155cd88decbbd671bd58840291d9330ce06ebba))
# [1.0.0-beta.1](https://github.com/leon-ai/leon/compare/1.0.0-beta.0...1.0.0-beta.1) (2019-02-24)
### Features
- add Docker support
- add Docker support
([209760db](https://github.com/leon-ai/leon/commit/209760dba747001300692fb6a6af97543de584d6))
### Bug Fixes
- **package/checker:**
- **package/checker:**
isitdown module fails with capital letters in URL
([ada6aaef](https://github.com/leon-ai/leon/commit/ada6aaef4bada47e87d28f9f6eaa05b9e23f58d2))
- **web app:**
- **web app:**
enable environment variables
([a438d6f9](https://github.com/leon-ai/leon/commit/a438d6f942812f74e3dda75a9875609f8bea21cd))
### Performance Improvements
- **web app:**
- **web app:**
favicon compression
([33dbcb42](https://github.com/leon-ai/leon/commit/33dbcb425eaafba90176ff64e5f689eb36bc6ce1))
### Documentation Changes
- update README to make the reader genderless
- update README to make the reader genderless
([58662658](https://github.com/leon-ai/leon/commit/586626586b7a2f84cb2cd84028111976bc5172f0))
- use "to rule them all" in README
- use "to rule them all" in README
([c74dda4c](https://github.com/leon-ai/leon/commit/c74dda4cb9acc78de143ae01fdc6b4ef0a5ec3ef))
- **readme:**
- **readme:**
add story write-up
([08a68e37](https://github.com/leon-ai/leon/commit/08a68e376b6a9367425947380564120943376500))
# [1.0.0-beta.0](https://github.com/leon-ai/leon/compare/https://github.com/leon-ai/leon.git...1.0.0-beta.0) (2019-02-10)
Initial release.

View File

@ -1,4 +1,4 @@
FROM ubuntu:latest
FROM ubuntu:20.04
ENV IS_DOCKER true
# Replace shell with bash so we can source files
@ -15,18 +15,23 @@ RUN apt-get update && apt-get install --yes -q --no-install-recommends \
curl \
git \
wget \
libssl-dev \
zlib1g-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
llvm \
libncurses5-dev \
xz-utils \
tk-dev libxml2-dev \
libxmlsec1-dev \
libffi-dev \
liblzma-dev
libssl-dev \
openssl \
libz-dev \
zlib1g-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
llvm \
libncurses5-dev \
xz-utils \
tk-dev libxml2-dev \
libxmlsec1-dev \
libffi-dev \
liblzma-dev \
libgdbm-dev \
libnss3-dev \
libc6-dev
# Run the container as an unprivileged user
RUN groupadd docker && useradd -g docker -s /bin/bash -m docker
@ -34,28 +39,14 @@ USER docker
WORKDIR /home/docker
# Install Node.js with nvm
ENV NVM_DIR /home/docker/nvm
ENV NODE_VERSION v16.17.0
ENV NVM_DIR /home/docker/.nvm
ENV NODE_VERSION v16.18.0
RUN curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.33.1/install.sh | bash
RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.2/install.sh | bash
RUN /bin/bash -c "source $NVM_DIR/nvm.sh && nvm install $NODE_VERSION && nvm use --delete-prefix $NODE_VERSION"
ENV NODE_PATH $NVM_DIR/versions/node/$NODE_VERSION/lib/node_modules
ENV PATH $NVM_DIR/versions/node/$NODE_VERSION/bin:$PATH
# Install Python with pyenv
RUN git clone --depth=1 https://github.com/pyenv/pyenv.git .pyenv
ENV PYENV_ROOT="/home/docker/.pyenv"
ENV PATH="${PYENV_ROOT}/shims:${PYENV_ROOT}/bin:${PATH}"
ENV PYTHON_VERSION=3.9.10
RUN pyenv install ${PYTHON_VERSION}
RUN pyenv global ${PYTHON_VERSION}
# Install Pipenv
ENV PYTHON_BIN_PATH /home/docker/.local/bin
ENV PATH="${PYTHON_BIN_PATH}:${PATH}"
RUN python -m pip install --user --force-reinstall pipenv virtualenv
ENV PATH $NVM_DIR/versions/node/$NODE_VERSION/bin:$PATH
# Install Leon
WORKDIR /home/docker/leon

107
README.md
View File

@ -7,7 +7,7 @@
Leon
</h1>
*<p align="center">Your open-source personal assistant.</p>*
_<p align="center">Your open-source personal assistant.</p>_
<p align="center">
<a href="https://github.com/leon-ai/leon/blob/develop/LICENSE.md"><img src="https://img.shields.io/badge/license-MIT-blue.svg?label=License&style=flat" /></a>
@ -28,14 +28,47 @@
<a href="https://blog.getleon.ai/the-story-behind-leon/">Story</a>
</p>
<br>
<h2 align="center">📢 Notice 📢</h2>
<p align="center">
<a href="https://blog.getleon.ai/a-much-better-nlp-and-future-1-0-0-beta-7/"><img width="400" src="https://blog.getleon.ai/static/a62ac28a01cb6898e299dced40875a68/c1b63/beta-7.png" /></a>
<br>
Many exciting things are coming up, hence no new documentation and test are going to be written until the official release of Leon. Feel free to <a href="https://discord.gg/MNQqqKg"><b>join us on Discord</b></a> to know more and to read the <a href="https://blog.getleon.ai/a-much-better-nlp-and-future-1-0-0-beta-7/"><b>"A Much Better NLP and Future" blog post</b></a>.
</p>
<br><br>
---
## Current State
### Why is there a small amount of contributors?
I'm taking a lot of time to work on the new core of Leon due to personal reasons. I can only work on it after work and on weekends. Hence, **I'm blocking any potential contribution as the whole core of Leon is coming with many breaking changes**. Many of you are willing to contribute in Leon (create new skills, help to improve the core, translations and so on...), a big thanks to every one of you!
I wish one day I could work on Leon full time, but it's not possible at the moment as I need to pay my bills. I have some ideas about how to monetize Leon in the future (Leon's core will always remain open source), but before to get there there is still a long way to go.
Until then, any financial support by [sponsoring Leon](http://sponsor.getleon.ai) is much appreciated 🙂
### How about large language models and Leon?
Since AI gained in popularity and large language models are getting more and more traction, many of you joined our community. A huge welcome to all of you! 🤗
At the moment, Leon's NLU will remain intents first with his own model without relying on an LLM. It is important that Leon can run 100% offline and I'm confident that with the downsizing techniques such as quantization Leon will sooner or later work with LLMs at his core and still be able to run on edge.
Here is how LLMs may help Leon in the future:
- Intent fallback: when an utterance cannot match an intent, then rely on an LLM to provide results.
- New named entity recognition engine: provide a better solution to extract entities from utterances such as fruits, numbers, cities, durations, persons, etc.
- Skill features: let skills leverage LLMs to provide out-of-the-box NLP features such as summarization, translation, sentiment analysis and so on...
- Skill building: LLMs can help to develop skills such as paraphrasing utterance samples, translate answers, convert code from our Python bridge to the upcoming JavaScript bridge and vice versa, etc.
- More...
### What's Next?
Once the new core released, we'll work on the community aspect of Leon. For example, better organize [our Discord](https://discord.gg/MNQqqKg), planify regular calls, work on skills together, etc. It is very important for Leon to have a real community. At that moment, the skills platform will already be online, so it'll be easier to sync our progress and publish new skills.
- Feel free to check out the Git development branches and our [next major milestones](https://blog.getleon.ai/a-much-better-nlp-and-future-1-0-0-beta-7/#whats-next).
- And the [detailed roadmap](http://roadmap.getleon.ai).
- Many exciting things are coming up, hence no new documentation and test are going to be written until the official release of Leon.
---
## Latest Release
Check out the [latest release blog post](https://blog.getleon.ai/binaries-and-typescript-rewrite-1-0-0-beta-8/).
<a href="https://blog.getleon.ai/binaries-and-typescript-rewrite-1-0-0-beta-8/"><img width="400" src="https://blog.getleon.ai/static/a0d1cbafd1968e7531dc17e229f8cc61/aa440/beta-8.png" /></a>
---
@ -52,10 +85,10 @@ If you want to, Leon can communicate with you by being **offline to protect your
### Why?
> 1. If you are a developer (or not), you may want to build many things that could help in your daily life.
> Instead of building a dedicated project for each of those ideas, Leon can help you with his
> Skills structure.
> Instead of building a dedicated project for each of those ideas, Leon can help you with his
> Skills structure.
> 2. With this generic structure, everyone can create their own skills and share them with others.
> Therefore there is only one core (to rule them all).
> Therefore there is only one core (to rule them all).
> 3. Leon uses AI concepts, which is cool.
> 4. Privacy matters, you can configure Leon to talk with him offline. You can already text with him without any third party services.
> 5. Open source is great.
@ -63,21 +96,25 @@ If you want to, Leon can communicate with you by being **offline to protect your
### What is this repository for?
> This repository contains the following nodes of Leon:
>
> - The server
> - Skills
> - The web app
> - The hotword node
> - The TCP server (for inter-process communication between Leon and third-party nodes such as spaCy)
> - The Python bridge (the connector between the core and skills made with Python)
### What is Leon able to do?
> Today, the most interesting part is about his core and the way he can scale up. He is pretty young but can easily scale to have new features (skills).
> You can find what he is able to do by browsing the [packages list](https://github.com/leon-ai/leon/tree/develop/packages).
> You can find what he is able to do by browsing the [skills list](https://github.com/leon-ai/leon/tree/develop/skills).<br>
> Please do know that after the official release, we will build many skills along with the community. Feel free to [join us on Discord](https://discord.gg/MNQqqKg) to be part of the journey.
Sounds good for you? Then let's get started!
Sounds good to you? Then let's get started!
## ☁️ Try with a Single-Click
Gitpod will automatically setup an environment and run an instance for you.
Gitpod will automatically set up an environment and run an instance for you.
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/leon-ai/leon)
@ -157,7 +194,7 @@ You'll find a write-up on this [blog post](https://blog.getleon.ai/the-story-beh
- [Blog](https://blog.getleon.ai)
- [GitHub issues](https://github.com/leon-ai/leon/issues)
- [YouTube](https://www.youtube.com/channel/UCW6mk6j6nQUzFYY97r47emQ)
- [#LeonAI](https://twitter.com/search?f=live&q=%23LeonAI%20(from%3Agrenlouis%20OR%20from%3Alouistiti_fr)&src=typed_query)
- [#LeonAI](<https://twitter.com/search?f=live&q=%23LeonAI%20(from%3Agrenlouis%20OR%20from%3Alouistiti_fr)&src=typed_query>)
## 👨 Author
@ -169,23 +206,22 @@ You'll find a write-up on this [blog post](https://blog.getleon.ai/the-story-beh
<tbody>
<tr>
<td align="center" valign="middle" width="128">
<a href="https://github.com/phareal">
<img src="https://github.com/phareal.png?size=128" />
phareal
<a href="https://github.com/Appwrite">
<img src="https://github.com/Appwrite.png?size=128" />
Appwrite
</a><br>
<sub><sup>30 USD / month</sup></sub>
<sub><sup>250 USD / month</sup></sub>
</td>
<td align="center" valign="middle" width="128">
<a href="https://antmedia.io/?utm_source=github&utm_medium=banner&utm_campaign=opensource&utm_id=1">
<img src="https://github.com/ant-media.png?size=128" />
Ant Media
</a><br>
<sub><sup>17 USD / month</sup></sub>
<img src="https://getleon.ai/img/anonymous.svg" width="128" />
Anonymous
<br>
<sub><sup>100 USD / month</sup></sub>
</td>
<td align="center" valign="middle" width="128">
<a href="https://github.com/KeithIMyers">
<img src="https://github.com/KeithIMyers.png?size=128" />
Keith Myers
<a href="https://github.com/herbundkraut">
<img src="https://github.com/herbundkraut.png?size=128" />
herbundkraut
</a><br>
<sub><sup>10 USD / month</sup></sub>
</td>
@ -208,17 +244,24 @@ The focus is not only limited to the activity you see on GitHub but also a lot o
### Special Thanks
<a href="https://www.aoz.studio">
<img src="https://user-images.githubusercontent.com/1731544/153794939-c42f1b10-a15d-4e82-b448-dc95bfe85b1c.png" alt="AOZ Studio" width="64" />
</a>
<a href="https://vercel.com/?utm_source=leon-ai&utm_campaign=oss">
<img src="https://i.imgur.com/S5olXWh.png" alt="Vercel" width="128" />
</a>
&nbsp; &nbsp; &nbsp;
<a href="https://www.macstadium.com/">
<img src="https://getleon.ai/img/thanks/mac-stadium.svg" alt="MacStadium" width="128" />
</a>
&nbsp; &nbsp; &nbsp;
<a href="https://www.aoz.studio">
<img src="https://getleon.ai/_next/image?url=%2Fimg%2Fthanks%2Faoz-studio.png&w=384&q=75" alt="AOZ Studio" width="128" />
</a>
## 📝 License
[MIT License](https://github.com/leon-ai/leon/blob/develop/LICENSE.md)
Copyright (c) 2019-present, Louis Grenard <louis@getleon.ai>
## Cheers!
![Cheers!](https://github.githubassets.com/images/icons/emoji/unicode/1f379.png "Cheers!")
![Cheers!](https://github.githubassets.com/images/icons/emoji/unicode/1f379.png 'Cheers!')

View File

@ -1,18 +1,84 @@
@import url(https://fonts.googleapis.com/css?family=Open+Sans:400,600,700,800);
html, body, div, span, applet, object, iframes,
h1, h2, h3, h4, h5, h6, p, blockquote, pre,
a, abbr, acronym, address, big, cite, code,
del, dfn, em, img, ins, kbd, q, s, samp,
small, strike, sub, sup, tt, var,
u, i, center,
dl, dt, dd, ol, ul, li,
fieldset, form, label, legend,
table, caption, tbody, tfoot, thead, tr, th, td,
article, aside, canvas, details, embed,
figure, figcaption, footer, header, hgroup,
menu, nav, output, ruby, section, summary,
time, mark, audio, video {
html,
body,
div,
span,
applet,
object,
iframes,
h1,
h2,
h3,
h4,
h5,
h6,
p,
blockquote,
pre,
a,
abbr,
acronym,
address,
big,
cite,
code,
del,
dfn,
em,
img,
ins,
kbd,
q,
s,
samp,
small,
strike,
sub,
sup,
tt,
var,
u,
i,
center,
dl,
dt,
dd,
ol,
ul,
li,
fieldset,
form,
label,
legend,
table,
caption,
tbody,
tfoot,
thead,
tr,
th,
td,
article,
aside,
canvas,
details,
embed,
figure,
figcaption,
footer,
header,
hgroup,
menu,
nav,
output,
ruby,
section,
summary,
time,
mark,
audio,
video {
margin: 0;
padding: 0;
border: 0;
@ -21,17 +87,29 @@ time, mark, audio, video {
vertical-align: baseline;
}
article, aside, details, figcaption, figure,
footer, header, hgroup, menu, nav, section {
article,
aside,
details,
figcaption,
figure,
footer,
header,
hgroup,
menu,
nav,
section {
display: block;
}
blockquote, q {
blockquote,
q {
quotes: none;
}
blockquote:before, blockquote:after,
q:before, q:after {
blockquote:before,
blockquote:after,
q:before,
q:after {
content: '';
content: none;
}
@ -47,8 +125,10 @@ table {
}
:root {
--black-color: #151718;
--white-color: #FFF;
--black-color: #000;
--light-black-color: #222426;
--white-color: #fff;
--grey-color: #323739;
}
a {
@ -66,7 +146,7 @@ body {
font-weight: 400;
}
body > * {
transition: opacity .5s;
transition: opacity 0.5s;
}
body.settingup > * {
opacity: 0;
@ -81,14 +161,14 @@ body.settingup::after {
left: 50%;
transform: translate(-50%, -50%);
border-radius: 50%;
animation: scaleout .6s infinite ease-in-out;
animation: scaleout 0.6s infinite ease-in-out;
}
@keyframes scaleout {
0% {
transform: scale(0);
}
100% {
transform: scale(1.0);
transform: scale(1);
opacity: 0;
}
}
@ -115,7 +195,7 @@ input {
color: var(--white-color);
width: 100%;
border: none;
border-bottom: 2px solid var(--white-color);
border-bottom: 2px solid var(--grey-color);
background: none;
font-weight: 400;
font-size: 4em;
@ -124,7 +204,7 @@ input {
small {
color: var(--white-color);
font-size: .7em;
font-size: 0.7em;
}
.hide {
@ -144,14 +224,14 @@ small {
top: 10%;
height: 50%;
overflow-y: auto;
border: 2px solid var(--white-color);
border: 2px solid var(--grey-color);
border-radius: 12px;
}
#feed::-webkit-scrollbar {
width: 6px;
}
#feed::-webkit-scrollbar-thumb {
background-color: rgba(255, 255, 255, .2);
background-color: rgba(255, 255, 255, 0.2);
border-radius: 12px;
}
@ -166,7 +246,7 @@ small {
padding: 0 8px;
opacity: 0;
margin-top: 20px;
transition: opacity .3s;
transition: opacity 0.3s;
}
#is-typing.on {
opacity: 1;
@ -181,16 +261,16 @@ small {
transform: scale(1);
}
#is-typing .circle:nth-child(1) {
animation: typing .2s linear infinite alternate;
background-color: #0071F0;
animation: typing 0.2s linear infinite alternate;
background-color: #0071f0;
}
#is-typing .circle:nth-child(2) {
animation: typing .2s .2s linear infinite alternate;
animation: typing 0.2s 0.2s linear infinite alternate;
background-color: var(--white-color);
}
#is-typing .circle:nth-child(3) {
animation: typing .2s linear infinite alternate;
background-color: #EC297A;
animation: typing 0.2s linear infinite alternate;
background-color: #ec297a;
}
@keyframes typing {
100% {
@ -216,16 +296,16 @@ small {
word-break: break-word;
text-align: left;
opacity: 0;
animation: fadeIn .2s ease-in forwards;
animation: fadeIn 0.2s ease-in forwards;
}
#feed .me .bubble {
background-color: #1C75DB;
background-color: #1c75db;
color: var(--white-color);
right: 0;
}
#feed .leon .bubble {
background-color: #EEE;
color: var(--black-color);
background-color: var(--light-black-color);
color: var(--white-color);
}
@keyframes fadeIn {
100% {
@ -251,7 +331,7 @@ small {
padding: 2px 8px;
font-size: inherit;
cursor: pointer;
transition: background-color .2s, color .2s;
transition: background-color 0.2s, color 0.2s;
}
.suggestion:hover {
color: var(--black-color);
@ -284,7 +364,7 @@ small {
background-color: #888;
-webkit-mask-image: url(../img/mic.svg);
mask-image: url(../img/mic.svg);
transition: background-color .2s;
transition: background-color 0.2s;
}
#mic-button:not(.enabled) {
margin-left: -26px;
@ -293,13 +373,13 @@ small {
background-color: var(--white-color);
}
#mic-button.enabled {
background-color: #00E676;
background-color: #00e676;
}
#mic-button.enabled + #sonar {
width: 26px;
height: 26px;
border-radius: 50%;
opacity: .3;
opacity: 0.3;
background-color: #575757;
pointer-events: none;
animation: sonar 1.3s linear infinite;

View File

@ -1,50 +1,52 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="stylesheet" href="/css/style.css" />
<link rel="icon" type="image/png" href="/img/favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Leon</title>
</head>
<body class="settingup">
<main>
<div id="feed">
<p id="no-bubble" class="hide">
You can start to interact with Leon, don't be shy.
</p>
</div>
<div id="suggestions-container"></div>
<div id="is-typing">
<div class="circle"></div>
<div class="circle"></div>
<div class="circle"></div>
</div>
<div id="input-container">
<div id="mic-container">
<button id="mic-button"></button>
<div id="sonar"></div>
<head>
<meta charset="utf-8" />
<link rel="stylesheet" href="/css/style.css" />
<link rel="icon" type="image/png" href="/img/favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Leon</title>
</head>
<body class="settingup">
<main>
<div id="feed">
<p id="no-bubble" class="hide">
You can start to interact with Leon, don't be shy.
</p>
</div>
<label for="utterance"></label>
<input type="text" id="utterance" autocomplete="off" autofocus>
<small>
Use <kbd></kbd> <kbd></kbd> to browse history;
<kbd></kbd> to submit;
<kbd>alt + c to listen.</kbd>
</small>
</div>
</main>
<footer>
<div id="logo"></div>
<div id="version">
<small>v</small>
</div>
<div id="discord">
<small class="italic">
<a href="https://discord.gg/MNQqqKg" target="_blank">Join us on Discord</a>
</small>
</div>
</footer>
<script type="module" src="/js/main.js"></script>
</body>
<div id="suggestions-container"></div>
<div id="is-typing">
<div class="circle"></div>
<div class="circle"></div>
<div class="circle"></div>
</div>
<div id="input-container">
<div id="mic-container">
<button id="mic-button"></button>
<div id="sonar"></div>
</div>
<label for="utterance"></label>
<input type="text" id="utterance" autocomplete="off" autofocus />
<small>
Use <kbd></kbd> <kbd></kbd> to browse history; <kbd></kbd> to
submit;
<kbd>alt + c to listen.</kbd>
</small>
</div>
</main>
<footer>
<div id="logo"></div>
<div id="version">
<small>v</small>
</div>
<div id="discord">
<small class="italic">
<a href="https://discord.gg/MNQqqKg" target="_blank"
>Join us on Discord</a
>
</small>
</div>
</footer>
<script type="module" src="/js/main.js"></script>
</body>
</html>

View File

@ -1,5 +1,5 @@
export default class Chatbot {
constructor () {
constructor() {
this.et = new EventTarget()
this.feed = document.querySelector('#feed')
this.typing = document.querySelector('#is-typing')
@ -8,7 +8,7 @@ export default class Chatbot {
this.parsedBubbles = JSON.parse(this.bubbles)
}
async init () {
async init() {
await this.loadFeed()
this.scrollDown()
@ -21,19 +21,19 @@ export default class Chatbot {
})
}
sendTo (who, string) {
sendTo(who, string) {
if (who === 'leon') {
this.et.dispatchEvent(new CustomEvent('to-leon', { detail: string }))
}
}
receivedFrom (who, string) {
receivedFrom(who, string) {
if (who === 'leon') {
this.et.dispatchEvent(new CustomEvent('me-received', { detail: string }))
}
}
isTyping (who, value) {
isTyping(who, value) {
if (who === 'leon') {
if (value) {
this.enableTyping()
@ -43,23 +43,23 @@ export default class Chatbot {
}
}
enableTyping () {
enableTyping() {
if (!this.typing.classList.contains('on')) {
this.typing.classList.add('on')
}
}
disableTyping () {
disableTyping() {
if (this.typing.classList.contains('on')) {
this.typing.classList.remove('on')
}
}
scrollDown () {
scrollDown() {
this.feed.scrollTo(0, this.feed.scrollHeight)
}
loadFeed () {
loadFeed() {
return new Promise((resolve) => {
if (this.parsedBubbles === null || this.parsedBubbles.length === 0) {
this.noBubbleMessage.classList.remove('hide')
@ -72,7 +72,7 @@ export default class Chatbot {
this.createBubble(bubble.who, bubble.string, false)
if ((i + 1) === this.parsedBubbles.length) {
if (i + 1 === this.parsedBubbles.length) {
setTimeout(() => {
resolve()
}, 100)
@ -82,7 +82,7 @@ export default class Chatbot {
})
}
createBubble (who, string, save = true) {
createBubble(who, string, save = true) {
const container = document.createElement('div')
const bubble = document.createElement('p')
@ -97,7 +97,7 @@ export default class Chatbot {
}
}
saveBubble (who, string) {
saveBubble(who, string) {
if (!this.noBubbleMessage.classList.contains('hide')) {
this.noBubbleMessage.classList.add('hide')
}

View File

@ -1,8 +1,9 @@
import { io } from 'socket.io-client'
import Chatbot from './chatbot'
export default class Client {
constructor (client, serverUrl, input, res) {
constructor(client, serverUrl, input, res) {
this.client = client
this._input = input
this._suggestionContainer = document.querySelector('#suggestions-container')
@ -12,25 +13,25 @@ export default class Client {
this.parsedHistory = []
this.info = res
this.chatbot = new Chatbot()
this._recorder = { }
this._recorder = {}
this._suggestions = []
}
set input (newInput) {
set input(newInput) {
if (typeof newInput !== 'undefined') {
this._input.value = newInput
}
}
set recorder (recorder) {
set recorder(recorder) {
this._recorder = recorder
}
get recorder () {
get recorder() {
return this._recorder
}
init (loader) {
init(loader) {
this.chatbot.init()
this.socket.on('connect', () => {
@ -46,7 +47,7 @@ export default class Client {
})
this.socket.on('suggest', (data) => {
data.forEach((suggestionText) => {
data?.forEach((suggestionText) => {
this.addSuggestion(suggestionText)
})
})
@ -99,7 +100,7 @@ export default class Client {
}
}
}
}, 1000)
}, 1_000)
}, data.duration + 500)
}
})
@ -116,9 +117,12 @@ export default class Client {
}
}
send (keyword) {
send(keyword) {
if (this._input.value !== '') {
this.socket.emit(keyword, { client: this.client, value: this._input.value.trim() })
this.socket.emit(keyword, {
client: this.client,
value: this._input.value.trim()
})
this.chatbot.sendTo('leon', this._input.value)
this._suggestions.forEach((suggestion) => {
@ -135,7 +139,7 @@ export default class Client {
return false
}
save () {
save() {
let val = this._input.value
if (localStorage.getItem('history') === null) {
@ -157,7 +161,7 @@ export default class Client {
this._input.value = ''
}
addSuggestion (text) {
addSuggestion(text) {
const newSuggestion = document.createElement('button')
newSuggestion.classList.add('suggestion')
newSuggestion.textContent = text

View File

@ -1,7 +1,12 @@
const listener = { }
const listener = {}
listener.listening = (stream, minDecibels, maxBlankTime,
cbOnStart, cbOnEnd) => {
listener.listening = (
stream,
minDecibels,
maxBlankTime,
cbOnStart,
cbOnEnd
) => {
const ctx = new AudioContext()
const analyser = ctx.createAnalyser()
const streamNode = ctx.createMediaStreamSource(stream)
@ -26,7 +31,7 @@ listener.listening = (stream, minDecibels, maxBlankTime,
silenceStart = time
}
if (!triggered && (time - silenceStart) > maxBlankTime) {
if (!triggered && time - silenceStart > maxBlankTime) {
cbOnEnd()
triggered = true

View File

@ -1,5 +1,5 @@
export default class Loader {
constructor () {
constructor() {
this.et = new EventTarget()
this.body = document.querySelector('body')
@ -12,11 +12,11 @@ export default class Loader {
})
}
start () {
start() {
this.et.dispatchEvent(new CustomEvent('settingup', { detail: true }))
}
stop () {
stop() {
this.et.dispatchEvent(new CustomEvent('settingup', { detail: false }))
}
}

View File

@ -1,4 +1,4 @@
import request from 'superagent'
import axios from 'axios'
import Loader from './loader'
import Client from './client'
@ -11,58 +11,67 @@ const config = {
server_host: import.meta.env.VITE_LEON_HOST,
server_port: import.meta.env.VITE_LEON_PORT,
min_decibels: -40, // Noise detection sensitivity
max_blank_time: 1000 // Maximum time to consider a blank (ms)
max_blank_time: 1_000 // Maximum time to consider a blank (ms)
}
const serverUrl = import.meta.env.VITE_LEON_NODE_ENV === 'production' ? '' : `${config.server_host}:${config.server_port}`
const serverUrl =
import.meta.env.VITE_LEON_NODE_ENV === 'production'
? ''
: `${config.server_host}:${config.server_port}`
document.addEventListener('DOMContentLoaded', () => {
document.addEventListener('DOMContentLoaded', async () => {
const loader = new Loader()
loader.start()
request.get(`${serverUrl}/api/v1/info`)
.end((err, res) => {
if (err || !res.ok) {
console.error(err.response.error.message)
} else {
const input = document.querySelector('#utterance')
const mic = document.querySelector('#mic-button')
const v = document.querySelector('#version small')
const client = new Client(config.app, serverUrl, input, res.body)
let rec = { }
let chunks = []
try {
const response = await axios.get(`${serverUrl}/api/v1/info`)
const input = document.querySelector('#utterance')
const mic = document.querySelector('#mic-button')
const v = document.querySelector('#version small')
const client = new Client(config.app, serverUrl, input, response.data)
let rec = {}
let chunks = []
v.innerHTML += client.info.version
v.innerHTML += client.info.version
client.init(loader)
client.init(loader)
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => {
if (MediaRecorder) {
rec = new Recorder(stream, mic, client.info)
client.recorder = rec
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices
.getUserMedia({ audio: true })
.then((stream) => {
if (MediaRecorder) {
rec = new Recorder(stream, mic, client.info)
client.recorder = rec
rec.ondataavailable((e) => {
chunks.push(e.data)
})
rec.ondataavailable((e) => {
chunks.push(e.data)
})
rec.onstart(() => { /* */ })
rec.onstart(() => {
/* */
})
rec.onstop(() => {
const blob = new Blob(chunks)
chunks = []
rec.enabled = false
rec.onstop(() => {
const blob = new Blob(chunks)
chunks = []
rec.enabled = false
// Ensure there are some data
if (blob.size >= 1000) {
client.socket.emit('recognize', blob)
}
})
// Ensure there are some data
if (blob.size >= 1_000) {
client.socket.emit('recognize', blob)
}
})
listener.listening(stream, config.min_decibels, config.max_blank_time, () => {
listener.listening(
stream,
config.min_decibels,
config.max_blank_time,
() => {
// Noise detected
rec.noiseDetected = true
}, () => {
},
() => {
// Noise ended
rec.noiseDetected = false
@ -72,52 +81,63 @@ document.addEventListener('DOMContentLoaded', () => {
rec.hotwordTriggered = false
rec.countSilenceAfterTalk = 0
}
})
}
)
client.socket.on('enable-record', () => {
rec.hotwordTriggered = true
rec.start()
setTimeout(() => { rec.hotwordTriggered = false }, config.max_blank_time)
rec.enabled = true
})
} else {
console.error('MediaRecorder is not supported on your browser.')
}
}).catch((err) => {
console.error('MediaDevices.getUserMedia() threw the following error:', err)
})
} else {
console.error('MediaDevices.getUserMedia() is not supported on your browser.')
}
document.addEventListener('keydown', (e) => {
onkeydowndocument(e, () => {
if (rec.enabled === false) {
input.value = ''
client.socket.on('enable-record', () => {
rec.hotwordTriggered = true
rec.start()
setTimeout(() => {
rec.hotwordTriggered = false
}, config.max_blank_time)
rec.enabled = true
} else {
rec.stop()
rec.enabled = false
}
})
})
input.addEventListener('keydown', (e) => {
onkeydowninput(e, client)
})
mic.addEventListener('click', (e) => {
e.preventDefault()
if (rec.enabled === false) {
rec.start()
rec.enabled = true
})
} else {
rec.stop()
rec.enabled = false
console.error('MediaRecorder is not supported on your browser.')
}
})
.catch((err) => {
console.error(
'MediaDevices.getUserMedia() threw the following error:',
err
)
})
} else {
console.error(
'MediaDevices.getUserMedia() is not supported on your browser.'
)
}
document.addEventListener('keydown', (e) => {
onkeydowndocument(e, () => {
if (rec.enabled === false) {
input.value = ''
rec.start()
rec.enabled = true
} else {
rec.stop()
rec.enabled = false
}
})
})
input.addEventListener('keydown', (e) => {
onkeydowninput(e, client)
})
mic.addEventListener('click', (e) => {
e.preventDefault()
if (rec.enabled === false) {
rec.start()
rec.enabled = true
} else {
rec.stop()
rec.enabled = false
}
})
} catch (e) {
alert(`Error: ${e.message}; ${JSON.stringify(e.response?.data)}`)
console.error(e)
}
})

View File

@ -14,13 +14,13 @@ const onkeydowninput = (e, client) => {
index = -1
}
} else if (localStorage.getItem('history') !== null) {
if (key === 38 && index < (parsedHistory.length - 1)) {
if (key === 38 && index < parsedHistory.length - 1) {
index += 1
client.input = parsedHistory[index]
} else if (key === 40 && (index - 1) >= 0) {
} else if (key === 40 && index - 1 >= 0) {
index -= 1
client.input = parsedHistory[index]
} else if (key === 40 && (index - 1) < 0) {
} else if (key === 40 && index - 1 < 0) {
client.input = ''
index = -1
}
@ -33,7 +33,4 @@ const onkeydowndocument = (e, cb) => {
}
}
export {
onkeydowninput,
onkeydowndocument
}
export { onkeydowninput, onkeydowndocument }

View File

@ -2,7 +2,7 @@ import on from '../sounds/on.mp3'
import off from '../sounds/off.mp3'
export default class Recorder {
constructor (stream, el, info) {
constructor(stream, el, info) {
this.recorder = new MediaRecorder(stream, { audioBitsPerSecond: 16000 })
this.el = el
this.audioOn = new Audio(on)
@ -15,7 +15,7 @@ export default class Recorder {
this.countSilenceAfterTalk = 0
}
start (playSound = true) {
start(playSound = true) {
if (this.info.stt.enabled === false) {
console.warn('Speech-to-text disabled')
} else {
@ -24,7 +24,7 @@ export default class Recorder {
}
}
stop (playSound = true) {
stop(playSound = true) {
if (this.info.stt.enabled === false) {
console.warn('Speech-to-text disabled')
} else {
@ -33,7 +33,7 @@ export default class Recorder {
}
}
onstart (cb) {
onstart(cb) {
this.recorder.onstart = (e) => {
if (this.playSound) {
this.audioOn.play()
@ -44,7 +44,7 @@ export default class Recorder {
}
}
onstop (cb) {
onstop(cb) {
this.recorder.onstop = (e) => {
if (this.playSound) {
this.audioOff.play()
@ -55,7 +55,7 @@ export default class Recorder {
}
}
ondataavailable (cb) {
ondataavailable(cb) {
this.recorder.ondataavailable = (e) => {
cb(e)
}

View File

@ -1,16 +1,24 @@
import dns from 'node:dns'
import dotenv from 'dotenv'
import { defineConfig } from 'vite'
dotenv.config()
dns.setDefaultResultOrder('verbatim')
// Map necessary Leon's env vars as Vite only expose VITE_*
process.env.VITE_LEON_NODE_ENV = process.env.LEON_NODE_ENV
process.env.VITE_LEON_HOST = process.env.LEON_HOST
process.env.VITE_LEON_PORT = process.env.LEON_PORT
export default {
export default defineConfig({
root: 'app/src',
build: {
outDir: '../dist',
emptyOutDir: true
},
server: {
port: 3000
}
}
})

2
bridges/nodejs/.npmrc Normal file
View File

@ -0,0 +1,2 @@
package-lock=false
save-exact=true

View File

@ -0,0 +1,15 @@
{
"name": "leon-nodejs-bridge",
"description": "Leon's Node.js bridge to communicate between the core and skills made with JavaScript",
"main": "dist/leon-nodejs-bridge.js",
"author": {
"name": "Louis Grenard",
"email": "louis@getleon.ai",
"url": "https://twitter.com/grenlouis"
},
"license": "MIT",
"homepage": "https://getleon.ai",
"bugs": {
"url": "https://github.com/leon-ai/leon/issues"
}
}

View File

@ -0,0 +1,5 @@
import { VERSION } from './version'
console.log('[WIP] Node.js bridge', VERSION)
// TODO

View File

@ -0,0 +1 @@
export const VERSION = '0.0.0'

View File

@ -0,0 +1,23 @@
{
"extends": "@tsconfig/node16-strictest/tsconfig.json",
"compilerOptions": {
"rootDir": "./src",
"outDir": "./dist",
"baseUrl": ".",
"paths": {
"@/*": ["./src/*"]
},
"ignoreDeprecations": "5.0",
"allowJs": true,
"checkJs": false,
"resolveJsonModule": true
},
"ts-node": {
"swc": true,
"require": ["tsconfig-paths/register"],
"files": true
},
"files": [],
"include": ["src/**/*"],
"exclude": ["dist"]
}

View File

@ -1,592 +0,0 @@
{
"_meta": {
"hash": {
"sha256": "2db39af98e7ccc96ec933a4b2cbe921addef26899daf69d7d4d8ddb01ef5f746"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.9.10"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"beautifulsoup4": {
"hashes": [
"sha256:034740f6cb549b4e932ae1ab975581e6103ac8f942200a0e9759065984391858",
"sha256:945065979fb8529dd2f37dbb58f00b661bdbcbebf954f93b32fdf5263ef35348",
"sha256:ba6d5c59906a85ac23dadfe5c88deaf3e179ef565f4898671253e50a78680718"
],
"index": "pypi",
"version": "==4.7.1"
},
"blis": {
"hashes": [
"sha256:0c3245e42c7c6ba0d68d7dff4171d11bb08174e639bc8edd52a1fd831de1d903",
"sha256:0f7bfdee74ac695c35360ace00f2630c1b47406dc0b99ba9211bfa8588bfbed9",
"sha256:159a1a9b32213d99d1415789ac66ed8d23442a696d9d376c66d7b791d3eae575",
"sha256:17df5ac7d9a9dbbf0415f8f8392fbdf1790fa394f89d695bae5e2e7e361c852b",
"sha256:1e970ba1eb12ca38fb5d57f379472125bc3f5106c8214dc847fe79b027212135",
"sha256:1f5fa330ab66d0e92a845b1db361ec8bf3dc4bc7e0dc0ded94f36b8e9f731650",
"sha256:2056b0d9722f5505cfa94904c6248021197c63ecf45804dcf117f8f1c6160ab6",
"sha256:25cdb9775699c1b926df514a5d4c28016c11722a66211f1024b2f21373f50de2",
"sha256:2778fe0ba0e25c157839fdd19ed66b9a340c92d4e92e707b7fa9aa21c51cb254",
"sha256:294421b720c2de904908de841464c667e1a5c5e9f3db6931dfa29cf369d3653a",
"sha256:2db369a4f95927be37e11790dd1ccbf99fd6201eaffbcf408546db847b7b5740",
"sha256:354cadff661a1452c886f541b84018770ddb4c134844c56e7c1a30aa4bcc473d",
"sha256:4e7b7b8bc8cf5e82958bbc393e0167318a930d394cbbf04c1ba18cfabaef5818",
"sha256:63735128c9cae44dc6cbf7557327385df0c4ed2dc2c45a00dabfde1e4d00802d",
"sha256:66b8ca1a2eb8f1e0563a592aae4b8682b66189ad560e3b8221d93eab0cb76582",
"sha256:90f17543e0aa3bc379d139867467df2c365ffaf5b61988de12dbba6dbbc9fab4",
"sha256:95d22d3007cb454d11a478331690629861f7d40b4668f9fccfd13b6507ed099b",
"sha256:96ff4c0c1ceab9f94c14b3281f3cef82f593c48c3b5f6169bd51cdcd315e0a6e",
"sha256:ae5b06fe3b94645ac5d93cbc7c0129639cc3e0d50b4efb361a20a9e160277a92",
"sha256:bf60f634481c3d0faf831ac4f2d1c75343e98f714dc88e3fb3c329758577e772",
"sha256:bfa56e7ef14ae607d8444eb344d22f252a2e0b0f9bfa4bdc9b0c48a9f96b5461",
"sha256:cac120e3c0517095d3c39278e8b6b1102b1add0e1f4e161a87f313d8ee7c12e1",
"sha256:f576ad64b772b6fd7df6ef94986235f321983dc870d0f76d78c931bafc41cfa4",
"sha256:f7d541bb06323aa350163ba4a3ad00e8effb3b53d4c58ee6228224f3928b6c57"
],
"version": "==0.7.8"
},
"catalogue": {
"hashes": [
"sha256:2d786e229d8d202b4f8a2a059858e45a2331201d831e39746732daa704b99f69",
"sha256:b325c77659208bfb6af1b0d93b1a1aa4112e1bb29a4c5ced816758a722f0e388"
],
"markers": "python_version >= '3.6'",
"version": "==2.0.8"
},
"certifi": {
"hashes": [
"sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d",
"sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"
],
"markers": "python_version >= '3.6'",
"version": "==2022.6.15"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"version": "==3.0.4"
},
"click": {
"hashes": [
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
"sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
],
"markers": "python_version >= '3.7'",
"version": "==8.1.3"
},
"cymem": {
"hashes": [
"sha256:04676d696596b0db3f3c5a3936bab12fb6f24278921a6622bb185e61765b2b4d",
"sha256:169725b5816959d34de2545b33fee6a8021a6e08818794a426c5a4f981f17e5e",
"sha256:228bd261a85d92d870ed358f263ee028ac026302304f2186827377a3895c5819",
"sha256:2aa3fa467d906cd2c27fa0a2e2952dd7925f5fcc7973fab6d815ef6acb25aad8",
"sha256:38b51ac23f914d85b197dbd0fb2d3e2de9bf6112b9b30f16b45dbb6c9b4e509d",
"sha256:4749f220e4c06ec44eb10de13794ff0508cdc4f8eff656cf49cab2cdb3122c0c",
"sha256:492084aef23ac2ff3da3729e9d36340bc91a96c2dc8c3a82a1926e384ab52412",
"sha256:4f87fe087f2ae36c3e20e2b1a29d7f76a28c035372d0a97655f26223d975235a",
"sha256:5d631239bfb07293ee444b269656308da952b6b003b12332ccb1c624dbfcda4b",
"sha256:6b0d1a6b0a1296f31fa9e4b7ae5ea49394084ecc883b1ae6fec4844403c43468",
"sha256:700540b68e96a7056d0691d467df2bbaaf0934a3e6fe2383669998cbee19580a",
"sha256:971cf0a8437dfb4185c3049c086e463612fe849efadc0f5cc153fc81c501da7d",
"sha256:a261f51796a2705f3900ed22b8442519a0f230f50a816fb5bd89cb9b027dc5ac",
"sha256:a93fba62fe79dbf6fc4d5b6d804a6e114b44af3ff3d40a28833ee39f21bd336b",
"sha256:af3c01e6b20f9e6c07c7d7cdb7f710e49889d3906c9a3e039546ee6636a34b9a",
"sha256:b8e1c18bb00800425576710468299153caad20c64ddb6819d40a6a34e21ee21c",
"sha256:c59293b232b53ebb47427f16cf648e937022f489cff36c11d1d8a1f0075b6609",
"sha256:c59ca1072769cb6c3eade59de9e080ff2cecde0122f7e0ca0dc9ef2ed9240f0e",
"sha256:cd818356b635d8ae546e152a6f2b95f00e959d128a16155c275b0c202cd6312b",
"sha256:d4dc378fb9dda3b0529361fe32cfe1a6de0fc16bb40c710aaec8d217534928d2",
"sha256:d7a59cef8f2fa25d12e2c30138f8623acbd43ad2715e730a709e49c5eef8e1b0",
"sha256:dd52d8a81881804625df88453611175ab7e0099b34f52204da1f6940cf2e83c9",
"sha256:ea535f74ab6024e7416f93de564e5c81fb7c0964b96280de66f60aeb05f0cf53",
"sha256:ee73a48c5a7e0f2acf6830ddc958ffafd7a614cfb79d14017a459bc7a7145ecd"
],
"version": "==2.0.6"
},
"geonamescache": {
"hashes": [
"sha256:5a1fc774a3282d324952b87eb5c2c13684055e68e829526909c76c2fffb84f8f",
"sha256:efa1dd28a87632303c5d882cc52718f04cf28e85f1cc7d6afdf8a6ba7c5ab1dc"
],
"index": "pypi",
"version": "==1.3.0"
},
"idna": {
"hashes": [
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
"sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
],
"version": "==2.8"
},
"jinja2": {
"hashes": [
"sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
"sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
],
"markers": "python_version >= '3.7'",
"version": "==3.1.2"
},
"langcodes": {
"hashes": [
"sha256:4d89fc9acb6e9c8fdef70bcdf376113a3db09b67285d9e1d534de6d8818e7e69",
"sha256:794d07d5a28781231ac335a1561b8442f8648ca07cd518310aeb45d6f0807ef6"
],
"markers": "python_version >= '3.6'",
"version": "==3.3.0"
},
"markupsafe": {
"hashes": [
"sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003",
"sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88",
"sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5",
"sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7",
"sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a",
"sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603",
"sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1",
"sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135",
"sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247",
"sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6",
"sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601",
"sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77",
"sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02",
"sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e",
"sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63",
"sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f",
"sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980",
"sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b",
"sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812",
"sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff",
"sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96",
"sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1",
"sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925",
"sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a",
"sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6",
"sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e",
"sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f",
"sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4",
"sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f",
"sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3",
"sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c",
"sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a",
"sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417",
"sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a",
"sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a",
"sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37",
"sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452",
"sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933",
"sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a",
"sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"
],
"markers": "python_version >= '3.7'",
"version": "==2.1.1"
},
"murmurhash": {
"hashes": [
"sha256:1b3d584705bf7be487c09a79f35a45a45f9b2a8a2120ccca47692d24bc6329ff",
"sha256:1fd2dac6ea401310d1245f68a758ff86bc58ba28fe3ec41c3fec42d4cea249a0",
"sha256:262abc461353bcd724daef11aa236fed95ce2c5642847fe18f835721ae0356d6",
"sha256:2d001688bd2106fbd84a99d4c0292339de7a0a96c8516596032a3e9389adba25",
"sha256:341a499cb30fae9f305479650728d832744a2e37f8e7fbdbfaedd8676b4e822d",
"sha256:37c8207377110fa791051e2998d6d8d84d0a066010be9e8e41d1af1c41a65f14",
"sha256:43d161ecdc1923ec401efb4e78d1da848c4aa957a49c08c29af276a7696c5336",
"sha256:469a5c8959988ce8c395be2c623a45df8112783d87445b6a230596807e7786d2",
"sha256:60637d74c73be1259d9c07d03726b75907ad9e8dd47f946b21244113cdc52843",
"sha256:66857bf301c676917d7ceba3fa42fb82aa34605d6b9788612c532bc6d69af9f9",
"sha256:67d48fc47edc40fef9eb19c6e9ee3d4321f864a3d6566217812615c79fa4ff16",
"sha256:79a3ef14cab2bdc2cf4814eb30d8237d28ddc9e6fd1859d3d61857162979a630",
"sha256:88c569636b0bad29a3854f1d324636e0561423da75ef6e5bc5b363f8d2143692",
"sha256:88eb9b866d2e6690249eb305cce509eeffbf2a59c42eb47e8aab6a3b46bd4a54",
"sha256:8bb03aad842837a64b0c1d2ed22b50eba85f9fa51476c8bc0a77c366a979f1f3",
"sha256:97231ac1f42d29b6e1a3e2e8d23ab32deb37ccd55b00554d74e4bd8f0e73b7d7",
"sha256:9fe289e88b18b388d6029a17839113a6090ead3b04c904cba0b64961f0db7d02",
"sha256:aaa8df50c83468a6be52e29a85d7d54b981b43cd1c9b501b3a2d80acbe87283e",
"sha256:ac0701b6823a8a88412c2a1410b1afbd25ee20666ea9d4ad1d3016c7d55ac433",
"sha256:b9bab3d46076ac65b38e9f2823ce3e45d7a2a798e3dd466eb594de1802732979",
"sha256:bafe99db2f05d8ecd9b39250cf37f561b4281d081b7c10bd283d6c1fd685bedf",
"sha256:bd4584443731a9d73b87970f9b08c86162d35acc48b63c78cf215bcebeb8b5b0",
"sha256:cbf5b97feaba000df8e5b16ce23e426ea9b52e0e442e6451b4687b6f7797804e",
"sha256:d3d64b6a24203da05964c708856f2ff46653dbd115d3bba326e0c6dc1d4ea511",
"sha256:d55e956cebe38ef96360949b130cbfb605c3ffc04e3b770c6e0299308c46c880"
],
"markers": "python_version >= '3.6'",
"version": "==1.0.8"
},
"numpy": {
"hashes": [
"sha256:17e5226674f6ea79e14e3b91bfbc153fdf3ac13f5cc54ee7bc8fdbe820a32da0",
"sha256:2bd879d3ca4b6f39b7770829f73278b7c5e248c91d538aab1e506c628353e47f",
"sha256:4f41f5bf20d9a521f8cab3a34557cd77b6f205ab2116651f12959714494268b0",
"sha256:5593f67e66dea4e237f5af998d31a43e447786b2154ba1ad833676c788f37cde",
"sha256:5e28cd64624dc2354a349152599e55308eb6ca95a13ce6a7d5679ebff2962913",
"sha256:633679a472934b1c20a12ed0c9a6c9eb167fbb4cb89031939bfd03dd9dbc62b8",
"sha256:806970e69106556d1dd200e26647e9bee5e2b3f1814f9da104a943e8d548ca38",
"sha256:806cc25d5c43e240db709875e947076b2826f47c2c340a5a2f36da5bb10c58d6",
"sha256:8247f01c4721479e482cc2f9f7d973f3f47810cbc8c65e38fd1bbd3141cc9842",
"sha256:8ebf7e194b89bc66b78475bd3624d92980fca4e5bb86dda08d677d786fefc414",
"sha256:8ecb818231afe5f0f568c81f12ce50f2b828ff2b27487520d85eb44c71313b9e",
"sha256:8f9d84a24889ebb4c641a9b99e54adb8cab50972f0166a3abc14c3b93163f074",
"sha256:909c56c4d4341ec8315291a105169d8aae732cfb4c250fbc375a1efb7a844f8f",
"sha256:9b83d48e464f393d46e8dd8171687394d39bc5abfe2978896b77dc2604e8635d",
"sha256:ac987b35df8c2a2eab495ee206658117e9ce867acf3ccb376a19e83070e69418",
"sha256:b78d00e48261fbbd04aa0d7427cf78d18401ee0abd89c7559bbf422e5b1c7d01",
"sha256:b8b97a8a87cadcd3f94659b4ef6ec056261fa1e1c3317f4193ac231d4df70215",
"sha256:bd5b7ccae24e3d8501ee5563e82febc1771e73bd268eef82a1e8d2b4d556ae66",
"sha256:bdc02c0235b261925102b1bd586579b7158e9d0d07ecb61148a1799214a4afd5",
"sha256:be6b350dfbc7f708d9d853663772a9310783ea58f6035eec649fb9c4371b5389",
"sha256:c403c81bb8ffb1c993d0165a11493fd4bf1353d258f6997b3ee288b0a48fce77",
"sha256:cf8c6aed12a935abf2e290860af8e77b26a042eb7f2582ff83dc7ed5f963340c",
"sha256:d98addfd3c8728ee8b2c49126f3c44c703e2b005d4a95998e2167af176a9e722",
"sha256:dc76bca1ca98f4b122114435f83f1fcf3c0fe48e4e6f660e07996abf2f53903c",
"sha256:dec198619b7dbd6db58603cd256e092bcadef22a796f778bf87f8592b468441d",
"sha256:df28dda02c9328e122661f399f7655cdcbcf22ea42daa3650a26bce08a187450",
"sha256:e603ca1fb47b913942f3e660a15e55a9ebca906857edfea476ae5f0fe9b457d5",
"sha256:ecfdd68d334a6b97472ed032b5b37a30d8217c097acfff15e8452c710e775524"
],
"markers": "python_version >= '3.8'",
"version": "==1.23.2"
},
"packaging": {
"hashes": [
"sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb",
"sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"
],
"markers": "python_version >= '3.6'",
"version": "==21.3"
},
"pathy": {
"hashes": [
"sha256:3178215bdadf3741107d987020be0fb5b59888f60f96de43cce5fe45d9d4b64a",
"sha256:a7aa9794fade161bb4c28a33c5bc2c6bf41f61ec5eee51cfa8914f0a433447e1"
],
"markers": "python_version >= '3.6'",
"version": "==0.6.2"
},
"preshed": {
"hashes": [
"sha256:0435d4647037fc534bf1bdcf87fdbf206a935abe5e68c9814277090c62e6b6ef",
"sha256:06be22a8a2757e61b2c3cc854e9c2d2361fb3b6528deb2af5b5079e6da670405",
"sha256:0feae8cfcf46fb97cd8118d150245ae31c68ca9c68e514ab33e1e33a90eee459",
"sha256:13140808a56ce67e30579a40e293d16dedeb85fd56c74cd024c44f7a9f966432",
"sha256:1573d42bdec168081242ec88d40288000f84a5afb66a60517e4220950fe62f17",
"sha256:1acc514a41142a086c2ebc19cd368534e27337511b3a2936c28455e0b42f3eb3",
"sha256:24935553ff18f39195b468984d784b2bb7825b6f43385a2d0d05b02e55090815",
"sha256:3741b459f50dde7a22b88d75d1fc4a7e6e36402db43c4f690df44e966ff1bd54",
"sha256:37900e1e923a35fc15f56cd02f4a899e5903790113256fedbf527a5b3f8e2214",
"sha256:39cd2a0ab1adb11452c617831ea0ccea7d1712f2812d1744738735987513113a",
"sha256:3b1d747ab1e233bc4bccdc4baee7531c5661459d7a8d67845833f7c857b371fb",
"sha256:3e7022fe473e677088cda6e0538669fe240943e3eb7ff88447d690a52c6d87ce",
"sha256:4a5825b80a450198cb582b3b4004c95e9d4eca268e89c126a9d71e8f26b69338",
"sha256:4a99d4f46a4e317245f2370eb13d0e3c6747f66752a8d88dbc9284f4fd25e05f",
"sha256:5eeafb832009a3e1ed92a412f19499c86e687308d6d56617772d42ee4de26ccf",
"sha256:6e1b61c9797443ee42cb314c91ab178791b80397c2e98410f5173806604458b1",
"sha256:6e518275d1b84bed779ddc69eabeb9283bf5f9f5482bcaf1b099d2877b050a0d",
"sha256:7cc3166ed48d603131912da40a7638b3655d6990032a770303346e2f5f4154d4",
"sha256:7f97c3d2dc9713139cc096af9e4455ced33f9e90ce9d29f739db5adff6ae6a06",
"sha256:8d57469e5e52d37342588804d3ce70128ab42f42e5babaeea0d29d3609ce7b30",
"sha256:9785e3de0dca8f4d6040eba89e50e72012ce94d269583502d14f958624daea73",
"sha256:9c700a0744f5ede86ff8754522bd54f92f5b75f58046bc3f17c60d1c25529f41",
"sha256:a9ed0dc685aa058a1a76111f476715ccfb4df12800c3681e401d6af7384f570d",
"sha256:ab4168fe1d31707e3473d1bb57d3c88dac9151b3cbe61d22f14bf691e04aaa1b",
"sha256:d2cc8d8df139d5935464530d45ff0e1209752e9748792cdf774b5d5fc230c07a"
],
"markers": "python_version >= '3.6'",
"version": "==3.0.7"
},
"pydantic": {
"hashes": [
"sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44",
"sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d",
"sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84",
"sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555",
"sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7",
"sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131",
"sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8",
"sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3",
"sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56",
"sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0",
"sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4",
"sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453",
"sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044",
"sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e",
"sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15",
"sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb",
"sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001",
"sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d",
"sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3",
"sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e",
"sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f",
"sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c",
"sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b",
"sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8",
"sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567",
"sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979",
"sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326",
"sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb",
"sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f",
"sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa",
"sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747",
"sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801",
"sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55",
"sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08",
"sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76"
],
"markers": "python_full_version >= '3.6.1'",
"version": "==1.9.2"
},
"pyparsing": {
"hashes": [
"sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb",
"sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"
],
"markers": "python_full_version >= '3.6.8'",
"version": "==3.0.9"
},
"python-dotenv": {
"hashes": [
"sha256:32b2bdc1873fd3a3c346da1c6db83d0053c3c62f28f1f38516070c4c8971b1d3",
"sha256:a5de49a31e953b45ff2d2fd434bbc2670e8db5273606c1e737cc6b93eff3655f"
],
"index": "pypi",
"version": "==0.19.2"
},
"pytube": {
"hashes": [
"sha256:2a32f3475f063d25e7b7a7434a93b51d59aadbbda7ed24af48f097b2876c0964",
"sha256:9190f5e13b05a4fc1586eedd46f7164c3a4318607d455a1f7c126699ddde724f"
],
"index": "pypi",
"version": "==9.5.0"
},
"requests": {
"hashes": [
"sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
"sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
],
"index": "pypi",
"version": "==2.21.0"
},
"setuptools": {
"hashes": [
"sha256:2347b2b432c891a863acadca2da9ac101eae6169b1d3dfee2ec605ecd50dbfe5",
"sha256:e4f30b9f84e5ab3decf945113119649fec09c1fc3507c6ebffec75646c56e62b"
],
"index": "pypi",
"version": "==60.9.3"
},
"smart-open": {
"hashes": [
"sha256:71d14489da58b60ce12fc3ecb823facc59a8b23cd1b58edb97175640350d3a62",
"sha256:75abf758717a92a8f53aa96953f0c245c8cedf8e1e4184903db3659b419d4c17"
],
"markers": "python_version >= '3.6' and python_version < '4.0'",
"version": "==5.2.1"
},
"soupsieve": {
"hashes": [
"sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759",
"sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"
],
"markers": "python_version >= '3.6'",
"version": "==2.3.2.post1"
},
"spacy": {
"hashes": [
"sha256:0720297328e32c1952a6c2eb9f5d9aa7b11b95588886c9117a7030c2dd1a0619",
"sha256:344f262344f1c078e6159543eecc09b32b70c138deb1f9fe01c49cd89002730f",
"sha256:3c99920830c1cbf3be169b7817863d88fa07c465540ebf0eed304ba106201cb4",
"sha256:3caaa36f8fc95776a558ec2a4625f5cd22c1fa7990b1d9ab650709851586cec4",
"sha256:3ccd1a9f56759e5f0f9efdf57119a06702ad59c045dde0b3830b5472593e09ef",
"sha256:4dc6ea142866f7042a710597ad2c3939bde3486c27d0eb11fcfb079245c72cb3",
"sha256:5b95eff2f9e012991c27e93c038ef8f3ca4132ec7e66e097833ef22f263a46da",
"sha256:6c45fe868973dd44afa4a200f72fd2d7288548ae6bc201fc5a6d5ffb0a4949b3",
"sha256:6d147595db91191a0a748e33629c99fb5e7849af5100237e058fd832b7254242",
"sha256:7039fe789caa9753fa0a66b9391f0f93d49e6cb1017336c6b0842220a04346a8",
"sha256:76ad38af7205dbc97e1a3dd0956f5d97b4d31542adbb039eb161bf5f08e386ec",
"sha256:9c9167631733f8618f8928745fc740f3fd680994993dce2db7a4dd2b85d8919e",
"sha256:9e631c07560457926860b1c64bf2a62e4e4c25359085024ea3847cc7e2fc39ee",
"sha256:9ff04736c88475a950726c92a8e87ea6f42ca49ae60e0dc9612b456333f25e9c",
"sha256:a10451847eb7983ba1fe05ab87a37367e00e2807fe633d17926626cb2dacd10f",
"sha256:a4fa15d13ef73089680506c91e27630005537b9b68d2d4e736cec26cb29a3f47",
"sha256:b350dc2321b4cbeac65419ec0cb0d0697f991c0a3b4ca4236438fbcce77e9421",
"sha256:b3b336aed79d7610ad33fac1557de8b96a1421aa8c4d41604700d1eb1c7e34fd",
"sha256:dbcf52b49a7221202f7a388993562c9dd610ed621697a4684583514aed63c06a",
"sha256:e0bec5befbb6bbc11d4d984e8f864dcbdf9e9274dbc5dbc349b931a2dd9ec84c",
"sha256:eb39f5551f4ef6475fa7f46fec347553680ab0e917d2a790384aa75d5797333d",
"sha256:eca827077f32965c1931ccb34375c29a7b9c321818e1773248004719b27b008f",
"sha256:f0b26babf0d7d9b07f8350c390b19445207a9f84cac7a81ac2b6d0cbcb47357a",
"sha256:ff0687a6f3e822655650a5ba45af6738b879962806f09ac3903768b0b67cf415"
],
"index": "pypi",
"version": "==3.4.0"
},
"spacy-legacy": {
"hashes": [
"sha256:16104595d8ab1b7267f817a449ad1f986eb1f2a2edf1050748f08739a479679a",
"sha256:8526a54d178dee9b7f218d43e5c21362c59056c5da23380b319b56043e9211f3"
],
"markers": "python_version >= '3.6'",
"version": "==3.0.10"
},
"spacy-loggers": {
"hashes": [
"sha256:00f6fd554db9fd1fde6501b23e1f0e72f6eef14bb1e7fc15456d11d1d2de92ca",
"sha256:f74386b390a023f9615dcb499b7b4ad63338236a8187f0ec4dfe265a9f665ee8"
],
"markers": "python_version >= '3.6'",
"version": "==1.0.3"
},
"srsly": {
"hashes": [
"sha256:1a90233d5ef978db5887d0d159fa5652bfecde353812c4b4e293791460db2211",
"sha256:1ce17dcf2acd26f3add82d94a29d264bd72364071a29dccdd335d4c1826e4e98",
"sha256:2702ab0858bb27cefe684cc01be5f28ca05d204dc60b52efd122434864a347bd",
"sha256:30eb733770b69593ac31ff84df41455e45bb2e48bf44c1382bc40f10be3e8936",
"sha256:372204bbb7089ecc99fa984f0ef6c06063dcf6ac1abf0d896605486a2cdf5779",
"sha256:47e6ce2aed95aa2f55b9a967f34f82cf170ff2c0e126d3422ede106dbfe4922c",
"sha256:4bb401bf8477b685d933670bcec6b430b8e8025cd68da1e6bacfd57f6d8191d5",
"sha256:4efa441fc54d3f2300c6ce48f9e44ed9850f2627c0ebeaa320b206b470679e63",
"sha256:5054c25a9b923d33f54e7bbab8c5be669db94b9bab87e348439bf85f3d644825",
"sha256:708623d4e4503fee4cd9c727d471ab6918b664e177fbe413b0ddd2debb45437a",
"sha256:74acd1e52235fa46a3ede5c34f33adf8bad4050bbf44ec04d76369c4aed9041e",
"sha256:8235975d943587b4d17fc10e860b11d9248f58c0b74e95f911bd70b24542c630",
"sha256:8abfca6b34c8a03608ca9c6e54e315c30d240cc3aae942a412c52742d15a089b",
"sha256:930164882db523020c7660b12a7f3b0d9266781012004fa2e8ad6150412493e1",
"sha256:964bdc3f8ff4a5e66ab2685e001b78f6ca2ce68fe4817842f4f342abad2fddbb",
"sha256:a6d03f65c079e98fcd635f19c65c0ddb7b06f1069332fb87c74e36d7c3ca883d",
"sha256:bd4f7e2b43187d4fc5095fb01fe7b7868a777eb12315ff1ac07e3243fd4493e8",
"sha256:c85962bf1d6f52b9d5d2e727a4e3c558a6b00cecadea29141f122f6c83147cca",
"sha256:c96963e1db238014525f924d5d5058ee9386e129c82f24cc63fead41902e1c06",
"sha256:cb65e2d67980419072379abef1e5b1d3b11931082bc038b0295d35a56376c3d5",
"sha256:d6733ddaf0b2db54f334a2e881f1112be0ff48f113c96bcd88d1aec1ff871ca8",
"sha256:e0c6510716e5cb2abf11abcbe06338283b01690955342e78a76fcb9953b7e3ba",
"sha256:e1866a1a5ea8c7bb213b0c3c7a7b97cd6d9a3ca26f2f59e441d47fa9720fb399",
"sha256:e8a06581627b6712f19c60241b7c14c2bb29ce86ef04f791379a79f1b249a128"
],
"markers": "python_version >= '3.6'",
"version": "==2.4.4"
},
"thinc": {
"hashes": [
"sha256:133f585941801382dd52201eb5b857426dfa1adca298b052875c9a07943c18b0",
"sha256:21ec24cdb3248617b41d4bdf13de557fdd994bca73b107c30530b7fcd590c9c2",
"sha256:2766914adae8768aaad0d18c3968779bebe3d366f94ebca3b42b3f8cc290c5e3",
"sha256:2c40cbc4c630e6201983af88ee9c4fd9f1d8c1d41545c78a861e9911fcb23b7b",
"sha256:478f32fcf2042ead1bfbba7a3c77e2fc574c1f65967efc137156130bfd02c056",
"sha256:5ab8dcf427184d5573494061d4cb7201ae4d7303433f81af359b6bd48b19515f",
"sha256:619776f6070a00a06713ef14f67e03af5bea16afda1897e7936ba8bbf0b3dba6",
"sha256:6934566d3bca7cd0d2912fbb8966882fd7b43b8ec0139b0c7797814e11555be0",
"sha256:6e293ea5141767817c26f085a26fcd3c451c15c1902c5f2a7bdb9a7fe57ebdef",
"sha256:74fa81f69e67363c4d3b4d60a0608adddadb0f2e2f4cc8c1f2dc2b083747fd69",
"sha256:77c7f6984dcaa007bcba8ff67e2e3c7a71a237b63193e5c14fe832493e53976a",
"sha256:7a261b55d72c266d467423966aa48ebf73326736155307c5265a1c99f163f2d7",
"sha256:7dc9a3a108f1730f6cd65a68ae67ea2eb10bd314cffe8dca1afe693c50e84b9b",
"sha256:7ded7df0174040ccd8be60780f43fd3d18bf675ac1170d82d09985d214896521",
"sha256:9096bea0f7aa29e52a3a23759cd6cabee98366a41a2c880db949fcf00f936aa4",
"sha256:9097836ba9a9f631736ab86bb622dcc6155476c7ea0f55486dc2d9b22c07f5d6",
"sha256:b33b2b71ccf6689968a8452e0c1beadf385511df9b55e36d83e688a2832f6b0a",
"sha256:bb45c8aabb3d4e646a25939096cc751cb4e0e4ff9d3bfdcce9fa64ff0622d348",
"sha256:bf9e0ac84a8f473ad723059ac561c307682015a083a8b9b7ff26808780715666",
"sha256:c01ca2b364ef7b02c2ffbed0b423597c9999131f83a1878548d36666ca2a27ed",
"sha256:d6738b13d0f7ddaf02f43ce71353fe079d461bf9bec76f48406071de4a89a1dd",
"sha256:e22998c2d914bee02dc58aa791bc7df6f9733554b244e94b33ff4a491322c142",
"sha256:eaaea91dc56c041516a829c460423a8aef5357001610c8d6395bce95d8254a0b",
"sha256:f8b673775f120d7f54b5f3230742239e21fc5c4e3138372dad87b82617677509"
],
"markers": "python_version >= '3.6'",
"version": "==8.1.0"
},
"tinydb": {
"hashes": [
"sha256:357eb7383dee6915f17b00596ec6dd2a890f3117bf52be28a4c516aeee581100",
"sha256:e2cdf6e2dad49813e9b5fceb3c7943387309a8738125fbff0b58d248a033f7a9"
],
"index": "pypi",
"version": "==4.7.0"
},
"torch": {
"hashes": [
"sha256:03e31c37711db2cd201e02de5826de875529e45a55631d317aadce2f1ed45aa8",
"sha256:0b44601ec56f7dd44ad8afc00846051162ef9c26a8579dda0a02194327f2d55e",
"sha256:42e115dab26f60c29e298559dbec88444175528b729ae994ec4c65d56fe267dd",
"sha256:42f639501928caabb9d1d55ddd17f07cd694de146686c24489ab8c615c2871f2",
"sha256:4e1b9c14cf13fd2ab8d769529050629a0e68a6fc5cb8e84b4a3cc1dd8c4fe541",
"sha256:68104e4715a55c4bb29a85c6a8d57d820e0757da363be1ba680fa8cc5be17b52",
"sha256:69fe2cae7c39ccadd65a123793d30e0db881f1c1927945519c5c17323131437e",
"sha256:6cf6f54b43c0c30335428195589bd00e764a6d27f3b9ba637aaa8c11aaf93073",
"sha256:743784ccea0dc8f2a3fe6a536bec8c4763bd82c1352f314937cb4008d4805de1",
"sha256:8a34a2fbbaa07c921e1b203f59d3d6e00ed379f2b384445773bd14e328a5b6c8",
"sha256:976c3f997cea38ee91a0dd3c3a42322785414748d1761ef926b789dfa97c6134",
"sha256:9b356aea223772cd754edb4d9ecf2a025909b8615a7668ac7d5130f86e7ec421",
"sha256:9c038662db894a23e49e385df13d47b2a777ffd56d9bcd5b832593fab0a7e286",
"sha256:a8320ba9ad87e80ca5a6a016e46ada4d1ba0c54626e135d99b2129a4541c509d",
"sha256:b5dbcca369800ce99ba7ae6dee3466607a66958afca3b740690d88168752abcf",
"sha256:bfec2843daa654f04fda23ba823af03e7b6f7650a873cdb726752d0e3718dada",
"sha256:cd26d8c5640c3a28c526d41ccdca14cf1cbca0d0f2e14e8263a7ac17194ab1d2",
"sha256:e9c8f4a311ac29fc7e8e955cfb7733deb5dbe1bdaabf5d4af2765695824b7e0d",
"sha256:f00c721f489089dc6364a01fd84906348fe02243d0af737f944fddb36003400d",
"sha256:f3b52a634e62821e747e872084ab32fbcb01b7fa7dbb7471b6218279f02a178a"
],
"index": "pypi",
"version": "==1.12.1"
},
"tqdm": {
"hashes": [
"sha256:40be55d30e200777a307a7585aee69e4eabb46b4ec6a4b4a5f2d9f11e7d5408d",
"sha256:74a2cdefe14d11442cedf3ba4e21a3b84ff9a2dbdc6cfae2c34addb2a14a5ea6"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==4.64.0"
},
"typer": {
"hashes": [
"sha256:023bae00d1baf358a6cc7cea45851639360bb716de687b42b0a4641cd99173f1",
"sha256:b8261c6c0152dd73478b5ba96ba677e5d6948c715c310f7c91079f311f62ec03"
],
"markers": "python_version >= '3.6'",
"version": "==0.4.2"
},
"typing-extensions": {
"hashes": [
"sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02",
"sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"
],
"markers": "python_version >= '3.7'",
"version": "==4.3.0"
},
"urllib3": {
"hashes": [
"sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4",
"sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4.0'",
"version": "==1.24.3"
},
"wasabi": {
"hashes": [
"sha256:c8e372781be19272942382b14d99314d175518d7822057cb7a97010c4259d249",
"sha256:fe862cc24034fbc9f04717cd312ab884f71f51a8ecabebc3449b751c2a649d83"
],
"version": "==0.10.1"
},
"wheel": {
"hashes": [
"sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a",
"sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4"
],
"index": "pypi",
"version": "==0.37.1"
}
},
"develop": {}
}

0
bridges/python/dist/.gitkeep vendored Normal file
View File

View File

@ -0,0 +1,16 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[requires]
python_version = "3.9.10"
[packages]
setuptools = "*"
wheel = "*"
cx-freeze = "==6.11.1"
requests = "==2.28.1"
pytube = "==9.5.0"
tinydb = "==4.7.0"
beautifulsoup4 = "==4.7.1"

View File

@ -0,0 +1,29 @@
from cx_Freeze import setup, Executable
import requests.certs
from version import __version__
options = {
'build_exe': {
# Add common dependencies for skills
'includes': [
'bs4',
'pytube'
],
'include_files': [(requests.certs.where(), 'cacert.pem')]
}
}
executables = [
Executable(
script='bridges/python/src/main.py',
target_name='leon-python-bridge'
)
]
setup(
name='leon-python-bridge',
version=__version__,
executables=executables,
options=options
)

View File

@ -2,20 +2,19 @@
# -*- coding:utf-8 -*-
from json import loads, dumps
from os import path, environ
from pathlib import Path
from random import choice
from sys import argv, stdout
from vars import useragent
from tinydb import TinyDB, Query, table, operations
from time import sleep
import sqlite3
import sys
import os
import requests
import re
dirname = path.dirname(path.realpath(__file__))
dirname = os.path.dirname(os.path.realpath(__file__))
intent_object_path = argv[1]
intent_object_path = sys.argv[1]
codes = []
intent_obj_file = open(intent_object_path, 'r', encoding = 'utf8')
@ -37,7 +36,7 @@ def translate(key, dict = { }):
output = ''
variables = { }
file = open(path.join(dirname, '../../skills', intent_obj['domain'], intent_obj['skill'], 'config', intent_obj['lang'] + '.json'), 'r', encoding = 'utf8')
file = open(os.path.join(os.getcwd(), 'skills', intent_obj['domain'], intent_obj['skill'], 'config', intent_obj['lang'] + '.json'), 'r', encoding = 'utf8')
obj = loads(file.read())
file.close()
@ -92,7 +91,7 @@ def output(type, content = '', core = { }):
}))
if (type == 'inter'):
stdout.flush()
sys.stdout.flush()
def http(method, url, headers = None):
"""Send HTTP request with the Leon user agent"""
@ -108,7 +107,7 @@ def http(method, url, headers = None):
def config(key):
"""Get a skill configuration value"""
file = open(path.join(dirname, '../../skills', intent_obj['domain'], intent_obj['skill'], 'src/config.json'), 'r', encoding = 'utf8')
file = open(os.path.join(os.getcwd(), 'skills', intent_obj['domain'], intent_obj['skill'], 'src/config.json'), 'r', encoding = 'utf8')
obj = loads(file.read())
file.close()
@ -117,8 +116,9 @@ def config(key):
def create_dl_dir():
"""Create the downloads folder of a current skill"""
dl_dir = path.dirname(path.realpath(__file__)) + '/../../downloads/'
skill_dl_dir = path.join(dl_dir, intent_obj['domain'], intent_obj['skill'])
dl_dir = os.path.join(os.getcwd(), 'downloads')
# dl_dir = os.path.dirname(os.path.realpath(__file__)) + '/../../../../downloads/'
skill_dl_dir = os.path.join(dl_dir, intent_obj['domain'], intent_obj['skill'])
Path(skill_dl_dir).mkdir(parents = True, exist_ok = True)
@ -129,8 +129,8 @@ def db(db_type = 'tinydb'):
for a specific skill"""
if db_type == 'tinydb':
ext = '.json' if environ.get('LEON_NODE_ENV') != 'testing' else '.spec.json'
db = TinyDB(path.join(dirname, '../../skills', intent_obj['domain'], intent_obj['skill'], 'memory/db' + ext))
ext = '.json' if os.environ.get('LEON_NODE_ENV') != 'testing' else '.spec.json'
db = TinyDB(os.path.join(os.getcwd(), 'skills', intent_obj['domain'], intent_obj['skill'], 'memory/db' + ext))
return {
'db': db,
'query': Query,
@ -142,6 +142,6 @@ def get_table(slug):
"""Get a table from a specific skill"""
domain, skill, table = slug.split('.')
ext = '.json' if environ.get('LEON_NODE_ENV') != 'testing' else '.spec.json'
db = TinyDB(path.join(dirname, '../../skills', domain, skill, 'memory/db' + ext))
ext = '.json' if os.environ.get('LEON_NODE_ENV') != 'testing' else '.spec.json'
db = TinyDB(os.path.join(os.getcwd(), 'skills', domain, skill, 'memory/db' + ext))
return db.table(table)

View File

@ -1,10 +1,11 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from os import path
import sys
import os
from json import loads
packagejsonfile = open(path.dirname(path.realpath(__file__)) + '/../../package.json', 'r', encoding = 'utf8')
packagejsonfile = open(os.path.join(os.getcwd(), 'package.json'), 'r', encoding = 'utf8')
packagejson = loads(packagejsonfile.read())
packagejsonfile.close()

View File

@ -0,0 +1 @@
__version__ = '1.0.1'

View File

@ -1,4 +1,5 @@
{
"$schema": "../../../schemas/voice-config-schemas/amazon.json",
"credentials": {
"accessKeyId": "",
"secretAccessKey": ""

View File

@ -1,12 +1,13 @@
{
"type": "",
"$schema": "../../../schemas/voice-config-schemas/google-cloud.json",
"type": "service_account",
"project_id": "",
"private_key_id": "",
"private_key": "",
"client_email": "",
"client_email": "example@iam.gserviceaccount.com",
"client_id": "",
"auth_uri": "",
"token_uri": "",
"auth_provider_x509_cert_url": "",
"client_x509_cert_url": ""
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://accounts.google.com/o/oauth2/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/"
}

View File

@ -1,4 +1,5 @@
{
"$schema": "../../../schemas/voice-config-schemas/watson-stt.json",
"apikey": "",
"url": ""
"url": "https://stream.watsonplatform.net/speech-to-text/api"
}

View File

@ -1,4 +1,5 @@
{
"$schema": "../../../schemas/voice-config-schemas/watson-tts.json",
"apikey": "",
"url": ""
"url": "https://stream.watsonplatform.net/text-to-speech/api"
}

View File

@ -1,45 +1,43 @@
{
"answers": {
"success": {
},
"errors": {
"not_found": "Sorry, it seems I cannot find that",
"nlu": "It might come from my natural language understanding, the error returned is: \"%error%\""
},
"$schema": "../../../schemas/global-data/global-answers.json",
"answers": {
"success": {},
"errors": {
"not_found": "Sorry, it seems I cannot find that"
},
"synchronizer": {
"syncing_direct": "I will now synchronize the downloaded content on your current device. Don't worry, I will let you know once I'm done",
"synced_direct": "The new content has been synchronized on your device",
"syncing_google_drive": "I will now synchronize the downloaded content on Google Drive. Don't worry, I will let you know once I'm done",
"synced_google_drive": "The new content is now available on Google Drive"
},
"random_errors": [
"Sorry, there is a problem with my system",
"Sorry, I don't work correctly",
"Sorry, you need to fix me",
"Sorry, I cannot do that because I'm broken"
],
"random_skill_errors": [
"Sorry, it seems I have a problem with the \"%skill_name%\" skill from the \"%domain_name%\" domain",
"Sorry, I have an issue with the \"%skill_name%\" skill from the \"%domain_name%\" domain",
"Sorry, I've got an error with the \"%skill_name%\" skill from the \"%domain_name%\" domain",
"Sorry, the \"%skill_name%\" skill from the \"%domain_name%\" domain is broken"
],
"random_unknown_intents": [
"Sorry, I still don't know this, but you can help me to understand by <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">creating a pull request</a>",
"Sorry, you should teach me this request. You can teach me by <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">creating a pull request</a>",
"Sorry, I cannot answer that. Let me answer you in the future by <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">creating a pull request</a>",
"Sorry, you have to educate me more. You can help me with that by <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">contributing to my code</a>",
"Sorry, I don't understand your query",
"random_errors": [
"Sorry, there is a problem with my system. Please check my logs for further details",
"Sorry, I don't work correctly. Please look at my logs for more information",
"Sorry, you need to fix me. Please take a look at my logs for further information",
"Sorry, I cannot do that because I'm broken. Please check my logs for further details"
],
"random_skill_errors": [
"Sorry, it seems I have a problem with the \"%skill_name%\" skill from the \"%domain_name%\" domain",
"Sorry, I have an issue with the \"%skill_name%\" skill from the \"%domain_name%\" domain",
"Sorry, I've got an error with the \"%skill_name%\" skill from the \"%domain_name%\" domain",
"Sorry, the \"%skill_name%\" skill from the \"%domain_name%\" domain is broken"
],
"random_unknown_intents": [
"Sorry, I still don't know this, but you can help me to understand by <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">creating a pull request</a>",
"Sorry, you should teach me this request. You can teach me by <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">creating a pull request</a>",
"Sorry, I cannot answer that. Let me answer you in the future by <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">creating a pull request</a>",
"Sorry, you have to educate me more. You can help me with that by <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">contributing to my code</a>",
"Sorry, I don't understand your query",
"Sorry, I'm still very young, I didn't get your point"
],
"random_not_sure": [
"Sorry, you may repeat in an another way",
"Sorry, I'm not sure I understood correctly",
"Sorry, I'm not sure for what you asked, please repeat with a different way",
"Sorry, please repeat again by formulating differently",
"Sorry, I didn't correctly clean my ears today! Oh wait, I'm your personal assistant then please try again with a new way"
],
],
"random_not_sure": [
"Sorry, you may repeat in an another way",
"Sorry, I'm not sure I understood correctly",
"Sorry, I'm not sure for what you asked, please repeat with a different way",
"Sorry, please repeat again by formulating differently",
"Sorry, I didn't correctly clean my ears today! Oh wait, I'm your personal assistant then please try again with a new way"
],
"random_not_able": [
"Sorry, I'm not able to answer. I understand what you said, but please repeat in another way",
"Sorry, I have a blackout, I cannot answer that. I understand what you said, but try to repeat in another way"
@ -63,5 +61,5 @@
"Aah, you want to change the subject, sure",
"Mmmh, as you wish, let's switch conversation"
]
}
}
}

View File

@ -1,4 +1,5 @@
{
"$schema": "../../../../schemas/global-data/global-entity.json",
"options": {
"red": {
"synonyms": ["red"],

View File

@ -1,4 +1,5 @@
{
"$schema": "../../../../schemas/global-data/global-entity.json",
"options": {
"LOW": {
"synonyms": ["low"]

View File

@ -1,4 +1,5 @@
{
"$schema": "../../../../schemas/global-data/global-entity.json",
"options": {
"Alexa": {
"synonyms": ["Alexa"],

View File

@ -1,4 +1,5 @@
{
"$schema": "../../../../schemas/global-data/global-resolver.json",
"name": "affirmation_denial",
"intents": {
"affirmation": {
@ -22,7 +23,14 @@
"That works",
"Go ahead",
"Why not",
"Please"
"Please",
"Absolutely",
"Precisely",
"Spot on",
"Undoubtedly",
"Certainly",
"Without a doubt",
"Definitely"
],
"value": true
},
@ -36,7 +44,10 @@
"No thanks",
"No I'm fine",
"Hell no",
"Please do not"
"Please do not",
"I disagree",
"Negative",
"Not at all"
],
"value": false
}

View File

@ -1,45 +1,44 @@
{
"answers": {
"success": {
},
"errors": {
"not_found": "Désolé, il semblerait que je n'arrive pas à trouver ça",
"nlu": "L'erreur semble provenir de ma compréhension de langage naturel. Voici plus de détails au sujet de cette dernière : \"%error%\""
},
"$schema": "../../../schemas/global-data/global-answers.json",
"answers": {
"success": {},
"errors": {
"not_found": "Désolé, il semblerait que je n'arrive pas à trouver ça",
"nlu": "L'erreur semble provenir de ma compréhension de langage naturel. Voici plus de détails au sujet de cette dernière : \"%error%\""
},
"synchronizer": {
"syncing_direct": "Je vais maintenant synchroniser le contenu téléchargé sur votre appareil actuel. Ne vous inquiétez pas, je vous préviendrai lorsque j'aurai terminé",
"synced_direct": "Le nouveau contenu a été synchronisé sur votre appareil",
"syncing_google_drive": "Je vais maintenant synchroniser le contenu téléchargé sur Google Drive. Ne vous inquiétez pas, je vous préviendrai lorsque j'aurai terminé",
"synced_google_drive": "Le nouveau contenu est maintenant disponible sur Google Drive"
},
"random_errors": [
"Désolé, il y a un problème avec mon système",
"Désolé, je ne fonctionne pas correctement",
"Désolé, vous devez me réparer",
"Désolé, je ne peux aboutir à votre demande parce que je suis cassé"
],
"random_skill_errors": [
"Désolé, il semblerait y avoir un problème avec le skill \"%skill_name%\" du domaine \"%domain_name%\"",
"Désolé, j'ai un problème avec le skill \"%skill_name%\" du domaine \"%domain_name%\"",
"Désolé, j'ai une erreur avec le skill \"%skill_name%\" du domaine \"%domain_name%\"",
"Désolé, le skill \"%skill_name%\" du domaine \"%domain_name%\" est cassé"
],
"random_unknown_intents": [
"Désolé, je ne connais pas encore ça, mais vous pouvez m'aider à comprendre en <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">créant une pull request</a>",
"Désolé, vous devriez m'apprendre cette requête. Vous pouvez m'apprendre en <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">créant une pull request</a>",
"Désolé, je ne peux pas répondre à ça. Laissez moi vous répondre à l'avenir en <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">créant une pull request</a>",
"Désolé, vous devez m'éduquer un peu plus. Vous pouvez m'aider avec ça en <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">contribuant à mon code</a>",
"Désolé, je ne comprends pas votre requête",
"random_errors": [
"Désolé, il y a un problème avec mon système. Veuillez consulter mes logs pour plus de détails",
"Désolé, je ne fonctionne pas correctement. Merci de regarder mes logs pour plus d'information",
"Désolé, vous devez me réparer. Veuillez vérifier mes logs pour en savoir plus",
"Désolé, je ne peux aboutir à votre demande parce que je suis cassé. Regardez mes logs pour plus de détails"
],
"random_skill_errors": [
"Désolé, il semblerait y avoir un problème avec le skill \"%skill_name%\" du domaine \"%domain_name%\"",
"Désolé, j'ai un problème avec le skill \"%skill_name%\" du domaine \"%domain_name%\"",
"Désolé, j'ai une erreur avec le skill \"%skill_name%\" du domaine \"%domain_name%\"",
"Désolé, le skill \"%skill_name%\" du domaine \"%domain_name%\" est cassé"
],
"random_unknown_intents": [
"Désolé, je ne connais pas encore ça, mais vous pouvez m'aider à comprendre en <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">créant une pull request</a>",
"Désolé, vous devriez m'apprendre cette requête. Vous pouvez m'apprendre en <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">créant une pull request</a>",
"Désolé, je ne peux pas répondre à ça. Laissez moi vous répondre à l'avenir en <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">créant une pull request</a>",
"Désolé, vous devez m'éduquer un peu plus. Vous pouvez m'aider avec ça en <a href=\"https://github.com/leon-ai/leon/blob/develop/.github/CONTRIBUTING.md\" target=\"_blank\">contribuant à mon code</a>",
"Désolé, je ne comprends pas votre requête",
"Désolé, je suis encore très jeune, je n'ai pas compris votre demande"
],
"random_not_sure": [
"Désolé, vous pouvez répéter d'une autre façon",
"Désolé, je ne suis pas sûr de comprendre",
"Désolé, je ne suis pas certain de votre demande, merci de répéter d'une manière différente",
"Désolé, merci de répéter à nouveau en formulant différemment",
"Désolé, je n'ai pas nettoyé mes oreilles correctement ! Attendez-voir, je suis votre assistant personnel, je vous prie donc de répéter d'une nouvelle façon"
],
],
"random_not_sure": [
"Désolé, vous pouvez répéter d'une autre façon",
"Désolé, je ne suis pas sûr de comprendre",
"Désolé, je ne suis pas certain de votre demande, merci de répéter d'une manière différente",
"Désolé, merci de répéter à nouveau en formulant différemment",
"Désolé, je n'ai pas nettoyé mes oreilles correctement ! Attendez-voir, je suis votre assistant personnel, je vous prie donc de répéter d'une nouvelle façon"
],
"random_not_able": [
"Désolé, je ne suis pas capable de répondre. J'ai compris ce que vous avez dit, mais je vous prie de répéter d'une autre façon",
"Désolé, j'ai un trou de mémoire, je ne peux pas répondre à ça. J'ai compris ce que vous disiez, mais essayez voir d'une autre façon s'il vous plaît"
@ -58,5 +57,5 @@
"Vous êtes génial, mais je n'ai pas encore appris cette langue",
"Ça ressemble à une lautre langue que je ne peux pas comprendre pour le moment"
]
}
}
}

View File

@ -1,19 +1,16 @@
{
"$schema": "../../../../schemas/global-data/global-entity.json",
"options": {
"rouge": {
"synonyms": ["rouge"],
"data": {
"usage": [
"..."
]
"usage": ["..."]
}
},
"bleu": {
"synonyms": ["bleu"],
"data": {
"usage": [
"..."
]
"usage": ["..."]
}
}
}

View File

@ -1,27 +1,22 @@
{
"$schema": "../../../../schemas/global-data/global-entity.json",
"options": {
"bas": {
"synonyms": ["bas", "basse"],
"data": {
"value": [
"LOW"
]
"value": ["LOW"]
}
},
"moyen": {
"synonyms": ["moyen"],
"data": {
"value": [
"MEDIUM"
]
"value": ["MEDIUM"]
}
},
"haut": {
"synonyms": ["haut", "haute"],
"data": {
"value": [
"HIGH"
]
"value": ["HIGH"]
}
}
}

View File

@ -1,4 +1,5 @@
{
"$schema": "../../../../schemas/global-data/global-entity.json",
"options": {
"Alexa": {
"synonyms": ["Alexa"],

View File

@ -1,4 +1,5 @@
{
"$schema": "../../../../schemas/global-data/global-resolver.json",
"name": "affirmation_denial",
"intents": {
"affirmation": {

View File

@ -1,10 +1,9 @@
{
"langs": {
"langs": {
"en-US": {
"short": "en",
"min_confidence": 0.5,
"fallbacks": [
]
"fallbacks": []
},
"fr-FR": {
"short": "fr",
@ -18,5 +17,5 @@
}
]
}
}
}
}

View File

@ -1,11 +1,98 @@
{
"endpoints": [
{
"method": "POST",
"route": "/api/action/news/github_trends/run",
"params": ["number", "daterange"],
"entitiesType": "builtIn"
},
{
"method": "GET",
"route": "/api/action/news/product_hunt_trends/run",
"params": []
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/create_list",
"params": ["list"],
"entitiesType": "trim"
},
{
"method": "GET",
"route": "/api/action/productivity/todo_list/view_lists",
"params": []
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/view_list",
"params": ["list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/rename_list",
"params": ["old_list", "new_list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/delete_list",
"params": ["list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/add_todos",
"params": ["todos", "list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/complete_todos",
"params": ["todos", "list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/uncheck_todos",
"params": ["todos", "list"],
"entitiesType": "trim"
},
{
"method": "GET",
"route": "/api/action/social_communication/mbti/setup",
"params": []
},
{
"method": "GET",
"route": "/api/action/social_communication/mbti/quiz",
"params": []
},
{
"method": "GET",
"route": "/api/action/utilities/have_i_been_pwned/run",
"params": []
},
{
"method": "POST",
"route": "/api/action/utilities/is_it_down/run",
"params": ["url"],
"entitiesType": "builtIn"
},
{
"method": "GET",
"route": "/api/action/utilities/speed_test/run",
"params": []
},
{
"method": "GET",
"route": "/api/action/utilities/youtube_downloader/run",
"params": []
},
{
"method": "POST",
"route": "/api/action/games/akinator/choose_thematic",
"params": [
"thematic"
],
"params": ["thematic"],
"entitiesType": "trim"
},
{
@ -46,9 +133,7 @@
{
"method": "POST",
"route": "/api/action/games/rochambeau/play",
"params": [
"handsign"
],
"params": ["handsign"],
"entitiesType": "trim"
},
{
@ -120,118 +205,6 @@
"method": "GET",
"route": "/api/action/leon/welcome/run",
"params": []
},
{
"method": "POST",
"route": "/api/action/news/github_trends/run",
"params": [
"number",
"daterange"
],
"entitiesType": "builtIn"
},
{
"method": "GET",
"route": "/api/action/news/product_hunt_trends/run",
"params": []
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/create_list",
"params": [
"list"
],
"entitiesType": "trim"
},
{
"method": "GET",
"route": "/api/action/productivity/todo_list/view_lists",
"params": []
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/view_list",
"params": [
"list"
],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/rename_list",
"params": [
"old_list",
"new_list"
],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/delete_list",
"params": [
"list"
],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/add_todos",
"params": [
"todos",
"list"
],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/complete_todos",
"params": [
"todos",
"list"
],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/uncheck_todos",
"params": [
"todos",
"list"
],
"entitiesType": "trim"
},
{
"method": "GET",
"route": "/api/action/social_communication/mbti/setup",
"params": []
},
{
"method": "GET",
"route": "/api/action/social_communication/mbti/quiz",
"params": []
},
{
"method": "GET",
"route": "/api/action/utilities/have_i_been_pwned/run",
"params": []
},
{
"method": "POST",
"route": "/api/action/utilities/is_it_down/run",
"params": [
"url"
],
"entitiesType": "builtIn"
},
{
"method": "GET",
"route": "/api/action/utilities/speed_test/run",
"params": []
},
{
"method": "GET",
"route": "/api/action/utilities/youtube_downloader/run",
"params": []
}
]
}
}

View File

@ -12,6 +12,6 @@ services:
tty: true
command: 'npm run dev:server && npm run dev:app'
volumes:
- './:/app'
- '/app/node_modules'
- './:/home/docker/leon'
- '/home/docker/leon/node_modules'
network_mode: 'host'

View File

@ -2,6 +2,7 @@
This node enables the wake word "Leon". Once this is running, you can
call Leon by saying his name according to the language you chose.
## Getting Started
### Installation

View File

@ -3,9 +3,7 @@
* You can consider to run this file on a different hardware
*/
/* eslint-disable import/no-unresolved */
const request = require('superagent')
const axios = require('axios')
const record = require('node-record-lpcm16')
const { Detector, Models } = require('@bugsounet/snowboy')
const { io } = require('socket.io-client')
@ -23,63 +21,63 @@ socket.on('connect', () => {
console.log('Connected to the server')
console.log('Waiting for hotword...')
})
;(async () => {
try {
await axios.get(`${url}/api/v1/info`)
request.get(`${url}/api/v1/info`)
.end((err, res) => {
if (err || !res.ok) {
if (!err.response) {
console.error(`Failed to reach the server: ${err}`)
} else {
console.error(err.response.error.message)
}
const models = new Models()
models.add({
file: `${__dirname}/models/leon-${lang}.pmdl`,
sensitivity: '0.5',
hotwords: `leon-${lang}`
})
const detector = new Detector({
resource: `${__dirname}/node_modules/@bugsounet/snowboy/resources/common.res`,
models,
audioGain: 2.0,
applyFrontend: true
})
/*detector.on('silence', () => {
})*/
detector.on('sound', (/* buffer */) => {
/**
* <buffer> contains the last chunk of the audio that triggers the "sound" event.
* It could be written to a wav stream
*/
})
detector.on('error', () => {
console.error('error')
})
detector.on('hotword', (index, hotword, buffer) => {
/**
* <buffer> contains the last chunk of the audio that triggers the "hotword" event.
* It could be written to a wav stream. You will have to use it
* together with the <buffer> in the "sound" event if you want to get audio
* data after the hotword
*/
const obj = { hotword, buffer }
console.log('Hotword detected', obj)
socket.emit('hotword-detected', obj)
})
const mic = record.start({
threshold: 0,
verbose: false
})
mic.pipe(detector)
} catch (e) {
if (!e.response) {
console.error(`Failed to reach the server: ${e}`)
} else {
const models = new Models()
models.add({
file: `${__dirname}/models/leon-${lang}.pmdl`,
sensitivity: '0.5',
hotwords: `leon-${lang}`
})
const detector = new Detector({
resource: `${__dirname}/node_modules/@bugsounet/snowboy/resources/common.res`,
models,
audioGain: 2.0,
applyFrontend: true
})
detector.on('silence', () => {
})
detector.on('sound', (/* buffer */) => {
/**
* <buffer> contains the last chunk of the audio that triggers the "sound" event.
* It could be written to a wav stream
*/
})
detector.on('error', () => {
console.error('error')
})
detector.on('hotword', (index, hotword, buffer) => {
/**
* <buffer> contains the last chunk of the audio that triggers the "hotword" event.
* It could be written to a wav stream. You will have to use it
* together with the <buffer> in the "sound" event if you want to get audio
* data after the hotword
*/
const obj = { hotword, buffer }
console.log('Hotword detected', obj)
socket.emit('hotword-detected', obj)
})
const mic = record.start({
threshold: 0,
verbose: false
})
mic.pipe(detector)
console.error(e)
}
})
}
})()

1449
hotword/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -13,10 +13,10 @@
},
"license": "MIT",
"dependencies": {
"node-record-lpcm16": "^0.3.0",
"@mapbox/node-pre-gyp": "^1.0.8",
"@bugsounet/snowboy": "^2.2.3",
"socket.io-client": "^4.0.0",
"superagent": "^3.5.2"
"@bugsounet/snowboy": "^2.2.5",
"@mapbox/node-pre-gyp": "^1.0.10",
"node-record-lpcm16": "^0.3.1",
"socket.io-client": "^4.5.2",
"superagent": "^8.0.0"
}
}

7
nodemon.json Normal file
View File

@ -0,0 +1,7 @@
{
"verbose": false,
"watch": ["server/src"],
"ext": "ts,js,json",
"ignore": [".git", "node_modules", "server/src/tmp", "server/dist"],
"exec": "ts-node server/src/index.ts"
}

35751
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "leon",
"version": "1.0.0-beta.7",
"version": "1.0.0-beta.8",
"description": "Server, skills and web app of the Leon personal assistant",
"author": {
"name": "Louis Grenard",
@ -21,92 +21,125 @@
"npm": ">=8.0.0"
},
"scripts": {
"lint": "babel-node scripts/lint.js",
"lint": "ts-node scripts/lint.js",
"test": "npm run test:json && npm run test:over-http && npm run test:unit && npm run test:e2e",
"test:unit": "npm run train en && cross-env PIPENV_PIPFILE=bridges/python/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --projects test/unit/unit.jest.json && npm run train",
"test:unit": "npm run train en && cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --projects test/unit/unit.jest.json && npm run train",
"test:e2e": "npm run test:e2e:nlp-modules && npm run test:e2e:modules",
"test:e2e:modules": "babel-node scripts/run-clean-test-dbs.js && npm run train en && cross-env PIPENV_PIPFILE=bridges/python/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --verbose --projects test/e2e/modules/e2e.modules.jest.json && babel-node scripts/run-clean-test-dbs.js && npm run train",
"test:e2e:nlp-modules": "npm run train en && cross-env PIPENV_PIPFILE=bridges/python/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --verbose --setupTestFrameworkScriptFile=./test/paths.setup.js test/e2e/nlp-modules.spec.js && npm run train",
"test:e2e:modules": "ts-node scripts/run-clean-test-dbs.js && npm run train en && cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --verbose --projects test/e2e/modules/e2e.modules.jest.json && ts-node scripts/run-clean-test-dbs.js && npm run train",
"test:e2e:nlp-modules": "npm run train en && cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --verbose --setupTestFrameworkScriptFile=./test/paths.setup.js test/e2e/nlp-modules.spec.js && npm run train",
"test:json": "jest --silent --projects test/json/json.jest.json",
"test:over-http": "npm run generate:skills-endpoints && npm run train && cross-env PIPENV_PIPFILE=bridges/python/Pipfile LEON_NODE_ENV=testing LEON_HOST=http://localhost LEON_PORT=1338 LEON_HTTP_API_KEY=72aeb5ba324580963114481144385d7179c106fc jest --forceExit --silent --verbose --notify=false --bail --collectCoverage=false test/e2e/over-http.spec.js",
"test:module": "babel-node scripts/test-module.js",
"setup:offline": "babel-node scripts/setup-offline/setup-offline.js",
"setup:offline-stt": "babel-node scripts/setup-offline/run-setup-stt.js",
"setup:offline-tts": "babel-node scripts/setup-offline/run-setup-tts.js",
"setup:offline-hotword": "babel-node scripts/setup-offline/run-setup-hotword.js",
"test:over-http": "npm run generate:skills-endpoints && npm run train && cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile LEON_NODE_ENV=testing LEON_HOST=http://localhost LEON_PORT=1338 LEON_HTTP_API_KEY=72aeb5ba324580963114481144385d7179c106fc jest --forceExit --silent --verbose --notify=false --bail --collectCoverage=false test/e2e/over-http.spec.js",
"test:module": "ts-node scripts/test-module.js",
"setup:offline": "ts-node scripts/setup-offline/setup-offline.js",
"setup:offline-stt": "ts-node scripts/setup-offline/run-setup-stt.js",
"setup:offline-tts": "ts-node scripts/setup-offline/run-setup-tts.js",
"setup:offline-hotword": "ts-node scripts/setup-offline/run-setup-hotword.js",
"setup:python-bridge": "ts-node scripts/setup/setup-python-dev-env.js python-bridge",
"setup:tcp-server": "ts-node scripts/setup/setup-python-dev-env.js tcp-server",
"preinstall": "node scripts/setup/preinstall.js",
"postinstall": "babel-node scripts/setup/setup.js",
"postinstall": "ts-node scripts/setup/setup.js",
"dev:app": "vite --config app/vite.config.js",
"dev:server": "npm run train && npm run generate:skills-endpoints && cross-env LEON_NODE_ENV=development nodemon --watch server ./server/src/index.js --ignore server/src/tmp/ --exec babel-node",
"dev:server": "npm run train && npm run generate:skills-endpoints && cross-env LEON_NODE_ENV=development tsc-watch --noClear --onSuccess \"nodemon\"",
"dev:server:no-lint": "npm run train && npm run generate:skills-endpoints && cross-env LEON_NODE_ENV=development \"nodemon\"",
"wake": "cross-env LEON_HOST=http://localhost LEON_PORT=1337 node hotword/index.js",
"delete-dist:server": "shx rm -rf ./server/dist",
"clean:python-deps": "shx rm -rf ./bridges/python/src/.venv && npm run postinstall",
"prepare": "husky install",
"generate:skills-endpoints": "babel-node scripts/generate/run-generate-skills-endpoints.js",
"generate:http-api-key": "babel-node scripts/generate/run-generate-http-api-key.js",
"generate:skills-endpoints": "ts-node scripts/generate/run-generate-skills-endpoints.js",
"generate:http-api-key": "ts-node scripts/generate/run-generate-http-api-key.js",
"generate:json-schemas": "ts-node scripts/generate/run-generate-json-schemas.js",
"build": "npm run build:app && npm run build:server",
"build:app": "cross-env LEON_NODE_ENV=production babel-node scripts/app/run-build-app.js",
"build:server": "npm run delete-dist:server && npm run train && npm run generate:skills-endpoints && babel ./server/src -d ./server/dist --copy-files && shx mkdir -p server/dist/tmp",
"start:tcp-server": "cross-env PIPENV_PIPFILE=bridges/python/Pipfile pipenv run python bridges/python/tcp_server/main.py",
"start": "cross-env LEON_NODE_ENV=production node ./server/dist/index.js",
"train": "babel-node scripts/train/run-train.js",
"prepare-release": "babel-node scripts/release/prepare-release.js",
"check": "babel-node scripts/run-check.js",
"build:app": "cross-env LEON_NODE_ENV=production ts-node scripts/app/run-build-app.js",
"build:server": "npm run delete-dist:server && npm run train && npm run generate:skills-endpoints && tsc --project tsconfig.json && resolve-tspaths && shx rm -rf server/dist/core server/dist/package.json && shx mv -f server/dist/server/src/* server/dist && shx rm -rf server/dist/server && shx mkdir -p server/dist/tmp",
"build:nodejs-bridge": "ts-node scripts/build-binaries.js nodejs-bridge",
"build:python-bridge": "ts-node scripts/build-binaries.js python-bridge",
"build:tcp-server": "ts-node scripts/build-binaries.js tcp-server",
"start:tcp-server": "cross-env PIPENV_PIPFILE=tcp_server/src/Pipfile pipenv run python tcp_server/src/main.py",
"start": "cross-env LEON_NODE_ENV=production node server/dist/pre-check.js && node server/dist/index.js",
"python-bridge": "cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile pipenv run python bridges/python/src/main.py server/src/intent-object.sample.json",
"train": "ts-node scripts/train/run-train.js",
"prepare-release": "ts-node scripts/release/prepare-release.js",
"pre-release:nodejs-bridge": "ts-node scripts/release/pre-release-binaries.js nodejs-bridge",
"pre-release:python-bridge": "ts-node scripts/release/pre-release-binaries.js python-bridge",
"pre-release:tcp-server": "ts-node scripts/release/pre-release-binaries.js tcp-server",
"check": "ts-node scripts/check.js",
"docker:build": "docker build -t leon-ai/leon .",
"docker:run": "docker compose up",
"docker:dev": "docker compose --file=docker-compose.dev.yml up",
"docker:check": "docker run --rm --interactive leon-ai/leon npm run check"
},
"dependencies": {
"@aws-sdk/client-polly": "^3.18.0",
"@ffmpeg-installer/ffmpeg": "^1.1.0",
"@ffprobe-installer/ffprobe": "^1.3.0",
"@google-cloud/speech": "^4.2.0",
"@google-cloud/text-to-speech": "^3.2.1",
"@nlpjs/builtin-microsoft": "^4.22.7",
"@nlpjs/core-loader": "^4.22.7",
"@nlpjs/lang-all": "^4.22.12",
"@nlpjs/nlp": "^4.22.17",
"archiver": "^5.3.0",
"async": "^3.2.0",
"cross-env": "^7.0.3",
"dotenv": "^10.0.0",
"execa": "^5.0.0",
"fastify": "^3.25.2",
"fastify-static": "^4.5.0",
"fluent-ffmpeg": "^2.1.2",
"googleapis": "^67.1.1",
"ibm-watson": "^6.1.1",
"moment-timezone": "^0.5.33",
"@aws-sdk/client-polly": "3.18.0",
"@fastify/static": "6.9.0",
"@ffmpeg-installer/ffmpeg": "1.1.0",
"@ffprobe-installer/ffprobe": "1.4.1",
"@google-cloud/speech": "4.2.0",
"@google-cloud/text-to-speech": "3.2.1",
"@nlpjs/builtin-microsoft": "4.22.7",
"@nlpjs/core-loader": "4.22.7",
"@nlpjs/lang-all": "4.22.12",
"@nlpjs/nlp": "4.22.17",
"@segment/ajv-human-errors": "2.1.2",
"@sinclair/typebox": "0.25.8",
"ajv": "8.11.0",
"ajv-formats": "2.1.1",
"archiver": "5.3.1",
"async": "3.2.4",
"axios": "1.1.2",
"cross-env": "7.0.3",
"dayjs": "1.11.5",
"dotenv": "10.0.0",
"execa": "5.0.0",
"extract-zip": "2.0.1",
"fastify": "4.15.0",
"fluent-ffmpeg": "2.1.2",
"getos": "3.2.1",
"googleapis": "67.1.1",
"ibm-watson": "6.1.1",
"node-wav": "0.0.2",
"socket.io": "^4.4.0",
"socket.io-client": "^4.4.0",
"stt": "^1.1.0",
"superagent": "^6.1.0",
"tree-kill": "^1.2.2"
"os-name": "4.0.1",
"pretty-bytes": "5.6.0",
"pretty-ms": "7.0.1",
"socket.io": "4.5.2",
"socket.io-client": "4.5.2",
"stt": "1.4.0",
"tree-kill": "1.2.2"
},
"devDependencies": {
"@babel/cli": "^7.16.0",
"@babel/core": "^7.16.0",
"@babel/node": "^7.16.0",
"@babel/preset-env": "^7.16.0",
"@nlpjs/utils": "^4.24.1",
"babel-plugin-module-resolver": "^4.1.0",
"cli-spinner": "^0.2.10",
"eslint": "^7.28.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-import-resolver-babel-module": "^5.3.1",
"eslint-plugin-import": "^2.23.2",
"eslint-plugin-jest": "^24.3.6",
"git-changelog": "^2.0.0",
"husky": "^7.0.0",
"inquirer": "^8.1.0",
"jest": "^27.4.7",
"jest-canvas-mock": "^2.3.1",
"jest-extended": "^2.0.0",
"json": "^10.0.0",
"nodemon": "^2.0.7",
"semver": "^7.3.5",
"shx": "^0.3.3",
"vite": "^2.7.7"
"@nlpjs/utils": "4.24.1",
"@swc/core": "1.3.14",
"@tsconfig/node16-strictest": "1.0.4",
"@types/archiver": "5.3.2",
"@types/cli-spinner": "0.2.1",
"@types/fluent-ffmpeg": "2.1.20",
"@types/getos": "3.0.1",
"@types/node": "18.7.13",
"@types/node-wav": "0.0.0",
"@typescript-eslint/eslint-plugin": "5.55.0",
"@typescript-eslint/parser": "5.55.0",
"cli-spinner": "0.2.10",
"eslint": "8.22.0",
"eslint-config-prettier": "8.5.0",
"eslint-import-resolver-typescript": "3.5.1",
"eslint-plugin-import": "2.26.0",
"eslint-plugin-unicorn": "43.0.2",
"git-changelog": "2.0.0",
"husky": "7.0.0",
"inquirer": "8.1.0",
"jest": "27.4.7",
"jest-canvas-mock": "2.3.1",
"jest-extended": "2.0.0",
"json": "10.0.0",
"lint-staged": "13.0.3",
"nodemon": "2.0.19",
"prettier": "2.8.7",
"resolve-tspaths": "0.8.8",
"semver": "7.3.5",
"shx": "0.3.3",
"ts-node": "10.9.1",
"tsc-watch": "6.0.0",
"tsconfig-paths": "4.1.2",
"typescript": "5.0.2",
"vite": "3.0.9"
}
}

View File

@ -1,13 +1,17 @@
import { command } from 'execa'
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
/**
* Build web app
*/
export default () => new Promise(async (resolve) => {
await command('vite --config app/vite.config.js build', { shell: true, stdout: 'inherit' })
export default () =>
new Promise(async (resolve) => {
await command('vite --config app/vite.config.js build', {
shell: true,
stdout: 'inherit'
})
log.success('Web app built')
resolve()
})
LogHelper.success('Web app built')
resolve()
})

View File

@ -1,14 +1,14 @@
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
import buildApp from './build-app'
/**
* Execute the building app script
*/
(async () => {
;(async () => {
try {
await buildApp()
} catch (e) {
log.error(`Failed to build: ${e}`)
LogHelper.error(`Failed to build: ${e}`)
}
})()

View File

@ -2,4 +2,4 @@
<% _.forEach(sections, (section) => { if(section.commitsCount > 0) { %>### <%= section.title %>
<% _.forEach(section.commits, (commit) => { %> - <%= printCommit(commit, true) %><% }) %>
<% _.forEach(section.components.sort((a, b) => a !== b ? a < b ? -1 : 0 : 1), (component) => { %> - **<%= component.name %>:**
<% _.forEach(component.commits, (commit) => { %> <%= (component.commits.length > 1) ? ' -' : '' %> <%= printCommit(commit, true) %><% }) %><% }) %><% } %><% }) %>
<% _.forEach(component.commits, (commit) => { %> <%= (component.commits.length > 1) ? ' -' : '' %> <%= printCommit(commit, true) %><% }) %><% }) %><% } %><% }) %>

View File

@ -1 +1,12 @@
{"lang":"en","domain":"leon","skill":"random_number","action":"run","utterance":"Give me a random number","slots":{},"entities":[],"current_entities":[],"resolvers":[],"current_resolvers":[]}
{
"lang": "en",
"domain": "leon",
"skill": "random_number",
"action": "run",
"utterance": "Give me a random number",
"slots": {},
"entities": [],
"current_entities": [],
"resolvers": [],
"current_resolvers": []
}

195
scripts/build-binaries.js Normal file
View File

@ -0,0 +1,195 @@
import path from 'node:path'
import fs from 'node:fs'
import { command } from 'execa'
import archiver from 'archiver'
import prettyBytes from 'pretty-bytes'
import {
PYTHON_BRIDGE_SRC_PATH,
TCP_SERVER_SRC_PATH,
BINARIES_FOLDER_NAME,
NODEJS_BRIDGE_DIST_PATH,
PYTHON_BRIDGE_DIST_PATH,
TCP_SERVER_DIST_PATH,
NODEJS_BRIDGE_BIN_NAME,
PYTHON_BRIDGE_BIN_NAME,
TCP_SERVER_BIN_NAME,
NODEJS_BRIDGE_ROOT_PATH
} from '@/constants'
import { OSTypes } from '@/types'
import { LogHelper } from '@/helpers/log-helper'
import { LoaderHelper } from '@/helpers/loader-helper'
import { SystemHelper } from '@/helpers/system-helper'
/**
* Build binaries for the given OS according to the given build target
* 1. Get the correct OS platform and CPU architecture
* 2. If Linux, install the required dependencies
* 3. Build the given build target
* 4. Pack the distribution entities into a ZIP file
*/
const BUILD_TARGETS = new Map()
BUILD_TARGETS.set('nodejs-bridge', {
name: 'Node.js bridge',
needsPythonEnv: false,
distPath: NODEJS_BRIDGE_DIST_PATH,
archiveName: `${NODEJS_BRIDGE_BIN_NAME.split('.')[0]}.zip`
})
BUILD_TARGETS.set('python-bridge', {
name: 'Python bridge',
needsPythonEnv: true,
pipfilePath: path.join(PYTHON_BRIDGE_SRC_PATH, 'Pipfile'),
setupFilePath: path.join(PYTHON_BRIDGE_SRC_PATH, 'setup.py'),
distPath: PYTHON_BRIDGE_DIST_PATH,
archiveName: `${PYTHON_BRIDGE_BIN_NAME}-${BINARIES_FOLDER_NAME}.zip`,
dotVenvPath: path.join(PYTHON_BRIDGE_SRC_PATH, '.venv')
})
BUILD_TARGETS.set('tcp-server', {
name: 'TCP server',
needsPythonEnv: true,
pipfilePath: path.join(TCP_SERVER_SRC_PATH, 'Pipfile'),
setupFilePath: path.join(TCP_SERVER_SRC_PATH, 'setup.py'),
distPath: TCP_SERVER_DIST_PATH,
archiveName: `${TCP_SERVER_BIN_NAME}-${BINARIES_FOLDER_NAME}.zip`,
dotVenvPath: path.join(TCP_SERVER_SRC_PATH, '.venv')
})
;(async () => {
LoaderHelper.start()
const { argv } = process
const givenBuildTarget = argv[2].toLowerCase()
if (!BUILD_TARGETS.has(givenBuildTarget)) {
LogHelper.error(
`Invalid build target: ${givenBuildTarget}. Valid targets are: ${Array.from(
BUILD_TARGETS.keys()
).join(', ')}`
)
process.exit(1)
}
const {
name: buildTarget,
needsPythonEnv,
pipfilePath,
setupFilePath,
distPath,
archiveName,
dotVenvPath
} = BUILD_TARGETS.get(givenBuildTarget)
const buildPath = needsPythonEnv
? path.join(distPath, BINARIES_FOLDER_NAME)
: distPath
const { type: osType } = SystemHelper.getInformation()
/**
* Install requirements
*/
try {
if (needsPythonEnv && osType === OSTypes.Linux) {
LogHelper.info('Checking whether the "patchelf" utility can be found...')
await command('patchelf --version', { shell: true })
LogHelper.success('The "patchelf" utility has been found')
}
} catch (e) {
const installPatchelfCommand = 'sudo apt install patchelf'
LogHelper.error(
`The "patchelf" utility is not installed. Please run the following command: "${installPatchelfCommand}" or install it via a packages manager supported by your Linux distribution such as DNF, YUM, etc. Then try again`
)
process.exit(1)
}
LogHelper.info(`Building the ${buildTarget}...`)
if (needsPythonEnv) {
/**
* Build for binaries requiring a Python environment
*/
try {
// Required environment variables to set up
process.env.PIPENV_PIPFILE = pipfilePath
process.env.PIPENV_VENV_IN_PROJECT = true
await command(
`pipenv run python ${setupFilePath} build --build-exe ${buildPath}`,
{
shell: true,
stdio: 'inherit'
}
)
LogHelper.success(`The ${buildTarget} has been built`)
} catch (e) {
LogHelper.error(
`An error occurred while building the ${buildTarget}. Try to delete the ${dotVenvPath} folder, run the setup command then build again: ${e}`
)
process.exit(1)
}
} else {
/**
* Build for binaries not requiring a Python environment
*/
try {
const tsconfigPath = path.join(NODEJS_BRIDGE_ROOT_PATH, 'tsconfig.json')
const distMainFilePath = path.join(NODEJS_BRIDGE_DIST_PATH, 'main.js')
const distRenamedMainFilePath = path.join(
NODEJS_BRIDGE_DIST_PATH,
NODEJS_BRIDGE_BIN_NAME
)
await command(`tsc --project ${tsconfigPath}`, {
shell: true,
stdio: 'inherit'
})
await fs.promises.rename(distMainFilePath, distRenamedMainFilePath)
LogHelper.success(`The ${buildTarget} has been built`)
} catch (e) {
LogHelper.error(
`An error occurred while building the ${buildTarget}: ${e}`
)
process.exit(1)
}
}
/**
* Pack distribution entities into a ZIP archive
*/
const archivePath = path.join(distPath, archiveName)
LogHelper.info(`Packing to ${archivePath}...`)
const output = fs.createWriteStream(archivePath)
const archive = archiver('zip')
output.on('close', () => {
const size = prettyBytes(archive.pointer())
LogHelper.info(`Total archive size: ${size}`)
LogHelper.success(`${buildTarget} has been packed to ${archivePath}`)
process.exit(0)
})
archive.on('error', (err) => {
LogHelper.error(
`An error occurred while packing the ${buildTarget}: ${err}`
)
})
archive.pipe(output)
if (needsPythonEnv) {
archive.directory(buildPath, BINARIES_FOLDER_NAME)
} else {
archive.glob(`**/!(${archiveName})`, { cwd: distPath })
}
await archive.finalize()
})()

View File

@ -1,54 +1,59 @@
import execa from 'execa'
import log from '@/helpers/log'
import os from '@/helpers/os'
import { LogHelper } from '@/helpers/log-helper'
import { SystemHelper } from '@/helpers/system-helper'
/**
* Check OS environment
*/
export default () => new Promise(async (resolve, reject) => {
log.info('Checking OS environment...')
export default () =>
new Promise(async (resolve, reject) => {
LogHelper.info('Checking OS environment...')
const info = os.get()
const info = SystemHelper.getInformation()
if (info.type === 'windows') {
log.error('Voice offline mode is not available on Windows')
reject()
} else if (info.type === 'unknown') {
log.error('This OS is unknown, please open an issue to let us know about it')
reject()
} else {
try {
log.success(`You are running ${info.name}`)
log.info('Checking tools...')
if (info.type === 'windows') {
LogHelper.error('Voice offline mode is not available on Windows')
reject()
} else if (info.type === 'unknown') {
LogHelper.error(
'This OS is unknown, please open an issue to let us know about it'
)
reject()
} else {
try {
LogHelper.success(`You are running ${info.name}`)
LogHelper.info('Checking tools...')
await execa('tar', ['--version'])
log.success('"tar" found')
await execa('make', ['--version'])
log.success('"make" found')
await execa('tar', ['--version'])
LogHelper.success('"tar" found')
await execa('make', ['--version'])
LogHelper.success('"make" found')
if (info.type === 'macos') {
await execa('brew', ['--version'])
log.success('"brew" found')
await execa('curl', ['--version'])
log.success('"curl" found')
} else if (info.type === 'linux') {
await execa('apt-get', ['--version'])
log.success('"apt-get" found')
await execa('wget', ['--version'])
log.success('"wget" found')
if (info.type === 'macos') {
await execa('brew', ['--version'])
LogHelper.success('"brew" found')
await execa('curl', ['--version'])
LogHelper.success('"curl" found')
} else if (info.type === 'linux') {
await execa('apt-get', ['--version'])
LogHelper.success('"apt-get" found')
await execa('wget', ['--version'])
LogHelper.success('"wget" found')
}
resolve()
} catch (e) {
if (e.cmd) {
const cmd = e.cmd.substr(0, e.cmd.indexOf(' '))
LogHelper.error(
`The following command has failed: "${e.cmd}". "${cmd}" is maybe missing. To continue this setup, please install the required tool. More details about the failure: ${e}`
)
} else {
LogHelper.error(`Failed to prepare the environment: ${e}`)
}
reject(e)
}
resolve()
} catch (e) {
if (e.cmd) {
const cmd = e.cmd.substr(0, e.cmd.indexOf(' '))
log.error(`The following command has failed: "${e.cmd}". "${cmd}" is maybe missing. To continue this setup, please install the required tool. More details about the failure: ${e}`)
} else {
log.error(`Failed to prepare the environment: ${e}`)
}
reject(e)
}
}
})
})

View File

@ -1,241 +1,556 @@
import fs from 'node:fs'
import path from 'node:path'
import os from 'node:os'
import { spawn } from 'node:child_process'
import dotenv from 'dotenv'
import fs from 'fs'
import { command } from 'execa'
import semver from 'semver'
import kill from 'tree-kill'
import axios from 'axios'
import osName from 'os-name'
import getos from 'getos'
import { version } from '@@/package.json'
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
import { SystemHelper } from '@/helpers/system-helper'
import {
MINIMUM_REQUIRED_RAM,
LEON_VERSION,
PYTHON_BRIDGE_BIN_PATH,
TCP_SERVER_BIN_PATH,
TCP_SERVER_VERSION,
PYTHON_BRIDGE_VERSION,
INSTANCE_ID
} from '@/constants'
dotenv.config()
/**
* Checking script
* Help to figure out what is installed or not
* Help to figure out the setup state
*/
export default () => new Promise(async (resolve, reject) => {
;(async () => {
try {
const nodeMinRequiredVersion = '10'
const npmMinRequiredVersion = '5'
const pythonMinRequiredVersion = '3'
const nodeMinRequiredVersion = '16'
const npmMinRequiredVersion = '8'
const flitePath = 'bin/flite/flite'
const coquiLanguageModelPath = 'bin/coqui/huge-vocabulary.scorer'
const amazonPath = 'core/config/voice/amazon.json'
const googleCloudPath = 'core/config/voice/google-cloud.json'
const watsonSttPath = 'core/config/voice/watson-stt.json'
const watsonTtsPath = 'core/config/voice/watson-tts.json'
const globalResolversNlpModelPath = 'core/data/models/leon-global-resolvers-model.nlp'
const skillsResolversNlpModelPath = 'core/data/models/leon-skills-resolvers-model.nlp'
const globalResolversNlpModelPath =
'core/data/models/leon-global-resolvers-model.nlp'
const skillsResolversNlpModelPath =
'core/data/models/leon-skills-resolvers-model.nlp'
const mainNlpModelPath = 'core/data/models/leon-main-model.nlp'
const report = {
can_run: { title: 'Run', type: 'error', v: true },
can_run_skill: { title: 'Run skills', type: 'error', v: true },
can_text: { title: 'Reply you by texting', type: 'error', v: true },
can_amazon_polly_tts: { title: 'Amazon Polly text-to-speech', type: 'warning', v: true },
can_google_cloud_tts: { title: 'Google Cloud text-to-speech', type: 'warning', v: true },
can_watson_tts: { title: 'Watson text-to-speech', type: 'warning', v: true },
can_offline_tts: { title: 'Offline text-to-speech', type: 'warning', v: true },
can_google_cloud_stt: { title: 'Google Cloud speech-to-text', type: 'warning', v: true },
can_watson_stt: { title: 'Watson speech-to-text', type: 'warning', v: true },
can_offline_stt: { title: 'Offline speech-to-text', type: 'warning', v: true }
can_start_tcp_server: {
title: 'Start the TCP server',
type: 'error',
v: true
},
can_amazon_polly_tts: {
title: 'Amazon Polly text-to-speech',
type: 'warning',
v: true
},
can_google_cloud_tts: {
title: 'Google Cloud text-to-speech',
type: 'warning',
v: true
},
can_watson_tts: {
title: 'Watson text-to-speech',
type: 'warning',
v: true
},
can_offline_tts: {
title: 'Offline text-to-speech',
type: 'warning',
v: true
},
can_google_cloud_stt: {
title: 'Google Cloud speech-to-text',
type: 'warning',
v: true
},
can_watson_stt: {
title: 'Watson speech-to-text',
type: 'warning',
v: true
},
can_offline_stt: {
title: 'Offline speech-to-text',
type: 'warning',
v: true
}
}
let reportDataInput = {
leonVersion: null,
instanceID: INSTANCE_ID || null,
environment: {
osDetails: null,
nodeVersion: null,
npmVersion: null
},
nlpModels: {
globalResolversModelState: null,
skillsResolversModelState: null,
mainModelState: null
},
pythonBridge: {
version: null,
executionTime: null,
command: null,
output: null,
error: null
},
tcpServer: {
version: null,
startTime: null,
command: null,
output: null,
error: null
},
report: null
}
log.title('Checking')
LogHelper.title('Checking')
// Leon version checking
/**
* Leon version checking
*/
log.info('Leon version')
log.success(`${version}\n`);
LogHelper.info('Leon version')
LogHelper.success(`${LEON_VERSION}\n`)
reportDataInput.leonVersion = LEON_VERSION
// Environment checking
/**
* Environment checking
*/
(await Promise.all([
command('node --version', { shell: true }),
command('npm --version', { shell: true }),
command('pipenv --version', { shell: true })
])).forEach((p) => {
log.info(p.command)
LogHelper.info('Environment')
if (p.command.indexOf('node --version') !== -1
&& !semver.satisfies(semver.clean(p.stdout), `>=${nodeMinRequiredVersion}`)) {
Object.keys(report).forEach((item) => { if (report[item].type === 'error') report[item].v = false })
log.error(`${p.stdout}\nThe Node.js version must be >=${nodeMinRequiredVersion}. Please install it: https://nodejs.org (or use nvm)\n`)
} else if (p.command.indexOf('npm --version') !== -1
&& !semver.satisfies(semver.clean(p.stdout), `>=${npmMinRequiredVersion}`)) {
Object.keys(report).forEach((item) => { if (report[item].type === 'error') report[item].v = false })
log.error(`${p.stdout}\nThe npm version must be >=${npmMinRequiredVersion}. Please install it: https://www.npmjs.com/get-npm (or use nvm)\n`)
const osInfo = {
type: os.type(),
platform: os.platform(),
arch: os.arch(),
cpus: os.cpus().length,
release: os.release(),
osName: osName(),
distro: null
}
const totalRAMInGB = SystemHelper.getTotalRAM()
if (Math.round(totalRAMInGB) < MINIMUM_REQUIRED_RAM) {
report.can_run.v = false
LogHelper.error(
`Total RAM: ${totalRAMInGB} GB. Leon needs at least ${MINIMUM_REQUIRED_RAM} GB of RAM`
)
} else {
LogHelper.success(`Total RAM: ${totalRAMInGB} GB`)
}
if (osInfo.platform === 'linux') {
getos((e, os) => {
osInfo.distro = os
LogHelper.success(`${JSON.stringify(osInfo)}\n`)
})
} else {
LogHelper.success(`${JSON.stringify(osInfo)}\n`)
}
reportDataInput.environment.osDetails = osInfo
reportDataInput.environment.totalRAMInGB = totalRAMInGB
;(
await Promise.all([
command('node --version', { shell: true }),
command('npm --version', { shell: true })
])
).forEach((p) => {
LogHelper.info(p.command)
if (
p.command.indexOf('node --version') !== -1 &&
!semver.satisfies(semver.clean(p.stdout), `>=${nodeMinRequiredVersion}`)
) {
Object.keys(report).forEach((item) => {
if (report[item].type === 'error') report[item].v = false
})
LogHelper.error(
`${p.stdout}\nThe Node.js version must be >=${nodeMinRequiredVersion}. Please install it: https://nodejs.org (or use nvm)\n`
)
} else if (
p.command.indexOf('npm --version') !== -1 &&
!semver.satisfies(semver.clean(p.stdout), `>=${npmMinRequiredVersion}`)
) {
Object.keys(report).forEach((item) => {
if (report[item].type === 'error') report[item].v = false
})
LogHelper.error(
`${p.stdout}\nThe npm version must be >=${npmMinRequiredVersion}. Please install it: https://www.npmjs.com/get-npm (or use nvm)\n`
)
} else {
log.success(`${p.stdout}\n`)
}
});
(await Promise.all([
command('pipenv --where', { shell: true }),
command('pipenv run python --version', { shell: true })
])).forEach((p) => {
log.info(p.command)
if (p.command.indexOf('pipenv run python --version') !== -1
&& !semver.satisfies(p.stdout.split(' ')[1], `>=${pythonMinRequiredVersion}`)) {
Object.keys(report).forEach((item) => { if (report[item].type === 'error') report[item].v = false })
log.error(`${p.stdout}\nThe Python version must be >=${pythonMinRequiredVersion}. Please install it: https://www.python.org/downloads\n`)
} else {
log.success(`${p.stdout}\n`)
LogHelper.success(`${p.stdout}\n`)
if (p.command.includes('node --version')) {
reportDataInput.environment.nodeVersion = p.stdout
} else if (p.command.includes('npm --version')) {
reportDataInput.environment.npmVersion = p.stdout
}
}
})
// Skill execution checking
/**
* Skill execution checking
*/
LogHelper.success(`Python bridge version: ${PYTHON_BRIDGE_VERSION}`)
reportDataInput.pythonBridge.version = PYTHON_BRIDGE_VERSION
LogHelper.info('Executing a skill...')
try {
const p = await command('pipenv run python bridges/python/main.py scripts/assets/intent-object.json', { shell: true })
log.info(p.command)
log.success(`${p.stdout}\n`)
const executionStart = Date.now()
const p = await command(
`${PYTHON_BRIDGE_BIN_PATH} "${path.join(
process.cwd(),
'scripts',
'assets',
'intent-object.json'
)}"`,
{ shell: true }
)
const executionEnd = Date.now()
const executionTime = executionEnd - executionStart
LogHelper.info(p.command)
reportDataInput.pythonBridge.command = p.command
LogHelper.success(p.stdout)
reportDataInput.pythonBridge.output = p.stdout
LogHelper.info(`Skill execution time: ${executionTime}ms\n`)
reportDataInput.pythonBridge.executionTime = `${executionTime}ms`
} catch (e) {
log.info(e.command)
LogHelper.info(e.command)
report.can_run_skill.v = false
log.error(`${e}\n`)
LogHelper.error(`${e}\n`)
reportDataInput.pythonBridge.error = JSON.stringify(e)
}
// Global resolvers NLP model checking
/**
* TCP server startup checking
*/
log.info('Global resolvers NLP model state')
if (!fs.existsSync(globalResolversNlpModelPath)
|| !Object.keys(fs.readFileSync(globalResolversNlpModelPath)).length) {
report.can_text.v = false
Object.keys(report).forEach((item) => { if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1) report[item].v = false })
log.error('Global resolvers NLP model not found or broken. Try to generate a new one: "npm run train"\n')
} else {
log.success('Found and valid\n')
LogHelper.success(`TCP server version: ${TCP_SERVER_VERSION}`)
reportDataInput.tcpServer.version = TCP_SERVER_VERSION
LogHelper.info('Starting the TCP server...')
const tcpServerCommand = `${TCP_SERVER_BIN_PATH} en`
const tcpServerStart = Date.now()
const p = spawn(tcpServerCommand, { shell: true })
const ignoredWarnings = [
'UserWarning: Unable to retrieve source for @torch.jit._overload function'
]
LogHelper.info(tcpServerCommand)
reportDataInput.tcpServer.command = tcpServerCommand
if (osInfo.platform === 'darwin') {
LogHelper.info(
'For the first start, it may take a few minutes to cold start the TCP server on macOS. No worries it is a one-time thing'
)
}
// Skills resolvers NLP model checking
let tcpServerOutput = ''
log.info('Skills resolvers NLP model state')
if (!fs.existsSync(skillsResolversNlpModelPath)
|| !Object.keys(fs.readFileSync(skillsResolversNlpModelPath)).length) {
report.can_text.v = false
Object.keys(report).forEach((item) => { if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1) report[item].v = false })
log.error('Skills resolvers NLP model not found or broken. Try to generate a new one: "npm run train"\n')
} else {
log.success('Found and valid\n')
}
p.stdout.on('data', (data) => {
const newData = data.toString()
tcpServerOutput += newData
// Main NLP model checking
log.info('Main NLP model state')
if (!fs.existsSync(mainNlpModelPath)
|| !Object.keys(fs.readFileSync(mainNlpModelPath)).length) {
report.can_text.v = false
Object.keys(report).forEach((item) => { if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1) report[item].v = false })
log.error('Main NLP model not found or broken. Try to generate a new one: "npm run train"\n')
} else {
log.success('Found and valid\n')
}
// TTS checking
log.info('Amazon Polly TTS')
try {
const json = JSON.parse(fs.readFileSync(amazonPath))
if (json.credentials.accessKeyId === '' || json.credentials.secretAccessKey === '') {
report.can_amazon_polly_tts.v = false
log.warning('Amazon Polly TTS is not yet configured\n')
} else {
log.success('Configured\n')
if (newData?.toLowerCase().includes('waiting for')) {
kill(p.pid)
LogHelper.success('The TCP server can successfully start')
}
} catch (e) {
report.can_amazon_polly_tts.v = false
log.warning(`Amazon Polly TTS is not yet configured: ${e}\n`)
}
})
log.info('Google Cloud TTS/STT')
try {
const json = JSON.parse(fs.readFileSync(googleCloudPath))
const results = []
Object.keys(json).forEach((item) => { if (json[item] === '') results.push(false) })
if (results.includes(false)) {
p.stderr.on('data', (data) => {
const newData = data.toString()
// Ignore given warnings on stderr output
if (!ignoredWarnings.some((w) => newData.includes(w))) {
tcpServerOutput += newData
report.can_start_tcp_server.v = false
reportDataInput.tcpServer.error = newData
LogHelper.error(`Cannot start the TCP server: ${newData}`)
}
})
const timeout = 3 * 60_000
// In case it takes too long, force kill
setTimeout(() => {
kill(p.pid)
const error = `The TCP server timed out after ${timeout}ms`
LogHelper.error(error)
reportDataInput.tcpServer.error = error
report.can_start_tcp_server.v = false
}, timeout)
p.stdout.on('end', async () => {
const tcpServerEnd = Date.now()
reportDataInput.tcpServer.output = tcpServerOutput
reportDataInput.tcpServer.startTime = `${tcpServerEnd - tcpServerStart}ms`
LogHelper.info(
`TCP server startup time: ${reportDataInput.tcpServer.startTime}\n`
)
/**
* Global resolvers NLP model checking
*/
LogHelper.info('Global resolvers NLP model state')
if (
!fs.existsSync(globalResolversNlpModelPath) ||
!Object.keys(await fs.promises.readFile(globalResolversNlpModelPath))
.length
) {
const state = 'Global resolvers NLP model not found or broken'
report.can_text.v = false
Object.keys(report).forEach((item) => {
if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1)
report[item].v = false
})
LogHelper.error(
`${state}. Try to generate a new one: "npm run train"\n`
)
reportDataInput.nlpModels.globalResolversModelState = state
} else {
const state = 'Found and valid'
LogHelper.success(`${state}\n`)
reportDataInput.nlpModels.globalResolversModelState = state
}
/**
* Skills resolvers NLP model checking
*/
LogHelper.info('Skills resolvers NLP model state')
if (
!fs.existsSync(skillsResolversNlpModelPath) ||
!Object.keys(await fs.promises.readFile(skillsResolversNlpModelPath))
.length
) {
const state = 'Skills resolvers NLP model not found or broken'
report.can_text.v = false
Object.keys(report).forEach((item) => {
if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1)
report[item].v = false
})
LogHelper.error(
`${state}. Try to generate a new one: "npm run train"\n`
)
reportDataInput.nlpModels.skillsResolversModelState = state
} else {
const state = 'Found and valid'
LogHelper.success(`${state}\n`)
reportDataInput.nlpModels.skillsResolversModelState = state
}
/**
* Main NLP model checking
*/
LogHelper.info('Main NLP model state')
if (
!fs.existsSync(mainNlpModelPath) ||
!Object.keys(await fs.promises.readFile(mainNlpModelPath)).length
) {
const state = 'Main NLP model not found or broken'
report.can_text.v = false
Object.keys(report).forEach((item) => {
if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1)
report[item].v = false
})
LogHelper.error(
`${state}. Try to generate a new one: "npm run train"\n`
)
reportDataInput.nlpModels.mainModelState = state
} else {
const state = 'Found and valid'
LogHelper.success(`${state}\n`)
reportDataInput.nlpModels.mainModelState = state
}
/**
* TTS/STT checking
*/
LogHelper.info('Amazon Polly TTS')
try {
const json = JSON.parse(await fs.promises.readFile(amazonPath))
if (
json.credentials.accessKeyId === '' ||
json.credentials.secretAccessKey === ''
) {
report.can_amazon_polly_tts.v = false
LogHelper.warning('Amazon Polly TTS is not yet configured\n')
} else {
LogHelper.success('Configured\n')
}
} catch (e) {
report.can_amazon_polly_tts.v = false
LogHelper.warning(`Amazon Polly TTS is not yet configured: ${e}\n`)
}
LogHelper.info('Google Cloud TTS/STT')
try {
const json = JSON.parse(await fs.promises.readFile(googleCloudPath))
const results = []
Object.keys(json).forEach((item) => {
if (json[item] === '') results.push(false)
})
if (results.includes(false)) {
report.can_google_cloud_tts.v = false
report.can_google_cloud_stt.v = false
LogHelper.warning('Google Cloud TTS/STT is not yet configured\n')
} else {
LogHelper.success('Configured\n')
}
} catch (e) {
report.can_google_cloud_tts.v = false
report.can_google_cloud_stt.v = false
log.warning('Google Cloud TTS/STT is not yet configured\n')
} else {
log.success('Configured\n')
LogHelper.warning(`Google Cloud TTS/STT is not yet configured: ${e}\n`)
}
} catch (e) {
report.can_google_cloud_tts.v = false
report.can_google_cloud_stt.v = false
log.warning(`Google Cloud TTS/STT is not yet configured: ${e}\n`)
}
log.info('Watson TTS')
try {
const json = JSON.parse(fs.readFileSync(watsonTtsPath))
const results = []
Object.keys(json).forEach((item) => { if (json[item] === '') results.push(false) })
if (results.includes(false)) {
LogHelper.info('Watson TTS')
try {
const json = JSON.parse(await fs.promises.readFile(watsonTtsPath))
const results = []
Object.keys(json).forEach((item) => {
if (json[item] === '') results.push(false)
})
if (results.includes(false)) {
report.can_watson_tts.v = false
LogHelper.warning('Watson TTS is not yet configured\n')
} else {
LogHelper.success('Configured\n')
}
} catch (e) {
report.can_watson_tts.v = false
log.warning('Watson TTS is not yet configured\n')
} else {
log.success('Configured\n')
LogHelper.warning(`Watson TTS is not yet configured: ${e}\n`)
}
} catch (e) {
report.can_watson_tts.v = false
log.warning(`Watson TTS is not yet configured: ${e}\n`)
}
log.info('Offline TTS')
if (!fs.existsSync(flitePath)) {
report.can_offline_tts.v = false
log.warning(`Cannot find ${flitePath}. You can setup the offline TTS by running: "npm run setup:offline-tts"\n`)
} else {
log.success(`Found Flite at ${flitePath}\n`)
}
LogHelper.info('Offline TTS')
log.info('Watson STT')
try {
const json = JSON.parse(fs.readFileSync(watsonSttPath))
const results = []
Object.keys(json).forEach((item) => { if (json[item] === '') results.push(false) })
if (results.includes(false)) {
if (!fs.existsSync(flitePath)) {
report.can_offline_tts.v = false
LogHelper.warning(
`Cannot find ${flitePath}. You can set up the offline TTS by running: "npm run setup:offline-tts"\n`
)
} else {
LogHelper.success(`Found Flite at ${flitePath}\n`)
}
LogHelper.info('Watson STT')
try {
const json = JSON.parse(await fs.promises.readFile(watsonSttPath))
const results = []
Object.keys(json).forEach((item) => {
if (json[item] === '') results.push(false)
})
if (results.includes(false)) {
report.can_watson_stt.v = false
LogHelper.warning('Watson STT is not yet configured\n')
} else {
LogHelper.success('Configured\n')
}
} catch (e) {
report.can_watson_stt.v = false
log.warning('Watson STT is not yet configured\n')
} else {
log.success('Configured\n')
LogHelper.warning(`Watson STT is not yet configured: ${e}`)
}
} catch (e) {
report.can_watson_stt.v = false
log.warning(`Watson STT is not yet configured: ${e}`)
}
log.info('Offline STT')
if (!fs.existsSync(coquiLanguageModelPath)) {
report.can_offline_stt.v = false
log.warning(`Cannot find ${coquiLanguageModelPath}. You can setup the offline STT by running: "npm run setup:offline-stt"`)
} else {
log.success(`Found Coqui language model at ${coquiLanguageModelPath}`)
}
LogHelper.info('Offline STT')
// Report
log.title('Report')
log.info('Here is the diagnosis about your current setup')
Object.keys(report).forEach((item) => {
if (report[item].v === true) {
log.success(report[item].title)
if (!fs.existsSync(coquiLanguageModelPath)) {
report.can_offline_stt.v = false
LogHelper.warning(
`Cannot find ${coquiLanguageModelPath}. You can setup the offline STT by running: "npm run setup:offline-stt"`
)
} else {
log[report[item].type](report[item].title)
LogHelper.success(
`Found Coqui language model at ${coquiLanguageModelPath}`
)
}
/**
* Report
*/
LogHelper.title('Report')
LogHelper.info('Here is the diagnosis about your current setup')
Object.keys(report).forEach((item) => {
if (report[item].v === true) {
LogHelper.success(report[item].title)
} else {
LogHelper[report[item].type](report[item].title)
}
})
LogHelper.default('')
if (
report.can_run.v &&
report.can_run_skill.v &&
report.can_text.v &&
report.can_start_tcp_server.v
) {
LogHelper.success('Hooray! Leon can run correctly')
LogHelper.info(
'If you have some yellow warnings, it is all good. It means some entities are not yet configured'
)
} else {
LogHelper.error('Please fix the errors above')
}
reportDataInput.report = report
reportDataInput = JSON.parse(
SystemHelper.sanitizeUsername(JSON.stringify(reportDataInput))
)
LogHelper.title('REPORT URL')
LogHelper.info('Sending report...')
try {
const { data } = await axios.post('https://getleon.ai/api/report', {
report: reportDataInput
})
const { data: responseReportData } = data
LogHelper.success(`Report URL: ${responseReportData.reportUrl}`)
} catch (e) {
LogHelper.error(`Failed to send report: ${e}`)
}
process.exit(0)
})
log.default('')
if (report.can_run.v && report.can_run_skill.v && report.can_text.v) {
log.success('Hooray! Leon can run correctly')
log.info('If you have some yellow warnings, it is all good. It means some entities are not yet configured')
} else {
log.error('Please fix the errors above')
}
resolve()
} catch (e) {
log.error(e)
reject()
LogHelper.error(e)
}
})
})()

View File

@ -1,41 +1,43 @@
import fs from 'fs'
import { join } from 'path'
import fs from 'node:fs'
import { join } from 'node:path'
import log from '@/helpers/log'
import domain from '@/helpers/domain'
import { LogHelper } from '@/helpers/log-helper'
import { SkillDomainHelper } from '@/helpers/skill-domain-helper'
/**
* This script delete test DB files if they exist
*/
export default () => new Promise(async (resolve, reject) => {
log.info('Cleaning test DB files...')
export default () =>
new Promise(async (resolve, reject) => {
LogHelper.info('Cleaning test DB files...')
const [domainKeys, domains] = await Promise.all([domain.list(), domain.getDomainsObj()])
const skillDomains = await SkillDomainHelper.getSkillDomains()
for (let i = 0; i < domainKeys.length; i += 1) {
const currentDomain = domains[domainKeys[i]]
const skillKeys = Object.keys(currentDomain.skills)
for (const currentDomain of skillDomains.values()) {
const skillKeys = Object.keys(currentDomain.skills)
for (let j = 0; j < skillKeys.length; j += 1) {
const currentSkill = currentDomain.skills[skillKeys[j]]
for (let j = 0; j < skillKeys.length; j += 1) {
const currentSkill = currentDomain.skills[skillKeys[j]]
try {
// TODO: handle case where the memory folder contain multiple DB nodes
const dbFolder = join(currentSkill.path, 'memory')
const dbTestFiles = fs.readdirSync(dbFolder).filter((entity) => entity.indexOf('.spec.json') !== -1)
try {
// TODO: handle case where the memory folder contain multiple DB nodes
const dbFolder = join(currentSkill.path, 'memory')
const dbTestFiles = (await fs.promises.readdir(dbFolder)).filter(
(entity) => entity.indexOf('.spec.json') !== -1
)
if (dbTestFiles.length > 0) {
log.info(`Deleting ${dbTestFiles[0]}...`)
fs.unlinkSync(join(dbFolder, dbTestFiles[0]))
log.success(`${dbTestFiles[0]} deleted`)
if (dbTestFiles.length > 0) {
LogHelper.info(`Deleting ${dbTestFiles[0]}...`)
await fs.promises.unlink(join(dbFolder, dbTestFiles[0]))
LogHelper.success(`${dbTestFiles[0]} deleted`)
}
} catch (e) {
LogHelper.error(`Failed to clean: "${skillKeys[j]}" test DB file`)
reject(e)
}
} catch (e) {
log.error(`Failed to clean: "${skillKeys[j]}" test DB file`)
reject(e)
}
}
}
log.success('Cleaning done')
resolve()
})
LogHelper.success('Cleaning done')
resolve()
})

View File

@ -1,28 +1,34 @@
import fs from 'fs'
import fs from 'node:fs'
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
/**
* This script is executed after "git commit" or "git merge" (Git hook https://git-scm.com/docs/githooks#_commit_msg)
* it ensures the authenticity of commit messages
*/
log.info('Checking commit message...')
;(async () => {
LogHelper.info('Checking commit message...')
const commitEditMsgFile = '.git/COMMIT_EDITMSG'
const commitEditMsgFile = '.git/COMMIT_EDITMSG'
if (fs.existsSync(commitEditMsgFile)) {
try {
const commitMessage = fs.readFileSync(commitEditMsgFile, 'utf8')
const regex = '(build|BREAKING|chore|ci|docs|feat|fix|perf|refactor|style|test)(\\((web app|docker|server|hotword|skill\\/([\\w-]+)))?\\)?: .{1,50}' // eslint-disable-line no-useless-escape
if (fs.existsSync(commitEditMsgFile)) {
try {
const commitMessage = await fs.promises.readFile(
commitEditMsgFile,
'utf8'
)
const regex =
'(build|BREAKING|chore|ci|docs|feat|fix|perf|refactor|style|test)(\\((web app|scripts|docker|server|hotword|tcp server|bridge\\/(python|nodejs)|skill\\/([\\w-]+)))?\\)?: .{1,50}'
if (commitMessage.match(regex) !== null) {
log.success('Commit message validated')
} else {
log.error(`Commit message does not match the format: ${regex}`)
if (commitMessage.match(regex) !== null) {
LogHelper.success('Commit message validated')
} else {
LogHelper.error(`Commit message does not match the format: ${regex}`)
process.exit(1)
}
} catch (e) {
LogHelper.error(e.message)
process.exit(1)
}
} catch (e) {
log.error(e.message)
process.exit(1)
}
}
})()

View File

@ -1,11 +1,12 @@
import dotenv from 'dotenv'
import crypto from 'crypto'
import fs from 'fs'
import { prompt } from 'inquirer'
import path from 'path'
import fs from 'node:fs'
import path from 'node:path'
import crypto from 'node:crypto'
import log from '@/helpers/log'
import string from '@/helpers/string'
import dotenv from 'dotenv'
import { prompt } from 'inquirer'
import { LogHelper } from '@/helpers/log-helper'
import { StringHelper } from '@/helpers/string-helper'
dotenv.config()
@ -13,59 +14,64 @@ dotenv.config()
* Generate HTTP API key script
* save it in the .env file
*/
const generateHttpApiKey = () => new Promise(async (resolve, reject) => {
log.info('Generating the HTTP API key...')
const generateHTTPAPIKey = () =>
new Promise(async (resolve, reject) => {
LogHelper.info('Generating the HTTP API key...')
try {
const shasum = crypto.createHash('sha1')
const str = string.random(11)
const dotEnvPath = path.join(process.cwd(), '.env')
const envVarKey = 'LEON_HTTP_API_KEY'
let content = fs.readFileSync(dotEnvPath, 'utf8')
try {
const shasum = crypto.createHash('sha1')
const str = StringHelper.random(11)
const dotEnvPath = path.join(process.cwd(), '.env')
const envVarKey = 'LEON_HTTP_API_KEY'
let content = await fs.promises.readFile(dotEnvPath, 'utf8')
shasum.update(str)
const sha1 = shasum.digest('hex')
shasum.update(str)
const sha1 = shasum.digest('hex')
let lines = content.split('\n')
lines = lines.map((line) => {
if (line.indexOf(`${envVarKey}=`) !== -1) {
line = `${envVarKey}=${sha1}`
}
let lines = content.split('\n')
lines = lines.map((line) => {
if (line.indexOf(`${envVarKey}=`) !== -1) {
line = `${envVarKey}=${sha1}`
}
return line
})
content = lines.join('\n')
fs.writeFileSync(dotEnvPath, content)
log.success('HTTP API key generated')
resolve()
} catch (e) {
log.error(e.message)
reject(e)
}
})
export default () => new Promise(async (resolve, reject) => {
try {
if (!process.env.LEON_HTTP_API_KEY || process.env.LEON_HTTP_API_KEY === '') {
await generateHttpApiKey()
} else if (!process.env.IS_DOCKER) {
const answer = await prompt({
type: 'confirm',
name: 'generate.httpApiKey',
message: 'Do you want to regenerate the HTTP API key?',
default: false
return line
})
if (answer.generate.httpApiKey === true) {
await generateHttpApiKey()
}
}
content = lines.join('\n')
resolve()
} catch (e) {
reject(e)
}
})
await fs.promises.writeFile(dotEnvPath, content)
LogHelper.success('HTTP API key generated')
resolve()
} catch (e) {
LogHelper.error(e.message)
reject(e)
}
})
export default () =>
new Promise(async (resolve, reject) => {
try {
if (
!process.env.LEON_HTTP_API_KEY ||
process.env.LEON_HTTP_API_KEY === ''
) {
await generateHTTPAPIKey()
} else if (!process.env.IS_DOCKER) {
const answer = await prompt({
type: 'confirm',
name: 'generate.httpAPIKey',
message: 'Do you want to regenerate the HTTP API key?',
default: false
})
if (answer.generate.httpAPIKey === true) {
await generateHTTPAPIKey()
}
}
resolve()
} catch (e) {
reject(e)
}
})

View File

@ -0,0 +1,80 @@
import fs from 'node:fs'
import path from 'node:path'
import { LogHelper } from '@/helpers/log-helper'
import {
domainSchemaObject,
skillSchemaObject,
skillConfigSchemaObject
} from '@/schemas/skill-schemas'
import {
globalEntitySchemaObject,
globalResolverSchemaObject,
globalAnswersSchemaObject
} from '@/schemas/global-data-schemas'
import {
amazonVoiceConfiguration,
googleCloudVoiceConfiguration,
watsonVoiceConfiguration
} from '@/schemas/voice-config-schemas'
/**
* Generate JSON schemas
* @param {string} categoryName
* @param {Map<string, Object>} schemas
*/
export const generateSchemas = async (categoryName, schemas) => {
const categorySchemasPath = path.join(process.cwd(), 'schemas', categoryName)
await fs.promises.mkdir(categorySchemasPath, { recursive: true })
for (const [schemaName, schemaObject] of schemas.entries()) {
const schemaPath = path.join(categorySchemasPath, `${schemaName}.json`)
await fs.promises.writeFile(
schemaPath,
JSON.stringify(
{
$schema: 'https://json-schema.org/draft-07/schema',
...schemaObject
},
null,
2
)
)
}
}
export default async () => {
LogHelper.info('Generating the JSON schemas...')
await Promise.all([
generateSchemas(
'global-data',
new Map([
['global-entity', globalEntitySchemaObject],
['global-resolver', globalResolverSchemaObject],
['global-answers', globalAnswersSchemaObject]
])
),
generateSchemas(
'skill-schemas',
new Map([
['domain', domainSchemaObject],
['skill', skillSchemaObject],
['skill-config', skillConfigSchemaObject]
])
),
generateSchemas(
'voice-config-schemas',
new Map([
['amazon', amazonVoiceConfiguration],
['google-cloud', googleCloudVoiceConfiguration],
['watson-stt', watsonVoiceConfiguration],
['watson-tts', watsonVoiceConfiguration]
])
)
])
LogHelper.success('JSON schemas generated')
}

View File

@ -1,11 +1,11 @@
import dotenv from 'dotenv'
import fs from 'fs'
import path from 'path'
import fs from 'node:fs'
import path from 'node:path'
import log from '@/helpers/log'
import dotenv from 'dotenv'
import { langs } from '@@/core/langs.json'
import domain from '@/helpers/domain'
import { LogHelper } from '@/helpers/log-helper'
import { SkillDomainHelper } from '@/helpers/skill-domain-helper'
dotenv.config()
@ -14,121 +14,151 @@ dotenv.config()
* Parse and convert skills config into a JSON file understandable by Fastify
* to dynamically generate endpoints so skills can be accessible over HTTP
*/
export default () => new Promise(async (resolve, reject) => {
const supportedMethods = ['DELETE', 'GET', 'HEAD', 'PATCH', 'POST', 'PUT', 'OPTIONS']
const outputFile = '/core/skills-endpoints.json'
const outputFilePath = path.join(__dirname, `../..${outputFile}`)
const lang = langs[process.env.LEON_HTTP_API_LANG].short
export default () =>
new Promise(async (resolve, reject) => {
const supportedMethods = [
'DELETE',
'GET',
'HEAD',
'PATCH',
'POST',
'PUT',
'OPTIONS'
]
const outputFile = '/core/skills-endpoints.json'
const outputFilePath = path.join(__dirname, `../..${outputFile}`)
const lang = langs[process.env.LEON_HTTP_API_LANG].short
try {
const [domainKeys, domains] = await Promise.all([domain.list(), domain.getDomainsObj()])
const finalObj = {
endpoints: []
}
let isFileNeedToBeGenerated = true
let loopIsBroken = false
try {
const skillDomains = await SkillDomainHelper.getSkillDomains()
const finalObj = {
endpoints: []
}
let isFileNeedToBeGenerated = true
let loopIsBroken = false
// Check if a new routing generation is necessary
if (fs.existsSync(outputFilePath)) {
const mtimeEndpoints = fs.statSync(outputFilePath).mtime.getTime()
// Check if a new routing generation is necessary
if (fs.existsSync(outputFilePath)) {
const mtimeEndpoints = (
await fs.promises.stat(outputFilePath)
).mtime.getTime()
for (let i = 0; i < domainKeys.length; i += 1) {
const currentDomain = domains[domainKeys[i]]
const skillKeys = Object.keys(currentDomain.skills)
let i = 0
for (const currentDomain of skillDomains.values()) {
const skillKeys = Object.keys(currentDomain.skills)
// Browse skills
for (let j = 0; j < skillKeys.length; j += 1) {
const skillFriendlyName = skillKeys[j]
const currentSkill = currentDomain.skills[skillFriendlyName]
const fileInfo = fs.statSync(path.join(currentSkill.path, 'config', `${lang}.json`))
const mtime = fileInfo.mtime.getTime()
// Browse skills
for (let j = 0; j < skillKeys.length; j += 1) {
const skillFriendlyName = skillKeys[j]
const currentSkill = currentDomain.skills[skillFriendlyName]
const fileInfo = await fs.promises.stat(
path.join(currentSkill.path, 'config', `${lang}.json`)
)
const mtime = fileInfo.mtime.getTime()
if (mtime > mtimeEndpoints) {
loopIsBroken = true
if (mtime > mtimeEndpoints) {
loopIsBroken = true
break
}
}
if (loopIsBroken) {
break
}
}
if (loopIsBroken) {
break
}
if (i + 1 === skillDomains.size) {
LogHelper.success(`${outputFile} is already up-to-date`)
isFileNeedToBeGenerated = false
}
if ((i + 1) === domainKeys.length) {
log.success(`${outputFile} is already up-to-date`)
isFileNeedToBeGenerated = false
i += 1
}
}
}
// Force if a language is given
if (isFileNeedToBeGenerated) {
log.info('Parsing skills configuration...')
// Force if a language is given
if (isFileNeedToBeGenerated) {
LogHelper.info('Parsing skills configuration...')
for (let i = 0; i < domainKeys.length; i += 1) {
const currentDomain = domains[domainKeys[i]]
const skillKeys = Object.keys(currentDomain.skills)
for (const currentDomain of skillDomains.values()) {
const skillKeys = Object.keys(currentDomain.skills)
// Browse skills
for (let j = 0; j < skillKeys.length; j += 1) {
const skillFriendlyName = skillKeys[j]
const currentSkill = currentDomain.skills[skillFriendlyName]
// Browse skills
for (let j = 0; j < skillKeys.length; j += 1) {
const skillFriendlyName = skillKeys[j]
const currentSkill = currentDomain.skills[skillFriendlyName]
const configFilePath = path.join(currentSkill.path, 'config', `${lang}.json`)
const { actions } = JSON.parse(fs.readFileSync(configFilePath, 'utf8'))
const actionsKeys = Object.keys(actions)
const configFilePath = path.join(
currentSkill.path,
'config',
`${lang}.json`
)
const { actions } = JSON.parse(
await fs.promises.readFile(configFilePath, 'utf8')
)
const actionsKeys = Object.keys(actions)
for (let k = 0; k < actionsKeys.length; k += 1) {
const action = actionsKeys[k]
const actionObj = actions[action]
const { entities, http_api } = actionObj // eslint-disable-line camelcase
let finalMethod = (entities || http_api?.entities) ? 'POST' : 'GET'
for (let k = 0; k < actionsKeys.length; k += 1) {
const action = actionsKeys[k]
const actionObj = actions[action]
const { entities, http_api } = actionObj
let finalMethod = entities || http_api?.entities ? 'POST' : 'GET'
// Only generate this route if it is not disabled from the skill config
if (!http_api?.disabled || (http_api?.disabled && http_api?.disabled === false)) {
if (http_api?.method) {
finalMethod = http_api.method.toUpperCase()
// Only generate this route if it is not disabled from the skill config
if (
!http_api?.disabled ||
(http_api?.disabled && http_api?.disabled === false)
) {
if (http_api?.method) {
finalMethod = http_api.method.toUpperCase()
}
if (!supportedMethods.includes(finalMethod)) {
reject(
`The "${finalMethod}" HTTP method of the ${currentDomain.name}/${currentSkill.name}/${action} action is not supported`
)
}
const endpoint = {
method: finalMethod.toUpperCase(),
route: `/api/action/${currentDomain.name}/${currentSkill.name}/${action}`,
params: []
}
if (http_api?.timeout) {
endpoint.timeout = http_api.timeout
}
if (entities) {
// Handle explicit trim entities
endpoint.entitiesType = 'trim'
endpoint.params = entities.map((entity) => entity.name)
} else if (http_api?.entities) {
// Handle built-in entities
endpoint.entitiesType = 'builtIn'
endpoint.params = http_api.entities.map(
(entity) => entity.entity
)
}
finalObj.endpoints.push(endpoint)
}
if (!supportedMethods.includes(finalMethod)) {
reject(`The "${finalMethod}" HTTP method of the ${currentDomain.name}/${currentSkill.name}/${action} action is not supported`)
}
const endpoint = {
method: finalMethod.toUpperCase(),
route: `/api/action/${currentDomain.name}/${currentSkill.name}/${action}`,
params: []
}
if (http_api?.timeout) {
endpoint.timeout = http_api.timeout
}
if (entities) {
// Handle explicit trim entities
endpoint.entitiesType = 'trim'
endpoint.params = entities.map((entity) => entity.name)
} else if (http_api?.entities) {
// Handle built-in entities
endpoint.entitiesType = 'builtIn'
endpoint.params = http_api.entities.map((entity) => entity.entity)
}
finalObj.endpoints.push(endpoint)
}
}
}
}
log.info(`Writing ${outputFile} file...`)
try {
fs.writeFileSync(outputFilePath, JSON.stringify(finalObj, null, 2))
log.success(`${outputFile} file generated`)
resolve()
} catch (e) {
reject(`Failed to generate ${outputFile} file: ${e.message}`)
LogHelper.info(`Writing ${outputFile} file...`)
try {
await fs.promises.writeFile(
outputFilePath,
JSON.stringify(finalObj, null, 2)
)
LogHelper.success(`${outputFile} file generated`)
resolve()
} catch (e) {
reject(`Failed to generate ${outputFile} file: ${e.message}`)
}
}
} catch (e) {
LogHelper.error(e.message)
reject(e)
}
} catch (e) {
log.error(e.message)
reject(e)
}
})
})

View File

@ -1,14 +1,14 @@
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
import generateHttpApiKey from './generate-http-api-key'
/**
* Execute the generating HTTP API key script
*/
(async () => {
;(async () => {
try {
await generateHttpApiKey()
} catch (e) {
log.error(`Failed to generate the HTTP API key: ${e}`)
LogHelper.error(`Failed to generate the HTTP API key: ${e}`)
}
})()

View File

@ -0,0 +1,14 @@
import { LogHelper } from '@/helpers/log-helper'
import generateJsonSchemas from './generate-json-schemas'
/**
* Execute the generating JSON schemas script
*/
;(async () => {
try {
await generateJsonSchemas()
} catch (error) {
LogHelper.error(`Failed to generate the json schemas: ${error}`)
}
})()

View File

@ -1,14 +1,14 @@
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
import generateSkillsEndpoints from './generate-skills-endpoints'
/**
* Execute the generating skills endpoints script
*/
(async () => {
;(async () => {
try {
await generateSkillsEndpoints()
} catch (e) {
log.error(`Failed to generate skills endpoints: ${e}`)
LogHelper.error(`Failed to generate skills endpoints: ${e}`)
}
})()

View File

@ -1,36 +1,55 @@
import { command } from 'execa'
import log from '@/helpers/log'
import loader from '@/helpers/loader'
import { LogHelper } from '@/helpers/log-helper'
import { LoaderHelper } from '@/helpers/loader-helper'
const globs = [
'"app/src/js/*.{ts,js}"',
// TODO: deal with it once handling new hotword
// '"hotword/index.{ts,js}"',
// TODO: put it back once tests have been reintroduced into skills
// '"skills/**/*.js"',
'"scripts/**/*.{ts,js}"',
'"server/src/**/*.{ts,js}"'
// TODO: put it back once tests need to be written
/*'"test/!*.js"',
'"test/e2e/!**!/!*.js"',
'"test/json/!**!/!*.js"',
'"test/unit/!**!/!*.js"'*/
]
const src = globs.join(' ')
async function prettier() {
await command('prettier --write . --ignore-path .gitignore', {
shell: true
})
await command(`prettier --check ${src} --ignore-path .gitignore`, {
shell: true,
stdio: 'inherit'
})
}
/**
* This script ensures the correct coding syntax of the whole project
*/
(async () => {
loader.start()
log.info('Linting...')
;(async () => {
LoaderHelper.start()
LogHelper.info('Linting...')
try {
const globs = [
'"app/src/js/*.js"',
'"hotword/index.js"',
// TODO: put it back once tests have been reintroduced into skills
// '"skills/**/*.js"',
'"scripts/**/*.js"',
'"server/src/**/*.js"',
'"test/*.js"',
'"test/e2e/**/*.js"',
'"test/json/**/*.js"',
'"test/unit/**/*.js"'
]
await Promise.all([
prettier(),
command(`eslint ${src} --ignore-path .gitignore`, {
shell: true,
stdio: 'inherit'
})
])
await command(`npx eslint ${globs.join(' ')}`, { shell: true })
log.success('Looks great')
loader.stop()
LogHelper.success('Looks great')
LoaderHelper.stop()
} catch (e) {
log.error(`Does not look great: ${e.stdout}`)
loader.stop()
LogHelper.error(`Does not look great: ${e.message}`)
LoaderHelper.stop()
process.exit(1)
}
})()

View File

@ -1,53 +1,64 @@
import { command } from 'execa'
import fs from 'fs'
import fs from 'node:fs'
import log from '@/helpers/log'
import { command } from 'execa'
import { LogHelper } from '@/helpers/log-helper'
/**
* Update version number in files which need version number
*/
export default (version) => new Promise(async (resolve, reject) => {
const changelog = 'CHANGELOG.md'
const tmpChangelog = 'TMP-CHANGELOG.md'
export default (version) =>
new Promise(async (resolve, reject) => {
const changelog = 'CHANGELOG.md'
const tmpChangelog = 'TMP-CHANGELOG.md'
log.info(`Generating ${changelog}...`)
LogHelper.info(`Generating ${changelog}...`)
try {
await command(`git-changelog --changelogrc .changelogrc --template scripts/assets/CHANGELOG-TEMPLATE.md --file scripts/tmp/${tmpChangelog} --version_name ${version}`, { shell: true })
} catch (e) {
log.error(`Error during git-changelog: ${e}`)
reject(e)
}
try {
log.info('Getting remote origin URL...')
log.info('Getting previous tag...')
const sh = await command('git config --get remote.origin.url && git tag | tail -n1', { shell: true })
const repoUrl = sh.stdout.substr(0, sh.stdout.lastIndexOf('.git'))
const previousTag = sh.stdout.substr(sh.stdout.indexOf('\n') + 1).trim()
const changelogData = fs.readFileSync(changelog, 'utf8')
const compareUrl = `${repoUrl}/compare/${previousTag}...v${version}`
let tmpData = fs.readFileSync(`scripts/tmp/${tmpChangelog}`, 'utf8')
log.success(`Remote origin URL gotten: ${repoUrl}.git`)
log.success(`Previous tag gotten: ${previousTag}`)
if (previousTag !== '') {
tmpData = tmpData.replace(version, `[${version}](${compareUrl})`)
try {
await command(
`git-changelog --changelogrc .changelogrc --template scripts/assets/CHANGELOG-TEMPLATE.md --file scripts/tmp/${tmpChangelog} --version_name ${version}`,
{ shell: true }
)
} catch (e) {
LogHelper.error(`Error during git-changelog: ${e}`)
reject(e)
}
fs.writeFile(changelog, `${tmpData}${changelogData}`, (err) => {
if (err) log.error(`Failed to write into file: ${err}`)
else {
fs.unlinkSync(`scripts/tmp/${tmpChangelog}`)
log.success(`${changelog} generated`)
resolve()
try {
LogHelper.info('Getting remote origin URL...')
LogHelper.info('Getting previous tag...')
const sh = await command(
'git config --get remote.origin.url && git tag | tail -n1',
{ shell: true }
)
const repoUrl = sh.stdout.substr(0, sh.stdout.lastIndexOf('.git'))
const previousTag = sh.stdout.substr(sh.stdout.indexOf('\n') + 1).trim()
const changelogData = await fs.promises.readFile(changelog, 'utf8')
const compareUrl = `${repoUrl}/compare/${previousTag}...v${version}`
let tmpData = await fs.promises.readFile(
`scripts/tmp/${tmpChangelog}`,
'utf8'
)
LogHelper.success(`Remote origin URL gotten: ${repoUrl}.git`)
LogHelper.success(`Previous tag gotten: ${previousTag}`)
if (previousTag !== '') {
tmpData = tmpData.replace(version, `[${version}](${compareUrl})`)
}
})
} catch (e) {
log.error(`Error during git commands: ${e}`)
reject(e)
}
})
try {
await fs.promises.writeFile(changelog, `${tmpData}${changelogData}`)
await fs.promises.unlink(`scripts/tmp/${tmpChangelog}`)
LogHelper.success(`${changelog} generated`)
resolve()
} catch (error) {
LogHelper.error(`Failed to write into file: ${error}`)
}
} catch (e) {
LogHelper.error(`Error during git commands: ${e}`)
reject(e)
}
})

View File

@ -0,0 +1,85 @@
import path from 'node:path'
import { prompt } from 'inquirer'
import { command } from 'execa'
import {
NODEJS_BRIDGE_SRC_PATH,
PYTHON_BRIDGE_SRC_PATH,
TCP_SERVER_SRC_PATH
} from '@/constants'
import { LogHelper } from '@/helpers/log-helper'
import { LoaderHelper } from '@/helpers/loader-helper'
/**
* Pre-release binaries via GitHub Actions
* 1. Ask for confirmation whether the binary version has been bumped
* 2. Trigger GitHub workflow to pre-release binaries
*/
const BUILD_TARGETS = new Map()
BUILD_TARGETS.set('nodejs-bridge', {
workflowFileName: 'pre-release-nodejs-bridge.yml',
versionFilePath: path.join(NODEJS_BRIDGE_SRC_PATH, 'version.ts')
})
BUILD_TARGETS.set('python-bridge', {
workflowFileName: 'pre-release-python-bridge.yml',
versionFilePath: path.join(PYTHON_BRIDGE_SRC_PATH, 'version.py')
})
BUILD_TARGETS.set('tcp-server', {
workflowFileName: 'pre-release-tcp-server.yml',
versionFilePath: path.join(TCP_SERVER_SRC_PATH, 'version.py')
})
;(async () => {
LoaderHelper.start()
const { argv } = process
const givenReleaseTarget = argv[2].toLowerCase()
const givenBranch = argv[3]?.toLowerCase()
const { workflowFileName, versionFilePath } =
BUILD_TARGETS.get(givenReleaseTarget)
LoaderHelper.stop()
const answer = await prompt({
type: 'confirm',
name: 'binary.bumped',
message: `Have you bumped the version number of the binary from the "${versionFilePath}" file?`,
default: false
})
LoaderHelper.start()
if (!answer.binary.bumped) {
LogHelper.info(
'Please bump the version number of the binary from the version file before continuing'
)
process.exit(0)
}
try {
LogHelper.info('Triggering the GitHub workflow...')
const runWorkflowCommand = !givenBranch
? `gh workflow run ${workflowFileName}`
: `gh workflow run ${workflowFileName} --ref ${givenBranch}`
await command(runWorkflowCommand, {
shell: true,
stdout: 'inherit'
})
LogHelper.success(
'GitHub workflow triggered. The pre-release is on its way!'
)
LogHelper.success(
'Once the pre-release is done, go to the GitHub releases to double-check information and hit release'
)
process.exit(0)
} catch (e) {
LogHelper.error(
`An error occurred while triggering the GitHub workflow: ${e}`
)
process.exit(1)
}
})()

View File

@ -1,5 +1,5 @@
import log from '@/helpers/log'
import loader from '@/helpers/loader'
import { LogHelper } from '@/helpers/log-helper'
import { LoaderHelper } from '@/helpers/loader-helper'
import updateVersion from './update-version'
import generateChangelog from './generate-changelog'
@ -7,27 +7,30 @@ import generateChangelog from './generate-changelog'
/**
* Main entry of the release preparation
*/
(async () => {
loader.start()
log.info('Preparing for release...')
;(async () => {
LoaderHelper.start()
LogHelper.info('Preparing for release...')
const { argv } = process
const version = argv[2].toLowerCase()
const semverRegex = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(-(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\+[0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*)?$/
const semverRegex =
/^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(-(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\+[0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*)?$/
if (version.match(semverRegex) !== null) {
try {
await updateVersion(version)
await generateChangelog(version)
log.success('Hooray! Leon is ready to be released!')
loader.stop()
LogHelper.success('Hooray! Leon is ready to be released!')
LoaderHelper.stop()
} catch (e) {
log.error(e)
loader.stop()
LogHelper.error(e)
LoaderHelper.stop()
}
} else {
log.error('The version number does match the Semantic Versioning rules (https://semver.org)')
loader.stop()
LogHelper.error(
'The version number does match the Semantic Versioning rules (https://semver.org)'
)
LoaderHelper.stop()
}
})()

View File

@ -1,30 +1,33 @@
import { command } from 'execa'
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
/**
* Update version number in files which need version number
*/
export default (version) => new Promise(async (resolve, reject) => {
log.info('Updating version...')
export default (version) =>
new Promise(async (resolve, reject) => {
LogHelper.info('Updating version...')
const promises = []
const files = [
'package.json',
'package-lock.json'
]
const promises = []
// const files = ['package.json', 'package-lock.json']
const files = ['package.json']
for (let i = 0; i < files.length; i += 1) {
promises.push(command(`json -I -f ${files[i]} -e 'this.version="${version}"'`, { shell: true }))
}
for (let i = 0; i < files.length; i += 1) {
promises.push(
command(`json -I -f ${files[i]} -e 'this.version="${version}"'`, {
shell: true
})
)
}
try {
await Promise.all(promises)
try {
await Promise.all(promises)
log.success(`Version updated to ${version}`)
resolve()
} catch (e) {
log.error(`Error while updating version: ${e.stderr}`)
reject(e)
}
})
LogHelper.success(`Version updated to ${version}`)
resolve()
} catch (e) {
LogHelper.error(`Error while updating version: ${e.stderr}`)
reject(e)
}
})

View File

@ -1,16 +0,0 @@
import loader from '@/helpers/loader'
import check from './check'
/**
* Execute the checking script
*/
(async () => {
try {
loader.start()
await check()
loader.stop()
} catch (e) {
loader.stop()
}
})()

View File

@ -1,14 +1,14 @@
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
import cleanTestDbs from './clean-test-dbs'
/**
* Execute the cleaning test DBs script
*/
(async () => {
;(async () => {
try {
await cleanTestDbs()
} catch (e) {
log.error(`Failed to clean test DBs: ${e}`)
LogHelper.error(`Failed to clean test DBs: ${e}`)
}
})()

View File

@ -1,14 +1,14 @@
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
import setupHotword from './setup-hotword';
import setupHotword from './setup-hotword'
/**
* Execute the setup offline hotword script
*/
(async () => {
;(async () => {
try {
await setupHotword()
} catch (e) {
log.error(`Failed to setup offline hotword: ${e}`)
LogHelper.error(`Failed to set up offline hotword: ${e}`)
}
})()

View File

@ -1,14 +1,14 @@
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
import setupStt from './setup-stt';
import setupStt from './setup-stt'
/**
* Execute the setup offline STT script
*/
(async () => {
;(async () => {
try {
await setupStt()
} catch (e) {
log.error(`Failed to setup offline STT: ${e}`)
LogHelper.error(`Failed to set up offline STT: ${e}`)
}
})()

View File

@ -1,14 +1,14 @@
import log from '@/helpers/log'
import { LogHelper } from '@/helpers/log-helper'
import setupTts from './setup-tts';
import setupTts from './setup-tts'
/**
* Execute the setup offline TTS script
*/
(async () => {
;(async () => {
try {
await setupTts()
} catch (e) {
log.error(`Failed to setup offline TTS: ${e}`)
LogHelper.error(`Failed to set up offline TTS: ${e}`)
}
})()

View File

@ -1,48 +1,52 @@
import { command } from 'execa'
import log from '@/helpers/log'
import os from '@/helpers/os'
import { LogHelper } from '@/helpers/log-helper'
import { SystemHelper } from '@/helpers/system-helper'
/**
* Setup offline hotword detection
*/
export default () => new Promise(async (resolve, reject) => {
log.info('Setting up offline hotword detection...')
export default () =>
new Promise(async (resolve, reject) => {
LogHelper.info('Setting up offline hotword detection...')
const info = os.get()
let pkgm = 'apt-get install'
if (info.type === 'macos') {
pkgm = 'brew'
}
if (info.type === 'windows') {
log.error('Voice offline mode is not available on Windows')
reject()
} else {
try {
log.info('Installing dependencies...')
let cmd = `sudo ${pkgm} sox libsox-fmt-all -y`
if (info.type === 'linux') {
log.info(`Executing the following command: ${cmd}`)
await command(cmd, { shell: true })
} else if (info.type === 'macos') {
cmd = `${pkgm} install swig portaudio sox`
log.info(`Executing the following command: ${cmd}`)
await command(cmd, { shell: true })
}
log.success('System dependencies downloaded')
log.info('Installing hotword dependencies...')
await command('cd hotword && npm install', { shell: true })
log.success('Offline hotword detection installed')
await command('cd hotword/node_modules/@bugsounet/snowboy && CXXFLAGS="--std=c++17" ../../../node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp clean configure build', { shell: true })
log.success('Snowboy bindings compiled')
resolve()
} catch (e) {
log.error(`Failed to install offline hotword detection: ${e}`)
reject(e)
const info = SystemHelper.getInformation()
let pkgm = 'apt-get install'
if (info.type === 'macos') {
pkgm = 'brew'
}
}
})
if (info.type === 'windows') {
LogHelper.error('Voice offline mode is not available on Windows')
reject()
} else {
try {
LogHelper.info('Installing dependencies...')
let cmd = `sudo ${pkgm} sox libsox-fmt-all -y`
if (info.type === 'linux') {
LogHelper.info(`Executing the following command: ${cmd}`)
await command(cmd, { shell: true })
} else if (info.type === 'macos') {
cmd = `${pkgm} install swig portaudio sox`
LogHelper.info(`Executing the following command: ${cmd}`)
await command(cmd, { shell: true })
}
LogHelper.success('System dependencies downloaded')
LogHelper.info('Installing hotword dependencies...')
await command('cd hotword && npm install', { shell: true })
LogHelper.success('Offline hotword detection installed')
await command(
'cd hotword/node_modules/@bugsounet/snowboy && CXXFLAGS="--std=c++17" ../../../node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp clean configure build',
{ shell: true }
)
LogHelper.success('Snowboy bindings compiled')
resolve()
} catch (e) {
LogHelper.error(`Failed to install offline hotword detection: ${e}`)
reject(e)
}
}
})

View File

@ -1,32 +1,33 @@
import dotenv from 'dotenv'
import loader from '@/helpers/loader'
import log from '@/helpers/log'
import { LoaderHelper } from '@/helpers/loader-helper'
import { LogHelper } from '@/helpers/log-helper'
import checkOs from '../check-os'
import setupHotword from './setup-hotword'
import setupTts from './setup-tts'
import setupStt from './setup-stt'
dotenv.config();
dotenv.config()
/**
* Main entry to setup offline components
* Main entry to set up offline components
*/
(async () => {
;(async () => {
try {
loader.start()
LoaderHelper.start()
await checkOs()
loader.stop()
LoaderHelper.stop()
await setupHotword()
loader.start()
LoaderHelper.start()
await setupTts()
await setupStt()
loader.stop()
log.success('Hooray! Offline components are installed!')
LoaderHelper.stop()
LogHelper.success('Hooray! Offline components are installed!')
} catch (e) {
log.error(e)
loader.stop()
LogHelper.error(e)
LoaderHelper.stop()
}
})()

View File

@ -1,43 +1,57 @@
import { command } from 'execa'
import fs from 'fs'
import fs from 'node:fs'
import log from '@/helpers/log'
import os from '@/helpers/os'
import { command } from 'execa'
import { LogHelper } from '@/helpers/log-helper'
import { SystemHelper } from '@/helpers/system-helper'
/**
* Setup offline speech-to-text
* Set up offline speech-to-text
*/
export default () => new Promise(async (resolve, reject) => {
log.info('Setting up offline speech-to-text...')
export default () =>
new Promise(async (resolve, reject) => {
LogHelper.info('Setting up offline speech-to-text...')
const destCoquiFolder = 'bin/coqui'
const tmpDir = 'scripts/tmp'
// check this repo for updates: https://github.com/coqui-ai/STT-models/tree/main/english/coqui
const coquiModelVersion = '1.0.0'
let downloader = 'wget'
if (os.get().type === 'macos') {
downloader = 'curl -L -O'
}
if (!fs.existsSync(`${destCoquiFolder}/model.tflite`)) {
try {
log.info('Downloading pre-trained model...')
await command(`cd ${tmpDir} && ${downloader} https://github.com/coqui-ai/STT-models/releases/download/english/coqui/v${coquiModelVersion}-huge-vocab/model.tflite`, { shell: true })
await command(`cd ${tmpDir} && ${downloader} https://github.com/coqui-ai/STT-models/releases/download/english/coqui/v${coquiModelVersion}-huge-vocab/huge-vocabulary.scorer`, { shell: true })
log.success('Pre-trained model download done')
log.info('Moving...')
await command(`mv -f ${tmpDir}/model.tflite ${destCoquiFolder}/model.tflite`, { shell: true })
await command(`mv -f ${tmpDir}/huge-vocabulary.scorer ${destCoquiFolder}/huge-vocabulary.scorer`, { shell: true })
log.success('Move done')
log.success('Offline speech-to-text installed')
resolve()
} catch (e) {
log.error(`Failed to install offline speech-to-text: ${e}`)
reject(e)
const destCoquiFolder = 'bin/coqui'
const tmpDir = 'scripts/tmp'
// check this repo for updates: https://github.com/coqui-ai/STT-models/tree/main/english/coqui
const coquiModelVersion = '1.0.0'
let downloader = 'wget'
if (SystemHelper.getInformation().type === 'macos') {
downloader = 'curl -L -O'
}
} else {
log.success('Offline speech-to-text is already installed')
resolve()
}
})
if (!fs.existsSync(`${destCoquiFolder}/model.tflite`)) {
try {
LogHelper.info('Downloading pre-trained model...')
await command(
`cd ${tmpDir} && ${downloader} https://github.com/coqui-ai/STT-models/releases/download/english/coqui/v${coquiModelVersion}-huge-vocab/model.tflite`,
{ shell: true }
)
await command(
`cd ${tmpDir} && ${downloader} https://github.com/coqui-ai/STT-models/releases/download/english/coqui/v${coquiModelVersion}-huge-vocab/huge-vocabulary.scorer`,
{ shell: true }
)
LogHelper.success('Pre-trained model download done')
LogHelper.info('Moving...')
await command(
`mv -f ${tmpDir}/model.tflite ${destCoquiFolder}/model.tflite`,
{ shell: true }
)
await command(
`mv -f ${tmpDir}/huge-vocabulary.scorer ${destCoquiFolder}/huge-vocabulary.scorer`,
{ shell: true }
)
LogHelper.success('Move done')
LogHelper.success('Offline speech-to-text installed')
resolve()
} catch (e) {
LogHelper.error(`Failed to install offline speech-to-text: ${e}`)
reject(e)
}
} else {
LogHelper.success('Offline speech-to-text is already installed')
resolve()
}
})

View File

@ -1,52 +1,68 @@
import { command } from 'execa'
import fs from 'fs'
import fs from 'node:fs'
import log from '@/helpers/log'
import os from '@/helpers/os'
import { command } from 'execa'
import { LogHelper } from '@/helpers/log-helper'
import { SystemHelper } from '@/helpers/system-helper'
/**
* Setup offline text-to-speech
* Set up offline text-to-speech
*/
export default () => new Promise(async (resolve, reject) => {
log.info('Setting up offline text-to-speech...')
export default () =>
new Promise(async (resolve, reject) => {
LogHelper.info('Setting up offline text-to-speech...')
const destFliteFolder = 'bin/flite'
const tmpDir = 'scripts/tmp'
let makeCores = ''
if (os.cpus().length > 2) {
makeCores = `-j ${os.cpus().length - 2}`
}
let downloader = 'wget'
if (os.get().type === 'macos') {
downloader = 'curl -L -O'
}
if (!fs.existsSync(`${destFliteFolder}/flite`)) {
try {
log.info('Downloading run-time synthesis engine...')
await command(`cd ${tmpDir} && ${downloader} http://ports.ubuntu.com/pool/universe/f/flite/flite_2.1-release.orig.tar.bz2`, { shell: true })
log.success('Run-time synthesis engine download done')
log.info('Unpacking...')
await command(`cd ${tmpDir} && tar xfvj flite_2.1-release.orig.tar.bz2 && cp ../assets/leon.lv flite-2.1-release/config`, { shell: true })
log.success('Unpack done')
log.info('Configuring...')
await command(`cd ${tmpDir}/flite-2.1-release && ./configure --with-langvox=leon`, { shell: true })
log.success('Configure done')
log.info('Building...')
await command(`cd ${tmpDir}/flite-2.1-release && make ${makeCores}`, { shell: true })
log.success('Build done')
log.info('Cleaning...')
await command(`cp -f ${tmpDir}/flite-2.1-release/bin/flite ${destFliteFolder} && rm -rf ${tmpDir}/flite-2.1-release*`, { shell: true })
log.success('Clean done')
log.success('Offline text-to-speech installed')
resolve()
} catch (e) {
log.error(`Failed to install offline text-to-speech: ${e}`)
reject(e)
const destFliteFolder = 'bin/flite'
const tmpDir = 'scripts/tmp'
let makeCores = ''
if (SystemHelper.getNumberOfCPUCores() > 2) {
makeCores = `-j ${SystemHelper.getNumberOfCPUCores() - 2}`
}
} else {
log.success('Offline text-to-speech is already installed')
resolve()
}
})
let downloader = 'wget'
if (SystemHelper.getInformation().type === 'macos') {
downloader = 'curl -L -O'
}
if (!fs.existsSync(`${destFliteFolder}/flite`)) {
try {
LogHelper.info('Downloading run-time synthesis engine...')
await command(
`cd ${tmpDir} && ${downloader} http://ports.ubuntu.com/pool/universe/f/flite/flite_2.1-release.orig.tar.bz2`,
{ shell: true }
)
LogHelper.success('Run-time synthesis engine download done')
LogHelper.info('Unpacking...')
await command(
`cd ${tmpDir} && tar xfvj flite_2.1-release.orig.tar.bz2 && cp ../assets/leon.lv flite-2.1-release/config`,
{ shell: true }
)
LogHelper.success('Unpack done')
LogHelper.info('Configuring...')
await command(
`cd ${tmpDir}/flite-2.1-release && ./configure --with-langvox=leon`,
{ shell: true }
)
LogHelper.success('Configure done')
LogHelper.info('Building...')
await command(`cd ${tmpDir}/flite-2.1-release && make ${makeCores}`, {
shell: true
})
LogHelper.success('Build done')
LogHelper.info('Cleaning...')
await command(
`cp -f ${tmpDir}/flite-2.1-release/bin/flite ${destFliteFolder} && rm -rf ${tmpDir}/flite-2.1-release*`,
{ shell: true }
)
LogHelper.success('Clean done')
LogHelper.success('Offline text-to-speech installed')
resolve()
} catch (e) {
LogHelper.error(`Failed to install offline text-to-speech: ${e}`)
reject(e)
}
} else {
LogHelper.success('Offline text-to-speech is already installed')
resolve()
}
})

View File

@ -0,0 +1,31 @@
import fs from 'node:fs'
import { LEON_FILE_PATH } from '@/constants'
import { Telemetry } from '@/telemetry'
import { LogHelper } from '@/helpers/log-helper'
export default async () => {
try {
const { instanceID, birthDate } = await Telemetry.postInstall()
if (!fs.existsSync(LEON_FILE_PATH)) {
await fs.promises.writeFile(
LEON_FILE_PATH,
JSON.stringify(
{
instanceID,
birthDate
},
null,
2
)
)
LogHelper.success(`Instance ID created: ${instanceID}`)
} else {
LogHelper.success(`Instance ID already exists: ${instanceID}`)
}
} catch (e) {
LogHelper.warning(`Failed to create the instance ID: ${e}`)
}
}

View File

@ -1 +1 @@
console.info('\x1b[36m➡ %s\x1b[0m', 'Running Leon\'s installation...')
console.info('\x1b[36m➡ %s\x1b[0m', "Running Leon's installation...")

Some files were not shown because too many files have changed in this diff Show More