mirror of
https://github.com/leon-ai/leon.git
synced 2024-11-10 15:19:18 +03:00
Merge branch 'python-bridge-tcp-server-binaries' into develop
This commit is contained in:
commit
5acd22a885
@ -1,2 +1,3 @@
|
||||
node_modules/
|
||||
bridges/python/.venv/*
|
||||
bridges/python/src/.venv/*
|
||||
dist/*
|
||||
|
11
.env.sample
11
.env.sample
@ -39,16 +39,7 @@ LEON_PY_TCP_SERVER_HOST=0.0.0.0
|
||||
LEON_PY_TCP_SERVER_PORT=1342
|
||||
|
||||
# Path to the Pipfile
|
||||
PIPENV_PIPFILE=bridges/python/Pipfile
|
||||
PIPENV_PIPFILE=bridges/python/src/Pipfile
|
||||
|
||||
# Path to the virtual env in .venv/
|
||||
PIPENV_VENV_IN_PROJECT=true
|
||||
|
||||
# Fix https://click.palletsprojects.com/en/7.x/python3/#python-3-surrogate-handling
|
||||
# If Leon replies you something like "Sorry, it seems I have a problem with the ... skill" but
|
||||
# still gives you the right answer, then:
|
||||
## 1. Run `locale -a`
|
||||
## 2. Pick a locale
|
||||
## 3. Replace the LC_ALL and LANG values with the needed locale
|
||||
LC_ALL=C.UTF-8
|
||||
LANG=C.UTF-8
|
||||
|
2
.github/CONTRIBUTING.md
vendored
2
.github/CONTRIBUTING.md
vendored
@ -125,6 +125,8 @@ Scopes define high-level nodes of Leon.
|
||||
|
||||
- web app
|
||||
- server
|
||||
- tcp server
|
||||
- python bridge
|
||||
- hotword
|
||||
- skill/skill_name
|
||||
|
||||
|
58
.github/workflows/pre-release-python-bridge.yml
vendored
58
.github/workflows/pre-release-python-bridge.yml
vendored
@ -2,22 +2,32 @@ name: Pre-release Python bridge
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
env:
|
||||
PIPENV_PIPFILE: bridges/python/src
|
||||
PIPENV_VENV_IN_PROJECT: true
|
||||
|
||||
jobs:
|
||||
pre-release:
|
||||
name: Pre-release
|
||||
build:
|
||||
name: Build
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9.10
|
||||
cache: pipenv
|
||||
|
||||
- name: Install Pipenv
|
||||
run: pip install --upgrade pip && pip install pipenv==2022.7.24
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v3
|
||||
@ -25,24 +35,53 @@ jobs:
|
||||
node-version: lts/*
|
||||
cache: npm
|
||||
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set Python bridge version
|
||||
working-directory: ./bridges/python/src
|
||||
working-directory: bridges/python/src
|
||||
run: |
|
||||
echo "PYTHON_BRIDGE_VERSION=$(python -c "from version import __version__; print(__version__)")" >> $GITHUB_ENV
|
||||
|
||||
- name: Display Python bridge version
|
||||
run: |
|
||||
echo "Python bridge version: ${{ env.PYTHON_BRIDGE_VERSION }}"
|
||||
|
||||
- name: Install core
|
||||
run: npm ci
|
||||
|
||||
- name: Setup Python bridge
|
||||
- name: Set up Python bridge
|
||||
run: npm run setup:python-bridge
|
||||
|
||||
- name: Build Python bridge
|
||||
run: npm run build:python-bridge
|
||||
|
||||
- name: Upload Python bridge
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
path: bridges/python/dist/*.zip
|
||||
|
||||
draft-release:
|
||||
name: Draft-release
|
||||
needs: [build]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9.10
|
||||
|
||||
- name: Set Python bridge version
|
||||
working-directory: bridges/python/src
|
||||
run: |
|
||||
echo "PYTHON_BRIDGE_VERSION=$(python -c "from version import __version__; print(__version__)")" >> $GITHUB_ENV
|
||||
|
||||
- name: Download Python bridge
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: bridges/python/dist
|
||||
|
||||
- uses: marvinpinto/action-automatic-releases@latest
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@ -50,5 +89,4 @@ jobs:
|
||||
draft: true
|
||||
prerelease: false
|
||||
title: Python Bridge ${{ env.PYTHON_BRIDGE_VERSION }}
|
||||
files: |
|
||||
bridges/python/dist/*.zip
|
||||
files: bridges/python/dist/artifact/*.zip
|
||||
|
58
.github/workflows/pre-release-tcp-server.yml
vendored
58
.github/workflows/pre-release-tcp-server.yml
vendored
@ -2,22 +2,32 @@ name: Pre-release TCP server
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
env:
|
||||
PIPENV_PIPFILE: tcp_server/src
|
||||
PIPENV_VENV_IN_PROJECT: true
|
||||
|
||||
jobs:
|
||||
pre-release:
|
||||
name: Pre-release
|
||||
build:
|
||||
name: Build
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9.10
|
||||
cache: pipenv
|
||||
|
||||
- name: Install Pipenv
|
||||
run: pip install --upgrade pip && pip install pipenv==2022.7.24
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v3
|
||||
@ -25,24 +35,53 @@ jobs:
|
||||
node-version: lts/*
|
||||
cache: npm
|
||||
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set TCP server version
|
||||
working-directory: ./tcp_server/src
|
||||
working-directory: tcp_server/src
|
||||
run: |
|
||||
echo "TCP_SERVER_VERSION=$(python -c "from version import __version__; print(__version__)")" >> $GITHUB_ENV
|
||||
|
||||
- name: Display TCP server version
|
||||
run: |
|
||||
echo "TCP server version: ${{ env.TCP_SERVER_VERSION }}"
|
||||
|
||||
- name: Install core
|
||||
run: npm ci
|
||||
|
||||
- name: Setup TCP server
|
||||
- name: Set up TCP server
|
||||
run: npm run setup:tcp-server
|
||||
|
||||
- name: Build TCP server
|
||||
run: npm run build:tcp-server
|
||||
|
||||
- name: Upload TCP server
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
path: tcp_server/dist/*.zip
|
||||
|
||||
draft-release:
|
||||
name: Draft-release
|
||||
needs: [build]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Clone repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9.10
|
||||
|
||||
- name: Set TCP server version
|
||||
working-directory: tcp_server/src
|
||||
run: |
|
||||
echo "TCP_SERVER_VERSION=$(python -c "from version import __version__; print(__version__)")" >> $GITHUB_ENV
|
||||
|
||||
- name: Download TCP server
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: tcp_server/dist
|
||||
|
||||
- uses: marvinpinto/action-automatic-releases@latest
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@ -50,5 +89,4 @@ jobs:
|
||||
draft: true
|
||||
prerelease: false
|
||||
title: TCP Server ${{ env.TCP_SERVER_VERSION }}
|
||||
files: |
|
||||
tcp_server/dist/*.zip
|
||||
files: tcp_server/dist/artifact/*.zip
|
||||
|
12
.gitignore
vendored
12
.gitignore
vendored
@ -1,11 +1,13 @@
|
||||
__pycache__/
|
||||
.idea/
|
||||
.fleet/
|
||||
.vscode/
|
||||
**/dist/
|
||||
**/dist/*
|
||||
**/build/
|
||||
**/node_modules/
|
||||
test/coverage/
|
||||
**/tmp/*
|
||||
bridges/python/.venv/*
|
||||
**/src/.venv/*
|
||||
downloads/*
|
||||
logs/*
|
||||
core/config/**/*.json
|
||||
@ -18,8 +20,10 @@ bin/flite/*
|
||||
npm-debug.log
|
||||
debug.log
|
||||
.env
|
||||
!**/.gitkeep
|
||||
!bridges/python/.venv/.gitkeep
|
||||
bridges/python/src/Pipfile.lock
|
||||
tcp_server/src/Pipfile.lock
|
||||
!tcp_server/**/.gitkeep
|
||||
!bridges/python/**/.gitkeep
|
||||
!**/*.sample*
|
||||
packages/**/config/config.json
|
||||
skills/**/src/config.json
|
||||
|
54
Dockerfile
54
Dockerfile
@ -10,23 +10,23 @@ RUN echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selectio
|
||||
# Install base dependencies
|
||||
RUN apt-get update && apt-get install --yes -q --no-install-recommends \
|
||||
apt-transport-https \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
wget \
|
||||
libssl-dev \
|
||||
zlib1g-dev \
|
||||
libbz2-dev \
|
||||
libreadline-dev \
|
||||
libsqlite3-dev \
|
||||
llvm \
|
||||
libncurses5-dev \
|
||||
xz-utils \
|
||||
tk-dev libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libffi-dev \
|
||||
liblzma-dev
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
wget \
|
||||
libssl-dev \
|
||||
zlib1g-dev \
|
||||
libbz2-dev \
|
||||
libreadline-dev \
|
||||
libsqlite3-dev \
|
||||
llvm \
|
||||
libncurses5-dev \
|
||||
xz-utils \
|
||||
tk-dev libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libffi-dev \
|
||||
liblzma-dev
|
||||
|
||||
# Run the container as an unprivileged user
|
||||
RUN groupadd docker && useradd -g docker -s /bin/bash -m docker
|
||||
@ -34,29 +34,15 @@ USER docker
|
||||
WORKDIR /home/docker
|
||||
|
||||
# Install Node.js with nvm
|
||||
ENV NVM_DIR /home/docker/nvm
|
||||
ENV NODE_VERSION v16.17.0
|
||||
ENV NVM_DIR /home/docker/.nvm
|
||||
ENV NODE_VERSION v16.18.0
|
||||
|
||||
RUN curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.33.1/install.sh | bash
|
||||
RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.2/install.sh | bash
|
||||
RUN /bin/bash -c "source $NVM_DIR/nvm.sh && nvm install $NODE_VERSION && nvm use --delete-prefix $NODE_VERSION"
|
||||
|
||||
ENV NODE_PATH $NVM_DIR/versions/node/$NODE_VERSION/lib/node_modules
|
||||
ENV PATH $NVM_DIR/versions/node/$NODE_VERSION/bin:$PATH
|
||||
|
||||
# Install Python with pyenv
|
||||
RUN git clone --depth=1 https://github.com/pyenv/pyenv.git .pyenv
|
||||
ENV PYENV_ROOT="/home/docker/.pyenv"
|
||||
ENV PATH="${PYENV_ROOT}/shims:${PYENV_ROOT}/bin:${PATH}"
|
||||
|
||||
ENV PYTHON_VERSION=3.9.10
|
||||
RUN pyenv install ${PYTHON_VERSION}
|
||||
RUN pyenv global ${PYTHON_VERSION}
|
||||
|
||||
# Install Pipenv
|
||||
ENV PYTHON_BIN_PATH /home/docker/.local/bin
|
||||
ENV PATH="${PYTHON_BIN_PATH}:${PATH}"
|
||||
RUN python -m pip install --user --force-reinstall pipenv virtualenv
|
||||
|
||||
# Install Leon
|
||||
WORKDIR /home/docker/leon
|
||||
USER root
|
||||
|
@ -68,6 +68,8 @@ If you want to, Leon can communicate with you by being **offline to protect your
|
||||
> - Skills
|
||||
> - The web app
|
||||
> - The hotword node
|
||||
> - The TCP server (for inter-process communication between Leon and third-party processes such as spaCy)
|
||||
> - The Python bridge (the connector between Python core and skills)
|
||||
|
||||
### What is Leon able to do?
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import request from 'superagent'
|
||||
import axios from 'axios'
|
||||
|
||||
import Loader from './loader'
|
||||
import Client from './client'
|
||||
@ -18,119 +18,100 @@ const serverUrl =
|
||||
? ''
|
||||
: `${config.server_host}:${config.server_port}`
|
||||
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
document.addEventListener('DOMContentLoaded', async () => {
|
||||
const loader = new Loader()
|
||||
|
||||
loader.start()
|
||||
|
||||
request.get(`${serverUrl}/api/v1/info`).end((err, res) => {
|
||||
if (err || !res.ok) {
|
||||
console.error(err.response.error.message)
|
||||
} else {
|
||||
const input = document.querySelector('#utterance')
|
||||
const mic = document.querySelector('#mic-button')
|
||||
const v = document.querySelector('#version small')
|
||||
const client = new Client(config.app, serverUrl, input, res.body)
|
||||
let rec = {}
|
||||
let chunks = []
|
||||
try {
|
||||
const response = await axios.get(`${serverUrl}/api/v1/info`)
|
||||
const input = document.querySelector('#utterance')
|
||||
const mic = document.querySelector('#mic-button')
|
||||
const v = document.querySelector('#version small')
|
||||
const client = new Client(config.app, serverUrl, input, response.data)
|
||||
let rec = {}
|
||||
let chunks = []
|
||||
|
||||
v.innerHTML += client.info.version
|
||||
v.innerHTML += client.info.version
|
||||
|
||||
client.init(loader)
|
||||
client.init(loader)
|
||||
|
||||
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
|
||||
navigator.mediaDevices
|
||||
.getUserMedia({ audio: true })
|
||||
.then((stream) => {
|
||||
if (MediaRecorder) {
|
||||
rec = new Recorder(stream, mic, client.info)
|
||||
client.recorder = rec
|
||||
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
|
||||
navigator.mediaDevices
|
||||
.getUserMedia({ audio: true })
|
||||
.then((stream) => {
|
||||
if (MediaRecorder) {
|
||||
rec = new Recorder(stream, mic, client.info)
|
||||
client.recorder = rec
|
||||
|
||||
rec.ondataavailable((e) => {
|
||||
chunks.push(e.data)
|
||||
})
|
||||
rec.ondataavailable((e) => {
|
||||
chunks.push(e.data)
|
||||
})
|
||||
|
||||
rec.onstart(() => {
|
||||
/* */
|
||||
})
|
||||
rec.onstart(() => {
|
||||
/* */
|
||||
})
|
||||
|
||||
rec.onstop(() => {
|
||||
const blob = new Blob(chunks)
|
||||
chunks = []
|
||||
rec.enabled = false
|
||||
rec.onstop(() => {
|
||||
const blob = new Blob(chunks)
|
||||
chunks = []
|
||||
rec.enabled = false
|
||||
|
||||
// Ensure there are some data
|
||||
if (blob.size >= 1000) {
|
||||
client.socket.emit('recognize', blob)
|
||||
}
|
||||
})
|
||||
// Ensure there are some data
|
||||
if (blob.size >= 1000) {
|
||||
client.socket.emit('recognize', blob)
|
||||
}
|
||||
})
|
||||
|
||||
listener.listening(
|
||||
stream,
|
||||
config.min_decibels,
|
||||
config.max_blank_time,
|
||||
() => {
|
||||
// Noise detected
|
||||
rec.noiseDetected = true
|
||||
},
|
||||
() => {
|
||||
// Noise ended
|
||||
listener.listening(
|
||||
stream,
|
||||
config.min_decibels,
|
||||
config.max_blank_time,
|
||||
() => {
|
||||
// Noise detected
|
||||
rec.noiseDetected = true
|
||||
},
|
||||
() => {
|
||||
// Noise ended
|
||||
|
||||
rec.noiseDetected = false
|
||||
if (rec.enabled && !rec.hotwordTriggered) {
|
||||
rec.stop()
|
||||
rec.enabled = false
|
||||
rec.hotwordTriggered = false
|
||||
rec.countSilenceAfterTalk = 0
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
client.socket.on('enable-record', () => {
|
||||
rec.hotwordTriggered = true
|
||||
rec.start()
|
||||
setTimeout(() => {
|
||||
rec.noiseDetected = false
|
||||
if (rec.enabled && !rec.hotwordTriggered) {
|
||||
rec.stop()
|
||||
rec.enabled = false
|
||||
rec.hotwordTriggered = false
|
||||
}, config.max_blank_time)
|
||||
rec.enabled = true
|
||||
})
|
||||
} else {
|
||||
console.error('MediaRecorder is not supported on your browser.')
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(
|
||||
'MediaDevices.getUserMedia() threw the following error:',
|
||||
err
|
||||
rec.countSilenceAfterTalk = 0
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
} else {
|
||||
console.error(
|
||||
'MediaDevices.getUserMedia() is not supported on your browser.'
|
||||
)
|
||||
}
|
||||
|
||||
document.addEventListener('keydown', (e) => {
|
||||
onkeydowndocument(e, () => {
|
||||
if (rec.enabled === false) {
|
||||
input.value = ''
|
||||
rec.start()
|
||||
rec.enabled = true
|
||||
client.socket.on('enable-record', () => {
|
||||
rec.hotwordTriggered = true
|
||||
rec.start()
|
||||
setTimeout(() => {
|
||||
rec.hotwordTriggered = false
|
||||
}, config.max_blank_time)
|
||||
rec.enabled = true
|
||||
})
|
||||
} else {
|
||||
rec.stop()
|
||||
rec.enabled = false
|
||||
console.error('MediaRecorder is not supported on your browser.')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
input.addEventListener('keydown', (e) => {
|
||||
onkeydowninput(e, client)
|
||||
})
|
||||
|
||||
mic.addEventListener('click', (e) => {
|
||||
e.preventDefault()
|
||||
.catch((err) => {
|
||||
console.error(
|
||||
'MediaDevices.getUserMedia() threw the following error:',
|
||||
err
|
||||
)
|
||||
})
|
||||
} else {
|
||||
console.error(
|
||||
'MediaDevices.getUserMedia() is not supported on your browser.'
|
||||
)
|
||||
}
|
||||
|
||||
document.addEventListener('keydown', (e) => {
|
||||
onkeydowndocument(e, () => {
|
||||
if (rec.enabled === false) {
|
||||
input.value = ''
|
||||
rec.start()
|
||||
rec.enabled = true
|
||||
} else {
|
||||
@ -138,6 +119,25 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
rec.enabled = false
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
input.addEventListener('keydown', (e) => {
|
||||
onkeydowninput(e, client)
|
||||
})
|
||||
|
||||
mic.addEventListener('click', (e) => {
|
||||
e.preventDefault()
|
||||
|
||||
if (rec.enabled === false) {
|
||||
rec.start()
|
||||
rec.enabled = true
|
||||
} else {
|
||||
rec.stop()
|
||||
rec.enabled = false
|
||||
}
|
||||
})
|
||||
} catch (e) {
|
||||
alert(`Error: ${e.message}; ${JSON.stringify(e.response.data)}`)
|
||||
console.error(e)
|
||||
}
|
||||
})
|
||||
|
592
bridges/python/Pipfile.lock
generated
592
bridges/python/Pipfile.lock
generated
@ -1,592 +0,0 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "2db39af98e7ccc96ec933a4b2cbe921addef26899daf69d7d4d8ddb01ef5f746"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.9.10"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"beautifulsoup4": {
|
||||
"hashes": [
|
||||
"sha256:034740f6cb549b4e932ae1ab975581e6103ac8f942200a0e9759065984391858",
|
||||
"sha256:945065979fb8529dd2f37dbb58f00b661bdbcbebf954f93b32fdf5263ef35348",
|
||||
"sha256:ba6d5c59906a85ac23dadfe5c88deaf3e179ef565f4898671253e50a78680718"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.7.1"
|
||||
},
|
||||
"blis": {
|
||||
"hashes": [
|
||||
"sha256:0c3245e42c7c6ba0d68d7dff4171d11bb08174e639bc8edd52a1fd831de1d903",
|
||||
"sha256:0f7bfdee74ac695c35360ace00f2630c1b47406dc0b99ba9211bfa8588bfbed9",
|
||||
"sha256:159a1a9b32213d99d1415789ac66ed8d23442a696d9d376c66d7b791d3eae575",
|
||||
"sha256:17df5ac7d9a9dbbf0415f8f8392fbdf1790fa394f89d695bae5e2e7e361c852b",
|
||||
"sha256:1e970ba1eb12ca38fb5d57f379472125bc3f5106c8214dc847fe79b027212135",
|
||||
"sha256:1f5fa330ab66d0e92a845b1db361ec8bf3dc4bc7e0dc0ded94f36b8e9f731650",
|
||||
"sha256:2056b0d9722f5505cfa94904c6248021197c63ecf45804dcf117f8f1c6160ab6",
|
||||
"sha256:25cdb9775699c1b926df514a5d4c28016c11722a66211f1024b2f21373f50de2",
|
||||
"sha256:2778fe0ba0e25c157839fdd19ed66b9a340c92d4e92e707b7fa9aa21c51cb254",
|
||||
"sha256:294421b720c2de904908de841464c667e1a5c5e9f3db6931dfa29cf369d3653a",
|
||||
"sha256:2db369a4f95927be37e11790dd1ccbf99fd6201eaffbcf408546db847b7b5740",
|
||||
"sha256:354cadff661a1452c886f541b84018770ddb4c134844c56e7c1a30aa4bcc473d",
|
||||
"sha256:4e7b7b8bc8cf5e82958bbc393e0167318a930d394cbbf04c1ba18cfabaef5818",
|
||||
"sha256:63735128c9cae44dc6cbf7557327385df0c4ed2dc2c45a00dabfde1e4d00802d",
|
||||
"sha256:66b8ca1a2eb8f1e0563a592aae4b8682b66189ad560e3b8221d93eab0cb76582",
|
||||
"sha256:90f17543e0aa3bc379d139867467df2c365ffaf5b61988de12dbba6dbbc9fab4",
|
||||
"sha256:95d22d3007cb454d11a478331690629861f7d40b4668f9fccfd13b6507ed099b",
|
||||
"sha256:96ff4c0c1ceab9f94c14b3281f3cef82f593c48c3b5f6169bd51cdcd315e0a6e",
|
||||
"sha256:ae5b06fe3b94645ac5d93cbc7c0129639cc3e0d50b4efb361a20a9e160277a92",
|
||||
"sha256:bf60f634481c3d0faf831ac4f2d1c75343e98f714dc88e3fb3c329758577e772",
|
||||
"sha256:bfa56e7ef14ae607d8444eb344d22f252a2e0b0f9bfa4bdc9b0c48a9f96b5461",
|
||||
"sha256:cac120e3c0517095d3c39278e8b6b1102b1add0e1f4e161a87f313d8ee7c12e1",
|
||||
"sha256:f576ad64b772b6fd7df6ef94986235f321983dc870d0f76d78c931bafc41cfa4",
|
||||
"sha256:f7d541bb06323aa350163ba4a3ad00e8effb3b53d4c58ee6228224f3928b6c57"
|
||||
],
|
||||
"version": "==0.7.8"
|
||||
},
|
||||
"catalogue": {
|
||||
"hashes": [
|
||||
"sha256:2d786e229d8d202b4f8a2a059858e45a2331201d831e39746732daa704b99f69",
|
||||
"sha256:b325c77659208bfb6af1b0d93b1a1aa4112e1bb29a4c5ced816758a722f0e388"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.0.8"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d",
|
||||
"sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2022.6.15"
|
||||
},
|
||||
"chardet": {
|
||||
"hashes": [
|
||||
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
|
||||
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
|
||||
],
|
||||
"version": "==3.0.4"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
|
||||
"sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==8.1.3"
|
||||
},
|
||||
"cymem": {
|
||||
"hashes": [
|
||||
"sha256:04676d696596b0db3f3c5a3936bab12fb6f24278921a6622bb185e61765b2b4d",
|
||||
"sha256:169725b5816959d34de2545b33fee6a8021a6e08818794a426c5a4f981f17e5e",
|
||||
"sha256:228bd261a85d92d870ed358f263ee028ac026302304f2186827377a3895c5819",
|
||||
"sha256:2aa3fa467d906cd2c27fa0a2e2952dd7925f5fcc7973fab6d815ef6acb25aad8",
|
||||
"sha256:38b51ac23f914d85b197dbd0fb2d3e2de9bf6112b9b30f16b45dbb6c9b4e509d",
|
||||
"sha256:4749f220e4c06ec44eb10de13794ff0508cdc4f8eff656cf49cab2cdb3122c0c",
|
||||
"sha256:492084aef23ac2ff3da3729e9d36340bc91a96c2dc8c3a82a1926e384ab52412",
|
||||
"sha256:4f87fe087f2ae36c3e20e2b1a29d7f76a28c035372d0a97655f26223d975235a",
|
||||
"sha256:5d631239bfb07293ee444b269656308da952b6b003b12332ccb1c624dbfcda4b",
|
||||
"sha256:6b0d1a6b0a1296f31fa9e4b7ae5ea49394084ecc883b1ae6fec4844403c43468",
|
||||
"sha256:700540b68e96a7056d0691d467df2bbaaf0934a3e6fe2383669998cbee19580a",
|
||||
"sha256:971cf0a8437dfb4185c3049c086e463612fe849efadc0f5cc153fc81c501da7d",
|
||||
"sha256:a261f51796a2705f3900ed22b8442519a0f230f50a816fb5bd89cb9b027dc5ac",
|
||||
"sha256:a93fba62fe79dbf6fc4d5b6d804a6e114b44af3ff3d40a28833ee39f21bd336b",
|
||||
"sha256:af3c01e6b20f9e6c07c7d7cdb7f710e49889d3906c9a3e039546ee6636a34b9a",
|
||||
"sha256:b8e1c18bb00800425576710468299153caad20c64ddb6819d40a6a34e21ee21c",
|
||||
"sha256:c59293b232b53ebb47427f16cf648e937022f489cff36c11d1d8a1f0075b6609",
|
||||
"sha256:c59ca1072769cb6c3eade59de9e080ff2cecde0122f7e0ca0dc9ef2ed9240f0e",
|
||||
"sha256:cd818356b635d8ae546e152a6f2b95f00e959d128a16155c275b0c202cd6312b",
|
||||
"sha256:d4dc378fb9dda3b0529361fe32cfe1a6de0fc16bb40c710aaec8d217534928d2",
|
||||
"sha256:d7a59cef8f2fa25d12e2c30138f8623acbd43ad2715e730a709e49c5eef8e1b0",
|
||||
"sha256:dd52d8a81881804625df88453611175ab7e0099b34f52204da1f6940cf2e83c9",
|
||||
"sha256:ea535f74ab6024e7416f93de564e5c81fb7c0964b96280de66f60aeb05f0cf53",
|
||||
"sha256:ee73a48c5a7e0f2acf6830ddc958ffafd7a614cfb79d14017a459bc7a7145ecd"
|
||||
],
|
||||
"version": "==2.0.6"
|
||||
},
|
||||
"geonamescache": {
|
||||
"hashes": [
|
||||
"sha256:5a1fc774a3282d324952b87eb5c2c13684055e68e829526909c76c2fffb84f8f",
|
||||
"sha256:efa1dd28a87632303c5d882cc52718f04cf28e85f1cc7d6afdf8a6ba7c5ab1dc"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.3.0"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
|
||||
"sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
|
||||
],
|
||||
"version": "==2.8"
|
||||
},
|
||||
"jinja2": {
|
||||
"hashes": [
|
||||
"sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
|
||||
"sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==3.1.2"
|
||||
},
|
||||
"langcodes": {
|
||||
"hashes": [
|
||||
"sha256:4d89fc9acb6e9c8fdef70bcdf376113a3db09b67285d9e1d534de6d8818e7e69",
|
||||
"sha256:794d07d5a28781231ac335a1561b8442f8648ca07cd518310aeb45d6f0807ef6"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.3.0"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
"sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003",
|
||||
"sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88",
|
||||
"sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5",
|
||||
"sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7",
|
||||
"sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a",
|
||||
"sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603",
|
||||
"sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1",
|
||||
"sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135",
|
||||
"sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247",
|
||||
"sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6",
|
||||
"sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601",
|
||||
"sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77",
|
||||
"sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02",
|
||||
"sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e",
|
||||
"sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63",
|
||||
"sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f",
|
||||
"sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980",
|
||||
"sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b",
|
||||
"sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812",
|
||||
"sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff",
|
||||
"sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96",
|
||||
"sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1",
|
||||
"sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925",
|
||||
"sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a",
|
||||
"sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6",
|
||||
"sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e",
|
||||
"sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f",
|
||||
"sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4",
|
||||
"sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f",
|
||||
"sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3",
|
||||
"sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c",
|
||||
"sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a",
|
||||
"sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417",
|
||||
"sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a",
|
||||
"sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a",
|
||||
"sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37",
|
||||
"sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452",
|
||||
"sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933",
|
||||
"sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a",
|
||||
"sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.1.1"
|
||||
},
|
||||
"murmurhash": {
|
||||
"hashes": [
|
||||
"sha256:1b3d584705bf7be487c09a79f35a45a45f9b2a8a2120ccca47692d24bc6329ff",
|
||||
"sha256:1fd2dac6ea401310d1245f68a758ff86bc58ba28fe3ec41c3fec42d4cea249a0",
|
||||
"sha256:262abc461353bcd724daef11aa236fed95ce2c5642847fe18f835721ae0356d6",
|
||||
"sha256:2d001688bd2106fbd84a99d4c0292339de7a0a96c8516596032a3e9389adba25",
|
||||
"sha256:341a499cb30fae9f305479650728d832744a2e37f8e7fbdbfaedd8676b4e822d",
|
||||
"sha256:37c8207377110fa791051e2998d6d8d84d0a066010be9e8e41d1af1c41a65f14",
|
||||
"sha256:43d161ecdc1923ec401efb4e78d1da848c4aa957a49c08c29af276a7696c5336",
|
||||
"sha256:469a5c8959988ce8c395be2c623a45df8112783d87445b6a230596807e7786d2",
|
||||
"sha256:60637d74c73be1259d9c07d03726b75907ad9e8dd47f946b21244113cdc52843",
|
||||
"sha256:66857bf301c676917d7ceba3fa42fb82aa34605d6b9788612c532bc6d69af9f9",
|
||||
"sha256:67d48fc47edc40fef9eb19c6e9ee3d4321f864a3d6566217812615c79fa4ff16",
|
||||
"sha256:79a3ef14cab2bdc2cf4814eb30d8237d28ddc9e6fd1859d3d61857162979a630",
|
||||
"sha256:88c569636b0bad29a3854f1d324636e0561423da75ef6e5bc5b363f8d2143692",
|
||||
"sha256:88eb9b866d2e6690249eb305cce509eeffbf2a59c42eb47e8aab6a3b46bd4a54",
|
||||
"sha256:8bb03aad842837a64b0c1d2ed22b50eba85f9fa51476c8bc0a77c366a979f1f3",
|
||||
"sha256:97231ac1f42d29b6e1a3e2e8d23ab32deb37ccd55b00554d74e4bd8f0e73b7d7",
|
||||
"sha256:9fe289e88b18b388d6029a17839113a6090ead3b04c904cba0b64961f0db7d02",
|
||||
"sha256:aaa8df50c83468a6be52e29a85d7d54b981b43cd1c9b501b3a2d80acbe87283e",
|
||||
"sha256:ac0701b6823a8a88412c2a1410b1afbd25ee20666ea9d4ad1d3016c7d55ac433",
|
||||
"sha256:b9bab3d46076ac65b38e9f2823ce3e45d7a2a798e3dd466eb594de1802732979",
|
||||
"sha256:bafe99db2f05d8ecd9b39250cf37f561b4281d081b7c10bd283d6c1fd685bedf",
|
||||
"sha256:bd4584443731a9d73b87970f9b08c86162d35acc48b63c78cf215bcebeb8b5b0",
|
||||
"sha256:cbf5b97feaba000df8e5b16ce23e426ea9b52e0e442e6451b4687b6f7797804e",
|
||||
"sha256:d3d64b6a24203da05964c708856f2ff46653dbd115d3bba326e0c6dc1d4ea511",
|
||||
"sha256:d55e956cebe38ef96360949b130cbfb605c3ffc04e3b770c6e0299308c46c880"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.0.8"
|
||||
},
|
||||
"numpy": {
|
||||
"hashes": [
|
||||
"sha256:17e5226674f6ea79e14e3b91bfbc153fdf3ac13f5cc54ee7bc8fdbe820a32da0",
|
||||
"sha256:2bd879d3ca4b6f39b7770829f73278b7c5e248c91d538aab1e506c628353e47f",
|
||||
"sha256:4f41f5bf20d9a521f8cab3a34557cd77b6f205ab2116651f12959714494268b0",
|
||||
"sha256:5593f67e66dea4e237f5af998d31a43e447786b2154ba1ad833676c788f37cde",
|
||||
"sha256:5e28cd64624dc2354a349152599e55308eb6ca95a13ce6a7d5679ebff2962913",
|
||||
"sha256:633679a472934b1c20a12ed0c9a6c9eb167fbb4cb89031939bfd03dd9dbc62b8",
|
||||
"sha256:806970e69106556d1dd200e26647e9bee5e2b3f1814f9da104a943e8d548ca38",
|
||||
"sha256:806cc25d5c43e240db709875e947076b2826f47c2c340a5a2f36da5bb10c58d6",
|
||||
"sha256:8247f01c4721479e482cc2f9f7d973f3f47810cbc8c65e38fd1bbd3141cc9842",
|
||||
"sha256:8ebf7e194b89bc66b78475bd3624d92980fca4e5bb86dda08d677d786fefc414",
|
||||
"sha256:8ecb818231afe5f0f568c81f12ce50f2b828ff2b27487520d85eb44c71313b9e",
|
||||
"sha256:8f9d84a24889ebb4c641a9b99e54adb8cab50972f0166a3abc14c3b93163f074",
|
||||
"sha256:909c56c4d4341ec8315291a105169d8aae732cfb4c250fbc375a1efb7a844f8f",
|
||||
"sha256:9b83d48e464f393d46e8dd8171687394d39bc5abfe2978896b77dc2604e8635d",
|
||||
"sha256:ac987b35df8c2a2eab495ee206658117e9ce867acf3ccb376a19e83070e69418",
|
||||
"sha256:b78d00e48261fbbd04aa0d7427cf78d18401ee0abd89c7559bbf422e5b1c7d01",
|
||||
"sha256:b8b97a8a87cadcd3f94659b4ef6ec056261fa1e1c3317f4193ac231d4df70215",
|
||||
"sha256:bd5b7ccae24e3d8501ee5563e82febc1771e73bd268eef82a1e8d2b4d556ae66",
|
||||
"sha256:bdc02c0235b261925102b1bd586579b7158e9d0d07ecb61148a1799214a4afd5",
|
||||
"sha256:be6b350dfbc7f708d9d853663772a9310783ea58f6035eec649fb9c4371b5389",
|
||||
"sha256:c403c81bb8ffb1c993d0165a11493fd4bf1353d258f6997b3ee288b0a48fce77",
|
||||
"sha256:cf8c6aed12a935abf2e290860af8e77b26a042eb7f2582ff83dc7ed5f963340c",
|
||||
"sha256:d98addfd3c8728ee8b2c49126f3c44c703e2b005d4a95998e2167af176a9e722",
|
||||
"sha256:dc76bca1ca98f4b122114435f83f1fcf3c0fe48e4e6f660e07996abf2f53903c",
|
||||
"sha256:dec198619b7dbd6db58603cd256e092bcadef22a796f778bf87f8592b468441d",
|
||||
"sha256:df28dda02c9328e122661f399f7655cdcbcf22ea42daa3650a26bce08a187450",
|
||||
"sha256:e603ca1fb47b913942f3e660a15e55a9ebca906857edfea476ae5f0fe9b457d5",
|
||||
"sha256:ecfdd68d334a6b97472ed032b5b37a30d8217c097acfff15e8452c710e775524"
|
||||
],
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==1.23.2"
|
||||
},
|
||||
"packaging": {
|
||||
"hashes": [
|
||||
"sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb",
|
||||
"sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==21.3"
|
||||
},
|
||||
"pathy": {
|
||||
"hashes": [
|
||||
"sha256:3178215bdadf3741107d987020be0fb5b59888f60f96de43cce5fe45d9d4b64a",
|
||||
"sha256:a7aa9794fade161bb4c28a33c5bc2c6bf41f61ec5eee51cfa8914f0a433447e1"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==0.6.2"
|
||||
},
|
||||
"preshed": {
|
||||
"hashes": [
|
||||
"sha256:0435d4647037fc534bf1bdcf87fdbf206a935abe5e68c9814277090c62e6b6ef",
|
||||
"sha256:06be22a8a2757e61b2c3cc854e9c2d2361fb3b6528deb2af5b5079e6da670405",
|
||||
"sha256:0feae8cfcf46fb97cd8118d150245ae31c68ca9c68e514ab33e1e33a90eee459",
|
||||
"sha256:13140808a56ce67e30579a40e293d16dedeb85fd56c74cd024c44f7a9f966432",
|
||||
"sha256:1573d42bdec168081242ec88d40288000f84a5afb66a60517e4220950fe62f17",
|
||||
"sha256:1acc514a41142a086c2ebc19cd368534e27337511b3a2936c28455e0b42f3eb3",
|
||||
"sha256:24935553ff18f39195b468984d784b2bb7825b6f43385a2d0d05b02e55090815",
|
||||
"sha256:3741b459f50dde7a22b88d75d1fc4a7e6e36402db43c4f690df44e966ff1bd54",
|
||||
"sha256:37900e1e923a35fc15f56cd02f4a899e5903790113256fedbf527a5b3f8e2214",
|
||||
"sha256:39cd2a0ab1adb11452c617831ea0ccea7d1712f2812d1744738735987513113a",
|
||||
"sha256:3b1d747ab1e233bc4bccdc4baee7531c5661459d7a8d67845833f7c857b371fb",
|
||||
"sha256:3e7022fe473e677088cda6e0538669fe240943e3eb7ff88447d690a52c6d87ce",
|
||||
"sha256:4a5825b80a450198cb582b3b4004c95e9d4eca268e89c126a9d71e8f26b69338",
|
||||
"sha256:4a99d4f46a4e317245f2370eb13d0e3c6747f66752a8d88dbc9284f4fd25e05f",
|
||||
"sha256:5eeafb832009a3e1ed92a412f19499c86e687308d6d56617772d42ee4de26ccf",
|
||||
"sha256:6e1b61c9797443ee42cb314c91ab178791b80397c2e98410f5173806604458b1",
|
||||
"sha256:6e518275d1b84bed779ddc69eabeb9283bf5f9f5482bcaf1b099d2877b050a0d",
|
||||
"sha256:7cc3166ed48d603131912da40a7638b3655d6990032a770303346e2f5f4154d4",
|
||||
"sha256:7f97c3d2dc9713139cc096af9e4455ced33f9e90ce9d29f739db5adff6ae6a06",
|
||||
"sha256:8d57469e5e52d37342588804d3ce70128ab42f42e5babaeea0d29d3609ce7b30",
|
||||
"sha256:9785e3de0dca8f4d6040eba89e50e72012ce94d269583502d14f958624daea73",
|
||||
"sha256:9c700a0744f5ede86ff8754522bd54f92f5b75f58046bc3f17c60d1c25529f41",
|
||||
"sha256:a9ed0dc685aa058a1a76111f476715ccfb4df12800c3681e401d6af7384f570d",
|
||||
"sha256:ab4168fe1d31707e3473d1bb57d3c88dac9151b3cbe61d22f14bf691e04aaa1b",
|
||||
"sha256:d2cc8d8df139d5935464530d45ff0e1209752e9748792cdf774b5d5fc230c07a"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.0.7"
|
||||
},
|
||||
"pydantic": {
|
||||
"hashes": [
|
||||
"sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44",
|
||||
"sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d",
|
||||
"sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84",
|
||||
"sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555",
|
||||
"sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7",
|
||||
"sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131",
|
||||
"sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8",
|
||||
"sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3",
|
||||
"sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56",
|
||||
"sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0",
|
||||
"sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4",
|
||||
"sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453",
|
||||
"sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044",
|
||||
"sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e",
|
||||
"sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15",
|
||||
"sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb",
|
||||
"sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001",
|
||||
"sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d",
|
||||
"sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3",
|
||||
"sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e",
|
||||
"sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f",
|
||||
"sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c",
|
||||
"sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b",
|
||||
"sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8",
|
||||
"sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567",
|
||||
"sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979",
|
||||
"sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326",
|
||||
"sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb",
|
||||
"sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f",
|
||||
"sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa",
|
||||
"sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747",
|
||||
"sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801",
|
||||
"sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55",
|
||||
"sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08",
|
||||
"sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76"
|
||||
],
|
||||
"markers": "python_full_version >= '3.6.1'",
|
||||
"version": "==1.9.2"
|
||||
},
|
||||
"pyparsing": {
|
||||
"hashes": [
|
||||
"sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb",
|
||||
"sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"
|
||||
],
|
||||
"markers": "python_full_version >= '3.6.8'",
|
||||
"version": "==3.0.9"
|
||||
},
|
||||
"python-dotenv": {
|
||||
"hashes": [
|
||||
"sha256:32b2bdc1873fd3a3c346da1c6db83d0053c3c62f28f1f38516070c4c8971b1d3",
|
||||
"sha256:a5de49a31e953b45ff2d2fd434bbc2670e8db5273606c1e737cc6b93eff3655f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.19.2"
|
||||
},
|
||||
"pytube": {
|
||||
"hashes": [
|
||||
"sha256:2a32f3475f063d25e7b7a7434a93b51d59aadbbda7ed24af48f097b2876c0964",
|
||||
"sha256:9190f5e13b05a4fc1586eedd46f7164c3a4318607d455a1f7c126699ddde724f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==9.5.0"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
|
||||
"sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.21.0"
|
||||
},
|
||||
"setuptools": {
|
||||
"hashes": [
|
||||
"sha256:2347b2b432c891a863acadca2da9ac101eae6169b1d3dfee2ec605ecd50dbfe5",
|
||||
"sha256:e4f30b9f84e5ab3decf945113119649fec09c1fc3507c6ebffec75646c56e62b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==60.9.3"
|
||||
},
|
||||
"smart-open": {
|
||||
"hashes": [
|
||||
"sha256:71d14489da58b60ce12fc3ecb823facc59a8b23cd1b58edb97175640350d3a62",
|
||||
"sha256:75abf758717a92a8f53aa96953f0c245c8cedf8e1e4184903db3659b419d4c17"
|
||||
],
|
||||
"markers": "python_version >= '3.6' and python_version < '4.0'",
|
||||
"version": "==5.2.1"
|
||||
},
|
||||
"soupsieve": {
|
||||
"hashes": [
|
||||
"sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759",
|
||||
"sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.3.2.post1"
|
||||
},
|
||||
"spacy": {
|
||||
"hashes": [
|
||||
"sha256:0720297328e32c1952a6c2eb9f5d9aa7b11b95588886c9117a7030c2dd1a0619",
|
||||
"sha256:344f262344f1c078e6159543eecc09b32b70c138deb1f9fe01c49cd89002730f",
|
||||
"sha256:3c99920830c1cbf3be169b7817863d88fa07c465540ebf0eed304ba106201cb4",
|
||||
"sha256:3caaa36f8fc95776a558ec2a4625f5cd22c1fa7990b1d9ab650709851586cec4",
|
||||
"sha256:3ccd1a9f56759e5f0f9efdf57119a06702ad59c045dde0b3830b5472593e09ef",
|
||||
"sha256:4dc6ea142866f7042a710597ad2c3939bde3486c27d0eb11fcfb079245c72cb3",
|
||||
"sha256:5b95eff2f9e012991c27e93c038ef8f3ca4132ec7e66e097833ef22f263a46da",
|
||||
"sha256:6c45fe868973dd44afa4a200f72fd2d7288548ae6bc201fc5a6d5ffb0a4949b3",
|
||||
"sha256:6d147595db91191a0a748e33629c99fb5e7849af5100237e058fd832b7254242",
|
||||
"sha256:7039fe789caa9753fa0a66b9391f0f93d49e6cb1017336c6b0842220a04346a8",
|
||||
"sha256:76ad38af7205dbc97e1a3dd0956f5d97b4d31542adbb039eb161bf5f08e386ec",
|
||||
"sha256:9c9167631733f8618f8928745fc740f3fd680994993dce2db7a4dd2b85d8919e",
|
||||
"sha256:9e631c07560457926860b1c64bf2a62e4e4c25359085024ea3847cc7e2fc39ee",
|
||||
"sha256:9ff04736c88475a950726c92a8e87ea6f42ca49ae60e0dc9612b456333f25e9c",
|
||||
"sha256:a10451847eb7983ba1fe05ab87a37367e00e2807fe633d17926626cb2dacd10f",
|
||||
"sha256:a4fa15d13ef73089680506c91e27630005537b9b68d2d4e736cec26cb29a3f47",
|
||||
"sha256:b350dc2321b4cbeac65419ec0cb0d0697f991c0a3b4ca4236438fbcce77e9421",
|
||||
"sha256:b3b336aed79d7610ad33fac1557de8b96a1421aa8c4d41604700d1eb1c7e34fd",
|
||||
"sha256:dbcf52b49a7221202f7a388993562c9dd610ed621697a4684583514aed63c06a",
|
||||
"sha256:e0bec5befbb6bbc11d4d984e8f864dcbdf9e9274dbc5dbc349b931a2dd9ec84c",
|
||||
"sha256:eb39f5551f4ef6475fa7f46fec347553680ab0e917d2a790384aa75d5797333d",
|
||||
"sha256:eca827077f32965c1931ccb34375c29a7b9c321818e1773248004719b27b008f",
|
||||
"sha256:f0b26babf0d7d9b07f8350c390b19445207a9f84cac7a81ac2b6d0cbcb47357a",
|
||||
"sha256:ff0687a6f3e822655650a5ba45af6738b879962806f09ac3903768b0b67cf415"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.4.0"
|
||||
},
|
||||
"spacy-legacy": {
|
||||
"hashes": [
|
||||
"sha256:16104595d8ab1b7267f817a449ad1f986eb1f2a2edf1050748f08739a479679a",
|
||||
"sha256:8526a54d178dee9b7f218d43e5c21362c59056c5da23380b319b56043e9211f3"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.0.10"
|
||||
},
|
||||
"spacy-loggers": {
|
||||
"hashes": [
|
||||
"sha256:00f6fd554db9fd1fde6501b23e1f0e72f6eef14bb1e7fc15456d11d1d2de92ca",
|
||||
"sha256:f74386b390a023f9615dcb499b7b4ad63338236a8187f0ec4dfe265a9f665ee8"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.0.3"
|
||||
},
|
||||
"srsly": {
|
||||
"hashes": [
|
||||
"sha256:1a90233d5ef978db5887d0d159fa5652bfecde353812c4b4e293791460db2211",
|
||||
"sha256:1ce17dcf2acd26f3add82d94a29d264bd72364071a29dccdd335d4c1826e4e98",
|
||||
"sha256:2702ab0858bb27cefe684cc01be5f28ca05d204dc60b52efd122434864a347bd",
|
||||
"sha256:30eb733770b69593ac31ff84df41455e45bb2e48bf44c1382bc40f10be3e8936",
|
||||
"sha256:372204bbb7089ecc99fa984f0ef6c06063dcf6ac1abf0d896605486a2cdf5779",
|
||||
"sha256:47e6ce2aed95aa2f55b9a967f34f82cf170ff2c0e126d3422ede106dbfe4922c",
|
||||
"sha256:4bb401bf8477b685d933670bcec6b430b8e8025cd68da1e6bacfd57f6d8191d5",
|
||||
"sha256:4efa441fc54d3f2300c6ce48f9e44ed9850f2627c0ebeaa320b206b470679e63",
|
||||
"sha256:5054c25a9b923d33f54e7bbab8c5be669db94b9bab87e348439bf85f3d644825",
|
||||
"sha256:708623d4e4503fee4cd9c727d471ab6918b664e177fbe413b0ddd2debb45437a",
|
||||
"sha256:74acd1e52235fa46a3ede5c34f33adf8bad4050bbf44ec04d76369c4aed9041e",
|
||||
"sha256:8235975d943587b4d17fc10e860b11d9248f58c0b74e95f911bd70b24542c630",
|
||||
"sha256:8abfca6b34c8a03608ca9c6e54e315c30d240cc3aae942a412c52742d15a089b",
|
||||
"sha256:930164882db523020c7660b12a7f3b0d9266781012004fa2e8ad6150412493e1",
|
||||
"sha256:964bdc3f8ff4a5e66ab2685e001b78f6ca2ce68fe4817842f4f342abad2fddbb",
|
||||
"sha256:a6d03f65c079e98fcd635f19c65c0ddb7b06f1069332fb87c74e36d7c3ca883d",
|
||||
"sha256:bd4f7e2b43187d4fc5095fb01fe7b7868a777eb12315ff1ac07e3243fd4493e8",
|
||||
"sha256:c85962bf1d6f52b9d5d2e727a4e3c558a6b00cecadea29141f122f6c83147cca",
|
||||
"sha256:c96963e1db238014525f924d5d5058ee9386e129c82f24cc63fead41902e1c06",
|
||||
"sha256:cb65e2d67980419072379abef1e5b1d3b11931082bc038b0295d35a56376c3d5",
|
||||
"sha256:d6733ddaf0b2db54f334a2e881f1112be0ff48f113c96bcd88d1aec1ff871ca8",
|
||||
"sha256:e0c6510716e5cb2abf11abcbe06338283b01690955342e78a76fcb9953b7e3ba",
|
||||
"sha256:e1866a1a5ea8c7bb213b0c3c7a7b97cd6d9a3ca26f2f59e441d47fa9720fb399",
|
||||
"sha256:e8a06581627b6712f19c60241b7c14c2bb29ce86ef04f791379a79f1b249a128"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.4.4"
|
||||
},
|
||||
"thinc": {
|
||||
"hashes": [
|
||||
"sha256:133f585941801382dd52201eb5b857426dfa1adca298b052875c9a07943c18b0",
|
||||
"sha256:21ec24cdb3248617b41d4bdf13de557fdd994bca73b107c30530b7fcd590c9c2",
|
||||
"sha256:2766914adae8768aaad0d18c3968779bebe3d366f94ebca3b42b3f8cc290c5e3",
|
||||
"sha256:2c40cbc4c630e6201983af88ee9c4fd9f1d8c1d41545c78a861e9911fcb23b7b",
|
||||
"sha256:478f32fcf2042ead1bfbba7a3c77e2fc574c1f65967efc137156130bfd02c056",
|
||||
"sha256:5ab8dcf427184d5573494061d4cb7201ae4d7303433f81af359b6bd48b19515f",
|
||||
"sha256:619776f6070a00a06713ef14f67e03af5bea16afda1897e7936ba8bbf0b3dba6",
|
||||
"sha256:6934566d3bca7cd0d2912fbb8966882fd7b43b8ec0139b0c7797814e11555be0",
|
||||
"sha256:6e293ea5141767817c26f085a26fcd3c451c15c1902c5f2a7bdb9a7fe57ebdef",
|
||||
"sha256:74fa81f69e67363c4d3b4d60a0608adddadb0f2e2f4cc8c1f2dc2b083747fd69",
|
||||
"sha256:77c7f6984dcaa007bcba8ff67e2e3c7a71a237b63193e5c14fe832493e53976a",
|
||||
"sha256:7a261b55d72c266d467423966aa48ebf73326736155307c5265a1c99f163f2d7",
|
||||
"sha256:7dc9a3a108f1730f6cd65a68ae67ea2eb10bd314cffe8dca1afe693c50e84b9b",
|
||||
"sha256:7ded7df0174040ccd8be60780f43fd3d18bf675ac1170d82d09985d214896521",
|
||||
"sha256:9096bea0f7aa29e52a3a23759cd6cabee98366a41a2c880db949fcf00f936aa4",
|
||||
"sha256:9097836ba9a9f631736ab86bb622dcc6155476c7ea0f55486dc2d9b22c07f5d6",
|
||||
"sha256:b33b2b71ccf6689968a8452e0c1beadf385511df9b55e36d83e688a2832f6b0a",
|
||||
"sha256:bb45c8aabb3d4e646a25939096cc751cb4e0e4ff9d3bfdcce9fa64ff0622d348",
|
||||
"sha256:bf9e0ac84a8f473ad723059ac561c307682015a083a8b9b7ff26808780715666",
|
||||
"sha256:c01ca2b364ef7b02c2ffbed0b423597c9999131f83a1878548d36666ca2a27ed",
|
||||
"sha256:d6738b13d0f7ddaf02f43ce71353fe079d461bf9bec76f48406071de4a89a1dd",
|
||||
"sha256:e22998c2d914bee02dc58aa791bc7df6f9733554b244e94b33ff4a491322c142",
|
||||
"sha256:eaaea91dc56c041516a829c460423a8aef5357001610c8d6395bce95d8254a0b",
|
||||
"sha256:f8b673775f120d7f54b5f3230742239e21fc5c4e3138372dad87b82617677509"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==8.1.0"
|
||||
},
|
||||
"tinydb": {
|
||||
"hashes": [
|
||||
"sha256:357eb7383dee6915f17b00596ec6dd2a890f3117bf52be28a4c516aeee581100",
|
||||
"sha256:e2cdf6e2dad49813e9b5fceb3c7943387309a8738125fbff0b58d248a033f7a9"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.7.0"
|
||||
},
|
||||
"torch": {
|
||||
"hashes": [
|
||||
"sha256:03e31c37711db2cd201e02de5826de875529e45a55631d317aadce2f1ed45aa8",
|
||||
"sha256:0b44601ec56f7dd44ad8afc00846051162ef9c26a8579dda0a02194327f2d55e",
|
||||
"sha256:42e115dab26f60c29e298559dbec88444175528b729ae994ec4c65d56fe267dd",
|
||||
"sha256:42f639501928caabb9d1d55ddd17f07cd694de146686c24489ab8c615c2871f2",
|
||||
"sha256:4e1b9c14cf13fd2ab8d769529050629a0e68a6fc5cb8e84b4a3cc1dd8c4fe541",
|
||||
"sha256:68104e4715a55c4bb29a85c6a8d57d820e0757da363be1ba680fa8cc5be17b52",
|
||||
"sha256:69fe2cae7c39ccadd65a123793d30e0db881f1c1927945519c5c17323131437e",
|
||||
"sha256:6cf6f54b43c0c30335428195589bd00e764a6d27f3b9ba637aaa8c11aaf93073",
|
||||
"sha256:743784ccea0dc8f2a3fe6a536bec8c4763bd82c1352f314937cb4008d4805de1",
|
||||
"sha256:8a34a2fbbaa07c921e1b203f59d3d6e00ed379f2b384445773bd14e328a5b6c8",
|
||||
"sha256:976c3f997cea38ee91a0dd3c3a42322785414748d1761ef926b789dfa97c6134",
|
||||
"sha256:9b356aea223772cd754edb4d9ecf2a025909b8615a7668ac7d5130f86e7ec421",
|
||||
"sha256:9c038662db894a23e49e385df13d47b2a777ffd56d9bcd5b832593fab0a7e286",
|
||||
"sha256:a8320ba9ad87e80ca5a6a016e46ada4d1ba0c54626e135d99b2129a4541c509d",
|
||||
"sha256:b5dbcca369800ce99ba7ae6dee3466607a66958afca3b740690d88168752abcf",
|
||||
"sha256:bfec2843daa654f04fda23ba823af03e7b6f7650a873cdb726752d0e3718dada",
|
||||
"sha256:cd26d8c5640c3a28c526d41ccdca14cf1cbca0d0f2e14e8263a7ac17194ab1d2",
|
||||
"sha256:e9c8f4a311ac29fc7e8e955cfb7733deb5dbe1bdaabf5d4af2765695824b7e0d",
|
||||
"sha256:f00c721f489089dc6364a01fd84906348fe02243d0af737f944fddb36003400d",
|
||||
"sha256:f3b52a634e62821e747e872084ab32fbcb01b7fa7dbb7471b6218279f02a178a"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.12.1"
|
||||
},
|
||||
"tqdm": {
|
||||
"hashes": [
|
||||
"sha256:40be55d30e200777a307a7585aee69e4eabb46b4ec6a4b4a5f2d9f11e7d5408d",
|
||||
"sha256:74a2cdefe14d11442cedf3ba4e21a3b84ff9a2dbdc6cfae2c34addb2a14a5ea6"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==4.64.0"
|
||||
},
|
||||
"typer": {
|
||||
"hashes": [
|
||||
"sha256:023bae00d1baf358a6cc7cea45851639360bb716de687b42b0a4641cd99173f1",
|
||||
"sha256:b8261c6c0152dd73478b5ba96ba677e5d6948c715c310f7c91079f311f62ec03"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==0.4.2"
|
||||
},
|
||||
"typing-extensions": {
|
||||
"hashes": [
|
||||
"sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02",
|
||||
"sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==4.3.0"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4",
|
||||
"sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4.0'",
|
||||
"version": "==1.24.3"
|
||||
},
|
||||
"wasabi": {
|
||||
"hashes": [
|
||||
"sha256:c8e372781be19272942382b14d99314d175518d7822057cb7a97010c4259d249",
|
||||
"sha256:fe862cc24034fbc9f04717cd312ab884f71f51a8ecabebc3449b751c2a649d83"
|
||||
],
|
||||
"version": "==0.10.1"
|
||||
},
|
||||
"wheel": {
|
||||
"hashes": [
|
||||
"sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a",
|
||||
"sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.37.1"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
16
bridges/python/src/Pipfile
Normal file
16
bridges/python/src/Pipfile
Normal file
@ -0,0 +1,16 @@
|
||||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[requires]
|
||||
python_version = "3.9.10"
|
||||
|
||||
[packages]
|
||||
setuptools = "*"
|
||||
wheel = "*"
|
||||
cx-freeze = "==6.11.1"
|
||||
requests = "==2.28.1"
|
||||
pytube = "==9.5.0"
|
||||
tinydb = "==4.7.0"
|
||||
beautifulsoup4 = "==4.7.1"
|
27
bridges/python/src/setup.py
Normal file
27
bridges/python/src/setup.py
Normal file
@ -0,0 +1,27 @@
|
||||
from cx_Freeze import setup, Executable
|
||||
|
||||
from version import __version__
|
||||
|
||||
options = {
|
||||
'build_exe': {
|
||||
# Add common dependencies for skills
|
||||
'includes': [
|
||||
'bs4',
|
||||
'pytube'
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
executables = [
|
||||
Executable(
|
||||
script='bridges/python/src/main.py',
|
||||
target_name='leon-python-bridge'
|
||||
)
|
||||
]
|
||||
|
||||
setup(
|
||||
name='leon-python-bridge',
|
||||
version=__version__,
|
||||
executables=executables,
|
||||
options=options
|
||||
)
|
@ -2,20 +2,19 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from json import loads, dumps
|
||||
from os import path, environ
|
||||
from pathlib import Path
|
||||
from random import choice
|
||||
from sys import argv, stdout
|
||||
from vars import useragent
|
||||
from tinydb import TinyDB, Query, table, operations
|
||||
from time import sleep
|
||||
import sqlite3
|
||||
import sys
|
||||
import os
|
||||
import requests
|
||||
import re
|
||||
|
||||
dirname = path.dirname(path.realpath(__file__))
|
||||
dirname = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
intent_object_path = argv[1]
|
||||
intent_object_path = sys.argv[1]
|
||||
codes = []
|
||||
|
||||
intent_obj_file = open(intent_object_path, 'r', encoding = 'utf8')
|
||||
@ -37,7 +36,7 @@ def translate(key, dict = { }):
|
||||
output = ''
|
||||
variables = { }
|
||||
|
||||
file = open(path.join(dirname, '../../skills', intent_obj['domain'], intent_obj['skill'], 'config', intent_obj['lang'] + '.json'), 'r', encoding = 'utf8')
|
||||
file = open(os.path.join(os.getcwd(), 'skills', intent_obj['domain'], intent_obj['skill'], 'config', intent_obj['lang'] + '.json'), 'r', encoding = 'utf8')
|
||||
obj = loads(file.read())
|
||||
file.close()
|
||||
|
||||
@ -92,7 +91,7 @@ def output(type, content = '', core = { }):
|
||||
}))
|
||||
|
||||
if (type == 'inter'):
|
||||
stdout.flush()
|
||||
sys.stdout.flush()
|
||||
|
||||
def http(method, url, headers = None):
|
||||
"""Send HTTP request with the Leon user agent"""
|
||||
@ -108,7 +107,7 @@ def http(method, url, headers = None):
|
||||
def config(key):
|
||||
"""Get a skill configuration value"""
|
||||
|
||||
file = open(path.join(dirname, '../../skills', intent_obj['domain'], intent_obj['skill'], 'src/config.json'), 'r', encoding = 'utf8')
|
||||
file = open(os.path.join(os.getcwd(), 'skills', intent_obj['domain'], intent_obj['skill'], 'src/config.json'), 'r', encoding = 'utf8')
|
||||
obj = loads(file.read())
|
||||
file.close()
|
||||
|
||||
@ -117,8 +116,9 @@ def config(key):
|
||||
def create_dl_dir():
|
||||
"""Create the downloads folder of a current skill"""
|
||||
|
||||
dl_dir = path.dirname(path.realpath(__file__)) + '/../../downloads/'
|
||||
skill_dl_dir = path.join(dl_dir, intent_obj['domain'], intent_obj['skill'])
|
||||
dl_dir = os.path.join(os.getcwd(), 'downloads')
|
||||
# dl_dir = os.path.dirname(os.path.realpath(__file__)) + '/../../../../downloads/'
|
||||
skill_dl_dir = os.path.join(dl_dir, intent_obj['domain'], intent_obj['skill'])
|
||||
|
||||
Path(skill_dl_dir).mkdir(parents = True, exist_ok = True)
|
||||
|
||||
@ -129,8 +129,8 @@ def db(db_type = 'tinydb'):
|
||||
for a specific skill"""
|
||||
|
||||
if db_type == 'tinydb':
|
||||
ext = '.json' if environ.get('LEON_NODE_ENV') != 'testing' else '.spec.json'
|
||||
db = TinyDB(path.join(dirname, '../../skills', intent_obj['domain'], intent_obj['skill'], 'memory/db' + ext))
|
||||
ext = '.json' if os.environ.get('LEON_NODE_ENV') != 'testing' else '.spec.json'
|
||||
db = TinyDB(os.path.join(os.getcwd(), 'skills', intent_obj['domain'], intent_obj['skill'], 'memory/db' + ext))
|
||||
return {
|
||||
'db': db,
|
||||
'query': Query,
|
||||
@ -142,6 +142,6 @@ def get_table(slug):
|
||||
"""Get a table from a specific skill"""
|
||||
|
||||
domain, skill, table = slug.split('.')
|
||||
ext = '.json' if environ.get('LEON_NODE_ENV') != 'testing' else '.spec.json'
|
||||
db = TinyDB(path.join(dirname, '../../skills', domain, skill, 'memory/db' + ext))
|
||||
ext = '.json' if os.environ.get('LEON_NODE_ENV') != 'testing' else '.spec.json'
|
||||
db = TinyDB(os.path.join(os.getcwd(), 'skills', domain, skill, 'memory/db' + ext))
|
||||
return db.table(table)
|
@ -1,10 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from os import path
|
||||
import sys
|
||||
import os
|
||||
from json import loads
|
||||
|
||||
packagejsonfile = open(path.dirname(path.realpath(__file__)) + '/../../package.json', 'r', encoding = 'utf8')
|
||||
packagejsonfile = open(os.path.join(os.getcwd(), 'package.json'), 'r', encoding = 'utf8')
|
||||
packagejson = loads(packagejsonfile.read())
|
||||
packagejsonfile.close()
|
||||
|
1
bridges/python/src/version.py
Normal file
1
bridges/python/src/version.py
Normal file
@ -0,0 +1 @@
|
||||
__version__ = '1.0.0'
|
@ -12,6 +12,6 @@ services:
|
||||
tty: true
|
||||
command: 'npm run dev:server && npm run dev:app'
|
||||
volumes:
|
||||
- './:/app'
|
||||
- '/app/node_modules'
|
||||
- './:/home/docker/leon'
|
||||
- '/home/docker/leon/node_modules'
|
||||
network_mode: 'host'
|
||||
|
@ -3,7 +3,7 @@
|
||||
* You can consider to run this file on a different hardware
|
||||
*/
|
||||
|
||||
const request = require('superagent')
|
||||
const axios = require('axios')
|
||||
const record = require('node-record-lpcm16')
|
||||
const { Detector, Models } = require('@bugsounet/snowboy')
|
||||
const { io } = require('socket.io-client')
|
||||
@ -21,15 +21,10 @@ socket.on('connect', () => {
|
||||
console.log('Connected to the server')
|
||||
console.log('Waiting for hotword...')
|
||||
})
|
||||
;(async () => {
|
||||
try {
|
||||
await axios.get(`${url}/api/v1/info`)
|
||||
|
||||
request.get(`${url}/api/v1/info`).end((err, res) => {
|
||||
if (err || !res.ok) {
|
||||
if (!err.response) {
|
||||
console.error(`Failed to reach the server: ${err}`)
|
||||
} else {
|
||||
console.error(err.response.error.message)
|
||||
}
|
||||
} else {
|
||||
const models = new Models()
|
||||
|
||||
models.add({
|
||||
@ -78,5 +73,11 @@ request.get(`${url}/api/v1/info`).end((err, res) => {
|
||||
})
|
||||
|
||||
mic.pipe(detector)
|
||||
} catch (e) {
|
||||
if (!e.response) {
|
||||
console.error(`Failed to reach the server: ${e}`)
|
||||
} else {
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
})
|
||||
})()
|
||||
|
635
package-lock.json
generated
635
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
27
package.json
27
package.json
@ -23,35 +23,41 @@
|
||||
"scripts": {
|
||||
"lint": "ts-node scripts/lint.js",
|
||||
"test": "npm run test:json && npm run test:over-http && npm run test:unit && npm run test:e2e",
|
||||
"test:unit": "npm run train en && cross-env PIPENV_PIPFILE=bridges/python/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --projects test/unit/unit.jest.json && npm run train",
|
||||
"test:unit": "npm run train en && cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --projects test/unit/unit.jest.json && npm run train",
|
||||
"test:e2e": "npm run test:e2e:nlp-modules && npm run test:e2e:modules",
|
||||
"test:e2e:modules": "ts-node scripts/run-clean-test-dbs.js && npm run train en && cross-env PIPENV_PIPFILE=bridges/python/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --verbose --projects test/e2e/modules/e2e.modules.jest.json && ts-node scripts/run-clean-test-dbs.js && npm run train",
|
||||
"test:e2e:nlp-modules": "npm run train en && cross-env PIPENV_PIPFILE=bridges/python/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --verbose --setupTestFrameworkScriptFile=./test/paths.setup.js test/e2e/nlp-modules.spec.js && npm run train",
|
||||
"test:e2e:modules": "ts-node scripts/run-clean-test-dbs.js && npm run train en && cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --verbose --projects test/e2e/modules/e2e.modules.jest.json && ts-node scripts/run-clean-test-dbs.js && npm run train",
|
||||
"test:e2e:nlp-modules": "npm run train en && cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile LEON_NODE_ENV=testing jest --forceExit --silent --verbose --setupTestFrameworkScriptFile=./test/paths.setup.js test/e2e/nlp-modules.spec.js && npm run train",
|
||||
"test:json": "jest --silent --projects test/json/json.jest.json",
|
||||
"test:over-http": "npm run generate:skills-endpoints && npm run train && cross-env PIPENV_PIPFILE=bridges/python/Pipfile LEON_NODE_ENV=testing LEON_HOST=http://localhost LEON_PORT=1338 LEON_HTTP_API_KEY=72aeb5ba324580963114481144385d7179c106fc jest --forceExit --silent --verbose --notify=false --bail --collectCoverage=false test/e2e/over-http.spec.js",
|
||||
"test:over-http": "npm run generate:skills-endpoints && npm run train && cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile LEON_NODE_ENV=testing LEON_HOST=http://localhost LEON_PORT=1338 LEON_HTTP_API_KEY=72aeb5ba324580963114481144385d7179c106fc jest --forceExit --silent --verbose --notify=false --bail --collectCoverage=false test/e2e/over-http.spec.js",
|
||||
"test:module": "ts-node scripts/test-module.js",
|
||||
"setup:offline": "ts-node scripts/setup-offline/setup-offline.js",
|
||||
"setup:offline-stt": "ts-node scripts/setup-offline/run-setup-stt.js",
|
||||
"setup:offline-tts": "ts-node scripts/setup-offline/run-setup-tts.js",
|
||||
"setup:offline-hotword": "ts-node scripts/setup-offline/run-setup-hotword.js",
|
||||
"setup:python-bridge": "ts-node scripts/setup/setup-python-dev-env.js python-bridge",
|
||||
"setup:tcp-server": "ts-node scripts/setup/setup-python-dev-env.js tcp-server",
|
||||
"preinstall": "node scripts/setup/preinstall.js",
|
||||
"postinstall": "ts-node scripts/setup/setup.js",
|
||||
"dev:app": "vite --config app/vite.config.js",
|
||||
"dev:server": "npm run train && npm run generate:skills-endpoints && cross-env LEON_NODE_ENV=development tsc-watch --noClear --onSuccess \"nodemon\"",
|
||||
"wake": "cross-env LEON_HOST=http://localhost LEON_PORT=1337 node hotword/index.js",
|
||||
"delete-dist:server": "shx rm -rf ./server/dist",
|
||||
"clean:python-deps": "shx rm -rf ./bridges/python/.venv && npm run postinstall",
|
||||
"clean:python-deps": "shx rm -rf ./bridges/python/src/.venv && npm run postinstall",
|
||||
"prepare": "husky install",
|
||||
"generate:skills-endpoints": "ts-node scripts/generate/run-generate-skills-endpoints.js",
|
||||
"generate:http-api-key": "ts-node scripts/generate/run-generate-http-api-key.js",
|
||||
"build": "npm run build:app && npm run build:server",
|
||||
"build:app": "cross-env LEON_NODE_ENV=production ts-node scripts/app/run-build-app.js",
|
||||
"build:server": "npm run delete-dist:server && npm run train && npm run generate:skills-endpoints && tsc && resolve-tspaths && shx rm -rf server/dist/core server/dist/package.json && shx mv -f server/dist/server/src/* server/dist && shx rm -rf server/dist/server && shx mkdir -p server/dist/tmp",
|
||||
"start:tcp-server": "cross-env PIPENV_PIPFILE=bridges/python/Pipfile pipenv run python bridges/python/tcp_server/main.py",
|
||||
"build:python-bridge": "ts-node scripts/build-binaries.js python-bridge",
|
||||
"build:tcp-server": "ts-node scripts/build-binaries.js tcp-server",
|
||||
"start:tcp-server": "cross-env PIPENV_PIPFILE=tcp_server/src/Pipfile pipenv run python tcp_server/src/main.py",
|
||||
"start": "cross-env LEON_NODE_ENV=production node ./server/dist/index.js",
|
||||
"train": "ts-node scripts/train/run-train.js",
|
||||
"prepare-release": "ts-node scripts/release/prepare-release.js",
|
||||
"check": "ts-node scripts/run-check.js",
|
||||
"pre-release:python-bridge": "ts-node scripts/release/pre-release-binaries.js python-bridge",
|
||||
"pre-release:tcp-server": "ts-node scripts/release/pre-release-binaries.js tcp-server",
|
||||
"check": "ts-node scripts/check.js",
|
||||
"docker:build": "docker build -t leon-ai/leon .",
|
||||
"docker:run": "docker compose up",
|
||||
"docker:dev": "docker compose --file=docker-compose.dev.yml up",
|
||||
@ -68,21 +74,24 @@
|
||||
"@nlpjs/core-loader": "^4.22.7",
|
||||
"@nlpjs/lang-all": "^4.22.12",
|
||||
"@nlpjs/nlp": "^4.22.17",
|
||||
"archiver": "^5.3.0",
|
||||
"archiver": "^5.3.1",
|
||||
"async": "^3.2.0",
|
||||
"axios": "1.1.2",
|
||||
"cross-env": "^7.0.3",
|
||||
"dayjs": "^1.11.5",
|
||||
"dotenv": "^10.0.0",
|
||||
"execa": "^5.0.0",
|
||||
"extract-zip": "2.0.1",
|
||||
"fastify": "^4.5.3",
|
||||
"fluent-ffmpeg": "^2.1.2",
|
||||
"googleapis": "^67.1.1",
|
||||
"ibm-watson": "^6.1.1",
|
||||
"node-wav": "0.0.2",
|
||||
"pretty-bytes": "^5.6.0",
|
||||
"pretty-ms": "^7.0.1",
|
||||
"socket.io": "^4.5.2",
|
||||
"socket.io-client": "^4.5.2",
|
||||
"stt": "^1.4.0",
|
||||
"superagent": "^8.0.0",
|
||||
"tree-kill": "^1.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
148
scripts/build-binaries.js
Normal file
148
scripts/build-binaries.js
Normal file
@ -0,0 +1,148 @@
|
||||
import path from 'node:path'
|
||||
import fs from 'node:fs'
|
||||
|
||||
import { command } from 'execa'
|
||||
import archiver from 'archiver'
|
||||
import prettyBytes from 'pretty-bytes'
|
||||
|
||||
import {
|
||||
PYTHON_BRIDGE_SRC_PATH,
|
||||
TCP_SERVER_SRC_PATH,
|
||||
BINARIES_FOLDER_NAME,
|
||||
PYTHON_BRIDGE_DIST_PATH,
|
||||
TCP_SERVER_DIST_PATH,
|
||||
PYTHON_BRIDGE_BIN_NAME,
|
||||
TCP_SERVER_BIN_NAME
|
||||
} from '@/constants'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import { LoaderHelper } from '@/helpers/loader-helper'
|
||||
import { OSHelper, OSTypes } from '@/helpers/os-helper'
|
||||
|
||||
/**
|
||||
* Build binaries for the given OS according to the given build target
|
||||
* 1. Get the correct OS platform and CPU architecture
|
||||
* 2. If Linux, install the required dependencies
|
||||
* 3. Build the given build target
|
||||
* 4. Pack the distribution entities into a ZIP file
|
||||
*/
|
||||
|
||||
const BUILD_TARGETS = new Map()
|
||||
|
||||
BUILD_TARGETS.set('python-bridge', {
|
||||
name: 'Python bridge',
|
||||
pipfilePath: path.join(PYTHON_BRIDGE_SRC_PATH, 'Pipfile'),
|
||||
setupFilePath: path.join(PYTHON_BRIDGE_SRC_PATH, 'setup.py'),
|
||||
distPath: PYTHON_BRIDGE_DIST_PATH,
|
||||
archiveName: `${PYTHON_BRIDGE_BIN_NAME}-${BINARIES_FOLDER_NAME}.zip`,
|
||||
dotVenvPath: path.join(PYTHON_BRIDGE_SRC_PATH, '.venv')
|
||||
})
|
||||
BUILD_TARGETS.set('tcp-server', {
|
||||
name: 'TCP server',
|
||||
pipfilePath: path.join(TCP_SERVER_SRC_PATH, 'Pipfile'),
|
||||
setupFilePath: path.join(TCP_SERVER_SRC_PATH, 'setup.py'),
|
||||
distPath: TCP_SERVER_DIST_PATH,
|
||||
archiveName: `${TCP_SERVER_BIN_NAME}-${BINARIES_FOLDER_NAME}.zip`,
|
||||
dotVenvPath: path.join(TCP_SERVER_SRC_PATH, '.venv')
|
||||
})
|
||||
;(async () => {
|
||||
LoaderHelper.start()
|
||||
|
||||
const { argv } = process
|
||||
const givenBuildTarget = argv[2].toLowerCase()
|
||||
|
||||
if (!BUILD_TARGETS.has(givenBuildTarget)) {
|
||||
LogHelper.error(
|
||||
`Invalid build target: ${givenBuildTarget}. Valid targets are: ${Array.from(
|
||||
BUILD_TARGETS.keys()
|
||||
).join(', ')}`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const {
|
||||
name: buildTarget,
|
||||
pipfilePath,
|
||||
setupFilePath,
|
||||
distPath,
|
||||
archiveName,
|
||||
dotVenvPath
|
||||
} = BUILD_TARGETS.get(givenBuildTarget)
|
||||
const buildPath = path.join(distPath, BINARIES_FOLDER_NAME)
|
||||
|
||||
const { type: osType } = OSHelper.getInformation()
|
||||
|
||||
/**
|
||||
* Install requirements
|
||||
*/
|
||||
try {
|
||||
if (osType === OSTypes.Linux) {
|
||||
LogHelper.info('Checking whether the "patchelf" utility can be found...')
|
||||
|
||||
await command('patchelf --version', { shell: true })
|
||||
|
||||
LogHelper.success('The "patchelf" utility has been found')
|
||||
}
|
||||
} catch (e) {
|
||||
const installPatchelfCommand = 'sudo apt install patchelf'
|
||||
|
||||
LogHelper.error(
|
||||
`The "patchelf" utility is not installed. Please run the following command: "${installPatchelfCommand}" or install it via a packages manager supported by your Linux distribution such as DNF, YUM, etc. Then try again`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Build
|
||||
*/
|
||||
try {
|
||||
LogHelper.info(`Building the ${buildTarget}...`)
|
||||
|
||||
// Required environment variables to set up
|
||||
process.env.PIPENV_PIPFILE = pipfilePath
|
||||
process.env.PIPENV_VENV_IN_PROJECT = true
|
||||
|
||||
await command(
|
||||
`pipenv run python ${setupFilePath} build --build-exe ${buildPath}`,
|
||||
{
|
||||
shell: true,
|
||||
stdio: 'inherit'
|
||||
}
|
||||
)
|
||||
|
||||
LogHelper.success(`The ${buildTarget} has been built`)
|
||||
} catch (e) {
|
||||
LogHelper.error(
|
||||
`An error occurred while building the ${buildTarget}. Try to delete the ${dotVenvPath} folder, run the setup command then build again: ${e}`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack distribution entities into a ZIP archive
|
||||
*/
|
||||
const archivePath = path.join(distPath, archiveName)
|
||||
LogHelper.info(`Packing to ${archivePath}...`)
|
||||
|
||||
const output = fs.createWriteStream(archivePath)
|
||||
const archive = archiver('zip')
|
||||
|
||||
output.on('close', () => {
|
||||
const size = prettyBytes(archive.pointer())
|
||||
|
||||
LogHelper.info(`Total archive size: ${size}`)
|
||||
LogHelper.success(`${buildTarget} has been packed to ${archivePath}`)
|
||||
process.exit(0)
|
||||
})
|
||||
|
||||
archive.on('error', (err) => {
|
||||
LogHelper.error(
|
||||
`An error occurred while packing the ${buildTarget}: ${err}`
|
||||
)
|
||||
})
|
||||
|
||||
archive.pipe(output)
|
||||
|
||||
archive.directory(buildPath, BINARIES_FOLDER_NAME)
|
||||
|
||||
await archive.finalize()
|
||||
})()
|
485
scripts/check.js
485
scripts/check.js
@ -1,206 +1,276 @@
|
||||
import fs from 'node:fs'
|
||||
import os from 'node:os'
|
||||
import { spawn } from 'node:child_process'
|
||||
|
||||
import dotenv from 'dotenv'
|
||||
import { command } from 'execa'
|
||||
import semver from 'semver'
|
||||
import kill from 'tree-kill'
|
||||
import axios from 'axios'
|
||||
|
||||
import { version } from '@@/package.json'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import {
|
||||
PYTHON_BRIDGE_BIN_PATH,
|
||||
TCP_SERVER_BIN_PATH,
|
||||
TCP_SERVER_VERSION,
|
||||
PYTHON_BRIDGE_VERSION
|
||||
} from '@/constants'
|
||||
|
||||
dotenv.config()
|
||||
|
||||
/**
|
||||
* Checking script
|
||||
* Help to figure out what is installed or not
|
||||
* Help to figure out the setup state
|
||||
*/
|
||||
export default () =>
|
||||
new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const nodeMinRequiredVersion = '10'
|
||||
const npmMinRequiredVersion = '5'
|
||||
const pythonMinRequiredVersion = '3'
|
||||
const flitePath = 'bin/flite/flite'
|
||||
const coquiLanguageModelPath = 'bin/coqui/huge-vocabulary.scorer'
|
||||
const amazonPath = 'core/config/voice/amazon.json'
|
||||
const googleCloudPath = 'core/config/voice/google-cloud.json'
|
||||
const watsonSttPath = 'core/config/voice/watson-stt.json'
|
||||
const watsonTtsPath = 'core/config/voice/watson-tts.json'
|
||||
const globalResolversNlpModelPath =
|
||||
'core/data/models/leon-global-resolvers-model.nlp'
|
||||
const skillsResolversNlpModelPath =
|
||||
'core/data/models/leon-skills-resolvers-model.nlp'
|
||||
const mainNlpModelPath = 'core/data/models/leon-main-model.nlp'
|
||||
const report = {
|
||||
can_run: { title: 'Run', type: 'error', v: true },
|
||||
can_run_skill: { title: 'Run skills', type: 'error', v: true },
|
||||
can_text: { title: 'Reply you by texting', type: 'error', v: true },
|
||||
has_spacy_model: {
|
||||
title: 'spaCy model is installed',
|
||||
type: 'error',
|
||||
v: true
|
||||
},
|
||||
can_amazon_polly_tts: {
|
||||
title: 'Amazon Polly text-to-speech',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_google_cloud_tts: {
|
||||
title: 'Google Cloud text-to-speech',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_watson_tts: {
|
||||
title: 'Watson text-to-speech',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_offline_tts: {
|
||||
title: 'Offline text-to-speech',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_google_cloud_stt: {
|
||||
title: 'Google Cloud speech-to-text',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_watson_stt: {
|
||||
title: 'Watson speech-to-text',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_offline_stt: {
|
||||
title: 'Offline speech-to-text',
|
||||
type: 'warning',
|
||||
v: true
|
||||
}
|
||||
;(async () => {
|
||||
try {
|
||||
const nodeMinRequiredVersion = '16'
|
||||
const npmMinRequiredVersion = '5'
|
||||
const flitePath = 'bin/flite/flite'
|
||||
const coquiLanguageModelPath = 'bin/coqui/huge-vocabulary.scorer'
|
||||
const amazonPath = 'core/config/voice/amazon.json'
|
||||
const googleCloudPath = 'core/config/voice/google-cloud.json'
|
||||
const watsonSttPath = 'core/config/voice/watson-stt.json'
|
||||
const watsonTtsPath = 'core/config/voice/watson-tts.json'
|
||||
const globalResolversNlpModelPath =
|
||||
'core/data/models/leon-global-resolvers-model.nlp'
|
||||
const skillsResolversNlpModelPath =
|
||||
'core/data/models/leon-skills-resolvers-model.nlp'
|
||||
const mainNlpModelPath = 'core/data/models/leon-main-model.nlp'
|
||||
const pastebinData = {
|
||||
leonVersion: null,
|
||||
environment: {
|
||||
osDetails: null,
|
||||
nodeVersion: null,
|
||||
npmVersion: null
|
||||
},
|
||||
nlpModels: {
|
||||
globalResolversModelState: null,
|
||||
skillsResolversModelState: null,
|
||||
mainModelState: null
|
||||
},
|
||||
pythonBridge: {
|
||||
version: null,
|
||||
executionTime: null,
|
||||
command: null,
|
||||
output: null,
|
||||
error: null
|
||||
},
|
||||
tcpServer: {
|
||||
version: null,
|
||||
startTime: null,
|
||||
command: null,
|
||||
output: null,
|
||||
error: null
|
||||
},
|
||||
report: null
|
||||
}
|
||||
const report = {
|
||||
can_run: { title: 'Run', type: 'error', v: true },
|
||||
can_run_skill: { title: 'Run skills', type: 'error', v: true },
|
||||
can_text: { title: 'Reply you by texting', type: 'error', v: true },
|
||||
can_start_tcp_server: {
|
||||
title: 'Start the TCP server',
|
||||
type: 'error',
|
||||
v: true
|
||||
},
|
||||
can_amazon_polly_tts: {
|
||||
title: 'Amazon Polly text-to-speech',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_google_cloud_tts: {
|
||||
title: 'Google Cloud text-to-speech',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_watson_tts: {
|
||||
title: 'Watson text-to-speech',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_offline_tts: {
|
||||
title: 'Offline text-to-speech',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_google_cloud_stt: {
|
||||
title: 'Google Cloud speech-to-text',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_watson_stt: {
|
||||
title: 'Watson speech-to-text',
|
||||
type: 'warning',
|
||||
v: true
|
||||
},
|
||||
can_offline_stt: {
|
||||
title: 'Offline speech-to-text',
|
||||
type: 'warning',
|
||||
v: true
|
||||
}
|
||||
}
|
||||
|
||||
LogHelper.title('Checking')
|
||||
LogHelper.title('Checking')
|
||||
|
||||
/**
|
||||
* Leon version checking
|
||||
*/
|
||||
/**
|
||||
* Leon version checking
|
||||
*/
|
||||
|
||||
LogHelper.info('Leon version')
|
||||
LogHelper.success(`${version}\n`)
|
||||
LogHelper.info('Leon version')
|
||||
LogHelper.success(`${version}\n`)
|
||||
pastebinData.leonVersion = version
|
||||
|
||||
/**
|
||||
* Environment checking
|
||||
*/
|
||||
LogHelper.info('OS')
|
||||
/**
|
||||
* Environment checking
|
||||
*/
|
||||
|
||||
const osInfo = {
|
||||
type: os.type(),
|
||||
platform: os.platform(),
|
||||
arch: os.arch(),
|
||||
cpus: os.cpus().length,
|
||||
release: os.release()
|
||||
}
|
||||
LogHelper.success(`${JSON.stringify(osInfo)}\n`)
|
||||
;(
|
||||
await Promise.all([
|
||||
command('node --version', { shell: true }),
|
||||
command('npm --version', { shell: true }),
|
||||
command('pipenv --version', { shell: true })
|
||||
])
|
||||
).forEach((p) => {
|
||||
LogHelper.info(p.command)
|
||||
LogHelper.info('OS')
|
||||
|
||||
if (
|
||||
p.command.indexOf('node --version') !== -1 &&
|
||||
!semver.satisfies(
|
||||
semver.clean(p.stdout),
|
||||
`>=${nodeMinRequiredVersion}`
|
||||
)
|
||||
) {
|
||||
Object.keys(report).forEach((item) => {
|
||||
if (report[item].type === 'error') report[item].v = false
|
||||
})
|
||||
LogHelper.error(
|
||||
`${p.stdout}\nThe Node.js version must be >=${nodeMinRequiredVersion}. Please install it: https://nodejs.org (or use nvm)\n`
|
||||
)
|
||||
} else if (
|
||||
p.command.indexOf('npm --version') !== -1 &&
|
||||
!semver.satisfies(
|
||||
semver.clean(p.stdout),
|
||||
`>=${npmMinRequiredVersion}`
|
||||
)
|
||||
) {
|
||||
Object.keys(report).forEach((item) => {
|
||||
if (report[item].type === 'error') report[item].v = false
|
||||
})
|
||||
LogHelper.error(
|
||||
`${p.stdout}\nThe npm version must be >=${npmMinRequiredVersion}. Please install it: https://www.npmjs.com/get-npm (or use nvm)\n`
|
||||
)
|
||||
} else {
|
||||
LogHelper.success(`${p.stdout}\n`)
|
||||
}
|
||||
})
|
||||
;(
|
||||
await Promise.all([
|
||||
command('pipenv --where', { shell: true }),
|
||||
command('pipenv run python --version', { shell: true })
|
||||
])
|
||||
).forEach((p) => {
|
||||
LogHelper.info(p.command)
|
||||
const osInfo = {
|
||||
type: os.type(),
|
||||
platform: os.platform(),
|
||||
arch: os.arch(),
|
||||
cpus: os.cpus().length,
|
||||
release: os.release()
|
||||
}
|
||||
LogHelper.success(`${JSON.stringify(osInfo)}\n`)
|
||||
pastebinData.environment.osDetails = osInfo
|
||||
;(
|
||||
await Promise.all([
|
||||
command('node --version', { shell: true }),
|
||||
command('npm --version', { shell: true })
|
||||
])
|
||||
).forEach((p) => {
|
||||
LogHelper.info(p.command)
|
||||
|
||||
if (
|
||||
p.command.indexOf('pipenv run python --version') !== -1 &&
|
||||
!semver.satisfies(
|
||||
p.stdout.split(' ')[1],
|
||||
`>=${pythonMinRequiredVersion}`
|
||||
)
|
||||
) {
|
||||
Object.keys(report).forEach((item) => {
|
||||
if (report[item].type === 'error') report[item].v = false
|
||||
})
|
||||
LogHelper.error(
|
||||
`${p.stdout}\nThe Python version must be >=${pythonMinRequiredVersion}. Please install it: https://www.python.org/downloads\n`
|
||||
)
|
||||
} else {
|
||||
LogHelper.success(`${p.stdout}\n`)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Skill execution checking
|
||||
*/
|
||||
|
||||
try {
|
||||
LogHelper.time('Skill execution time')
|
||||
const p = await command(
|
||||
'pipenv run python bridges/python/main.py scripts/assets/intent-object.json',
|
||||
{ shell: true }
|
||||
)
|
||||
LogHelper.timeEnd('Skill execution time')
|
||||
LogHelper.info(p.command)
|
||||
LogHelper.success(`${p.stdout}\n`)
|
||||
} catch (e) {
|
||||
LogHelper.info(e.command)
|
||||
report.can_run_skill.v = false
|
||||
LogHelper.error(`${e}\n`)
|
||||
}
|
||||
|
||||
/**
|
||||
* spaCy model checking
|
||||
*/
|
||||
|
||||
try {
|
||||
const p = await command(
|
||||
'pipenv run python -c "import en_core_web_trf"',
|
||||
{ shell: true }
|
||||
)
|
||||
LogHelper.info(p.command)
|
||||
LogHelper.success(`spaCy model installed\n`)
|
||||
} catch (e) {
|
||||
LogHelper.info(e.command)
|
||||
report.has_spacy_model.v = false
|
||||
if (
|
||||
p.command.indexOf('node --version') !== -1 &&
|
||||
!semver.satisfies(semver.clean(p.stdout), `>=${nodeMinRequiredVersion}`)
|
||||
) {
|
||||
Object.keys(report).forEach((item) => {
|
||||
if (report[item].type === 'error') report[item].v = false
|
||||
})
|
||||
LogHelper.error(
|
||||
'No spaCy model is installed. It is recommended to run the following command: "npm run clean:python-deps"\n'
|
||||
`${p.stdout}\nThe Node.js version must be >=${nodeMinRequiredVersion}. Please install it: https://nodejs.org (or use nvm)\n`
|
||||
)
|
||||
} else if (
|
||||
p.command.indexOf('npm --version') !== -1 &&
|
||||
!semver.satisfies(semver.clean(p.stdout), `>=${npmMinRequiredVersion}`)
|
||||
) {
|
||||
Object.keys(report).forEach((item) => {
|
||||
if (report[item].type === 'error') report[item].v = false
|
||||
})
|
||||
LogHelper.error(
|
||||
`${p.stdout}\nThe npm version must be >=${npmMinRequiredVersion}. Please install it: https://www.npmjs.com/get-npm (or use nvm)\n`
|
||||
)
|
||||
} else {
|
||||
LogHelper.success(`${p.stdout}\n`)
|
||||
if (p.command.includes('node --version')) {
|
||||
pastebinData.environment.nodeVersion = p.stdout
|
||||
} else if (p.command.includes('npm --version')) {
|
||||
pastebinData.environment.npmVersion = p.stdout
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Skill execution checking
|
||||
*/
|
||||
|
||||
LogHelper.success(`Python bridge version: ${PYTHON_BRIDGE_VERSION}`)
|
||||
pastebinData.pythonBridge.version = PYTHON_BRIDGE_VERSION
|
||||
LogHelper.info('Executing a skill...')
|
||||
|
||||
try {
|
||||
const executionStart = Date.now()
|
||||
const p = await command(
|
||||
`${PYTHON_BRIDGE_BIN_PATH} scripts/assets/intent-object.json`,
|
||||
{ shell: true }
|
||||
)
|
||||
const executionEnd = Date.now()
|
||||
const executionTime = executionEnd - executionStart
|
||||
LogHelper.info(p.command)
|
||||
pastebinData.pythonBridge.command = p.command
|
||||
LogHelper.success(p.stdout)
|
||||
pastebinData.pythonBridge.output = p.stdout
|
||||
LogHelper.info(`Skill execution time: ${executionTime}ms\n`)
|
||||
pastebinData.pythonBridge.executionTime = `${executionTime}ms`
|
||||
} catch (e) {
|
||||
LogHelper.info(e.command)
|
||||
report.can_run_skill.v = false
|
||||
LogHelper.error(`${e}\n`)
|
||||
pastebinData.pythonBridge.error = JSON.stringify(e)
|
||||
}
|
||||
|
||||
/**
|
||||
* TCP server startup checking
|
||||
*/
|
||||
|
||||
LogHelper.success(`TCP server version: ${TCP_SERVER_VERSION}`)
|
||||
pastebinData.tcpServer.version = TCP_SERVER_VERSION
|
||||
|
||||
LogHelper.info('Starting the TCP server...')
|
||||
|
||||
const tcpServerCommand = `${TCP_SERVER_BIN_PATH} en`
|
||||
const tcpServerStart = Date.now()
|
||||
const p = spawn(tcpServerCommand, { shell: true })
|
||||
const ignoredWarnings = [
|
||||
'UserWarning: Unable to retrieve source for @torch.jit._overload function'
|
||||
]
|
||||
|
||||
LogHelper.info(tcpServerCommand)
|
||||
pastebinData.tcpServer.command = tcpServerCommand
|
||||
|
||||
if (osInfo.platform === 'darwin') {
|
||||
LogHelper.info(
|
||||
'For the first start, it may take a few minutes to cold start the TCP server on macOS. No worries it is a one-time thing'
|
||||
)
|
||||
}
|
||||
|
||||
let tcpServerOutput = ''
|
||||
|
||||
p.stdout.on('data', (data) => {
|
||||
const newData = data.toString()
|
||||
tcpServerOutput += newData
|
||||
|
||||
if (newData?.toLowerCase().includes('waiting for')) {
|
||||
kill(p.pid)
|
||||
LogHelper.success('The TCP server can successfully start')
|
||||
}
|
||||
})
|
||||
|
||||
p.stderr.on('data', (data) => {
|
||||
const newData = data.toString()
|
||||
|
||||
// Ignore given warnings on stderr output
|
||||
if (!ignoredWarnings.some((w) => newData.includes(w))) {
|
||||
tcpServerOutput += newData
|
||||
report.can_start_tcp_server.v = false
|
||||
pastebinData.tcpServer.error = newData
|
||||
LogHelper.error(`Cannot start the TCP server: ${newData}`)
|
||||
}
|
||||
})
|
||||
|
||||
const timeout = 3 * 60_000
|
||||
// In case it takes too long, force kill
|
||||
setTimeout(() => {
|
||||
kill(p.pid)
|
||||
|
||||
const error = `The TCP server timed out after ${timeout}ms`
|
||||
LogHelper.error(error)
|
||||
pastebinData.tcpServer.error = error
|
||||
report.can_start_tcp_server.v = false
|
||||
}, timeout)
|
||||
|
||||
p.stdout.on('end', async () => {
|
||||
const tcpServerEnd = Date.now()
|
||||
pastebinData.tcpServer.output = tcpServerOutput
|
||||
pastebinData.tcpServer.startTime = `${tcpServerEnd - tcpServerStart}ms`
|
||||
LogHelper.info(
|
||||
`TCP server startup time: ${pastebinData.tcpServer.startTime}\n`
|
||||
)
|
||||
|
||||
/**
|
||||
* Global resolvers NLP model checking
|
||||
@ -212,16 +282,22 @@ export default () =>
|
||||
!fs.existsSync(globalResolversNlpModelPath) ||
|
||||
!Object.keys(fs.readFileSync(globalResolversNlpModelPath)).length
|
||||
) {
|
||||
const state = 'Global resolvers NLP model not found or broken'
|
||||
|
||||
report.can_text.v = false
|
||||
Object.keys(report).forEach((item) => {
|
||||
if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1)
|
||||
report[item].v = false
|
||||
})
|
||||
LogHelper.error(
|
||||
'Global resolvers NLP model not found or broken. Try to generate a new one: "npm run train"\n'
|
||||
`${state}. Try to generate a new one: "npm run train"\n`
|
||||
)
|
||||
pastebinData.nlpModels.globalResolversModelState = state
|
||||
} else {
|
||||
LogHelper.success('Found and valid\n')
|
||||
const state = 'Found and valid'
|
||||
|
||||
LogHelper.success(`${state}\n`)
|
||||
pastebinData.nlpModels.globalResolversModelState = state
|
||||
}
|
||||
|
||||
/**
|
||||
@ -234,16 +310,22 @@ export default () =>
|
||||
!fs.existsSync(skillsResolversNlpModelPath) ||
|
||||
!Object.keys(fs.readFileSync(skillsResolversNlpModelPath)).length
|
||||
) {
|
||||
const state = 'Skills resolvers NLP model not found or broken'
|
||||
|
||||
report.can_text.v = false
|
||||
Object.keys(report).forEach((item) => {
|
||||
if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1)
|
||||
report[item].v = false
|
||||
})
|
||||
LogHelper.error(
|
||||
'Skills resolvers NLP model not found or broken. Try to generate a new one: "npm run train"\n'
|
||||
`${state}. Try to generate a new one: "npm run train"\n`
|
||||
)
|
||||
pastebinData.nlpModels.skillsResolversModelState = state
|
||||
} else {
|
||||
LogHelper.success('Found and valid\n')
|
||||
const state = 'Found and valid'
|
||||
|
||||
LogHelper.success(`${state}\n`)
|
||||
pastebinData.nlpModels.skillsResolversModelState = state
|
||||
}
|
||||
|
||||
/**
|
||||
@ -256,16 +338,22 @@ export default () =>
|
||||
!fs.existsSync(mainNlpModelPath) ||
|
||||
!Object.keys(fs.readFileSync(mainNlpModelPath)).length
|
||||
) {
|
||||
const state = 'Main NLP model not found or broken'
|
||||
|
||||
report.can_text.v = false
|
||||
Object.keys(report).forEach((item) => {
|
||||
if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1)
|
||||
report[item].v = false
|
||||
})
|
||||
LogHelper.error(
|
||||
'Main NLP model not found or broken. Try to generate a new one: "npm run train"\n'
|
||||
`${state}. Try to generate a new one: "npm run train"\n`
|
||||
)
|
||||
pastebinData.nlpModels.mainModelState = state
|
||||
} else {
|
||||
LogHelper.success('Found and valid\n')
|
||||
const state = 'Found and valid'
|
||||
|
||||
LogHelper.success(`${state}\n`)
|
||||
pastebinData.nlpModels.mainModelState = state
|
||||
}
|
||||
|
||||
/**
|
||||
@ -393,7 +481,7 @@ export default () =>
|
||||
report.can_run.v &&
|
||||
report.can_run_skill.v &&
|
||||
report.can_text.v &&
|
||||
report.has_spacy_model.v
|
||||
report.can_start_tcp_server.v
|
||||
) {
|
||||
LogHelper.success('Hooray! Leon can run correctly')
|
||||
LogHelper.info(
|
||||
@ -403,9 +491,26 @@ export default () =>
|
||||
LogHelper.error('Please fix the errors above')
|
||||
}
|
||||
|
||||
resolve()
|
||||
} catch (e) {
|
||||
LogHelper.error(e)
|
||||
reject()
|
||||
}
|
||||
})
|
||||
pastebinData.report = report
|
||||
|
||||
LogHelper.title('REPORT URL')
|
||||
|
||||
LogHelper.info('Sending report...')
|
||||
|
||||
try {
|
||||
const { data } = await axios.post('https://getleon.ai/api/report', {
|
||||
report: pastebinData
|
||||
})
|
||||
const { data: reportData } = data
|
||||
|
||||
LogHelper.success(`Report URL: ${reportData.reportUrl}`)
|
||||
} catch (e) {
|
||||
LogHelper.error(`Failed to send report: ${e}`)
|
||||
}
|
||||
|
||||
process.exit(0)
|
||||
})
|
||||
} catch (e) {
|
||||
LogHelper.error(e)
|
||||
}
|
||||
})()
|
||||
|
@ -14,7 +14,7 @@ if (fs.existsSync(commitEditMsgFile)) {
|
||||
try {
|
||||
const commitMessage = fs.readFileSync(commitEditMsgFile, 'utf8')
|
||||
const regex =
|
||||
'(build|BREAKING|chore|ci|docs|feat|fix|perf|refactor|style|test)(\\((web app|docker|server|hotword|skill\\/([\\w-]+)))?\\)?: .{1,50}'
|
||||
'(build|BREAKING|chore|ci|docs|feat|fix|perf|refactor|style|test)(\\((web app|docker|server|hotword|tcp server|python bridge|skill\\/([\\w-]+)))?\\)?: .{1,50}'
|
||||
|
||||
if (commitMessage.match(regex) !== null) {
|
||||
LogHelper.success('Commit message validated')
|
||||
|
77
scripts/release/pre-release-binaries.js
Normal file
77
scripts/release/pre-release-binaries.js
Normal file
@ -0,0 +1,77 @@
|
||||
import path from 'node:path'
|
||||
|
||||
import { prompt } from 'inquirer'
|
||||
import { command } from 'execa'
|
||||
|
||||
import { PYTHON_BRIDGE_SRC_PATH, TCP_SERVER_SRC_PATH } from '@/constants'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import { LoaderHelper } from '@/helpers/loader-helper'
|
||||
|
||||
/**
|
||||
* Pre-release binaries via GitHub Actions
|
||||
* 1. Ask for confirmation whether the binary version has been bumped
|
||||
* 2. Trigger GitHub workflow to pre-release binaries
|
||||
*/
|
||||
|
||||
const BUILD_TARGETS = new Map()
|
||||
|
||||
BUILD_TARGETS.set('python-bridge', {
|
||||
workflowFileName: 'pre-release-python-bridge.yml',
|
||||
setupFilePath: path.join(PYTHON_BRIDGE_SRC_PATH, 'setup.py')
|
||||
})
|
||||
BUILD_TARGETS.set('tcp-server', {
|
||||
workflowFileName: 'pre-release-tcp-server.yml',
|
||||
setupFilePath: path.join(TCP_SERVER_SRC_PATH, 'setup.py')
|
||||
})
|
||||
;(async () => {
|
||||
LoaderHelper.start()
|
||||
|
||||
const { argv } = process
|
||||
const givenReleaseTarget = argv[2].toLowerCase()
|
||||
const givenBranch = argv[3]?.toLowerCase()
|
||||
const { workflowFileName, setupFilePath } =
|
||||
BUILD_TARGETS.get(givenReleaseTarget)
|
||||
|
||||
LoaderHelper.stop()
|
||||
const answer = await prompt({
|
||||
type: 'confirm',
|
||||
name: 'binary.bumped',
|
||||
message: `Have you bumped the version number of the binary from the "${setupFilePath}" file?`,
|
||||
default: false
|
||||
})
|
||||
LoaderHelper.start()
|
||||
|
||||
if (!answer.binary.bumped) {
|
||||
LogHelper.info(
|
||||
'Please bump the version number of the binary from the setup file before continuing'
|
||||
)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
try {
|
||||
LogHelper.info('Triggering the GitHub workflow...')
|
||||
|
||||
const runWorkflowCommand = !givenBranch
|
||||
? `gh workflow run ${workflowFileName}`
|
||||
: `gh workflow run ${workflowFileName} --ref ${givenBranch}`
|
||||
|
||||
await command(runWorkflowCommand, {
|
||||
shell: true,
|
||||
stdout: 'inherit'
|
||||
})
|
||||
|
||||
LogHelper.success(
|
||||
'GitHub workflow triggered. The pre-release is on its way!'
|
||||
)
|
||||
LogHelper.success(
|
||||
'Once the pre-release is done, go to the GitHub releases to double-check information and hit release'
|
||||
)
|
||||
|
||||
process.exit(0)
|
||||
} catch (e) {
|
||||
LogHelper.error(
|
||||
`An error occurred while triggering the GitHub workflow: ${e}`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
})()
|
@ -1,16 +0,0 @@
|
||||
import { LoaderHelper } from '@/helpers/loader-helper'
|
||||
|
||||
import check from './check'
|
||||
|
||||
/**
|
||||
* Execute the checking script
|
||||
*/
|
||||
;(async () => {
|
||||
try {
|
||||
LoaderHelper.start()
|
||||
await check()
|
||||
LoaderHelper.stop()
|
||||
} catch (e) {
|
||||
LoaderHelper.stop()
|
||||
}
|
||||
})()
|
105
scripts/setup/setup-python-binaries.js
Normal file
105
scripts/setup/setup-python-binaries.js
Normal file
@ -0,0 +1,105 @@
|
||||
import fs from 'node:fs'
|
||||
import path from 'node:path'
|
||||
import stream from 'node:stream'
|
||||
import readline from 'node:readline'
|
||||
|
||||
import axios from 'axios'
|
||||
import prettyBytes from 'pretty-bytes'
|
||||
import prettyMilliseconds from 'pretty-ms'
|
||||
import extractZip from 'extract-zip'
|
||||
|
||||
import {
|
||||
BINARIES_FOLDER_NAME,
|
||||
GITHUB_URL,
|
||||
PYTHON_BRIDGE_DIST_PATH,
|
||||
TCP_SERVER_DIST_PATH,
|
||||
PYTHON_BRIDGE_BIN_NAME,
|
||||
TCP_SERVER_BIN_NAME,
|
||||
PYTHON_BRIDGE_VERSION,
|
||||
TCP_SERVER_VERSION
|
||||
} from '@/constants'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
|
||||
/**
|
||||
* Set up Python binaries according to the given setup target
|
||||
* 1. Delete the existing dist binaries if already exist
|
||||
* 2. Download the latest Python binaries from GitHub releases
|
||||
* 3. Extract the downloaded ZIP file to the dist folder
|
||||
*/
|
||||
|
||||
const PYTHON_TARGETS = new Map()
|
||||
|
||||
PYTHON_TARGETS.set('python-bridge', {
|
||||
name: 'Python bridge',
|
||||
distPath: PYTHON_BRIDGE_DIST_PATH,
|
||||
archiveName: `${PYTHON_BRIDGE_BIN_NAME}-${BINARIES_FOLDER_NAME}.zip`,
|
||||
version: PYTHON_BRIDGE_VERSION
|
||||
})
|
||||
PYTHON_TARGETS.set('tcp-server', {
|
||||
name: 'TCP server',
|
||||
distPath: TCP_SERVER_DIST_PATH,
|
||||
archiveName: `${TCP_SERVER_BIN_NAME}-${BINARIES_FOLDER_NAME}.zip`,
|
||||
version: TCP_SERVER_VERSION
|
||||
})
|
||||
|
||||
const setupPythonBinaries = async (key) => {
|
||||
const { name, distPath, archiveName, version } = PYTHON_TARGETS.get(key)
|
||||
const buildPath = path.join(distPath, BINARIES_FOLDER_NAME)
|
||||
const archivePath = path.join(distPath, archiveName)
|
||||
|
||||
await Promise.all([
|
||||
fs.promises.rm(buildPath, { recursive: true, force: true }),
|
||||
fs.promises.rm(archivePath, { recursive: true, force: true })
|
||||
])
|
||||
|
||||
try {
|
||||
LogHelper.info(`Downloading ${name}...`)
|
||||
|
||||
const archiveWriter = fs.createWriteStream(archivePath)
|
||||
const latestReleaseAssetURL = `${GITHUB_URL}/releases/download/${key}_v${version}/${archiveName}`
|
||||
const { data } = await axios.get(latestReleaseAssetURL, {
|
||||
responseType: 'stream',
|
||||
onDownloadProgress: ({ loaded, total, progress, estimated, rate }) => {
|
||||
const percentage = Math.floor(progress * 100)
|
||||
const downloadedSize = prettyBytes(loaded)
|
||||
const totalSize = prettyBytes(total)
|
||||
const estimatedTime = !estimated
|
||||
? 0
|
||||
: prettyMilliseconds(estimated * 1_000, { secondsDecimalDigits: 0 })
|
||||
const downloadRate = !rate ? 0 : prettyBytes(rate)
|
||||
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0, null)
|
||||
process.stdout.write(
|
||||
`Download progress: ${percentage}% (${downloadedSize}/${totalSize} | ${downloadRate}/s | ${estimatedTime} ETA)`
|
||||
)
|
||||
|
||||
if (percentage === 100) {
|
||||
process.stdout.write('\n')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
data.pipe(archiveWriter)
|
||||
await stream.promises.finished(archiveWriter)
|
||||
|
||||
LogHelper.success(`${name} downloaded`)
|
||||
LogHelper.info(`Extracting ${name}...`)
|
||||
|
||||
const absoluteDistPath = path.resolve(distPath)
|
||||
await extractZip(archivePath, { dir: absoluteDistPath })
|
||||
|
||||
LogHelper.success(`${name} extracted`)
|
||||
|
||||
await fs.promises.rm(archivePath, { recursive: true, force: true })
|
||||
|
||||
LogHelper.success(`${name} ready`)
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to set up ${name}: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
export default async () => {
|
||||
await setupPythonBinaries('python-bridge')
|
||||
await setupPythonBinaries('tcp-server')
|
||||
}
|
285
scripts/setup/setup-python-dev-env.js
Normal file
285
scripts/setup/setup-python-dev-env.js
Normal file
@ -0,0 +1,285 @@
|
||||
import fs from 'node:fs'
|
||||
import path from 'node:path'
|
||||
|
||||
import { command } from 'execa'
|
||||
|
||||
import {
|
||||
EN_SPACY_MODEL_NAME,
|
||||
EN_SPACY_MODEL_VERSION,
|
||||
FR_SPACY_MODEL_NAME,
|
||||
FR_SPACY_MODEL_VERSION,
|
||||
PYTHON_BRIDGE_SRC_PATH,
|
||||
TCP_SERVER_SRC_PATH
|
||||
} from '@/constants'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import { LoaderHelper } from '@/helpers/loader-helper'
|
||||
import { CPUArchitectures, OSHelper, OSTypes } from '@/helpers/os-helper'
|
||||
|
||||
/**
|
||||
* Set up development environment according to the given setup target
|
||||
* 1. Verify Python environment
|
||||
* 2. Verify if the targeted development environment is up-to-date
|
||||
* 3. If up-to-date, exit
|
||||
* 4. If not up-to-date, delete the outdated development environment and install the new one
|
||||
* 5. Install spaCy models if the targeted development environment is the TCP server
|
||||
*/
|
||||
|
||||
// Define mirror to download models installation file
|
||||
function getModelInstallationFileUrl(model, mirror = undefined) {
|
||||
const { name, version } = SPACY_MODELS.get(model)
|
||||
const suffix = 'py3-none-any.whl'
|
||||
let urlPrefix = 'https://github.com/explosion/spacy-models/releases/download'
|
||||
|
||||
if (mirror === 'cn') {
|
||||
LogHelper.info(
|
||||
'Using Chinese mirror to download model installation file...'
|
||||
)
|
||||
urlPrefix =
|
||||
'https://download.fastgit.org/explosion/spacy-models/releases/download'
|
||||
}
|
||||
|
||||
return `${urlPrefix}/${name}-${version}/${name}-${version}-${suffix}`
|
||||
}
|
||||
|
||||
const SETUP_TARGETS = new Map()
|
||||
const SPACY_MODELS = new Map()
|
||||
|
||||
SETUP_TARGETS.set('python-bridge', {
|
||||
name: 'Python bridge',
|
||||
pipfilePath: path.join(PYTHON_BRIDGE_SRC_PATH, 'Pipfile'),
|
||||
dotVenvPath: path.join(PYTHON_BRIDGE_SRC_PATH, '.venv'),
|
||||
dotProjectPath: path.join(PYTHON_BRIDGE_SRC_PATH, '.venv', '.project')
|
||||
})
|
||||
SETUP_TARGETS.set('tcp-server', {
|
||||
name: 'TCP server',
|
||||
pipfilePath: path.join(TCP_SERVER_SRC_PATH, 'Pipfile'),
|
||||
dotVenvPath: path.join(TCP_SERVER_SRC_PATH, '.venv'),
|
||||
dotProjectPath: path.join(TCP_SERVER_SRC_PATH, '.venv', '.project')
|
||||
})
|
||||
|
||||
SPACY_MODELS.set('en', {
|
||||
name: EN_SPACY_MODEL_NAME,
|
||||
version: EN_SPACY_MODEL_VERSION
|
||||
})
|
||||
SPACY_MODELS.set('fr', {
|
||||
name: FR_SPACY_MODEL_NAME,
|
||||
version: FR_SPACY_MODEL_VERSION
|
||||
})
|
||||
;(async () => {
|
||||
LoaderHelper.start()
|
||||
|
||||
const { argv } = process
|
||||
const givenSetupTarget = argv[2].toLowerCase()
|
||||
// cn
|
||||
const givenMirror = argv[3]?.toLowerCase()
|
||||
|
||||
if (!SETUP_TARGETS.has(givenSetupTarget)) {
|
||||
LogHelper.error(
|
||||
`Invalid setup target: ${givenSetupTarget}. Valid targets are: ${Array.from(
|
||||
SETUP_TARGETS.keys()
|
||||
).join(', ')}`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const {
|
||||
name: setupTarget,
|
||||
pipfilePath,
|
||||
dotVenvPath,
|
||||
dotProjectPath
|
||||
} = SETUP_TARGETS.get(givenSetupTarget)
|
||||
|
||||
LogHelper.info('Checking Python environment...')
|
||||
|
||||
/**
|
||||
* Verify Python environment
|
||||
*/
|
||||
|
||||
// Check if the Pipfile exists
|
||||
if (fs.existsSync(pipfilePath)) {
|
||||
LogHelper.success(`${pipfilePath} found`)
|
||||
|
||||
try {
|
||||
// Check if Pipenv is installed
|
||||
const pipenvVersionChild = await command('pipenv --version', {
|
||||
shell: true
|
||||
})
|
||||
let pipenvVersion = String(pipenvVersionChild.stdout)
|
||||
|
||||
if (pipenvVersion.includes('version')) {
|
||||
pipenvVersion = pipenvVersion.split('version')[1].trim()
|
||||
pipenvVersion = `${pipenvVersion} version`
|
||||
}
|
||||
|
||||
LogHelper.success(`Pipenv ${pipenvVersion} found`)
|
||||
} catch (e) {
|
||||
LogHelper.error(
|
||||
`${e}\nPlease install Pipenv: "pip install pipenv" or read the documentation https://docs.pipenv.org`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install Python packages
|
||||
*/
|
||||
|
||||
LogHelper.info(`Setting up ${setupTarget} development environment...`)
|
||||
|
||||
const pipfileMtime = fs.statSync(pipfilePath).mtime
|
||||
const hasDotVenv = fs.existsSync(dotVenvPath)
|
||||
const { type: osType, cpuArchitecture } = OSHelper.getInformation()
|
||||
const installPythonPackages = async () => {
|
||||
LogHelper.info(`Installing Python packages from ${pipfilePath}.lock...`)
|
||||
|
||||
// Delete .venv directory to reset the development environment
|
||||
if (hasDotVenv) {
|
||||
LogHelper.info(`Deleting ${dotVenvPath}...`)
|
||||
fs.rmSync(dotVenvPath, { recursive: true, force: true })
|
||||
LogHelper.success(`${dotVenvPath} deleted`)
|
||||
}
|
||||
|
||||
try {
|
||||
await command(`pipenv install --verbose --site-packages`, {
|
||||
shell: true,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
|
||||
if (
|
||||
osType === OSTypes.MacOS &&
|
||||
cpuArchitecture === CPUArchitectures.ARM64
|
||||
) {
|
||||
LogHelper.info('macOS ARM64 detected')
|
||||
|
||||
LogHelper.info(
|
||||
'Installing Rust installer as it is needed for the "tokenizers" package for macOS ARM64 architecture...'
|
||||
)
|
||||
await command(`curl https://sh.rustup.rs -sSf | sh -s -- -y`, {
|
||||
shell: true,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
LogHelper.success('Rust installer installed')
|
||||
|
||||
LogHelper.info('Reloading configuration from "$HOME/.cargo/env"...')
|
||||
await command(`source "$HOME/.cargo/env"`, {
|
||||
shell: true,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
LogHelper.success('Configuration reloaded')
|
||||
|
||||
LogHelper.info('Checking Rust compiler version...')
|
||||
await command(`rustc --version`, {
|
||||
shell: true,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
LogHelper.success('Rust compiler OK')
|
||||
}
|
||||
|
||||
LogHelper.success('Python packages installed')
|
||||
} catch (e) {
|
||||
LogHelper.error(`Failed to install Python packages: ${e}`)
|
||||
|
||||
if (osType === OSTypes.Windows) {
|
||||
LogHelper.error(
|
||||
'Please check the error above. It might be related to Microsoft C++ Build Tools. If it is, you can check here: "https://stackoverflow.com/a/64262038/1768162" then restart your machine and retry'
|
||||
)
|
||||
LogHelper.error(
|
||||
'If it is related to some hash mismatch, you can try by installing Pipenv 2022.7.24: pip install pipenv==2022.7.24'
|
||||
)
|
||||
}
|
||||
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify if a fresh development environment installation is necessary
|
||||
*/
|
||||
|
||||
// Required environment variables to set up
|
||||
process.env.PIPENV_PIPFILE = pipfilePath
|
||||
process.env.PIPENV_VENV_IN_PROJECT = true
|
||||
|
||||
if (givenSetupTarget === 'python-bridge') {
|
||||
// As per: https://github.com/marcelotduarte/cx_Freeze/issues/1548
|
||||
process.env.PIP_NO_BINARY = 'cx_Freeze'
|
||||
}
|
||||
|
||||
try {
|
||||
if (!hasDotVenv) {
|
||||
await installPythonPackages()
|
||||
} else {
|
||||
if (fs.existsSync(dotProjectPath)) {
|
||||
const dotProjectMtime = fs.statSync(dotProjectPath).mtime
|
||||
|
||||
// Check if Python deps tree has been modified since the initial setup
|
||||
if (pipfileMtime > dotProjectMtime) {
|
||||
LogHelper.info('The development environment is not up-to-date')
|
||||
await installPythonPackages()
|
||||
} else {
|
||||
LogHelper.success('Python packages are up-to-date')
|
||||
}
|
||||
} else {
|
||||
await installPythonPackages()
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
LogHelper.error(
|
||||
`Failed to set up the ${setupTarget} development environment: ${e}`
|
||||
)
|
||||
} finally {
|
||||
LoaderHelper.stop()
|
||||
}
|
||||
|
||||
if (givenSetupTarget === 'tcp-server') {
|
||||
const installSpacyModels = async () => {
|
||||
try {
|
||||
LogHelper.info('Installing spaCy models...')
|
||||
|
||||
// Install models one by one to avoid network throttling
|
||||
for (const modelLanguage of SPACY_MODELS.keys()) {
|
||||
const modelInstallationFileUrl = getModelInstallationFileUrl(
|
||||
modelLanguage,
|
||||
givenMirror
|
||||
)
|
||||
|
||||
await command(`pipenv run pip install ${modelInstallationFileUrl}`, {
|
||||
shell: true,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
}
|
||||
|
||||
LogHelper.success('spaCy models installed')
|
||||
} catch (e) {
|
||||
LogHelper.error(`Failed to install spaCy models: ${e}`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
LogHelper.info('Checking whether all spaCy models are installed...')
|
||||
|
||||
try {
|
||||
for (const { name: modelName } of SPACY_MODELS.values()) {
|
||||
const { stderr } = await command(
|
||||
`pipenv run python -c "import ${modelName}"`,
|
||||
{ shell: true }
|
||||
)
|
||||
|
||||
// Check stderr output for Windows as no exception is thrown
|
||||
if (osType === OSTypes.Windows) {
|
||||
if (String(stderr).length > 0) {
|
||||
await installSpacyModels()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LogHelper.success('All spaCy models are already installed')
|
||||
} catch (e) {
|
||||
LogHelper.info('Not all spaCy models are installed')
|
||||
await installSpacyModels()
|
||||
}
|
||||
}
|
||||
|
||||
LogHelper.success(`${setupTarget} development environment ready`)
|
||||
})()
|
@ -1,109 +0,0 @@
|
||||
import fs from 'node:fs'
|
||||
import path from 'node:path'
|
||||
|
||||
import { command } from 'execa'
|
||||
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
|
||||
/**
|
||||
* Download and setup Leon's Python packages dependencies
|
||||
*/
|
||||
export default () =>
|
||||
new Promise(async (resolve, reject) => {
|
||||
LogHelper.info('Checking Python env...')
|
||||
|
||||
// Check if the Pipfile exists
|
||||
if (fs.existsSync('bridges/python/Pipfile')) {
|
||||
LogHelper.success('bridges/python/Pipfile found')
|
||||
|
||||
try {
|
||||
// Check if Pipenv is installed
|
||||
const pipenvVersionChild = await command('pipenv --version', {
|
||||
shell: true
|
||||
})
|
||||
let pipenvVersion = pipenvVersionChild.stdout
|
||||
|
||||
if (pipenvVersion.indexOf('version') !== -1) {
|
||||
pipenvVersion = pipenvVersion.substr(
|
||||
pipenvVersion.indexOf('version') + 'version '.length
|
||||
)
|
||||
pipenvVersion = `${pipenvVersion} version`
|
||||
}
|
||||
|
||||
LogHelper.success(`Pipenv ${pipenvVersion} found`)
|
||||
} catch (e) {
|
||||
LogHelper.error(
|
||||
`${e}\nPlease install Pipenv: "pip install pipenv" or read the documentation https://docs.pipenv.org`
|
||||
)
|
||||
reject(e)
|
||||
}
|
||||
|
||||
try {
|
||||
const dotVenvPath = path.join(process.cwd(), 'bridges/python/.venv')
|
||||
const pipfilePath = path.join(process.cwd(), 'bridges/python/Pipfile')
|
||||
const pipfileMtime = fs.statSync(pipfilePath).mtime
|
||||
const isDotVenvExist = fs.existsSync(dotVenvPath)
|
||||
const installPythonPackages = async () => {
|
||||
if (isDotVenvExist) {
|
||||
LogHelper.info(`Deleting ${dotVenvPath}...`)
|
||||
fs.rmSync(dotVenvPath, { recursive: true, force: true })
|
||||
LogHelper.success(`${dotVenvPath} deleted`)
|
||||
}
|
||||
|
||||
// Installing Python packages
|
||||
LogHelper.info(
|
||||
'Installing Python packages from bridges/python/Pipfile...'
|
||||
)
|
||||
|
||||
await command('pipenv install --site-packages', { shell: true })
|
||||
LogHelper.success('Python packages installed')
|
||||
|
||||
LogHelper.info('Installing spaCy models...')
|
||||
// Find new spaCy models: https://github.com/explosion/spacy-models/releases
|
||||
await Promise.all([
|
||||
command(
|
||||
'pipenv run spacy download en_core_web_trf-3.4.0 --direct',
|
||||
{ shell: true }
|
||||
),
|
||||
command(
|
||||
'pipenv run spacy download fr_core_news_md-3.4.0 --direct',
|
||||
{ shell: true }
|
||||
)
|
||||
])
|
||||
|
||||
LogHelper.success('spaCy models installed')
|
||||
}
|
||||
|
||||
if (!isDotVenvExist) {
|
||||
await installPythonPackages()
|
||||
} else {
|
||||
const dotProjectPath = path.join(
|
||||
process.cwd(),
|
||||
'bridges/python/.venv/.project'
|
||||
)
|
||||
if (fs.existsSync(dotProjectPath)) {
|
||||
const dotProjectMtime = fs.statSync(dotProjectPath).mtime
|
||||
|
||||
// Check if Python deps tree has been modified since the initial setup
|
||||
if (pipfileMtime > dotProjectMtime) {
|
||||
await installPythonPackages()
|
||||
} else {
|
||||
LogHelper.success('Python packages are up-to-date')
|
||||
}
|
||||
} else {
|
||||
await installPythonPackages()
|
||||
}
|
||||
}
|
||||
|
||||
resolve()
|
||||
} catch (e) {
|
||||
LogHelper.error(`Failed to install the Python packages: ${e}`)
|
||||
reject(e)
|
||||
}
|
||||
} else {
|
||||
LogHelper.error(
|
||||
'bridges/python/Pipfile does not exist. Try to pull the project (git pull)'
|
||||
)
|
||||
reject()
|
||||
}
|
||||
})
|
@ -7,7 +7,7 @@ import generateHttpApiKey from '../generate/generate-http-api-key'
|
||||
import setupDotenv from './setup-dotenv'
|
||||
import setupCore from './setup-core'
|
||||
import setupSkillsConfig from './setup-skills-config'
|
||||
import setupPythonPackages from './setup-python-packages'
|
||||
import setupPythonBinaries from './setup-python-binaries'
|
||||
|
||||
// Do not load ".env" file because it is not created yet
|
||||
|
||||
@ -16,15 +16,11 @@ import setupPythonPackages from './setup-python-packages'
|
||||
*/
|
||||
;(async () => {
|
||||
try {
|
||||
// Required env vars to setup
|
||||
process.env.PIPENV_PIPFILE = 'bridges/python/Pipfile'
|
||||
process.env.PIPENV_VENV_IN_PROJECT = 'true'
|
||||
|
||||
await setupDotenv()
|
||||
LoaderHelper.start()
|
||||
await Promise.all([setupCore(), setupSkillsConfig()])
|
||||
await setupPythonPackages()
|
||||
LoaderHelper.stop()
|
||||
await setupPythonBinaries()
|
||||
await generateHttpApiKey()
|
||||
LoaderHelper.start()
|
||||
await train()
|
||||
|
@ -18,7 +18,7 @@ import { LoaderHelper } from '@/helpers/loader-helper'
|
||||
LoaderHelper.start()
|
||||
await command('npm run train en', { shell: true })
|
||||
const cmd = await command(
|
||||
`cross-env PIPENV_PIPFILE=bridges/python/Pipfile LEON_NODE_ENV=testing jest --silent --config=./test/e2e/modules/e2e.modules.jest.json packages/${pkg}/test/${module}.spec.js && npm run train`,
|
||||
`cross-env PIPENV_PIPFILE=bridges/python/src/Pipfile LEON_NODE_ENV=testing jest --silent --config=./test/e2e/modules/e2e.modules.jest.json packages/${pkg}/test/${module}.spec.js && npm run train`,
|
||||
{ shell: true }
|
||||
)
|
||||
|
||||
|
@ -1,6 +1,10 @@
|
||||
import path from 'node:path'
|
||||
import fs from 'node:fs'
|
||||
|
||||
import dotenv from 'dotenv'
|
||||
|
||||
import type { LongLanguageCode } from '@/helpers/lang-helper'
|
||||
import { OSHelper } from '@/helpers/os-helper'
|
||||
|
||||
dotenv.config()
|
||||
|
||||
@ -8,11 +12,67 @@ const PRODUCTION_ENV = 'production'
|
||||
const DEVELOPMENT_ENV = 'development'
|
||||
const TESTING_ENV = 'testing'
|
||||
|
||||
export const GITHUB_URL = 'https://github.com/leon-ai/leon'
|
||||
|
||||
/**
|
||||
* Binaries / distribution
|
||||
*/
|
||||
export const BINARIES_FOLDER_NAME = OSHelper.getBinariesFolderName()
|
||||
export const PYTHON_BRIDGE_DIST_PATH = path.join('bridges', 'python', 'dist')
|
||||
export const TCP_SERVER_DIST_PATH = path.join('tcp_server', 'dist')
|
||||
|
||||
export const PYTHON_BRIDGE_SRC_PATH = path.join('bridges', 'python', 'src')
|
||||
export const TCP_SERVER_SRC_PATH = path.join('tcp_server', 'src')
|
||||
|
||||
const PYTHON_BRIDGE_VERSION_FILE_PATH = path.join(
|
||||
PYTHON_BRIDGE_SRC_PATH,
|
||||
'version.py'
|
||||
)
|
||||
const TCP_SERVER_VERSION_FILE_PATH = path.join(
|
||||
TCP_SERVER_SRC_PATH,
|
||||
'version.py'
|
||||
)
|
||||
export const [, PYTHON_BRIDGE_VERSION] = fs
|
||||
.readFileSync(PYTHON_BRIDGE_VERSION_FILE_PATH, 'utf8')
|
||||
.split("'")
|
||||
export const [, TCP_SERVER_VERSION] = fs
|
||||
.readFileSync(TCP_SERVER_VERSION_FILE_PATH, 'utf8')
|
||||
.split("'")
|
||||
|
||||
export const PYTHON_BRIDGE_BIN_NAME = 'leon-python-bridge'
|
||||
export const TCP_SERVER_BIN_NAME = 'leon-tcp-server'
|
||||
|
||||
export const TCP_SERVER_BIN_PATH = path.join(
|
||||
TCP_SERVER_DIST_PATH,
|
||||
BINARIES_FOLDER_NAME,
|
||||
TCP_SERVER_BIN_NAME
|
||||
)
|
||||
export const PYTHON_BRIDGE_BIN_PATH = path.join(
|
||||
PYTHON_BRIDGE_DIST_PATH,
|
||||
BINARIES_FOLDER_NAME,
|
||||
PYTHON_BRIDGE_BIN_NAME
|
||||
)
|
||||
|
||||
/**
|
||||
* spaCy models
|
||||
* Find new spaCy models: https://github.com/explosion/spacy-models/releases
|
||||
*/
|
||||
export const EN_SPACY_MODEL_NAME = 'en_core_web_trf'
|
||||
export const EN_SPACY_MODEL_VERSION = '3.4.0'
|
||||
export const FR_SPACY_MODEL_NAME = 'fr_core_news_md'
|
||||
export const FR_SPACY_MODEL_VERSION = '3.4.0'
|
||||
|
||||
/**
|
||||
* Environments
|
||||
*/
|
||||
export const IS_PRODUCTION_ENV = process.env['LEON_NODE_ENV'] === PRODUCTION_ENV
|
||||
export const IS_DEVELOPMENT_ENV =
|
||||
process.env['LEON_NODE_ENV'] === DEVELOPMENT_ENV
|
||||
export const IS_TESTING_ENV = process.env['LEON_NODE_ENV'] === TESTING_ENV
|
||||
|
||||
/**
|
||||
* Leon environment preferences
|
||||
*/
|
||||
export const LANG = process.env['LEON_LANG'] as LongLanguageCode
|
||||
|
||||
export const HOST = process.env['LEON_HOST']
|
||||
|
@ -3,7 +3,7 @@ import path from 'node:path'
|
||||
import { spawn } from 'node:child_process'
|
||||
|
||||
import { langs } from '@@/core/langs.json'
|
||||
import { HAS_TTS } from '@/constants'
|
||||
import { HAS_TTS, PYTHON_BRIDGE_BIN_PATH } from '@/constants'
|
||||
import { LangHelper } from '@/helpers/lang-helper'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import { SkillDomainHelper } from '@/helpers/skill-domain-helper'
|
||||
@ -199,8 +199,8 @@ class Brain {
|
||||
*
|
||||
* 1. Need to be at the root of the project
|
||||
* 2. Edit: server/src/intent-object.sample.json
|
||||
* 3. Run: PIPENV_PIPFILE=bridges/python/Pipfile pipenv run
|
||||
* python bridges/python/main.py server/src/intent-object.sample.json
|
||||
* 3. Run: PIPENV_PIPFILE=bridges/python/src/Pipfile pipenv run
|
||||
* python bridges/python/src/main.py server/src/intent-object.sample.json
|
||||
*/
|
||||
const slots = {}
|
||||
if (obj.slots) {
|
||||
@ -225,7 +225,7 @@ class Brain {
|
||||
try {
|
||||
fs.writeFileSync(intentObjectPath, JSON.stringify(intentObj))
|
||||
this.process = spawn(
|
||||
`pipenv run python bridges/python/main.py ${intentObjectPath}`,
|
||||
`${PYTHON_BRIDGE_BIN_PATH} ${intentObjectPath}`,
|
||||
{ shell: true }
|
||||
)
|
||||
} catch (e) {
|
||||
|
@ -6,7 +6,7 @@ import { containerBootstrap } from '@nlpjs/core-loader'
|
||||
import { Nlp } from '@nlpjs/nlp'
|
||||
import { BuiltinMicrosoft } from '@nlpjs/builtin-microsoft'
|
||||
import { LangAll } from '@nlpjs/lang-all'
|
||||
import request from 'superagent'
|
||||
import axios from 'axios'
|
||||
import kill from 'tree-kill'
|
||||
|
||||
import { langs } from '@@/core/langs.json'
|
||||
@ -14,6 +14,7 @@ import { version } from '@@/package.json'
|
||||
import {
|
||||
HAS_LOGGER,
|
||||
IS_TESTING_ENV,
|
||||
TCP_SERVER_BIN_PATH,
|
||||
TCP_SERVER_HOST,
|
||||
TCP_SERVER_PORT
|
||||
} from '@/constants'
|
||||
@ -44,7 +45,6 @@ const defaultNluResultObj = {
|
||||
class Nlu {
|
||||
constructor(brain) {
|
||||
this.brain = brain
|
||||
this.request = request
|
||||
this.globalResolversNlp = {}
|
||||
this.skillsResolversNlp = {}
|
||||
this.mainNlp = {}
|
||||
@ -223,10 +223,9 @@ class Nlu {
|
||||
|
||||
// Recreate a new TCP server process and reconnect the TCP client
|
||||
kill(global.tcpServerProcess.pid, () => {
|
||||
global.tcpServerProcess = spawn(
|
||||
`pipenv run python bridges/python/tcp_server/main.py ${locale}`,
|
||||
{ shell: true }
|
||||
)
|
||||
global.tcpServerProcess = spawn(`${TCP_SERVER_BIN_PATH} ${locale}`, {
|
||||
shell: true
|
||||
})
|
||||
|
||||
global.tcpClient = new TcpClient(TCP_SERVER_HOST, TCP_SERVER_PORT)
|
||||
|
||||
@ -243,21 +242,17 @@ class Nlu {
|
||||
sendLog(utterance) {
|
||||
/* istanbul ignore next */
|
||||
if (HAS_LOGGER && !IS_TESTING_ENV) {
|
||||
this.request
|
||||
.post('https://logger.getleon.ai/v1/expressions')
|
||||
.set('X-Origin', 'leon-core')
|
||||
.send({
|
||||
axios.request({
|
||||
method: 'POST',
|
||||
url: 'https://logger.getleon.ai/v1/expressions',
|
||||
headers: { 'X-Origin': 'leon-core' },
|
||||
data: {
|
||||
version,
|
||||
utterance,
|
||||
lang: this.brain.lang,
|
||||
classification: this.nluResultObj.classification
|
||||
})
|
||||
.then(() => {
|
||||
/* */
|
||||
})
|
||||
.catch(() => {
|
||||
/* */
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -555,7 +550,7 @@ class Nlu {
|
||||
return resolve(this.switchLanguage(utterance, locale, opts))
|
||||
}
|
||||
|
||||
this.sendLog()
|
||||
// this.sendLog()
|
||||
|
||||
if (intent === 'None') {
|
||||
const fallback = this.fallback(
|
||||
|
@ -3,11 +3,12 @@ import { EventEmitter } from 'node:events'
|
||||
|
||||
import { IS_PRODUCTION_ENV } from '@/constants'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import { OSHelper, OSTypes } from '@/helpers/os-helper'
|
||||
|
||||
// Time interval between each try (in ms)
|
||||
const INTERVAL = IS_PRODUCTION_ENV ? 3000 : 300
|
||||
const INTERVAL = IS_PRODUCTION_ENV ? 3000 : 500
|
||||
// Number of retries to connect to the TCP server
|
||||
const RETRIES_NB = IS_PRODUCTION_ENV ? 5 : 15
|
||||
const RETRIES_NB = IS_PRODUCTION_ENV ? 8 : 30
|
||||
|
||||
export default class TcpClient {
|
||||
constructor(host, port) {
|
||||
@ -47,6 +48,8 @@ export default class TcpClient {
|
||||
if (err.code === 'ECONNREFUSED') {
|
||||
this.reconnectCounter += 1
|
||||
|
||||
const { type: osType } = OSHelper.getInformation()
|
||||
|
||||
if (this.reconnectCounter >= RETRIES_NB) {
|
||||
LogHelper.error('Failed to connect to the TCP server')
|
||||
this.tcpSocket.end()
|
||||
@ -55,6 +58,14 @@ export default class TcpClient {
|
||||
if (this.reconnectCounter >= 1) {
|
||||
LogHelper.info('Trying to connect to the TCP server...')
|
||||
|
||||
if (this.reconnectCounter >= 5) {
|
||||
if (osType === OSTypes.MacOS) {
|
||||
LogHelper.warning(
|
||||
'The cold start of the TCP server can take a few more seconds on macOS. It should be a one time think, no worries'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
this.connect()
|
||||
}, INTERVAL * this.reconnectCounter)
|
||||
|
@ -1,34 +1,114 @@
|
||||
import os from 'node:os'
|
||||
|
||||
type OSType = 'windows' | 'macos' | 'linux' | 'unknown'
|
||||
type OSName = 'Windows' | 'macOS' | 'Linux' | 'Unknown'
|
||||
export enum OSTypes {
|
||||
Windows = 'windows',
|
||||
MacOS = 'macos',
|
||||
Linux = 'linux',
|
||||
Unknown = 'unknown'
|
||||
}
|
||||
export enum CPUArchitectures {
|
||||
X64 = 'x64',
|
||||
ARM64 = 'arm64'
|
||||
}
|
||||
enum OSNames {
|
||||
Windows = 'Windows',
|
||||
MacOS = 'macOS',
|
||||
Linux = 'Linux',
|
||||
Unknown = 'Unknown'
|
||||
}
|
||||
enum BinaryFolderNames {
|
||||
Linux64Bit = 'linux-x86_64', // Linux 64-bit (Intel)
|
||||
LinuxARM64 = 'linux-aarch64', // Linux 64-bit (ARM)
|
||||
MacOS64Bit = 'macosx-x86_64', // Apple 64-bit (Intel)
|
||||
MacOSARM64 = 'macosx-arm64', // Apple silicon (64-bit) (ARM - M1)
|
||||
Windows64Bit = 'win-amd64', // Windows 64-bit
|
||||
Unknown = 'unknown'
|
||||
}
|
||||
|
||||
interface GetInformation {
|
||||
type: OSType
|
||||
name: OSName
|
||||
type: OSTypes
|
||||
name: OSNames
|
||||
platform: NodeJS.Platform
|
||||
cpuArchitecture: CPUArchitectures
|
||||
}
|
||||
|
||||
type PartialInformation = {
|
||||
[key in NodeJS.Platform]?: {
|
||||
type: OSTypes
|
||||
name: OSNames
|
||||
}
|
||||
}
|
||||
|
||||
export class OSHelper {
|
||||
/**
|
||||
* Get information about your OS
|
||||
* @example getInformation() // { type: 'linux', name: 'Linux' }
|
||||
* N.B. Node.js returns info based on the compiled binary we are running on. Not based our machine hardware
|
||||
* @see https://github.com/nodejs/node/blob/main/BUILDING.md#supported-platforms
|
||||
* @example getInformation() // { type: 'linux', name: 'Linux', platform: 'linux', cpuArchitecture: 'x64' }
|
||||
*/
|
||||
public static getInformation(): GetInformation {
|
||||
let type: OSType = 'unknown'
|
||||
let name: OSName = 'Unknown'
|
||||
const platform = os.platform()
|
||||
const cpuArchitecture = os.arch() as CPUArchitectures
|
||||
|
||||
if (os.type().indexOf('Windows') !== -1) {
|
||||
type = 'windows'
|
||||
name = 'Windows'
|
||||
} else if (os.type() === 'Darwin') {
|
||||
type = 'macos'
|
||||
name = 'macOS'
|
||||
} else if (os.type() === 'Linux') {
|
||||
type = 'linux'
|
||||
name = 'Linux'
|
||||
const information: PartialInformation = {
|
||||
linux: {
|
||||
type: OSTypes.Linux,
|
||||
name: OSNames.Linux
|
||||
},
|
||||
darwin: {
|
||||
type: OSTypes.MacOS,
|
||||
name: OSNames.MacOS
|
||||
},
|
||||
// Node.js returns "win32" for both 32-bit and 64-bit versions of Windows
|
||||
win32: {
|
||||
type: OSTypes.Windows,
|
||||
name: OSNames.Windows
|
||||
}
|
||||
}
|
||||
|
||||
return { type, name }
|
||||
return {
|
||||
...(information[platform] || {
|
||||
type: OSTypes.Unknown,
|
||||
name: OSNames.Unknown
|
||||
}),
|
||||
platform,
|
||||
cpuArchitecture
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get binaries folder name based on the platform and CPU architecture
|
||||
* Comply with the naming convention of Python sysconfig.get_platform()
|
||||
* @see https://github.com/python/cpython/blob/main/Lib/sysconfig.py
|
||||
* @example getBinariesFolderName() // 'linux-x86_64'
|
||||
*/
|
||||
public static getBinariesFolderName(): BinaryFolderNames {
|
||||
const { type, cpuArchitecture } = this.getInformation()
|
||||
|
||||
if (type === OSTypes.Linux) {
|
||||
if (cpuArchitecture === CPUArchitectures.X64) {
|
||||
return BinaryFolderNames.Linux64Bit
|
||||
}
|
||||
|
||||
return BinaryFolderNames.LinuxARM64
|
||||
}
|
||||
|
||||
if (type === OSTypes.MacOS) {
|
||||
const cpuCores = os.cpus()
|
||||
const isM1 = cpuCores[0]?.model.includes('Apple')
|
||||
|
||||
if (isM1 || cpuArchitecture === CPUArchitectures.ARM64) {
|
||||
return BinaryFolderNames.MacOSARM64
|
||||
}
|
||||
|
||||
return BinaryFolderNames.MacOS64Bit
|
||||
}
|
||||
|
||||
if (type === OSTypes.Windows) {
|
||||
return BinaryFolderNames.Windows64Bit
|
||||
}
|
||||
|
||||
return BinaryFolderNames.Unknown
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -4,7 +4,8 @@ import {
|
||||
TCP_SERVER_HOST,
|
||||
TCP_SERVER_PORT,
|
||||
IS_DEVELOPMENT_ENV,
|
||||
LANG as LEON_LANG
|
||||
LANG as LEON_LANG,
|
||||
TCP_SERVER_BIN_PATH
|
||||
} from '@/constants'
|
||||
import { LangHelper } from '@/helpers/lang-helper'
|
||||
import TcpClient from '@/core/tcp-client'
|
||||
@ -13,9 +14,7 @@ import server from '@/core/http-server/server'
|
||||
process.title = 'leon'
|
||||
|
||||
global.tcpServerProcess = spawn(
|
||||
`pipenv run python bridges/python/tcp_server/main.py ${LangHelper.getShortCode(
|
||||
LEON_LANG
|
||||
)}`,
|
||||
`${TCP_SERVER_BIN_PATH} ${LangHelper.getShortCode(LEON_LANG)}`,
|
||||
{
|
||||
shell: true,
|
||||
detached: IS_DEVELOPMENT_ENV
|
||||
|
0
tcp_server/dist/.gitkeep
vendored
Normal file
0
tcp_server/dist/.gitkeep
vendored
Normal file
@ -7,15 +7,10 @@ name = "pypi"
|
||||
python_version = "3.9.10"
|
||||
|
||||
[packages]
|
||||
requests = "==2.21.0"
|
||||
pytube = "==9.5.0"
|
||||
tinydb = "==4.7.0"
|
||||
beautifulsoup4 = "==4.7.1"
|
||||
setuptools = "*"
|
||||
wheel = "*"
|
||||
cx-freeze = "==6.11.1"
|
||||
spacy = "==3.4.0"
|
||||
setuptools = "==60.9.3"
|
||||
wheel = "==0.37.1"
|
||||
torch = "==1.12.1"
|
||||
python-dotenv = "==0.19.2"
|
||||
geonamescache = "==1.3.0"
|
||||
|
||||
[dev-packages]
|
@ -21,7 +21,8 @@ class TCPServer:
|
||||
self.tcp_socket.listen()
|
||||
|
||||
while True:
|
||||
print('Waiting for connection...')
|
||||
# Flush buffered output to make it IPC friendly (readable on stdout)
|
||||
print('Waiting for connection...', flush=True)
|
||||
|
||||
# Our TCP server only needs to support one connection
|
||||
self.conn, self.addr = self.tcp_socket.accept()
|
@ -8,7 +8,7 @@ from dotenv import load_dotenv
|
||||
import lib.nlp as nlp
|
||||
from lib.TCPServer import TCPServer
|
||||
|
||||
dotenv_path = join(dirname(__file__), '../../../.env')
|
||||
dotenv_path = join(dirname(__file__), '../../../../../.env')
|
||||
load_dotenv(dotenv_path)
|
||||
|
||||
nlp.load_spacy_model()
|
38
tcp_server/src/setup.py
Normal file
38
tcp_server/src/setup.py
Normal file
@ -0,0 +1,38 @@
|
||||
from cx_Freeze import setup, Executable
|
||||
import sysconfig
|
||||
|
||||
from version import __version__
|
||||
|
||||
options = {
|
||||
'build_exe': {
|
||||
'packages': [
|
||||
'spacy',
|
||||
'torch',
|
||||
'en_core_web_trf',
|
||||
'fr_core_news_md'
|
||||
],
|
||||
'includes': [
|
||||
'srsly.msgpack.util',
|
||||
'blis',
|
||||
'cymem'
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# Include private libraries from the tokenizers package for Linux
|
||||
if 'linux' in sysconfig.get_platform():
|
||||
options['build_exe']['include_files'] = [('tcp_server/src/.venv/lib/python3.9/site-packages/tokenizers.libs', 'lib/tokenizers.libs')]
|
||||
|
||||
executables = [
|
||||
Executable(
|
||||
script='tcp_server/src/main.py',
|
||||
target_name='leon-tcp-server'
|
||||
)
|
||||
]
|
||||
|
||||
setup(
|
||||
name='leon-tcp-server',
|
||||
version=__version__,
|
||||
executables=executables,
|
||||
options=options
|
||||
)
|
1
tcp_server/src/version.py
Normal file
1
tcp_server/src/version.py
Normal file
@ -0,0 +1 @@
|
||||
__version__ = '1.0.0'
|
@ -1,4 +1,4 @@
|
||||
import superagent from 'superagent'
|
||||
import axios from 'axios'
|
||||
|
||||
import server from '@/core/http-server/server'
|
||||
|
||||
@ -17,19 +17,28 @@ const actionSkillUrl = `${urlPrefix}/p/leon/randomnumber/run`
|
||||
|
||||
describe('Over HTTP', () => {
|
||||
test(`Request query endpoint POST ${queryUrl}`, async () => {
|
||||
const { body } = await superagent
|
||||
.post(queryUrl)
|
||||
.send({ utterance: 'Hello' })
|
||||
.set('X-API-Key', process.env.LEON_HTTP_API_KEY)
|
||||
const { data } = await axios.post(
|
||||
queryUrl,
|
||||
{
|
||||
utterance: 'Hello'
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'X-API-Key': process.env.LEON_HTTP_API_KEY
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
expect(body).toHaveProperty('success', true)
|
||||
expect(data).toHaveProperty('success', true)
|
||||
})
|
||||
|
||||
test(`Request an action skill: GET ${actionSkillUrl}`, async () => {
|
||||
const { body } = await superagent
|
||||
.get(actionSkillUrl)
|
||||
.set('X-API-Key', process.env.LEON_HTTP_API_KEY)
|
||||
const { data } = await axios.get(actionSkillUrl, {
|
||||
headers: {
|
||||
'X-API-Key': process.env.LEON_HTTP_API_KEY
|
||||
}
|
||||
})
|
||||
|
||||
expect(body).toHaveProperty('success', true)
|
||||
expect(data).toHaveProperty('success', true)
|
||||
})
|
||||
})
|
||||
|
@ -19,5 +19,5 @@
|
||||
},
|
||||
"files": ["server/src/global.d.ts"],
|
||||
"include": ["server/src/**/*"],
|
||||
"exclude": ["node_modules", "server/dist"]
|
||||
"exclude": ["node_modules", "server/dist", "bridges/python", "tcp_server"]
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user