mirror of
https://github.com/leon-ai/leon.git
synced 2024-11-28 12:43:35 +03:00
Merge branch 'develop' into speedtest_package
This commit is contained in:
commit
0395d27d1d
2
.github/CONTRIBUTING.md
vendored
2
.github/CONTRIBUTING.md
vendored
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
Thanks a lot for your interest in contributing to Leon! :heart:
|
Thanks a lot for your interest in contributing to Leon! :heart:
|
||||||
|
|
||||||
**Leon needs open-source to live**, the more modules he has, the more skillful he becomes.
|
**Leon needs open source to live**, the more modules he has, the more skillful he becomes.
|
||||||
|
|
||||||
**Before submitting your contribution**, please take a moment to review this document.
|
**Before submitting your contribution**, please take a moment to review this document.
|
||||||
|
|
||||||
|
11
README.md
11
README.md
@ -34,6 +34,7 @@ You can also **text him** and he can also **text you**.
|
|||||||
If you want to, Leon can communicate with you by being **offline to protect your privacy**.
|
If you want to, Leon can communicate with you by being **offline to protect your privacy**.
|
||||||
|
|
||||||
### Why?
|
### Why?
|
||||||
|
|
||||||
> 1. If you are a developer (or not), you may want to build many things that could help in your daily life.
|
> 1. If you are a developer (or not), you may want to build many things that could help in your daily life.
|
||||||
> Instead of building a dedicated project for each of those ideas, Leon can help you with his
|
> Instead of building a dedicated project for each of those ideas, Leon can help you with his
|
||||||
> packages/modules (skills) structure.
|
> packages/modules (skills) structure.
|
||||||
@ -41,15 +42,21 @@ If you want to, Leon can communicate with you by being **offline to protect your
|
|||||||
> Therefore there is only one core (to rule them all).
|
> Therefore there is only one core (to rule them all).
|
||||||
> 3. Leon uses AI concepts, which is cool.
|
> 3. Leon uses AI concepts, which is cool.
|
||||||
> 4. Privacy matters, you can configure Leon to talk with him offline. You can already text with him without any third party services.
|
> 4. Privacy matters, you can configure Leon to talk with him offline. You can already text with him without any third party services.
|
||||||
> 5. Open-source is great.
|
> 5. Open source is great.
|
||||||
|
|
||||||
### What is this repository for?
|
### What is this repository for?
|
||||||
|
|
||||||
> This repository contains the following nodes of Leon:
|
> This repository contains the following nodes of Leon:
|
||||||
> - The server
|
> - The server
|
||||||
> - The packages/modules
|
> - The packages/modules
|
||||||
> - The web app
|
> - The web app
|
||||||
> - The hotword node
|
> - The hotword node
|
||||||
|
|
||||||
|
### What is Leon able to do?
|
||||||
|
|
||||||
|
> Today, the most interesting part is about his core and the way he can scale up. He is pretty young but can easily scale to have new features (packages/modules).
|
||||||
|
> You can find what he is able to do by browsing the [packages list](https://github.com/leon-ai/leon/tree/develop/packages).
|
||||||
|
|
||||||
Sounds good for you? Then let's get started!
|
Sounds good for you? Then let's get started!
|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
@ -119,7 +126,7 @@ To know what is going on, follow [roadmap.getleon.ai](https://roadmap.getleon.ai
|
|||||||
|
|
||||||
If you have an idea for improving Leon, do not hesitate.
|
If you have an idea for improving Leon, do not hesitate.
|
||||||
|
|
||||||
**Leon needs open-source to live**, the more modules he has, the more skillful he becomes.
|
**Leon needs open source to live**, the more modules he has, the more skillful he becomes.
|
||||||
|
|
||||||
## The Story Behind Leon
|
## The Story Behind Leon
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ export default class Chatbot {
|
|||||||
|
|
||||||
container.className = `bubble-container ${who}`
|
container.className = `bubble-container ${who}`
|
||||||
bubble.className = 'bubble'
|
bubble.className = 'bubble'
|
||||||
bubble.textContent = string
|
bubble.innerHTML = string
|
||||||
|
|
||||||
this.feed.appendChild(container).appendChild(bubble)
|
this.feed.appendChild(container).appendChild(bubble)
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ name = "pypi"
|
|||||||
requests = "==2.21.0"
|
requests = "==2.21.0"
|
||||||
pytube = "==9.2.2"
|
pytube = "==9.2.2"
|
||||||
tinydb = "==3.9.0"
|
tinydb = "==3.9.0"
|
||||||
|
beautifulsoup4 = "==4.7.1"
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
|
|
||||||
|
24
bridges/python/Pipfile.lock
generated
24
bridges/python/Pipfile.lock
generated
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"_meta": {
|
"_meta": {
|
||||||
"hash": {
|
"hash": {
|
||||||
"sha256": "ef69fb486898e1db2c2908e9b67e156c99e6a7ddaccad88881a5e8f36edd162e"
|
"sha256": "6b5d87faf7886492cc3d6fdc896041d302523857d81a9e072cbfd627bb204b39"
|
||||||
},
|
},
|
||||||
"pipfile-spec": 6,
|
"pipfile-spec": 6,
|
||||||
"requires": {
|
"requires": {
|
||||||
@ -16,12 +16,21 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"default": {
|
"default": {
|
||||||
|
"beautifulsoup4": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:034740f6cb549b4e932ae1ab975581e6103ac8f942200a0e9759065984391858",
|
||||||
|
"sha256:945065979fb8529dd2f37dbb58f00b661bdbcbebf954f93b32fdf5263ef35348",
|
||||||
|
"sha256:ba6d5c59906a85ac23dadfe5c88deaf3e179ef565f4898671253e50a78680718"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==4.7.1"
|
||||||
|
},
|
||||||
"certifi": {
|
"certifi": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7",
|
"sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
|
||||||
"sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"
|
"sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
|
||||||
],
|
],
|
||||||
"version": "==2018.11.29"
|
"version": "==2019.3.9"
|
||||||
},
|
},
|
||||||
"chardet": {
|
"chardet": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -53,6 +62,13 @@
|
|||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==2.21.0"
|
"version": "==2.21.0"
|
||||||
},
|
},
|
||||||
|
"soupsieve": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:afa56bf14907bb09403e5d15fbed6275caa4174d36b975226e3b67a3bb6e2c4b",
|
||||||
|
"sha256:eaed742b48b1f3e2d45ba6f79401b2ed5dc33b2123dfe216adb90d4bfa0ade26"
|
||||||
|
],
|
||||||
|
"version": "==1.8"
|
||||||
|
},
|
||||||
"tinydb": {
|
"tinydb": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:67b3b302fc86e0139db545d5abd65bf0e1dadaecee63bd1ff3fe2169810d5387",
|
"sha256:67b3b302fc86e0139db545d5abd65bf0e1dadaecee63bd1ff3fe2169810d5387",
|
||||||
|
@ -6,16 +6,17 @@ from os import path, environ
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from random import choice
|
from random import choice
|
||||||
from sys import argv, stdout
|
from sys import argv, stdout
|
||||||
from re import findall
|
|
||||||
from vars import useragent
|
from vars import useragent
|
||||||
from tinydb import TinyDB, Query, operations
|
from tinydb import TinyDB, Query, operations
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import requests
|
import requests
|
||||||
|
import re
|
||||||
|
|
||||||
dirname = path.dirname(path.realpath(__file__))
|
dirname = path.dirname(path.realpath(__file__))
|
||||||
|
|
||||||
queryobjectpath = argv[1]
|
queryobjectpath = argv[1]
|
||||||
|
codes = []
|
||||||
|
|
||||||
serversrc = 'dist' if environ.get('LEON_NODE_ENV') == 'production' else 'src'
|
serversrc = 'dist' if environ.get('LEON_NODE_ENV') == 'production' else 'src'
|
||||||
queryobjfile = open(queryobjectpath, 'r', encoding = 'utf8')
|
queryobjfile = open(queryobjectpath, 'r', encoding = 'utf8')
|
||||||
@ -49,12 +50,14 @@ def translate(key, d = { }):
|
|||||||
|
|
||||||
# "Temporize" for the data buffer ouput on the core
|
# "Temporize" for the data buffer ouput on the core
|
||||||
sleep(0.1)
|
sleep(0.1)
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def output(type, code, speech = ''):
|
def output(type, code, speech = ''):
|
||||||
"""Communicate with the Core"""
|
"""Communicate with the Core"""
|
||||||
|
|
||||||
|
codes.append(code)
|
||||||
|
|
||||||
print(dumps({
|
print(dumps({
|
||||||
'package': queryobj['package'],
|
'package': queryobj['package'],
|
||||||
'module': queryobj['module'],
|
'module': queryobj['module'],
|
||||||
@ -63,7 +66,7 @@ def output(type, code, speech = ''):
|
|||||||
'entities': queryobj['entities'],
|
'entities': queryobj['entities'],
|
||||||
'output': {
|
'output': {
|
||||||
'type': type,
|
'type': type,
|
||||||
'code': code,
|
'codes': codes,
|
||||||
'speech': speech,
|
'speech': speech,
|
||||||
'options': config('options')
|
'options': config('options')
|
||||||
}
|
}
|
||||||
@ -111,4 +114,3 @@ def db(dbtype = 'tinydb'):
|
|||||||
if dbtype == 'tinydb':
|
if dbtype == 'tinydb':
|
||||||
db = TinyDB(dirname + '/../../packages/' + queryobj['package'] + '/data/db/' + queryobj['package'] + '.json')
|
db = TinyDB(dirname + '/../../packages/' + queryobj['package'] + '/data/db/' + queryobj['package'] + '.json')
|
||||||
return { 'db': db, 'query': Query, 'operations': operations }
|
return { 'db': db, 'query': Query, 'operations': operations }
|
||||||
|
|
||||||
|
2160
package-lock.json
generated
2160
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "leon",
|
"name": "leon",
|
||||||
"version": "1.0.0-beta.1",
|
"version": "1.0.0-beta.2",
|
||||||
"description": "Server, packages and web app of the Leon personal assistant",
|
"description": "Server, packages and web app of the Leon personal assistant",
|
||||||
"author": {
|
"author": {
|
||||||
"name": "Louis Grenard",
|
"name": "Louis Grenard",
|
||||||
@ -80,7 +80,7 @@
|
|||||||
"cli-spinner": "^0.2.8",
|
"cli-spinner": "^0.2.8",
|
||||||
"dev-ip": "^1.0.1",
|
"dev-ip": "^1.0.1",
|
||||||
"envify": "^4.1.0",
|
"envify": "^4.1.0",
|
||||||
"eslint": "^3.19.0",
|
"eslint": "^5.16.0",
|
||||||
"eslint-config-airbnb-base": "^11.2.0",
|
"eslint-config-airbnb-base": "^11.2.0",
|
||||||
"eslint-import-resolver-babel-module": "^4.0.0",
|
"eslint-import-resolver-babel-module": "^4.0.0",
|
||||||
"eslint-plugin-import": "^2.12.0",
|
"eslint-plugin-import": "^2.12.0",
|
||||||
@ -88,7 +88,7 @@
|
|||||||
"git-changelog": "git+https://git@github.com/louistiti/git-changelog.git",
|
"git-changelog": "git+https://git@github.com/louistiti/git-changelog.git",
|
||||||
"husky": "^0.14.3",
|
"husky": "^0.14.3",
|
||||||
"inquirer": "^5.1.0",
|
"inquirer": "^5.1.0",
|
||||||
"jest": "^24.1.0",
|
"jest": "^24.7.1",
|
||||||
"jest-canvas-mock": "^2.0.0-alpha.3",
|
"jest-canvas-mock": "^2.0.0-alpha.3",
|
||||||
"jest-extended": "^0.11.1",
|
"jest-extended": "^0.11.1",
|
||||||
"json": "^9.0.6",
|
"json": "^9.0.6",
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"isitdown": [
|
"isitdown": [
|
||||||
"Is louistiti.fr up?",
|
"Is getleon.ai up?",
|
||||||
"Is louistiti.fr down?",
|
"Is mozilla.org down?",
|
||||||
"Is louistiti.fr up or down?",
|
"Is mozilla.org up or down?",
|
||||||
"Is github.com up?",
|
"Is github.com up?",
|
||||||
"Is github.com down?",
|
"Is github.com down?",
|
||||||
"Check if github.com is up or down",
|
"Check if github.com is up or down",
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"isitdown": [
|
"isitdown": [
|
||||||
"louistiti.fr en ligne ?",
|
"Est-ce que getleon.ai est en ligne ?",
|
||||||
"louistiti.fr hors ligne ?",
|
"Est-ce que mozilla.org est hors ligne ?",
|
||||||
"louistiti.fr en ligne ou hors ligne ?",
|
"mozilla.org est en ligne ou hors ligne ?",
|
||||||
"github.com en ligne ?",
|
"github.com en ligne ?",
|
||||||
"github.com hors ligne ?",
|
"github.com hors ligne ?",
|
||||||
"Vérifies si github.com en ligne ou hors ligne",
|
"Vérifies si github.com en ligne ou hors ligne",
|
||||||
|
@ -8,7 +8,7 @@ describe('checker:isitdown', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.finalOutput.code).toBe('invalid_domain_name')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers(['invalid_domain_name'])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('detects down domain name', async () => {
|
test('detects down domain name', async () => {
|
||||||
@ -18,8 +18,11 @@ describe('checker:isitdown', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.interOutput.code).toBe('down')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
expect(global.brain.finalOutput.code).toBe('done')
|
'checking',
|
||||||
|
'down',
|
||||||
|
'done'
|
||||||
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('detects up domain name', async () => {
|
test('detects up domain name', async () => {
|
||||||
@ -29,8 +32,11 @@ describe('checker:isitdown', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.interOutput.code).toBe('up')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
expect(global.brain.finalOutput.code).toBe('done')
|
'checking',
|
||||||
|
'up',
|
||||||
|
'done'
|
||||||
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('detects up domain names', async () => {
|
test('detects up domain names', async () => {
|
||||||
@ -40,7 +46,12 @@ describe('checker:isitdown', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.interOutput.code).toBe('up')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
expect(global.brain.finalOutput.code).toBe('done')
|
'checking',
|
||||||
|
'up',
|
||||||
|
'checking',
|
||||||
|
'up',
|
||||||
|
'done'
|
||||||
|
])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -8,6 +8,6 @@ describe('leon:bye', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.finalOutput.code).toBe('good_bye')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers(['good_bye'])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -16,6 +16,6 @@ describe('leon:greeting', async () => {
|
|||||||
'night',
|
'night',
|
||||||
'too_late',
|
'too_late',
|
||||||
'default'
|
'default'
|
||||||
]).toContain(global.brain.finalOutput.code)
|
]).toIncludeAnyMembers(global.brain.finalOutput.codes)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -8,6 +8,6 @@ describe('leon:joke', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.finalOutput.code).toBe('jokes')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers(['jokes'])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -8,6 +8,6 @@ describe('leon:meaningoflife', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.finalOutput.code).toBe('meaning_of_life')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers(['meaning_of_life'])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -8,7 +8,7 @@ describe('leon:partnerassistant', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.finalOutput.code).toBe('unknown')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers(['unknown'])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('talks about the personal assistant Alexa', async () => {
|
test('talks about the personal assistant Alexa', async () => {
|
||||||
@ -18,6 +18,6 @@ describe('leon:partnerassistant', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.finalOutput.code).toBe('success')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers(['success'])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -8,7 +8,7 @@ describe('leon:randomnumber', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.finalOutput.code).toBe('success')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers(['success'])
|
||||||
expect(parseInt(global.brain.finalOutput.speech, 10)).toBeGreaterThanOrEqual(0)
|
expect(parseInt(global.brain.finalOutput.speech, 10)).toBeGreaterThanOrEqual(0)
|
||||||
expect(parseInt(global.brain.finalOutput.speech, 10)).toBeLessThanOrEqual(100)
|
expect(parseInt(global.brain.finalOutput.speech, 10)).toBeLessThanOrEqual(100)
|
||||||
})
|
})
|
||||||
|
@ -8,6 +8,6 @@ describe('leon:welcome', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.finalOutput.code).toBe('welcome')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers(['welcome'])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -8,6 +8,6 @@ describe('leon:whoami', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
expect(global.brain.finalOutput.code).toBe('introduction')
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers(['introduction'])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
26
packages/trend/README.md
Normal file
26
packages/trend/README.md
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# Trend Package
|
||||||
|
|
||||||
|
The trend package contains modules related to trends.
|
||||||
|
|
||||||
|
## Modules
|
||||||
|
|
||||||
|
### GitHub
|
||||||
|
|
||||||
|
Grab the GitHub trends repositories according to several options.
|
||||||
|
|
||||||
|
#### Usage
|
||||||
|
|
||||||
|
```
|
||||||
|
(en-US) "What's trending on GitHub?"
|
||||||
|
(en-US) "Give me the 4 GitHub trends of this week for the JavaScript language"
|
||||||
|
(en-US) "What's the three GitHub trends of this month?"
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Product Hunt
|
||||||
|
|
||||||
|
WIP...
|
||||||
|
|
||||||
|
#### Usage
|
||||||
|
|
||||||
|
WIP...
|
0
packages/trend/__init__.py
Normal file
0
packages/trend/__init__.py
Normal file
0
packages/trend/config/.gitkeep
Normal file
0
packages/trend/config/.gitkeep
Normal file
8
packages/trend/config/config.sample.json
Normal file
8
packages/trend/config/config.sample.json
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"github": {
|
||||||
|
"options": {}
|
||||||
|
},
|
||||||
|
"producthunt": {
|
||||||
|
"options": {}
|
||||||
|
}
|
||||||
|
}
|
0
packages/trend/data/.gitkeep
Normal file
0
packages/trend/data/.gitkeep
Normal file
0
packages/trend/data/answers/.gitkeep
Normal file
0
packages/trend/data/answers/.gitkeep
Normal file
40
packages/trend/data/answers/en.json
Normal file
40
packages/trend/data/answers/en.json
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"github": {
|
||||||
|
"limit_max": [
|
||||||
|
"You've asked for too many GitHub trends, I'll give you 25 trends instead.",
|
||||||
|
"%limit% GitHub trends is a lot, let me tell you the 25 trends instead."
|
||||||
|
],
|
||||||
|
"reaching": [
|
||||||
|
"I'm reaching GitHub, please wait a second...",
|
||||||
|
"Let me reach GitHub..."
|
||||||
|
],
|
||||||
|
"today": [
|
||||||
|
"Here are the %limit% latest GitHub trends of today:<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"week": [
|
||||||
|
"Here are the %limit% latest GitHub trends of this week:<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"month": [
|
||||||
|
"Here are the %limit% latest GitHub trends of this month:<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"today_with_tech": [
|
||||||
|
"Here are the %limit% latest GitHub trends of today for the %tech% technology:<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"week_with_tech": [
|
||||||
|
"Here are the %limit% latest GitHub trends of this week for the %tech% technology:<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"month_with_tech": [
|
||||||
|
"Here are the %limit% latest GitHub trends of this month for the %tech% technology:<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"unreachable": [
|
||||||
|
"GitHub is unreachable for the moment, please retry later.",
|
||||||
|
"I'm having difficulties to reach GitHub, please retry later.",
|
||||||
|
"GitHub seems to be down, please try again later."
|
||||||
|
],
|
||||||
|
"list_element": [
|
||||||
|
"<li>#%rank%. <a href=\"%repository_url%\" target=\"_blank\">%repository_name%</a> created by <a href=\"%author_url%\" target=\"_blank\">%author_username%</a> with %stars_nb% new stars.</li>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"producthunt": {
|
||||||
|
}
|
||||||
|
}
|
40
packages/trend/data/answers/fr.json
Normal file
40
packages/trend/data/answers/fr.json
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"github": {
|
||||||
|
"limit_max": [
|
||||||
|
"Vous demandez beaucoup trop de tendances, laissez moi plutôt vous donner les 25 tendances.",
|
||||||
|
"%limit% tendances GitHub c'est beaucoup, permettez moi de vous donner les 25 tendances à la place."
|
||||||
|
],
|
||||||
|
"reaching": [
|
||||||
|
"Je suis en train d'atteindre GitHub, veuille patienter une seconde...",
|
||||||
|
"Laissez moi atteindre GitHub..."
|
||||||
|
],
|
||||||
|
"today": [
|
||||||
|
"Voici les %limit% dernières tendances GitHub du jour :<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"week": [
|
||||||
|
"Voici les %limit% dernières tendances GitHub de la semaine :<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"month": [
|
||||||
|
"Voici les %limit% dernières tendances GitHub du mois :<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"today_with_tech": [
|
||||||
|
"Voici les %limit% dernières tendances GitHub du jour pour la technologie %tech% :<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"week_with_tech": [
|
||||||
|
"Voici les %limit% dernières tendances GitHub de la semaine pour la technologie %tech% :<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"month_with_tech": [
|
||||||
|
"Voici les %limit% dernières tendances GitHub du mois pour la technologie %tech% :<br><br><ul>%result%</ul>"
|
||||||
|
],
|
||||||
|
"unreachable": [
|
||||||
|
"GitHub est inaccessible pour le moment, merci de réessayer plus tard.",
|
||||||
|
"Je rencontre des difficultés pour atteindre GitHub, merci de réessayer plus tard.",
|
||||||
|
"GitHub semble ne pas fonctionner correctement, veuillez retenter plus tard."
|
||||||
|
],
|
||||||
|
"list_element": [
|
||||||
|
"<li>#%rank%. <a href=\"%repository_url%\" target=\"_blank\">%repository_name%</a> créé par <a href=\"%author_url%\" target=\"_blank\">%author_username%</a> avec %stars_nb% nouvelles étoiles.</li>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"producthunt": {
|
||||||
|
}
|
||||||
|
}
|
0
packages/trend/data/db/.gitkeep
Normal file
0
packages/trend/data/db/.gitkeep
Normal file
0
packages/trend/data/expressions/.gitkeep
Normal file
0
packages/trend/data/expressions/.gitkeep
Normal file
19
packages/trend/data/expressions/en.json
Normal file
19
packages/trend/data/expressions/en.json
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"github": [
|
||||||
|
"What are the trends on GitHub?",
|
||||||
|
"Give me the GitHub trends",
|
||||||
|
"What's trending on GitHub?",
|
||||||
|
"What are the trends on GH?",
|
||||||
|
"Give me the GH trends",
|
||||||
|
"What's trending on GH?"
|
||||||
|
],
|
||||||
|
"producthunt": [
|
||||||
|
"What are the trends on Product Hunt?",
|
||||||
|
"Give me the Product Hunt trends",
|
||||||
|
"What's trending on Product Hunt?",
|
||||||
|
"What are the trends on PH?",
|
||||||
|
"Give me the PH trends",
|
||||||
|
"What's trending on PH?",
|
||||||
|
"What's trending on ProductHunt?"
|
||||||
|
]
|
||||||
|
}
|
18
packages/trend/data/expressions/fr.json
Normal file
18
packages/trend/data/expressions/fr.json
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"github": [
|
||||||
|
"Quelles sont les tendances sur GitHub ?",
|
||||||
|
"Donne-moi les tendances GitHub",
|
||||||
|
"Qu'est-ce qu'il y a en tendance sur GitHub ?",
|
||||||
|
"Quelles sont les tendances sur GH ?",
|
||||||
|
"Donne-moi les tendances GH",
|
||||||
|
"Qu'est-ce qu'il y a en tendance sur GH ?"
|
||||||
|
],
|
||||||
|
"producthunt": [
|
||||||
|
"Quelles sont les tendances sur Product Hunt ?",
|
||||||
|
"Donne-moi les tendances Product Hunt",
|
||||||
|
"Qu'est-ce qu'il y a en tendance sur Product Hunt ?",
|
||||||
|
"Quelles sont les tendances sur PH ?",
|
||||||
|
"Donne-moi les tendances PH",
|
||||||
|
"Qu'est-ce qu'il y a en tendance sur PH ?"
|
||||||
|
]
|
||||||
|
}
|
94
packages/trend/github.py
Normal file
94
packages/trend/github.py
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import utils
|
||||||
|
import packages.trend.github_lang as github_lang
|
||||||
|
from re import search, escape
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
def github(string, entities):
|
||||||
|
"""Grab the GitHub trends"""
|
||||||
|
|
||||||
|
# Number of repositories
|
||||||
|
limit = 5
|
||||||
|
|
||||||
|
# Range string
|
||||||
|
since = 'daily'
|
||||||
|
|
||||||
|
# Technology slug
|
||||||
|
techslug = ''
|
||||||
|
|
||||||
|
# Technology name
|
||||||
|
tech = ''
|
||||||
|
|
||||||
|
# Answer key
|
||||||
|
answerkey = 'today'
|
||||||
|
|
||||||
|
for item in entities:
|
||||||
|
if item['entity'] == 'number':
|
||||||
|
limit = item['resolution']['value']
|
||||||
|
if item['entity'] == 'daterange':
|
||||||
|
if item['resolution']['timex'].find('W') != -1:
|
||||||
|
since = 'weekly'
|
||||||
|
answerkey = 'week'
|
||||||
|
else:
|
||||||
|
since = 'monthly'
|
||||||
|
answerkey = 'month'
|
||||||
|
|
||||||
|
# Feed the languages list based on the GitHub languages list
|
||||||
|
for i, language in enumerate(github_lang.getall()):
|
||||||
|
# Find the asked language
|
||||||
|
if search(r'\b' + escape(language.lower()) + r'\b', string.lower()):
|
||||||
|
answerkey += '_with_tech'
|
||||||
|
tech = language
|
||||||
|
techslug = language.lower()
|
||||||
|
|
||||||
|
if limit > 25:
|
||||||
|
utils.output('inter', 'limit_max', utils.translate('limit_max', {
|
||||||
|
'limit': limit
|
||||||
|
}))
|
||||||
|
limit = 25
|
||||||
|
elif limit == 0:
|
||||||
|
limit = 5
|
||||||
|
|
||||||
|
utils.output('inter', 'reaching', utils.translate('reaching'))
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = utils.http('GET', 'https://github.com/trending/' + techslug + '?since=' + since)
|
||||||
|
soup = BeautifulSoup(r.text, features='html.parser')
|
||||||
|
elements = soup.select('.repo-list li', limit=limit)
|
||||||
|
result = ''
|
||||||
|
|
||||||
|
for i, element in enumerate(elements):
|
||||||
|
repository = element.h3.get_text(strip=True).replace(' ', '')
|
||||||
|
if (element.img != None):
|
||||||
|
author = element.img.get('alt')[1:]
|
||||||
|
else:
|
||||||
|
author = '?'
|
||||||
|
stars = element.select('span.d-inline-block.float-sm-right')[0].get_text(strip=True).split(' ')[0]
|
||||||
|
separators = [' ', ',', '.']
|
||||||
|
|
||||||
|
# Replace potential separators number
|
||||||
|
for j, separator in enumerate(separators):
|
||||||
|
stars = stars.replace(separator, '')
|
||||||
|
|
||||||
|
result += utils.translate('list_element', {
|
||||||
|
'rank': i + 1,
|
||||||
|
'repository_url': 'https://github.com/' + repository,
|
||||||
|
'repository_name': repository,
|
||||||
|
'author_url': 'https://github.com/' + author,
|
||||||
|
'author_username': author,
|
||||||
|
'stars_nb': stars
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return utils.output('end', answerkey, utils.translate(answerkey, {
|
||||||
|
'limit': limit,
|
||||||
|
'tech': tech,
|
||||||
|
'result': result
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
return utils.output('end', 'unreachable', utils.translate('unreachable'))
|
500
packages/trend/github_lang.py
Normal file
500
packages/trend/github_lang.py
Normal file
@ -0,0 +1,500 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
|
||||||
|
def getall():
|
||||||
|
return [
|
||||||
|
'1C Enterprise',
|
||||||
|
'ABAP',
|
||||||
|
'ABNF',
|
||||||
|
'ActionScript',
|
||||||
|
'Ada',
|
||||||
|
'Adobe Font Metrics',
|
||||||
|
'Agda',
|
||||||
|
'AGS Script',
|
||||||
|
'Alloy',
|
||||||
|
'Alpine Abuild',
|
||||||
|
'AMPL',
|
||||||
|
'AngelScript',
|
||||||
|
'Ant Build System',
|
||||||
|
'ANTLR',
|
||||||
|
'ApacheConf',
|
||||||
|
'Apex',
|
||||||
|
'API Blueprint',
|
||||||
|
'APL',
|
||||||
|
'Apollo Guidance Computer',
|
||||||
|
'AppleScript',
|
||||||
|
'Arc',
|
||||||
|
'AsciiDoc',
|
||||||
|
'ASN.1',
|
||||||
|
'ASP',
|
||||||
|
'AspectJ',
|
||||||
|
'Assembly',
|
||||||
|
'Asymptote',
|
||||||
|
'ATS',
|
||||||
|
'Augeas',
|
||||||
|
'AutoHotkey',
|
||||||
|
'AutoIt',
|
||||||
|
'Awk',
|
||||||
|
'Ballerina',
|
||||||
|
'Batchfile',
|
||||||
|
'Befunge',
|
||||||
|
'Bison',
|
||||||
|
'BitBake',
|
||||||
|
'Blade',
|
||||||
|
'BlitzBasic',
|
||||||
|
'BlitzMax',
|
||||||
|
'Bluespec',
|
||||||
|
'Boo',
|
||||||
|
'Brainfuck',
|
||||||
|
'Brightscript',
|
||||||
|
'Bro',
|
||||||
|
'C',
|
||||||
|
'C#',
|
||||||
|
'C++',
|
||||||
|
'C-ObjDump',
|
||||||
|
'C2hs Haskell',
|
||||||
|
"Cap'n Proto", 'CartoCSS',
|
||||||
|
'Ceylon',
|
||||||
|
'Chapel',
|
||||||
|
'Charity',
|
||||||
|
'ChucK',
|
||||||
|
'Cirru',
|
||||||
|
'Clarion',
|
||||||
|
'Clean',
|
||||||
|
'Click',
|
||||||
|
'CLIPS',
|
||||||
|
'Clojure',
|
||||||
|
'Closure Templates',
|
||||||
|
'Cloud Firestore Security Rules',
|
||||||
|
'CMake',
|
||||||
|
'COBOL',
|
||||||
|
'CoffeeScript',
|
||||||
|
'ColdFusion',
|
||||||
|
'ColdFusion CFC',
|
||||||
|
'COLLADA',
|
||||||
|
'Common Lisp',
|
||||||
|
'Common Workflow Language',
|
||||||
|
'Component Pascal',
|
||||||
|
'CoNLL-U',
|
||||||
|
'Cool',
|
||||||
|
'Coq',
|
||||||
|
'Cpp-ObjDump',
|
||||||
|
'Creole',
|
||||||
|
'Crystal',
|
||||||
|
'CSON',
|
||||||
|
'Csound',
|
||||||
|
'Csound Document',
|
||||||
|
'Csound Score',
|
||||||
|
'CSS',
|
||||||
|
'CSV',
|
||||||
|
'Cuda',
|
||||||
|
'CWeb',
|
||||||
|
'Cycript',
|
||||||
|
'Cython',
|
||||||
|
'D',
|
||||||
|
'D-ObjDump',
|
||||||
|
'Darcs Patch',
|
||||||
|
'Dart',
|
||||||
|
'DataWeave',
|
||||||
|
'desktop',
|
||||||
|
'Diff',
|
||||||
|
'DIGITAL Command Language',
|
||||||
|
'DM',
|
||||||
|
'DNS Zone',
|
||||||
|
'Dockerfile',
|
||||||
|
'Dogescript',
|
||||||
|
'DTrace',
|
||||||
|
'Dylan',
|
||||||
|
'E',
|
||||||
|
'Eagle',
|
||||||
|
'Easybuild',
|
||||||
|
'EBNF',
|
||||||
|
'eC',
|
||||||
|
'Ecere Projects',
|
||||||
|
'ECL',
|
||||||
|
'ECLiPSe',
|
||||||
|
'Edje Data Collection',
|
||||||
|
'edn',
|
||||||
|
'Eiffel',
|
||||||
|
'EJS',
|
||||||
|
'Elixir',
|
||||||
|
'Elm',
|
||||||
|
'Emacs Lisp',
|
||||||
|
'EmberScript',
|
||||||
|
'EML',
|
||||||
|
'EQ',
|
||||||
|
'Erlang',
|
||||||
|
'F#',
|
||||||
|
'F*',
|
||||||
|
'Factor',
|
||||||
|
'Fancy',
|
||||||
|
'Fantom',
|
||||||
|
'FIGlet Font',
|
||||||
|
'Filebench WML',
|
||||||
|
'Filterscript',
|
||||||
|
'fish',
|
||||||
|
'FLUX',
|
||||||
|
'Formatted',
|
||||||
|
'Forth',
|
||||||
|
'Fortran',
|
||||||
|
'FreeMarker',
|
||||||
|
'Frege',
|
||||||
|
'G-code',
|
||||||
|
'Game Maker Language',
|
||||||
|
'GAMS',
|
||||||
|
'GAP',
|
||||||
|
'GCC Machine Description',
|
||||||
|
'GDB',
|
||||||
|
'GDScript',
|
||||||
|
'Genie',
|
||||||
|
'Genshi',
|
||||||
|
'Gentoo Ebuild',
|
||||||
|
'Gentoo Eclass',
|
||||||
|
'Gerber Image',
|
||||||
|
'Gettext Catalog',
|
||||||
|
'Gherkin',
|
||||||
|
'GLSL',
|
||||||
|
'Glyph',
|
||||||
|
'Glyph Bitmap Distribution Format',
|
||||||
|
'GN',
|
||||||
|
'Gnuplot',
|
||||||
|
'Go',
|
||||||
|
'Golo',
|
||||||
|
'Gosu',
|
||||||
|
'Grace',
|
||||||
|
'Gradle',
|
||||||
|
'Grammatical Framework',
|
||||||
|
'Graph Modeling Language',
|
||||||
|
'GraphQL',
|
||||||
|
'Graphviz (DOT)',
|
||||||
|
'Groovy',
|
||||||
|
'Groovy Server Pages',
|
||||||
|
'Hack',
|
||||||
|
'Haml',
|
||||||
|
'Handlebars',
|
||||||
|
'HAProxy',
|
||||||
|
'Harbour',
|
||||||
|
'Haskell',
|
||||||
|
'Haxe',
|
||||||
|
'HCL',
|
||||||
|
'HiveQL',
|
||||||
|
'HLSL',
|
||||||
|
'HTML',
|
||||||
|
'HTML+Django',
|
||||||
|
'HTML+ECR',
|
||||||
|
'HTML+EEX',
|
||||||
|
'HTML+ERB',
|
||||||
|
'HTML+PHP',
|
||||||
|
'HTML+Razor',
|
||||||
|
'HTTP',
|
||||||
|
'HXML',
|
||||||
|
'Hy',
|
||||||
|
'HyPhy',
|
||||||
|
'IDL',
|
||||||
|
'Idris',
|
||||||
|
'IGOR Pro',
|
||||||
|
'Inform 7',
|
||||||
|
'INI',
|
||||||
|
'Inno Setup',
|
||||||
|
'Io',
|
||||||
|
'Ioke',
|
||||||
|
'IRC log',
|
||||||
|
'Isabelle',
|
||||||
|
'Isabelle ROOT',
|
||||||
|
'J',
|
||||||
|
'Jasmin',
|
||||||
|
'Java',
|
||||||
|
'Java Properties',
|
||||||
|
'Java Server Pages',
|
||||||
|
'JavaScript',
|
||||||
|
'JFlex',
|
||||||
|
'Jison',
|
||||||
|
'Jison Lex',
|
||||||
|
'Jolie',
|
||||||
|
'JSON',
|
||||||
|
'JSON with Comments',
|
||||||
|
'JSON5',
|
||||||
|
'JSONiq',
|
||||||
|
'JSONLD',
|
||||||
|
'Jsonnet',
|
||||||
|
'JSX',
|
||||||
|
'Julia',
|
||||||
|
'Jupyter Notebook',
|
||||||
|
'KiCad Layout',
|
||||||
|
'KiCad Legacy Layout',
|
||||||
|
'KiCad Schematic',
|
||||||
|
'Kit',
|
||||||
|
'Kotlin',
|
||||||
|
'KRL',
|
||||||
|
'LabVIEW',
|
||||||
|
'Lasso',
|
||||||
|
'Latte',
|
||||||
|
'Lean',
|
||||||
|
'Less',
|
||||||
|
'Lex',
|
||||||
|
'LFE',
|
||||||
|
'LilyPond',
|
||||||
|
'Limbo',
|
||||||
|
'Linker Script',
|
||||||
|
'Linux Kernel Module',
|
||||||
|
'Liquid',
|
||||||
|
'Literate Agda',
|
||||||
|
'Literate CoffeeScript',
|
||||||
|
'Literate Haskell',
|
||||||
|
'LiveScript',
|
||||||
|
'LLVM',
|
||||||
|
'Logos',
|
||||||
|
'Logtalk',
|
||||||
|
'LOLCODE',
|
||||||
|
'LookML',
|
||||||
|
'LoomScript',
|
||||||
|
'LSL',
|
||||||
|
'Lua',
|
||||||
|
'M',
|
||||||
|
'M4',
|
||||||
|
'M4Sugar',
|
||||||
|
'Makefile',
|
||||||
|
'Mako',
|
||||||
|
'Markdown',
|
||||||
|
'Marko',
|
||||||
|
'Mask',
|
||||||
|
'Mathematica',
|
||||||
|
'MATLAB',
|
||||||
|
'Maven POM',
|
||||||
|
'Max',
|
||||||
|
'MAXScript',
|
||||||
|
'mcfunction',
|
||||||
|
'MediaWiki',
|
||||||
|
'Mercury',
|
||||||
|
'Meson',
|
||||||
|
'Metal',
|
||||||
|
'MiniD',
|
||||||
|
'Mirah',
|
||||||
|
'Modelica',
|
||||||
|
'Modula-2',
|
||||||
|
'Modula-3',
|
||||||
|
'Module Management System',
|
||||||
|
'Monkey',
|
||||||
|
'Moocode',
|
||||||
|
'MoonScript',
|
||||||
|
'MQL4',
|
||||||
|
'MQL5',
|
||||||
|
'MTML',
|
||||||
|
'MUF',
|
||||||
|
'mupad',
|
||||||
|
'Myghty',
|
||||||
|
'NCL',
|
||||||
|
'Nearley',
|
||||||
|
'Nemerle',
|
||||||
|
'nesC',
|
||||||
|
'NetLinx',
|
||||||
|
'NetLinx+ERB',
|
||||||
|
'NetLogo',
|
||||||
|
'NewLisp',
|
||||||
|
'Nextflow',
|
||||||
|
'Nginx',
|
||||||
|
'Nim',
|
||||||
|
'Ninja',
|
||||||
|
'Nit',
|
||||||
|
'Nix',
|
||||||
|
'NL',
|
||||||
|
'NSIS',
|
||||||
|
'Nu',
|
||||||
|
'NumPy',
|
||||||
|
'ObjDump',
|
||||||
|
'Objective-C',
|
||||||
|
'Objective-C++',
|
||||||
|
'Objective-J',
|
||||||
|
'OCaml',
|
||||||
|
'Omgrofl',
|
||||||
|
'ooc',
|
||||||
|
'Opa',
|
||||||
|
'Opal',
|
||||||
|
'OpenCL',
|
||||||
|
'OpenEdge ABL',
|
||||||
|
'OpenRC runscript',
|
||||||
|
'OpenSCAD',
|
||||||
|
'OpenType Feature File',
|
||||||
|
'Org',
|
||||||
|
'Ox',
|
||||||
|
'Oxygene',
|
||||||
|
'Oz',
|
||||||
|
'P4',
|
||||||
|
'Pan',
|
||||||
|
'Papyrus',
|
||||||
|
'Parrot',
|
||||||
|
'Parrot Assembly',
|
||||||
|
'Parrot Internal Representation',
|
||||||
|
'Pascal',
|
||||||
|
'Pawn',
|
||||||
|
'Pep8',
|
||||||
|
'Perl',
|
||||||
|
'Perl 6',
|
||||||
|
'PHP',
|
||||||
|
'Pic',
|
||||||
|
'Pickle',
|
||||||
|
'PicoLisp',
|
||||||
|
'PigLatin',
|
||||||
|
'Pike',
|
||||||
|
'PLpgSQL',
|
||||||
|
'PLSQL',
|
||||||
|
'Pod',
|
||||||
|
'Pod 6',
|
||||||
|
'PogoScript',
|
||||||
|
'Pony',
|
||||||
|
'PostCSS',
|
||||||
|
'PostScript',
|
||||||
|
'POV-Ray SDL',
|
||||||
|
'PowerBuilder',
|
||||||
|
'PowerShell',
|
||||||
|
'Processing',
|
||||||
|
'Prolog',
|
||||||
|
'Propeller Spin',
|
||||||
|
'Protocol Buffer',
|
||||||
|
'Public Key',
|
||||||
|
'Pug',
|
||||||
|
'Puppet',
|
||||||
|
'Pure Data',
|
||||||
|
'PureBasic',
|
||||||
|
'PureScript',
|
||||||
|
'Python',
|
||||||
|
'Python console',
|
||||||
|
'Python traceback',
|
||||||
|
'q',
|
||||||
|
'QMake',
|
||||||
|
'QML',
|
||||||
|
'Quake',
|
||||||
|
'R',
|
||||||
|
'Racket',
|
||||||
|
'Ragel',
|
||||||
|
'RAML',
|
||||||
|
'Rascal',
|
||||||
|
'Raw token data',
|
||||||
|
'RDoc',
|
||||||
|
'REALbasic',
|
||||||
|
'Reason',
|
||||||
|
'Rebol',
|
||||||
|
'Red',
|
||||||
|
'Redcode',
|
||||||
|
'Regular Expression',
|
||||||
|
"Ren'Py", 'RenderScript',
|
||||||
|
'reStructuredText',
|
||||||
|
'REXX',
|
||||||
|
'RHTML',
|
||||||
|
'Rich Text Format',
|
||||||
|
'Ring',
|
||||||
|
'RMarkdown',
|
||||||
|
'RobotFramework',
|
||||||
|
'Roff',
|
||||||
|
'Rouge',
|
||||||
|
'RPC',
|
||||||
|
'RPM Spec',
|
||||||
|
'Ruby',
|
||||||
|
'RUNOFF',
|
||||||
|
'Rust',
|
||||||
|
'Sage',
|
||||||
|
'SaltStack',
|
||||||
|
'SAS',
|
||||||
|
'Sass',
|
||||||
|
'Scala',
|
||||||
|
'Scaml',
|
||||||
|
'Scheme',
|
||||||
|
'Scilab',
|
||||||
|
'SCSS',
|
||||||
|
'sed',
|
||||||
|
'Self',
|
||||||
|
'ShaderLab',
|
||||||
|
'Shell',
|
||||||
|
'ShellSession',
|
||||||
|
'Shen',
|
||||||
|
'Slash',
|
||||||
|
'Slice',
|
||||||
|
'Slim',
|
||||||
|
'Smali',
|
||||||
|
'Smalltalk',
|
||||||
|
'Smarty',
|
||||||
|
'SMT',
|
||||||
|
'Solidity',
|
||||||
|
'SourcePawn',
|
||||||
|
'SPARQL',
|
||||||
|
'Spline Font Database',
|
||||||
|
'SQF',
|
||||||
|
'SQL',
|
||||||
|
'SQLPL',
|
||||||
|
'Squirrel',
|
||||||
|
'SRecode Template',
|
||||||
|
'Stan',
|
||||||
|
'Standard ML',
|
||||||
|
'Stata',
|
||||||
|
'STON',
|
||||||
|
'Stylus',
|
||||||
|
'SubRip Text',
|
||||||
|
'SugarSS',
|
||||||
|
'SuperCollider',
|
||||||
|
'SVG',
|
||||||
|
'Swift',
|
||||||
|
'SystemVerilog',
|
||||||
|
'Tcl',
|
||||||
|
'Tcsh',
|
||||||
|
'Tea',
|
||||||
|
'Terra',
|
||||||
|
'TeX',
|
||||||
|
'Text',
|
||||||
|
'Textile',
|
||||||
|
'Thrift',
|
||||||
|
'TI Program',
|
||||||
|
'TLA',
|
||||||
|
'TOML',
|
||||||
|
'Turing',
|
||||||
|
'Turtle',
|
||||||
|
'Twig',
|
||||||
|
'TXL',
|
||||||
|
'Type Language',
|
||||||
|
'TypeScript',
|
||||||
|
'Unified Parallel C',
|
||||||
|
'Unity3D Asset',
|
||||||
|
'Unix Assembly',
|
||||||
|
'Uno',
|
||||||
|
'UnrealScript',
|
||||||
|
'UrWeb',
|
||||||
|
'Vala',
|
||||||
|
'VCL',
|
||||||
|
'Verilog',
|
||||||
|
'VHDL',
|
||||||
|
'Vim script',
|
||||||
|
'Visual Basic',
|
||||||
|
'Volt',
|
||||||
|
'Vue',
|
||||||
|
'Wavefront Material',
|
||||||
|
'Wavefront Object',
|
||||||
|
'wdl',
|
||||||
|
'Web Ontology Language',
|
||||||
|
'WebAssembly',
|
||||||
|
'WebIDL',
|
||||||
|
'Windows Registry Entries',
|
||||||
|
'wisp',
|
||||||
|
'World of Warcraft Addon Data',
|
||||||
|
'X BitMap',
|
||||||
|
'X Font Directory Index',
|
||||||
|
'X PixMap',
|
||||||
|
'X10',
|
||||||
|
'xBase',
|
||||||
|
'XC',
|
||||||
|
'XCompose',
|
||||||
|
'XML',
|
||||||
|
'Xojo',
|
||||||
|
'XPages',
|
||||||
|
'XProc',
|
||||||
|
'XQuery',
|
||||||
|
'XS',
|
||||||
|
'XSLT',
|
||||||
|
'Xtend',
|
||||||
|
'Yacc',
|
||||||
|
'YAML',
|
||||||
|
'YANG',
|
||||||
|
'YARA',
|
||||||
|
'YASnippet',
|
||||||
|
'Zephir',
|
||||||
|
'Zig',
|
||||||
|
'Zimpl'
|
||||||
|
]
|
10
packages/trend/producthunt.py
Normal file
10
packages/trend/producthunt.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import utils
|
||||||
|
|
||||||
|
def producthunt(string, entities):
|
||||||
|
"""WIP..."""
|
||||||
|
|
||||||
|
return utils.output('end', 'done')
|
119
packages/trend/test/github.spec.js
Normal file
119
packages/trend/test/github.spec.js
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
describe('trend:github', async () => {
|
||||||
|
test('forces limit', async () => {
|
||||||
|
global.nlu.brain.execute = jest.fn()
|
||||||
|
await global.nlu.process('Give me the 30 latest GitHub trends')
|
||||||
|
|
||||||
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
|
console.log(global.brain.finalOutput)
|
||||||
|
|
||||||
|
expect(global.brain.finalOutput.speech.split('</li>').length - 1).toBe(25)
|
||||||
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
|
'limit_max',
|
||||||
|
'reaching',
|
||||||
|
'today'
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('gives the 16 trends', async () => {
|
||||||
|
global.nlu.brain.execute = jest.fn()
|
||||||
|
await global.nlu.process('Give me the 16 latest GitHub trends')
|
||||||
|
|
||||||
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
|
console.log(global.brain.finalOutput)
|
||||||
|
|
||||||
|
expect(global.brain.finalOutput.speech.split('</li>').length - 1).toBe(16)
|
||||||
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
|
'reaching',
|
||||||
|
'today'
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('gives the default number of trends of this week', async () => {
|
||||||
|
global.nlu.brain.execute = jest.fn()
|
||||||
|
await global.nlu.process('Give me the GitHub trends of this week')
|
||||||
|
|
||||||
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
|
console.log(global.brain.finalOutput)
|
||||||
|
|
||||||
|
expect(global.brain.finalOutput.speech.split('</li>').length - 1).toBe(5)
|
||||||
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
|
'reaching',
|
||||||
|
'week'
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('gives the default number of trends of this month', async () => {
|
||||||
|
global.nlu.brain.execute = jest.fn()
|
||||||
|
await global.nlu.process('Give me the GitHub trends of this month')
|
||||||
|
|
||||||
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
|
console.log(global.brain.finalOutput)
|
||||||
|
|
||||||
|
expect(global.brain.finalOutput.speech.split('</li>').length - 1).toBe(5)
|
||||||
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
|
'reaching',
|
||||||
|
'month'
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('gives the 7 trends for the Python language', async () => {
|
||||||
|
global.nlu.brain.execute = jest.fn()
|
||||||
|
await global.nlu.process('Give me the 7 GitHub trends for the Python language')
|
||||||
|
|
||||||
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
|
console.log(global.brain.finalOutput)
|
||||||
|
|
||||||
|
expect(global.brain.finalOutput.speech.split('</li>').length - 1).toBe(7)
|
||||||
|
expect(global.brain.finalOutput.speech.indexOf('Python')).not.toBe(-1)
|
||||||
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
|
'reaching',
|
||||||
|
'today_with_tech'
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('gives the 14 trends of this week for the JavaScript language', async () => {
|
||||||
|
global.nlu.brain.execute = jest.fn()
|
||||||
|
await global.nlu.process('Give me the 14 GitHub trends of this week for the JavaScript language')
|
||||||
|
|
||||||
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
|
console.log(global.brain.finalOutput)
|
||||||
|
|
||||||
|
expect(global.brain.finalOutput.speech.split('</li>').length - 1).toBe(14)
|
||||||
|
expect(global.brain.finalOutput.speech.indexOf('JavaScript')).not.toBe(-1)
|
||||||
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
|
'reaching',
|
||||||
|
'week_with_tech'
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('gives the default number of trends of this month for the CSS language', async () => {
|
||||||
|
global.nlu.brain.execute = jest.fn()
|
||||||
|
await global.nlu.process('Give me the GitHub trends of this month for the CSS language')
|
||||||
|
|
||||||
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
|
console.log(global.brain.finalOutput)
|
||||||
|
|
||||||
|
expect(global.brain.finalOutput.speech.split('</li>').length - 1).toBe(5)
|
||||||
|
expect(global.brain.finalOutput.speech.indexOf('CSS')).not.toBe(-1)
|
||||||
|
expect(global.brain.finalOutput.codes).toIncludeSameMembers([
|
||||||
|
'reaching',
|
||||||
|
'month_with_tech'
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
1
packages/trend/version.txt
Normal file
1
packages/trend/version.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
1.0.0
|
@ -8,12 +8,12 @@ describe('videodownloader:youtube', async () => {
|
|||||||
const [obj] = global.nlu.brain.execute.mock.calls
|
const [obj] = global.nlu.brain.execute.mock.calls
|
||||||
await global.brain.execute(obj[0])
|
await global.brain.execute(obj[0])
|
||||||
|
|
||||||
await expect(global.brain.interOutput.code).toBe('reaching_playlist')
|
expect(global.brain.interOutput.codes).toIncludeSameMembers(['reaching_playlist'])
|
||||||
await expect([
|
expect([
|
||||||
'settings_error',
|
'settings_error',
|
||||||
'request_error',
|
'request_error',
|
||||||
'nothing_to_download',
|
'nothing_to_download',
|
||||||
'success'
|
'success'
|
||||||
]).toContain(global.brain.finalOutput.code)
|
]).toIncludeAnyMembers(global.brain.finalOutput.codes)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -34,7 +34,7 @@ export default () => new Promise(async (resolve, reject) => {
|
|||||||
// Installing Python packages
|
// Installing Python packages
|
||||||
log.info('Installing Python packages from bridges/python/Pipfile.lock...')
|
log.info('Installing Python packages from bridges/python/Pipfile.lock...')
|
||||||
|
|
||||||
await shell('pipenv install')
|
await shell('pipenv install --skip-lock')
|
||||||
log.success('Python packages installed')
|
log.success('Python packages installed')
|
||||||
resolve()
|
resolve()
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
@ -48,16 +48,19 @@ class Brain {
|
|||||||
/**
|
/**
|
||||||
* Make Leon talk
|
* Make Leon talk
|
||||||
*/
|
*/
|
||||||
talk (speech) {
|
talk (rawSpeech) {
|
||||||
log.title('Leon')
|
log.title('Leon')
|
||||||
log.info('Talking...')
|
log.info('Talking...')
|
||||||
|
|
||||||
if (speech !== '') {
|
if (rawSpeech !== '') {
|
||||||
if (process.env.LEON_TTS === 'true') {
|
if (process.env.LEON_TTS === 'true') {
|
||||||
|
// Stripe HTML
|
||||||
|
const speech = rawSpeech.replace(/<(?:.|\n)*?>/gm, '')
|
||||||
|
|
||||||
this.tts.add(speech)
|
this.tts.add(speech)
|
||||||
}
|
}
|
||||||
|
|
||||||
this.socket.emit('answer', speech)
|
this.socket.emit('answer', rawSpeech)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ describe('punctuation', () => {
|
|||||||
const rootFolders = [
|
const rootFolders = [
|
||||||
'packages'
|
'packages'
|
||||||
]
|
]
|
||||||
const punctuations = ['.', ';', ':', '?', '!']
|
const punctuations = ['.', ';', ':', '?', '!', '>']
|
||||||
const findPunctuation = s => punctuations.includes(s[s.length - 1])
|
const findPunctuation = s => punctuations.includes(s[s.length - 1])
|
||||||
const findString = (iterable) => {
|
const findString = (iterable) => {
|
||||||
const keys = Object.keys(iterable)
|
const keys = Object.keys(iterable)
|
||||||
|
Loading…
Reference in New Issue
Block a user