chore(init): available sources
All checks were successful
/ build (map[dockerfile:./services/hasura/Dockerfile name:hasura]) (push) Successful in 47s
/ build (map[dockerfile:./services/web/Dockerfile name:web]) (push) Successful in 1m47s
/ build (map[dockerfile:./services/watchers/Dockerfile name:watchers]) (push) Successful in 2m37s
/ build (map[dockerfile:./services/files/Dockerfile name:files]) (push) Successful in 2m52s
/ build (map[dockerfile:./services/api/Dockerfile name:api]) (push) Successful in 3m2s
/ build (map[dockerfile:./services/app/Dockerfile name:app]) (push) Successful in 31s
/ build (map[dockerfile:./services/tasks/Dockerfile name:tasks]) (push) Successful in 2m44s
/ deploy (push) Successful in 48s

This commit is contained in:
devthejo 2025-04-13 10:46:53 +02:00
commit 16b7e7d6aa
Signed by: devthejo
GPG key ID: C04215C627711F5C
547 changed files with 50638 additions and 0 deletions

64
.aidigestignore Normal file
View file

@ -0,0 +1,64 @@
# ai-digest
.aidigestignore
# yarn-dev-portal
yarn.lock.*
package.json.*
.yarn-dev-portal
# versioning
CHANGELOG.md
.versionrc.json
# lint
.commitlintrc.json
.eslintrc.js
.editorconfig
.husky
# node
.yarnrc.yml
.yarn
# docker
.dockerignore
docker-compose.build.yaml
# env
.envrc
.env.local
# git
.gitignore
# tmux
.tmux.conf
.tmuxp.full-decoupled.yaml
.tmuxp.yaml
# md
README.md
# ci/cd
.forgejo
# binaries
*.png
*.svg
*.jpg
*.jpeg
*.ico
# generic
*.bak
*.tmp
*.log
# project
tileserver-files
.dev-secrets
.osm-files
osm-files
tests
docs
dockerfile-x

5
.commitlintrc.json Normal file
View file

@ -0,0 +1,5 @@
{
"extends": [
"@commitlint/config-conventional"
]
}

6
.dev-secrets/jwk.json Normal file
View file

@ -0,0 +1,6 @@
{
"crv": "Ed25519",
"d": "NuYqgJxxoFye_uIVcu1FYtpzeXhwCBcTP2od3xAQjFs",
"x": "BIoZ8WwlWgplcYSQKUvlzE9dE8jKEOOvH5_Z19N-2zc",
"kty": "OKP"
}

6
.dev-secrets/jwk2.json Normal file
View file

@ -0,0 +1,6 @@
{
"crv": "Ed25519",
"d": "d3F9wKOVQIWXhDckflJs7KoUvI5mFX9jRCpeEbW4LG8",
"x": "H2MyHfvzAnNtnM6sUOhE32OIPPs-Ix6F13HaZ9fhCrg",
"kty": "OKP"
}

23
.dockerignore Normal file
View file

@ -0,0 +1,23 @@
**/*.log
**/*.md
**/.DS_Store
**/docker-compose.build.yaml
**/docker-compose.yaml
**/nodemon.json
**/.git
**/.dockerignore
**/Dockerfile
**/Dockerfile.*
**/*.dockerfile
**/node_modules
**/.eslintcache
**/.npm
/.osm-files
/osm-files
/tileserver-files
services/*/build
services/*/dist
googleServiceAccountKey.json

10
.editorconfig Normal file
View file

@ -0,0 +1,10 @@
root = true
[*]
end_of_line = lf
insert_final_newline = false
charset = utf-8
[*.js]
indent_style = space
indent_size = 2

50
.env.default Normal file
View file

@ -0,0 +1,50 @@
# DEV PORTS
API_PORT=4200
WEB_PORT=4203
SERVICE_APP_PORT=4209
FILES_PORT=4292
EXPOSE_API_PORT=4200
EXPOSE_HASURA_PORT=4201
EXPOSE_PG_PORT=4204
EXPOSE_MINO_PORT=4290
EXPOSE_MINO_CONSOLE_PORT=4291
EXPOSE_FILES_PORT=4292
EXPOSE_REDIS_QD_PORT=4278
EXPOSE_REDIS_HG_PORT=4279
EXPOSE_KEYDB_CG_PORT=4277
EXPOSE_RABBITMQ_NODE_PORT=4272
EXPOSE_RABBITMQ_MANAGEMENT_PORT_PORT=4273
OSRM_CAR_PORT=4261
OSRM_FOOT_PORT=4262
OSRM_BICYCLE_PORT=4263
TILESERVERGL_PORT=4282
NOMINATIM_PORT=4283
HASURA_CONSOLE_PORT=4295
HASURA_CONSOLE_API_PORT=4293
# JWT
CLAIMS_NAMESPACE=https://alertesecours.fr/claims
# APP
APP_OA_FILES_URL=http://10.0.2.2:4292/api/v1/oas
APP_GRAPHQL_URL=http://10.0.2.2:4201/v1/graphql
APP_GRAPHQL_WS_URL=ws://10.0.2.2:4201/v1/graphql
APP_OSRM_CAR_URL=http://10.0.2.2:4261
APP_OSRM_FOOT_URL=http://10.0.2.2:4262
APP_OSRM_BICYCLE_URL=http://10.0.2.2:4263
APP_GEOLOC_SYNC_URL=http://10.0.2.2:4200/api/v1/oas/geoloc/sync
# APP_MAPVIEW_STYLE_URL=https://tiles.alertesecours.fr/styles/basic-preview/style.json
APP_MAPVIEW_STYLE_URL=http://10.0.2.2:4203/app/style.json
# APP_MAPVIEW_STYLE_URL=https://openmaptiles.geo.data.gouv.fr/styles/osm-bright/style.json
# API KEY
WHAT3WORDS_API_KEY=
# GoogleServices
ANDROID_GOOGLE_SERVICES_FILE_PATH=/home/jo/.lab/alertesecours/google-services.json
ANDROID_GOOGLE_SERVICES_FILE_PATH=/home/jo/.lab/alertesecours/GoogleService-Info.plist

22
.envrc Normal file
View file

@ -0,0 +1,22 @@
export PATH=$PWD/bin:$PWD/scripts:$PATH
export PROJECT_WORKINGDIR=$PWD
# appli
export JAVA_HOME=${JAVA_HOME:-"/opt/android-studio/jbr"}
export ANDROID_HOME=$HOME/Android/Sdk
export PATH=$PATH:$ANDROID_HOME/emulator
export PATH=$PATH:$ANDROID_HOME/tools
export PATH=$PATH:$ANDROID_HOME/tools/bin
export PATH=$PATH:$ANDROID_HOME/platform-tools
export NODE_OPTIONS"=--openssl-legacy-provider"
export LOCAL_DEV=true
# local pg
export PGHOST=localhost
export PGUSER=dev
export PGPASSWORD=dev
export PGPORT=${EXPOSE_PG_PORT:-4204}
# dotenv
dotenv_if_exists .env.default
dotenv_if_exists .env.local

111
.eslintrc.js Normal file
View file

@ -0,0 +1,111 @@
const path = require("path")
const fs = require("fs")
// see https://github.com/import-js/eslint-plugin-import/issues/1174
const packageDirs = ["libs", "services"]
const packageDir = []
for (const dir of packageDirs) {
for (const d of fs
.readdirSync(path.resolve(__dirname, dir))
.filter(
(entry) =>
entry.slice(0, 1) !== "." &&
fs.lstatSync(path.resolve(__dirname, dir, entry)).isDirectory()
)) {
const p = path.resolve(dir, d)
packageDir.push(p)
}
}
module.exports = {
ignorePatterns: ["**/build/*", "modjo", "**/as-back"],
settings: {
"import/resolver": {
alias: true,
},
},
extends: [
"airbnb-base",
"prettier",
"plugin:prettier/recommended",
"plugin:import/recommended",
],
plugins: ["sql-pretty"],
rules: {
"no-undef": [2],
"sql-pretty/format": [1, { tags: ["sql"] }],
"no-shadow": [2, { allow: ["sql", "error"] }],
"node/no-extraneous-require": [0],
"import/no-commonjs": [0],
"import/no-dynamic-require": [0],
"import/no-extraneous-dependencies": [0],
"import/order": [
"error",
{
groups: [
"builtin",
"external",
"internal",
"parent",
"index",
"sibling",
"object",
],
pathGroups: [
{
group: "internal",
pattern: "~/**",
},
{
group: "internal",
pattern: "~**",
},
],
pathGroupsExcludedImportTypes: [],
},
],
"global-require": [0],
"no-restricted-syntax": [0],
"no-async-promise-executor": [0],
"no-nested-ternary": [0],
"no-loop-func": [0],
"no-new": [0],
"func-names": [0],
"no-plusplus": [0],
"no-param-reassign": [0],
"no-continue": [0],
"no-unused-vars": [
2,
{
vars: "all",
args: "after-used",
argsIgnorePattern: "^_",
varsIgnorePattern: "^_",
},
],
"no-console": [0],
"no-throw-literal": [0],
"no-await-in-loop": [0],
"consistent-return": [0],
semi: ["error", "never"],
"prettier/prettier": [
"error",
{
semi: false,
},
],
},
parserOptions: {
ecmaVersion: "latest",
sourceType: "script",
env: [
{
node: true,
},
],
},
globals: {
AggregateError: true,
dbug: true,
},
}

View file

@ -0,0 +1,105 @@
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
on:
workflow_dispatch:
push:
branches:
- "main"
tags:
- "**"
jobs:
build:
outputs:
tags: ${{ steps.meta.outputs.tags }}
runs-on: ubuntu-latest
container:
image: devthefuture/act-runner:latest
volumes:
- /buildkit-certs:/buildkit-certs
strategy:
matrix:
build:
- name: hasura
dockerfile: ./services/hasura/Dockerfile
- name: api
dockerfile: ./services/api/Dockerfile
- name: files
dockerfile: ./services/files/Dockerfile
- name: tasks
dockerfile: ./services/tasks/Dockerfile
- name: watchers
dockerfile: ./services/watchers/Dockerfile
- name: web
dockerfile: ./services/web/Dockerfile
- name: app
dockerfile: ./services/app/Dockerfile
steps:
- name: ⏬ Checkout code repository
uses: actions/checkout@v4
with:
token: ${{ secrets.M8A_ORG_BOT_REPO_TOKEN }} # Required for private repositories to works consistently, avoiding random errors
- name: 📌 Extract metadata (tags, labels) for Docker
id: meta
uses: https://git.devthefuture.org/devthefuture/docker-metadata-action@v5
with:
images: git.devthefuture.org/${{ github.repository }}/${{ matrix.build.name }}
tags: |
type=semver,pattern={{version}},priority=900
type=semver,pattern=v{{version}},priority=900
type=sha,priority=890
type=ref,event=branch,priority=600
type=ref,event=pr,priority=600
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }},priority=200
- name: 📦 Build and push Docker image
uses: https://git.devthefuture.org/devthefuture/actions/buildkit@main
with:
context: ${{ matrix.build.context || '.' }}
dockerfile: ${{ matrix.build.dockerfile }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
registry: git.devthefuture.org
registry-username: "org-bot-${{ github.repository_owner }}"
registry-password: ${{ secrets.M8A_ORG_BOT_PACKAGE_TOKEN }}
deploy:
needs: [build]
runs-on: ubuntu-latest
container:
image: devthefuture/act-runner:latest@sha256:f326ce2f586d4f55757b87d3de7baf29715ef6cbc5af7bdf6313bcf7a90e7b3d
steps:
- name: 🎡 Check out the Helm chart repository
uses: actions/checkout@v4
with:
repository: "${{ github.repository_owner }}/appsets"
token: ${{ secrets.M8A_ORG_BOT_REPO_TOKEN }}
ref: "main"
- name: 🚀 Upgrade images tag
uses: https://git.devthefuture.org/devthefuture/actions/uptag@v0.2.3
with:
app: |
---
name: hasura
key: graphql-engine.image.tag
---
name: api
key: modjo-microservice.image.tag
---
name: files
key: modjo-microservice.image.tag
---
name: tasks
key: modjo-microservice.image.tag
---
name: watchers
key: modjo-microservice.image.tag
---
name: app
key: nginx.image.tag
---
env: ${{ startsWith(github.ref, 'refs/tags/') && 'production' || 'staging' }}
meta-tags: ${{ needs.build.outputs.tags }}
commit: "true"
push: "true"

39
.gitignore vendored Normal file
View file

@ -0,0 +1,39 @@
node_modules
.vscode/notes.txt
yarn-error.log
*.logs
.DS_Store
.env.local
.env.yaml
.eslintcache
.archive
/.osm-files
/osm-files
/tileserver-files
googleServiceAccountKey.json
services/*/build
services/*/dist
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
# !.yarn/cache
yarn.lock.dev
yarn.lock
/package.json.dev
/package.json
.yarn-dev-portal/.links/*
!.yarn-dev-portal/.links/.gitkeep
codebase.md
/bin

7
.husky/commit-msg Executable file
View file

@ -0,0 +1,7 @@
#!/bin/sh
. "$(dirname $0)/_/husky.sh"
export PATH=$PATH:$HOME/.yarn/bin
yarn commitlint --edit $1

6
.husky/pre-commit Executable file
View file

@ -0,0 +1,6 @@
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"
export PATH=$PATH:$HOME/.yarn/bin
yarn lint-staged

1
.node-version Normal file
View file

@ -0,0 +1 @@
20

45
.tmux.conf Normal file
View file

@ -0,0 +1,45 @@
# sensitivity for vim
set -s escape-time 0
set -g mouse on
setw -g mode-keys vi
# increase history size
set-option -g history-limit 10000
bind-key -T copy-mode-vi v send -X begin-selection
bind -T copy-mode-vi y send-keys -X copy-pipe-and-cancel 'xclip -in -selection clipboard'
# better titles for Rofi switch mode
set-option -g set-titles on
set-option -g set-titles-string "#S - #W"
# map prefix to default Ctrl+b
set-option -g prefix C-b
bind-key C-b send-prefix
# remaps to avoid confirm prompt
# bind-key & kill-window
# bind-key x kill-pane
# bind-key X kill-session
# map C-a to toggle panes sync
bind C-a set-window-option synchronize-panes
# remap pane navigation to Alt + HJKL
bind-key -n C-Left select-pane -L
bind-key -n C-Down select-pane -D
bind-key -n C-Up select-pane -U
bind-key -n C-Right select-pane -R
# remap window navigation to Alt + NP
bind-key -n M-p previous-window
bind-key -n M-n next-window
# remap pane zoom to Alt + Z
bind-key -n M-z resize-pane -Z
# notes:
# use Shift + Mouse to allow text selection

View file

@ -0,0 +1,40 @@
session_name: helpme-project
# https://github.com/tmux-python/tmuxp/tree/master/examples
# tmux cheatsheet https://gist.github.com/MohamedAlaa/2961058
before_script: |
docker compose up --build -d
windows:
- window_name: dev
layout: tiled
panes:
- shell_command:
- docker compose logs --follow db
- shell_command:
- docker compose logs --follow minio
- shell_command:
- docker compose logs --follow rabbitmq
- shell_command:
- docker compose logs --follow osrm-car osrm-foot osrm-bicycle
- shell_command:
- docker compose logs --follow nominatim
- shell_command:
- docker compose logs --follow tileserver-gl
- shell_command:
- docker compose logs --follow hasura
- shell_command:
- docker compose logs --follow hasura_console
- shell_command:
- docker compose logs --follow api
- shell_command:
- docker compose logs --follow files
- shell_command:
- docker compose logs --follow watchers
- shell_command:
- docker compose logs --follow tasks
- shell_command:
- yarn dev:web

27
.tmuxp.yaml Normal file
View file

@ -0,0 +1,27 @@
session_name: helpme-project
# https://github.com/tmux-python/tmuxp/tree/master/examples
# tmux cheatsheet https://gist.github.com/MohamedAlaa/2961058
before_script: |
up
windows:
- window_name: dev
layout: tiled
panes:
- shell_command:
- docker compose logs --follow --tail=10 db minio rabbitmq redis-hot-geodata redis-q-dedup keydb-cold-geodata
- shell_command:
# - docker compose logs --follow tileserver-gl osrm-car osrm-foot osrm-bicycle nominatim
- docker compose logs --follow --tail=10 tileserver-gl osrm-car osrm-foot nominatim
- shell_command:
- docker compose logs --follow --tail=10 hasura hasura_console
- shell_command:
- docker compose logs --follow --tail=10 api files
- shell_command:
- docker compose logs --follow --tail=10 watchers tasks
- shell_command:
- yarn dev:web

68
.versionrc.json Normal file
View file

@ -0,0 +1,68 @@
{
"bumpFiles": [
{
"filename": "package.json",
"type": "json"
},
{
"filename": "alerte-secours/package.json",
"type": "json"
},
{
"filename": "services/api/package.json",
"type": "json"
},
{
"filename": "services/tasks/package.json",
"type": "json"
},
{
"filename": "services/watchers/package.json",
"type": "json"
},
{
"filename": "services/web/package.json",
"type": "json"
},
{
"filename": "services/hasura/version.json",
"type": "json"
}
],
"types": [
{
"type": "feat",
"section": "Features"
},
{
"type": "fix",
"section": "Bug Fixes"
},
{
"type": "chore",
"hidden": true
},
{
"type": "docs",
"hidden": true
},
{
"type": "style",
"hidden": true
},
{
"type": "refactor",
"hidden": true
},
{
"type": "perf",
"hidden": true
},
{
"type": "test",
"hidden": true
}
],
"commitUrlFormat": "https://github.com/mokkapps/changelog-generator-demo/commits/{{hash}}",
"compareUrlFormat": "https://github.com/mokkapps/changelog-generator-demo/compare/{{previousTag}}...{{currentTag}}"
}

5
.vscode/extensions.json vendored Normal file
View file

@ -0,0 +1,5 @@
{
"recommendations": [
"dbaeumer.vscode-eslint"
]
}

25
.vscode/settings.json vendored Normal file
View file

@ -0,0 +1,25 @@
{
"path-autocomplete.pathMappings": {
"~": "${folder}/src"
},
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
},
"eslint.validate": [
"javascript"
],
"[javascript]": {
"editor.defaultFormatter": "dbaeumer.vscode-eslint"
},
"eslint.workingDirectories": [{ "mode": "auto" }],
"files.associations": {
".env.{default,local}": "dotenv",
"package.json.*": "json"
},
"javascript.suggest.autoImports": true,
"typescript.suggest.autoImports": true,
"editor.suggestSelection": "first",
"vsintellicode.modify.editor.suggestSelection": "automaticallyOverrodeDefaultValue",
"editor.inlineSuggest.enabled": true
}

View file

View file

@ -0,0 +1 @@
../.links/modjo/plugins/amqp

View file

@ -0,0 +1 @@
../.links/modjo/plugins/apollo-client

View file

@ -0,0 +1 @@
../.links/modjo/plugins/config

View file

@ -0,0 +1 @@
../.links/modjo/packages/core

View file

@ -0,0 +1 @@
../.links/modjo/plugins/express

View file

@ -0,0 +1 @@
../.links/modjo/plugins/express-asyncapi/

View file

@ -0,0 +1 @@
../.links/modjo/plugins/express-monitor

View file

@ -0,0 +1 @@
../.links/modjo/plugins/graphql-pubsub

View file

@ -0,0 +1 @@
../.links/modjo/plugins/hasura

View file

@ -0,0 +1 @@
../.links/modjo/plugins/http-logger

View file

@ -0,0 +1 @@
../.links/modjo/plugins/http-server

View file

@ -0,0 +1 @@
../.links/modjo/plugins/ioredis

View file

@ -0,0 +1 @@
../.links/modjo/plugins/lightship

View file

@ -0,0 +1 @@
../.links/modjo/plugins/logger

View file

@ -0,0 +1 @@
../.links/modjo/microservices/oapi

View file

@ -0,0 +1 @@
../.links/modjo/microservices/watcher

View file

@ -0,0 +1 @@
../.links/modjo/microservices/worker

View file

@ -0,0 +1 @@
../.links/modjo/plugins/minio

1
.yarn-dev-portal/@modjo/oa Symbolic link
View file

@ -0,0 +1 @@
../.links/modjo/plugins/oa

View file

@ -0,0 +1 @@
../.links/modjo/plugins/oa-graphql

View file

@ -0,0 +1 @@
../.links/modjo/plugins/postgres

View file

@ -0,0 +1 @@
../.links/modjo/plugins/sentry

View file

@ -0,0 +1 @@
../.links/modjo/plugins/shutdown-handlers

View file

@ -0,0 +1 @@
../.links/modjo/plugins/slonik

View file

@ -0,0 +1 @@
../.links/modjo/plugins/version

View file

@ -0,0 +1,12 @@
/* eslint-disable */
//prettier-ignore
module.exports = {
name: "@yarnpkg/plugin-dev-portal",
factory: function (require) {
var plugin=(()=>{var B=Object.defineProperty;var U=(n,e,o)=>e in n?B(n,e,{enumerable:!0,configurable:!0,writable:!0,value:o}):n[e]=o;var s=(n=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(n,{get:(e,o)=>(typeof require<"u"?require:e)[o]}):n)(function(n){if(typeof require<"u")return require.apply(this,arguments);throw new Error('Dynamic require of "'+n+'" is not supported')});var a=(n,e)=>()=>(e||n((e={exports:{}}).exports,e),e.exports);var v=(n,e,o)=>(U(n,typeof e!="symbol"?e+"":e,o),o);var $=a((se,P)=>{var g=s("fs");P.exports=function(e){if(g.existsSync(e)){let o=g.lstatSync(e);if(o.isSymbolicLink()){let p=g.realpathSync(e);return g.statSync(p).isDirectory()}return o.isDirectory()}return!1}});var S=a((te,w)=>{w.exports=function(e){process.stdout.write(`[YARN-DEV-PORTAL] ${e}
`)}});var A=a((ce,N)=>{var D=s("path"),O=s("fs"),m=$(),z=S();N.exports=function(e){let o=[];if(!m(e))return o;let p=O.readdirSync(e),y=(r,c)=>{o.push({name:r,path:c}),z(`found package ${r} that will resolve to portal "${c}"`)};for(let r of p){let c=D.join(e,r);if(!r.startsWith("."))if(r.startsWith("@")){let d=O.readdirSync(c);for(let f of d){let i=D.join(c,f);m(i)&&y(`${r}/${f}`,i)}}else m(c)&&y(r,c)}return o}});var E=a((ie,b)=>{var j=s("path"),t=s("fs"),G=s("os"),{execSync:H}=s("child_process"),T=A(),_=process.cwd(),I=".yarn-dev-portal",x=S();b.exports=function(e,{makeLink:o=!0,yarnInstallProcessEnv:p={}}={}){let y=j.join(_,"package.json"),r=JSON.parse(t.readFileSync(y,{encoding:"utf-8"})),c=j.join(G.homedir(),I),d=T(c),f=j.join(_,I);if(d=[...d,...T(f)],e==="dev")for(let i of d)r.resolutions[i.name]=`portal:${i.path}`;else for(let i of Object.keys(r.resolutions))r.resolutions[i].startsWith("portal:")&&delete r.resolutions[i];t.writeFileSync(y,JSON.stringify(r,null,2)),x(`installing ${e}`),H("yarn",{stdio:"inherit",env:{...process.env,INSIDE_YARN_DEV_PORTAL:"true",...p}}),x(`creating ${e} files`),t.copyFileSync("yarn.lock",`yarn.lock.${e}`),t.copyFileSync("package.json",`package.json.${e}`),o&&(x(`linking to ${e} files`),t.existsSync("package.json")&&t.unlinkSync("package.json"),t.symlinkSync(`package.json.${e}`,"package.json"),t.existsSync("yarn.lock")&&t.unlinkSync("yarn.lock"),t.symlinkSync(`yarn.lock.${e}`,"yarn.lock"))}});var L=a((ae,F)=>{F.exports=function(){try{return fs.readlinkSync("yarn.lock").split(".").pop()}catch{return null}}});var q=a((le,J)=>{var R=E(),K=L(),C=S();J.exports=function(e,o={}){if(e||(e=K()||"dev"),!["dev","prod"].includes(e))throw new Error(`Invalid yarn dev-portal env ${e}, allowed values are "dev" or "prod"`);yarnEnvOther=e==="prod"?"dev":"prod",C(`setting env ${e}`),R(yarnEnvOther,{...o,makeLink:!1}),R(e,{...o}),C(`env ${e} ready`)}});var W=a((ye,V)=>{var{BaseCommand:M}=s("@yarnpkg/cli"),{Command:Q,Option:X}=s("clipanion"),Z=q(),h;V.exports=(h=class extends M{args=X.Proxy();async execute(){let[e="dev"]=this.args;Z(e)}},v(h,"paths",[["dev-portal"]]),v(h,"usage",Q.Usage({description:"produce two yarn.lock to seamlessly link to external local libraries you're developing using yarn portal",details:`
upgrade both version, dev and prod, of yarn.lock and package.json, and switch between them
`,examples:[["yarn dev-portal","yarn dev-portal prod","yarn dev-portal dev"]]})),h)});var oe=a((ue,Y)=>{var l=s("fs"),u=s("path"),ee=W(),ne=q(),k=process.cwd();!l.existsSync(u.join(k,"yarn.lock"))&&l.existsSync(u.join(k,"yarn.lock.prod"))&&l.symlinkSync("yarn.lock.prod","yarn.lock");!l.existsSync(u.join(k,"package.json"))&&l.existsSync(u.join(k,"package.json.prod"))&&l.symlinkSync("package.json.prod","package.json");Y.exports={commands:[ee],hooks:{afterAllInstalled:async n=>{process.env.INSIDE_YARN_DEV_PORTAL!=="true"&&(!l.existsSync(u.join(k,"package.json.prod"))||ne(null))}}}});return oe();})();
return plugin;
}
};

19
.yarn/plugins/@yarnpkg/plugin-fetch.cjs vendored Normal file
View file

@ -0,0 +1,19 @@
/* eslint-disable */
//prettier-ignore
module.exports = {
name: "@yarnpkg/plugin-fetch",
factory: function (require) {
var plugin=(()=>{var le=Object.defineProperty;var pe=(s,e,t)=>e in s?le(s,e,{enumerable:!0,configurable:!0,writable:!0,value:t}):s[e]=t;var c=(s=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(s,{get:(e,t)=>(typeof require<"u"?require:e)[t]}):s)(function(s){if(typeof require<"u")return require.apply(this,arguments);throw new Error('Dynamic require of "'+s+'" is not supported')});var i=(s,e)=>()=>(e||s((e={exports:{}}).exports,e),e.exports);var f=(s,e,t)=>(pe(s,typeof e!="symbol"?e+"":e,t),t);var $=i((Ue,A)=>{"use strict";A.exports=function(e){return e.map(function(t){return t&&typeof t=="object"?t.op.replace(/(.)/g,"\\$1"):/["\s]/.test(t)&&!/'/.test(t)?"'"+t.replace(/(['\\])/g,"\\$1")+"'":/["'\s]/.test(t)?'"'+t.replace(/(["\\$`!])/g,"\\$1")+'"':String(t).replace(/([A-Za-z]:)?([#!"$&'()*,:;<=>?@[\\\]^`{|}])/g,"$1\\$2")}).join(" ")}});var H=i((Ve,v)=>{var C=c("fs"),ue=c("path"),{parseSyml:fe}=c("@yarnpkg/parsers"),ke=["patch","npm","portal","link"],B=s=>{let e=s.trim().split("@");return s.startsWith("@")?e=e.slice(0,2):e=e.slice(0,1),e.join("@")},Y=(s,e)=>{let[t,r]=s.trim().split(`@${e}:`);return{key:t,version:r}},de=s=>{let[,e]=s.split("::")[0].split("#");return e};v.exports=function(){let e=C.readFileSync("yarn.lock","utf8"),t=fe(e),r=Object.keys(t).filter(o=>o.includes("@workspace:")),n=r.map(o=>{let[,l]=t[o].resolution.trim().split("@workspace:");return l==="."?null:l}).filter(Boolean);r.forEach(o=>{let{dependencies:l,dependenciesMeta:u,peerDependencies:b,peerDependenciesMeta:P,resolution:D,bin:ce}=t[o],[ae,w]=D.trim().split("@workspace:"),ie=ue.join(w,"package.json"),y={name:ae,version:"0.0.0",description:"**DON'T COMMIT** Generated file for caching",private:!0,dependencies:l,peerDependencies:b,peerDependenciesMeta:P,bin:ce};if(u){let m={};Object.keys(u).forEach(d=>{m[d]=l[d],delete l[d]}),y.optionalDependencies=m}if(w==="."){n.length>0&&(y.workspaces={packages:n});let m=Object.keys(t),d=new Map;m.forEach(p=>{p.split(",").forEach(k=>{if(k.includes("builtin<compat/"))return;let a=B(k);d.has(a)||d.set(a,[]),d.get(a).push(k)})}),y.resolutions=m.filter(p=>{var a;if(p.includes("@workspace:"))return!1;if(p.includes("@patch:"))return!((a=de(Y(p,"patch").version).match(/(\.\.\/)+/))!=null&&a.length);if(p.includes(", "))return!1;let k=B(p);return d.get(k).length===1}).reduce((p,k)=>(ke.forEach(a=>{if(!k.includes(`@${a}:`))return;let{key:x,version:g}=Y(k,a);switch(a){case"npm":p[x]=g.includes("@")?`${a}:${g}`:g;break;case"patch":k.includes("builtin<compat/")||(p[x]=`${a}:${g.split("::")[0]}`);break;case"portal":case"link":p[x]=`${a}:${g.split("::")[0]}`;break}}),p),{})}C.mkdirSync(w,{recursive:!0}),C.writeFileSync(ie,`${JSON.stringify(y,null,2)}
`)})}});var j=i((Ze,R)=>{var he=H();R.exports=s=>{s.context.stdout.write(`[YARN-FETCH] extracting package.json file(s) from yarn.lock
`),he()}});var K=i((Xe,L)=>{var h=c("fs"),me=c("path"),{execSync:ge}=c("child_process"),{parseSyml:ye}=c("@yarnpkg/parsers"),{BaseCommand:xe}=c("@yarnpkg/cli"),{Command:qe,Option:M}=c("clipanion"),be=$(),Pe=j(),q;L.exports=(q=class extends xe{protectPackageJson=M.Boolean("--protect-package-json");args=M.Proxy();async execute(){let{protectPackageJson:e=process.stdout.isTTY}=this,t=[];if(e){this.context.stdout.write(`[YARN-FETCH] backup possible package.json file(s)
`);let n=h.readFileSync("yarn.lock","utf8"),o=ye(n);t=Object.keys(o).filter(u=>u.includes("@workspace:")).map(u=>{let{resolution:b}=o[u],[,P]=b.trim().split("@workspace:");return me.join(P,"package.json")}),t.forEach(u=>{h.existsSync(u)&&!h.existsSync(`${u}.yarn-plugin-fetch-bak`)&&h.copyFileSync(u,`${u}.yarn-plugin-fetch-bak`)})}Pe(this);let r=`yarn ${be(this.args)}`;this.context.stdout.write(`[YARN-FETCH] ${r}
`);try{ge(r,{stdio:"inherit"})}catch(n){throw n}finally{e&&(this.context.stdout.write(`[YARN-FETCH] restoring possible package.json file(s)
`),t.forEach(n=>{h.existsSync(`${n}.yarn-plugin-fetch-bak`)?h.renameSync(`${n}.yarn-plugin-fetch-bak`,n):h.unlinkSync(n)}))}}},f(q,"paths",[["fetch"]]),f(q,"usage",qe.Usage({description:"fetch dependencies from yarn.lock in Docker build",details:`
expand yarn.lock to package.json file(s) and install dependencies in Docker build.
`,examples:[["yarn fetch --immutable","yarn fetch workspace my-package focus"]]})),q)});var W=i((st,I)=>{var{BaseCommand:we}=c("@yarnpkg/cli"),$e=j(),S;I.exports=(S=class extends we{async execute(){$e(this)}},f(S,"paths",[["fetch-tools","expand-lock"]]),S)});var z=i((rt,_)=>{function Ce(s,e,t){let r=e.split("."),n=s;for(let o of r){if(n[o]===void 0)return t;n=n[o]}return n}function je(s,e,t){let r=e.split("."),n=s;for(let o=0;o<r.length-1;o++){let l=r[o];(!n[l]||typeof n[l]!="object")&&(n[l]={}),n=n[l]}return n[r[r.length-1]]=t,s}function Se(s,e){let t=e.split("."),r=s;for(let n=0;n<t.length-1;n++){let o=t[n];if(!r[o])return!1;r=r[o]}return delete r[t[t.length-1]],!0}_.exports={get:Ce,set:je,unset:Se}});var E=i((ot,U)=>{var G=c("fs"),{get:Ee,set:Fe,unset:Je}=z();U.exports=function(e,t){let r=JSON.parse(G.readFileSync("package.json","utf-8")),n=Ee(r,e);n!==void 0&&(Fe(r,t,n),Je(r,e),G.writeFileSync("package.json",JSON.stringify(r,null,2)))}});var F=i((ct,V)=>{var Ne=E();V.exports=function(){Ne("scripts._postinstall","scripts.postinstall")}});var Q=i((it,Z)=>{var{BaseCommand:Te}=c("@yarnpkg/cli"),Oe=F(),J;Z.exports=(J=class extends Te{async execute(){Oe()}},f(J,"paths",[["fetch-tools","disable-postinstall"]]),J)});var N=i((pt,X)=>{var De=E();X.exports=function(){De("scripts.postinstall","scripts._postinstall")}});var te=i((ft,ee)=>{var{BaseCommand:Ae}=c("@yarnpkg/cli"),Be=N(),T;ee.exports=(T=class extends Ae{async execute(){Be()}},f(T,"paths",[["fetch-tools","disable-postinstall"]]),T)});var re=i((ht,ne)=>{var{execSync:Ye}=c("child_process"),{BaseCommand:ve}=c("@yarnpkg/cli"),{Option:se}=c("clipanion"),He=$(),Re=F(),Me=N(),O;ne.exports=(O=class extends ve{postinstall=se.Boolean("--postinstall");args=se.Proxy();async execute(){this.postinstall||(this.context.stdout.write(`[YARN-FETCH] disable postinstall command in package.json
`),Me());let e=`yarn workspaces focus --production ${He(this.args)}`;this.context.stdout.write(`[YARN-FETCH] ${e}
`),Ye(e,{stdio:"inherit"}),this.postinstall||(this.context.stdout.write(`[YARN-FETCH] re-enable postinstall command in package.json
`),Re())}},f(O,"paths",[["fetch-tools","production"]]),O)});var ze=i((gt,oe)=>{var Le=K(),Ke=W(),Ie=Q(),We=te(),_e=re();oe.exports={commands:[Le,Ke,We,Ie,_e]}});return ze();})();
return plugin;
}
};

934
.yarn/releases/yarn-4.6.0.cjs vendored Executable file

File diff suppressed because one or more lines are too long

14
.yarnrc.yml Normal file
View file

@ -0,0 +1,14 @@
compressionLevel: mixed
enableGlobalCache: false
nodeLinker: node-modules
plugins:
- path: .yarn/plugins/@yarnpkg/plugin-dev-portal.cjs
spec: "https://codeberg.org/devthefuture/yarn-plugin-dev-portal/raw/branch/master/bundles/@yarnpkg/plugin-dev-portal.js"
- checksum: 240d225dd5bf1e25068497140ced7a3b7658a4c3754c08ea57162c9fe3335d757af0eae55555f96150a3015cdd0337852401f3fae69c1edd05221cb32f038d8c
path: .yarn/plugins/@yarnpkg/plugin-fetch.cjs
spec: "https://codeberg.org/devthefuture/yarn-plugin-fetch/raw/branch/master/bundles/@yarnpkg/plugin-fetch.js"
yarnPath: .yarn/releases/yarn-4.6.0.cjs

81
LICENSE.md Normal file
View file

@ -0,0 +1,81 @@
# DevTheFuture Ethical Use License (DEF License)
**Effective Date:** 2025
**Licensor:** DevTheFuture.org
**License URL:** [https://devthefuture.org/DEF-LICENSE.md](https://devthefuture.org/DEF-LICENSE.md)
**Licensed Software:** Alerte-Secours
* * *
## 1. **Definitions**
* **Software:** The original software provided under this license, including all parts, components, and any modifications or derivatives, as defined by the Licensor.
* **Nonprofit Use:** For the purposes of this Agreement, “Nonprofit Use” is defined in a manner consistent with similar provisions in licenses such as the PolyForm Noncommercial License. It refers to usage that does not primarily aim to generate financial gain, including activities by educational institutions, public organizations, or private individuals where any incidental revenue is purely ancillary to the primary noncommercial purpose.
* **Profit Use:** Any use of the Software that is intended to generate revenue, provide a commercial advantage, or otherwise support a for-profit enterprise. This includes, but is not limited to, integration into commercial products, services, or platforms where the primary goal is financial gain.
* **Personal Data Monetization:** The process by which personal data collected through the operation of the Software is exploited for commercial gain. For clarity, the License expressly prohibits any use of personal data for:
* Marketing, advertising, or political influence campaigns.
* The creation, maintenance, or enhancement of databases or data aggregation services that are not directly tied to the explicitly exposed functionalities of the Software as communicated to the end user.
* **Competitor:** Any individual, organization, or entity that develops, distributes, or monetizes software that directly competes with the Software in its intended field or market.
* * *
## 2. **Grant of License**
1. **Nonprofit Use:**
* The Licensor grants a perpetual, royalty-free, non-exclusive, and non-transferable license to use, modify, and distribute the Software for Nonprofit Use, provided that all users comply with Section 3 and any additional terms specified herein.
2. **Profit Use:**
* Use of the Software for Profit Use requires obtaining a paid license. The Licensor reserves the exclusive right to determine the terms, cost, and approval of any Profit Use license at its sole discretion.
* All requests for a Profit Use license must be submitted in writing, and the Licensor may approve or deny such requests based on factors including the intended use, the nature of the entity, and other relevant considerations.
* * *
## 3. **Restrictions on Personal Data Monetization**
* The Software **must not be used to monetize, sell, or exploit any personal data** collected through its operation.
* Specifically, any personal data obtained through the Software:
* **May not be used for any marketing or advertising purposes.**
* **May not be used to influence public opinion or political processes.**
* **May not be compiled into databases or used for data aggregation unless such use is an explicit and integral feature of the Software, as clearly disclosed to end users.**
* All users and licensees agree to these conditions. Violation of this section shall be considered a material breach of this Agreement.
* * *
## 4. **Ownership and Intellectual Property**
* The Software and all related intellectual property rights remain the exclusive property of the Licensor.
* Users are prohibited from removing or altering any copyright, trademark, or other proprietary notices contained within the Software.
* * *
## 5. **No Warranty**
* The Software is provided “as is,” without any express or implied warranties, including but not limited to warranties of merchantability, fitness for a particular purpose, or non-infringement.
* Under no circumstances shall the Licensor be liable for any claims, damages, or liabilities arising from the use or inability to use the Software.
* * *
## 6. **Termination and Cure Period**
* In the event of a breach of Sections 2, 3, or 8, the Licensor shall notify the Licensee of the breach and provide a cure period of **30 days** during which the Licensee may remedy the breach.
* If the breach is not remedied within this cure period, or if the breach is of a nature that the Licensor determines is irreparable, this license shall automatically terminate without further notice.
* Profit Use licenses may be terminated or revoked by the Licensor in accordance with the specific terms outlined in each Profit Use agreement.
* * *
## 7. **Governing Law and Dispute Resolution**
* This Agreement shall be governed by and construed in accordance with internationally recognized principles of commercial law, as well as any applicable local laws governing the use or distribution of the Software.
* In the event of any disputes, the parties agree to attempt resolution through negotiation before pursuing legal remedies in a competent jurisdiction.
* * *
## 8. **Competitor Restriction**
* **Competitor Limitation:** Any Competitor is prohibited from using, accessing, or distributing the Software in any capacity (whether under Nonprofit or Profit Use) without the explicit, prior written consent of the Licensor.
* **Profit Use Licensing for Competitors:** The Licensor reserves the right to evaluate and either approve or deny any Profit Use license request from a Competitor at its sole discretion, without obligation to justify the decision.
* * *
By using or accessing the Software, you acknowledge that you have read, understood, and agree to be bound by the terms of this DevTheFuture Ethical Use License (DEF License).
* * *

58
README.md Normal file
View file

@ -0,0 +1,58 @@
# Alerte-Secours - Le Réflexe qui Sauve
## Dev
### Requirements
- docker
- tmux + tmuxp
- direnv
### Getting started
install
```sh
yarn
```
load tmux custom conf (optional)
```sh
tmux source-file .tmux.conf
```
### Start services
```sh
tmuxp load .
```
kill tmux session
```sh
tmux kill-session -t helpme-project || true
```
### Endpoints
#### services
- api 4200
- file 4292
- hasura 4201
- tasks
- watchers
#### consoles
- [hasura 4295](http://localhost:4295)
- [minio 4201](http://localhost:4201)
- [api 4200](http://0.0.0.0:4200/api/v1/swagger/)
- [api graphql 4200](http://0.0.0.0:4200/api/v1/graphql)
- [files 4200](http://0.0.0.0:4292/api/v1/swagger/)
#### oa url
- /api/v1
- /spec
- /oas
- /swagger
- /graphql
- /status
- /

304
docker-compose.build.yaml Normal file
View file

@ -0,0 +1,304 @@
services:
db:
image: mdillon/postgis:11
environment:
- POSTGRES_USER=dev
- POSTGRES_PASSWORD=dev
ports:
- "${EXPOSE_PG_PORT:-4204}:5432"
volumes:
- pgdata:/var/lib/postgresql/data
restart: always
minio:
image: bitnami/minio:2022-debian-10
ports:
- "${EXPOSE_MINO_PORT:-4290}:9000"
- "${EXPOSE_MINIO_CONSOLE_PORT:-4291}:9001"
environment:
MINIO_ROOT_USER: ${MINIO_ROOT_USER:-minio-admin}
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD:-minio-admin}
MINIO_FORCE_NEW_KEYS: "yes"
BITNAMI_DEBUG: "true"
volumes:
- minio_data:/data
minio-setup:
image: minio/mc
depends_on:
- minio
environment:
MINIO_ROOT_USER: ${MINIO_ROOT_USER:-minio-admin}
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD:-minio-admin}
entrypoint: >
/bin/sh -c "
set -e
mc alias set myminio http://minio:9000 ${MINIO_ROOT_USER:-minio-admin} ${MINIO_ROOT_PASSWORD:-minio-admin};
until mc ls myminio; do
echo 'Waiting for MinIO to be ready...';
sleep 1;
done;
echo 'MinIO is up and running, configuring...';
mc mb myminio/avatar;
mc mb myminio/audio;
mc anonymous set download myminio/avatar;
"
rabbitmq:
image: docker.io/bitnami/rabbitmq:3.9
restart: always
volumes:
- rabbitmq_data:/bitnami
ports:
- ${EXPOSE_RABBITMQ_NODE_PORT}:5672
- ${EXPOSE_RABBITMQ_MANAGEMENT_PORT_PORT}:15672
environment:
- RABBITMQ_USERNAME=dev
- RABBITMQ_PASSWORD=dev
- RABBITMQ_DISK_FREE_ABSOLUTE_LIMIT=2GB
redis-q-dedup:
image: &redisImage bitnami/redis:7.2
# restart: always
ports:
- "${EXPOSE_REDIS_QD_PORT:-4278}:6379"
environment: # https://hub.docker.com/r/bitnami/redis
REDIS_PASSWORD: redis-password
REDIS_PORT: "6379"
REDIS_AOF_ENABLED: "no"
redis-hot-geodata:
image: *redisImage
# restart: always
ports:
- "${EXPOSE_REDIS_HG_PORT:-4279}:6379"
volumes:
- redis_hg_data:/bitnami/redis/data
environment: # https://hub.docker.com/r/bitnami/redis
REDIS_PASSWORD: redis-password
REDIS_PORT: "6379"
REDIS_AOF_ENABLED: "yes"
REDIS_DISABLE_COMMANDS: FLUSHDB,FLUSHALL
keydb-cold-geodata:
image: eqalpha/keydb:x86_64_v6.3.4
# restart: always
ports:
- "${EXPOSE_KEYDB_CG_PORT:-4277}:6379"
volumes:
- keydb_cg_data:/data
command:
- keydb-server
- /etc/keydb/redis.conf
- --requirepass
- keydb-password
- --server-threads
- "2"
- --storage-provider
- flash
- /data/flash
- --maxmemory
- "1G"
- --maxmemory-policy
- allkeys-lfu
hasura:
image: helpme_hasura
build:
context: .
dockerfile: ./services/hasura/Dockerfile
ports:
- ${EXPOSE_HASURA_PORT:-4201}:8080
depends_on:
- db
- api
restart: always
environment:
HASURA_GRAPHQL_INFER_FUNCTION_PERMISSIONS: "false"
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
HASURA_GRAPHQL_DATABASE_URL: postgres://dev:dev@db:5432/dev
HASURA_GRAPHQL_ENABLE_CONSOLE: "true"
HASURA_GRAPHQL_JWT_SECRET: '{"jwk_url": "http://api:${API_PORT:-4200}/api/v1/oas/jwks","header":{"type":"Cookie","name":"bearer"},"claims_namespace":"${CLAIMS_NAMESPACE:-https://hasura.io/jwt/claims}"}'
HASURA_GRAPHQL_UNAUTHORIZED_ROLE: "anonymous"
HASURA_REMOTE_SCHEMA_API_GRAPHQL_ENDPOINT: ${HASURA_REMOTE_SCHEMA_API_GRAPHQL_ENDPOINT:-http://api:4200/api/v1/graphql}
API_PORT: ${API_PORT:-4200}
HASURA_GRAPHQL_ENABLED_APIS: ${HASURA_GRAPHQL_ENABLED_APIS:-graphql,metadata}
hasura_console:
image: helpme_hasura_console
build:
context: .
dockerfile: ./services/hasura/console/Dockerfile
ports:
- ${HASURA_CONSOLE_PORT:-4295}:${HASURA_CONSOLE_PORT:-4295}
- ${HASURA_CONSOLE_API_PORT:-4293}:${HASURA_CONSOLE_API_PORT:-4293}
depends_on:
- db
- api
restart: always
environment:
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
HASURA_CONSOLE_INTERNAL_HOSTNAME: ${HASURA_CONSOLE_INTERNAL_HOSTNAME:-hasura_console}
HASURA_CONSOLE_PORT: ${HASURA_CONSOLE_PORT:-4295}
HASURA_CONSOLE_API_PORT: ${HASURA_CONSOLE_API_PORT:-4293}
HASURA_GRAPHQL_ENDPOINT: http://localhost:${EXPOSE_HASURA_PORT:-4201}
HASURA_INTERNAL_HOSTNAME: ${HASURA_INTERNAL_HOSTNAME:-hasura}
HASURA_EXPOSE_PORT: ${EXPOSE_HASURA_PORT:-4201}
api:
image: helpme_api
build:
context: .
dockerfile: ./services/api/Dockerfile
volumes:
- ./.dev-secrets:/secrets:ro
ports:
- ${EXPOSE_API_PORT:-4200}:${API_PORT:-4200}
restart: always
environment:
PORT: "${API_PORT:-4200}"
JWK_FILE: /secrets/jwk.json
OLDJWK_FILE: /secrets/jwk2.json
DATABASE_URL: ${DATABASE_URL-psql://dev:dev@db/dev}
HASURA_GRAPHQL_URL: ${HASURA_GRAPHQL_URL:-http://hasura:8080/v1/graphql}
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
LOG_REQUESTS: ${LOG_REQUESTS:-false}
CLAIMS_NAMESPACE: ${CLAIMS_NAMESPACE:-https://hasura.io/jwt/claims}
AMQP_URL: ${AMQP_URL-amqp://dev:dev@rabbitmq}
AMQP_HOST: rabbitmq
AMQP_PORT: 5672
EXTERNAL_RINGOVER_CALL_EVENT_WEBHOOK_KEY: ${EXTERNAL_RINGOVER_CALL_EVENT_WEBHOOK_KEY:-}
REDIS_QUEUE_DEDUP_HOST: redis-q-dedup
REDIS_QUEUE_DEDUP_PASSWORD: redis-password
REDIS_HOT_GEODATA_HOST: redis-hot-geodata
REDIS_HOT_GEODATA_PASSWORD: redis-password
depends_on:
- db
- rabbitmq
- redis-hot-geodata
- keydb-cold-geodata
- redis-q-dedup
files:
image: helpme_file
restart: always
build:
context: .
dockerfile: ./services/files/Dockerfile
volumes:
- ./.dev-secrets:/secrets:ro
ports:
- ${EXPOSE_FILES_PORT:-4292}:${FILES_PORT:-4292}
environment:
LOGLEVEL: ${FILES_LOGLEVEL:-debug}
PORT: "${FILES_PORT:-4292}"
JWK_FILE: /secrets/jwk.json
OLDJWK_FILE: /secrets/jwk2.json
DATABASE_URL: ${DATABASE_URL-psql://dev:dev@db/dev}
MINIO_ENDPOINT: minio
MINIO_PORT: 9000
MINIO_ACCESS_KEY: ${MINIO_ROOT_USER:-minio-admin}
MINIO_SECRET_KEY: ${MINIO_ROOT_PASSWORD:-minio-admin}
HASURA_GRAPHQL_URL: ${HASURA_GRAPHQL_URL:-http://hasura:8080/v1/graphql}
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
LOG_REQUESTS: ${LOG_REQUESTS:-false}
CLAIMS_NAMESPACE: ${CLAIMS_NAMESPACE:-https://hasura.io/jwt/claims}
AMQP_URL: ${AMQP_URL-amqp://dev:dev@rabbitmq}
AMQP_HOST: rabbitmq
AMQP_PORT: 5672
SENTRY_DSN: ${SENTRY_DSN_FILES:-""}
SENTRY_ENVIRONMENT: ${SENTRY_ENVIRONMENT:-development}
depends_on:
- minio
- db
- rabbitmq
tasks:
image: helpme_tasks
restart: always
build:
context: .
dockerfile: ./services/tasks/Dockerfile
volumes:
- ./services/tasks/src/googleServiceAccountKey.json:/app/services/tasks/src/googleServiceAccountKey.json
environment:
API_URL: ${API_URL:-http://localhost:${EXPOSE_API_PORT:-4200}/api}
LOGLEVEL: ${TASKS_LOGLEVEL:-debug}
DATABASE_URL: ${DATABASE_URL-psql://dev:dev@db/dev}
AMQP_URL: ${AMQP_URL-amqp://dev:dev@rabbitmq}
AMQP_PORT: 5672
NOMINATIM_URL: ${NOMINATIM_URL:-http://nominatim:8080}
HASURA_GRAPHQL_URL: ${HASURA_GRAPHQL_URL:-http://hasura:8080/v1/graphql}
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
WHAT3WORDS_API_KEY: ${WHAT3WORDS_API_KEY-}
GOOGLE_SERVICE_ACCOUNT_KEY_FILE: ${GOOGLE_SERVICE_ACCOUNT_KEY_FILE:-/app/services/tasks/src/googleServiceAccountKey.json}
SMTP_USER: ""
SMTP_PASS: ""
SMTP_HOST: maildev
SMTP_PORT: 1025
SMTP_FROM: ""
REDIS_QUEUE_DEDUP_HOST: redis-q-dedup
REDIS_QUEUE_DEDUP_PASSWORD: redis-password
REDIS_HOT_GEODATA_HOST: redis-hot-geodata
REDIS_HOT_GEODATA_PASSWORD: redis-password
KEYDB_COLD_GEODATA_HOST: keydb-cold-geodata
KEYDB_COLD_GEODATA_PASSWORD: keydb-password
SENTRY_DSN: ${SENTRY_DSN_TASKS:-""}
SENTRY_ENVIRONMENT: ${SENTRY_ENVIRONMENT:-development}
depends_on:
- db
- rabbitmq
- redis-hot-geodata
- keydb-cold-geodata
- redis-q-dedup
watchers:
image: helpme_watchers
build:
context: .
dockerfile: ./services/watchers/Dockerfile
restart: always
environment:
LOGLEVEL: ${WATCHERS_LOGLEVEL:-debug}
DATABASE_URL: ${DATABASE_URL-psql://dev:dev@db/dev}
AMQP_URL: ${AMQP_URL-amqp://dev:dev@rabbitmq}
AMQP_HOST: rabbitmq
AMQP_PORT: 5672
HASURA_GRAPHQL_URL: ${HASURA_GRAPHQL_URL:-http://hasura:8080/v1/graphql}
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
REDIS_HOT_GEODATA_HOST: redis-hot-geodata
REDIS_HOT_GEODATA_PASSWORD: redis-password
KEYDB_COLD_GEODATA_HOST: keydb-cold-geodata
KEYDB_COLD_GEODATA_PASSWORD: keydb-password
SENTRY_DSN: ${SENTRY_DSN_WATCHERS:-""}
SENTRY_ENVIRONMENT: ${SENTRY_ENVIRONMENT:-development}
depends_on:
- db
- rabbitmq
- redis-hot-geodata
- keydb-cold-geodata
web:
image: helpme_web
restart: always
build:
context: .
dockerfile: ./services/web/Dockerfile
ports:
- ${WEB_PORT:-4203}:8080
volumes:
pgdata:
name: helpme-pgdata
minio_data:
name: helpme-miniodata
rabbitmq_data:
name: helpme-rabbitmq-data
redis_hg_data:
name: helpme-redishg-data
keydb_cg_data:
name: helpme-keydbcg-data

432
docker-compose.yaml Normal file
View file

@ -0,0 +1,432 @@
services:
db:
image: mdillon/postgis:11
restart: always
environment:
- POSTGRES_USER=dev
- POSTGRES_PASSWORD=dev
ports:
- "${EXPOSE_PG_PORT:-4204}:5432"
volumes:
- pg_data:/var/lib/postgresql/data
minio:
image: bitnami/minio:2022-debian-10
ports:
- "${EXPOSE_MINO_PORT:-4290}:9000"
- "${EXPOSE_MINIO_CONSOLE_PORT:-4291}:9001"
environment:
MINIO_ROOT_USER: ${MINIO_ROOT_USER:-minio-admin}
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD:-minio-admin}
MINIO_FORCE_NEW_KEYS: "yes"
BITNAMI_DEBUG: "true"
volumes:
- minio_data:/data
minio-setup:
image: minio/mc
depends_on:
- minio
environment:
MINIO_ROOT_USER: ${MINIO_ROOT_USER:-minio-admin}
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD:-minio-admin}
entrypoint: >
/bin/sh -c "
set -e
mc alias set myminio http://minio:9000 ${MINIO_ROOT_USER:-minio-admin} ${MINIO_ROOT_PASSWORD:-minio-admin};
until mc ls myminio; do
echo 'Waiting for MinIO to be ready...';
sleep 1;
done;
echo 'MinIO is up and running, configuring...';
mc mb myminio/avatar;
mc mb myminio/audio;
mc anonymous set download myminio/avatar;
"
rabbitmq:
image: docker.io/bitnami/rabbitmq:3.9
restart: always
volumes:
- rabbitmq_data:/bitnami
ports:
- ${EXPOSE_RABBITMQ_NODE_PORT}:5672
- ${EXPOSE_RABBITMQ_MANAGEMENT_PORT_PORT}:15672
environment:
- RABBITMQ_USERNAME=dev
- RABBITMQ_PASSWORD=dev
- RABBITMQ_DISK_FREE_ABSOLUTE_LIMIT=2GB
redis-q-dedup:
image: &redisImage bitnami/redis:7.2
# restart: always
ports:
- "${EXPOSE_REDIS_QD_PORT:-4278}:6379"
environment: # https://hub.docker.com/r/bitnami/redis
REDIS_PASSWORD: redis-password
REDIS_PORT: "6379"
REDIS_AOF_ENABLED: "no"
redis-hot-geodata:
image: *redisImage
# restart: always
ports:
- "${EXPOSE_REDIS_HG_PORT:-4279}:6379"
volumes:
- redis_hg_data:/bitnami/redis/data
environment: # https://hub.docker.com/r/bitnami/redis
REDIS_PASSWORD: redis-password
REDIS_PORT: "6379"
REDIS_AOF_ENABLED: "yes"
REDIS_DISABLE_COMMANDS: FLUSHDB,FLUSHALL
keydb-cold-geodata:
image: eqalpha/keydb:x86_64_v6.3.4
# restart: always
ports:
- "${EXPOSE_KEYDB_CG_PORT:-4277}:6379"
volumes:
- keydb_cg_data:/data
command:
- keydb-server
- /etc/keydb/redis.conf
- --requirepass
- keydb-password
- --server-threads
- "2"
- --storage-provider
- flash
- /data/flash
- --maxmemory
- "1G"
- --maxmemory-policy
- allkeys-lfu
hasura:
image: helpme_hasura
restart: always
build:
context: .
dockerfile: ./services/hasura/Dockerfile
volumes:
- ./services/hasura/metadata:/hasura-metadata
- ./services/hasura/migrations:/hasura-migrations
ports:
- ${EXPOSE_HASURA_PORT:-4201}:8080
depends_on:
- db
- api
environment:
HASURA_GRAPHQL_INFER_FUNCTION_PERMISSIONS: "false"
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
HASURA_GRAPHQL_DATABASE_URL: postgres://dev:dev@db:5432/dev
HASURA_GRAPHQL_ENABLE_CONSOLE: "true"
HASURA_GRAPHQL_DEV_MODE: "true"
HASURA_GRAPHQL_ENABLED_LOG_TYPES: startup, http-log, webhook-log, websocket-log, query-log
# HASURA_GRAPHQL_JWT_SECRET: '{"jwk_url": "http://api:${API_PORT:-4200}/api/v1/oas/jwks","header":{"type":"Cookie","name":"bearer"},"claims_namespace":"${CLAIMS_NAMESPACE:-https://alertesecours.fr/claims}"}'
HASURA_GRAPHQL_JWT_SECRET: '{"type":"EdDSA", "jwk_url": "http://api:${API_PORT:-4200}/api/v1/oas/jwks","claims_namespace":"${CLAIMS_NAMESPACE:-https://alertesecours.fr/claims}"}'
HASURA_GRAPHQL_UNAUTHORIZED_ROLE: "anonymous"
HASURA_REMOTE_SCHEMA_API_GRAPHQL_ENDPOINT: ${HASURA_REMOTE_SCHEMA_API_GRAPHQL_ENDPOINT:-http://api:4200/api/v1/graphql}
API_PORT: ${API_PORT:-4200}
hasura_console:
image: helpme_hasura_console
restart: always
build:
context: .
dockerfile: ./services/hasura/console/Dockerfile
volumes:
- ./services/hasura:/hasura
ports:
- ${HASURA_CONSOLE_PORT:-4295}:${HASURA_CONSOLE_PORT:-4295}
- ${HASURA_CONSOLE_API_PORT:-4293}:${HASURA_CONSOLE_API_PORT:-4293}
depends_on:
- db
- api
- hasura
environment:
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
HASURA_CONSOLE_INTERNAL_HOSTNAME: ${HASURA_CONSOLE_INTERNAL_HOSTNAME:-hasura_console}
HASURA_CONSOLE_PORT: ${HASURA_CONSOLE_PORT:-4295}
HASURA_CONSOLE_API_PORT: ${HASURA_CONSOLE_API_PORT:-4293}
HASURA_GRAPHQL_ENDPOINT: http://localhost:${EXPOSE_HASURA_PORT:-4201}
HASURA_INTERNAL_HOSTNAME: ${HASURA_INTERNAL_HOSTNAME:-hasura}
HASURA_EXPOSE_PORT: ${EXPOSE_HASURA_PORT:-4201}
api:
image: helpme_api:prod
restart: always
build:
context: .
dockerfile: ./services/api/Dockerfile.dev
volumes:
- ./.dev-secrets:/secrets:ro
- ./:/app
- /lab/devthefuture/modjo:/app/.yarn-dev-portal/.links/modjo
ports:
- ${EXPOSE_API_PORT:-4200}:${API_PORT:-4200}
environment:
LOGLEVEL: ${API_LOGLEVEL:-debug}
PORT: "${API_PORT:-4200}"
JWK_FILE: /secrets/jwk.json
OLDJWK_FILE: /secrets/jwk2.json
DATABASE_URL: ${DATABASE_URL-psql://dev:dev@db/dev}
HASURA_GRAPHQL_URL: ${HASURA_GRAPHQL_URL:-http://hasura:8080/v1/graphql}
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
NOMINATIM_URL: ${NOMINATIM_URL:-http://nominatim:8080}
WHAT3WORDS_API_KEY: ${WHAT3WORDS_API_KEY-}
LOG_REQUESTS: ${LOG_REQUESTS:-false}
CLAIMS_NAMESPACE: ${CLAIMS_NAMESPACE:-https://hasura.io/jwt/claims}
AMQP_URL: ${AMQP_URL-amqp://dev:dev@rabbitmq}
AMQP_HOST: rabbitmq
AMQP_PORT: 5672
EXTERNAL_RINGOVER_CALL_EVENT_WEBHOOK_KEY: ${EXTERNAL_RINGOVER_CALL_EVENT_WEBHOOK_KEY:-}
REDIS_QUEUE_DEDUP_HOST: redis-q-dedup
REDIS_QUEUE_DEDUP_PASSWORD: redis-password
REDIS_HOT_GEODATA_HOST: redis-hot-geodata
REDIS_HOT_GEODATA_PASSWORD: redis-password
SENTRY_DSN: ${SENTRY_DSN_API:-""}
SENTRY_ENVIRONMENT: ${SENTRY_ENVIRONMENT:-development}
depends_on:
- db
- rabbitmq
- redis-hot-geodata
- keydb-cold-geodata
- redis-q-dedup
files:
image: helpme_file
restart: always
build:
context: .
dockerfile: ./services/files/Dockerfile.dev
volumes:
- ./.dev-secrets:/secrets:ro
- ./:/app
- /lab/devthefuture/modjo:/app/.yarn-dev-portal/.links/modjo
ports:
- ${EXPOSE_FILES_PORT:-4292}:${FILES_PORT:-4292}
environment:
LOGLEVEL: ${FILES_LOGLEVEL:-debug}
PORT: "${FILES_PORT:-4292}"
JWK_FILE: /secrets/jwk.json
OLDJWK_FILE: /secrets/jwk2.json
DATABASE_URL: ${DATABASE_URL-psql://dev:dev@db/dev}
MINIO_ENDPOINT: minio
MINIO_PORT: 9000
MINIO_ACCESS_KEY: ${MINIO_ROOT_USER:-minio-admin}
MINIO_SECRET_KEY: ${MINIO_ROOT_PASSWORD:-minio-admin}
HASURA_GRAPHQL_URL: ${HASURA_GRAPHQL_URL:-http://hasura:8080/v1/graphql}
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
LOG_REQUESTS: ${LOG_REQUESTS:-false}
CLAIMS_NAMESPACE: ${CLAIMS_NAMESPACE:-https://hasura.io/jwt/claims}
AMQP_URL: ${AMQP_URL-amqp://dev:dev@rabbitmq}
AMQP_HOST: rabbitmq
AMQP_PORT: 5672
SENTRY_DSN: ${SENTRY_DSN_FILES:-""}
SENTRY_ENVIRONMENT: ${SENTRY_ENVIRONMENT:-development}
depends_on:
- minio
- db
- rabbitmq
tasks:
image: helpme_tasks
restart: always
build:
context: .
dockerfile: ./services/tasks/Dockerfile.dev
volumes:
- ./:/app
- /lab/devthefuture/modjo:/app/.yarn-dev-portal/.links/modjo
environment:
API_URL: ${API_URL:-http://localhost:${EXPOSE_API_PORT:-4200}/api}
LOGLEVEL: ${TASKS_LOGLEVEL:-debug}
DATABASE_URL: ${DATABASE_URL-psql://dev:dev@db/dev}
AMQP_URL: ${AMQP_URL-amqp://dev:dev@rabbitmq}
AMQP_PORT: 5672
HASURA_GRAPHQL_URL: ${HASURA_GRAPHQL_URL:-http://hasura:8080/v1/graphql}
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
NOMINATIM_URL: ${NOMINATIM_URL:-http://nominatim:8080}
WHAT3WORDS_API_KEY: ${WHAT3WORDS_API_KEY-}
GOOGLE_SERVICE_ACCOUNT_KEY_FILE: ${GOOGLE_SERVICE_ACCOUNT_KEY_FILE:-/app/services/tasks/src/googleServiceAccountKey.json}
SMTP_USER: ""
SMTP_PASS: ""
SMTP_HOST: maildev
SMTP_PORT: 1025
SMTP_FROM: ""
REDIS_QUEUE_DEDUP_HOST: redis-q-dedup
REDIS_QUEUE_DEDUP_PASSWORD: redis-password
REDIS_HOT_GEODATA_HOST: redis-hot-geodata
REDIS_HOT_GEODATA_PASSWORD: redis-password
KEYDB_COLD_GEODATA_HOST: keydb-cold-geodata
KEYDB_COLD_GEODATA_PASSWORD: keydb-password
SENTRY_DSN: ${SENTRY_DSN_TASKS:-""}
SENTRY_ENVIRONMENT: ${SENTRY_ENVIRONMENT:-development}
depends_on:
- db
- rabbitmq
- redis-hot-geodata
- keydb-cold-geodata
- redis-q-dedup
watchers:
image: helpme_watchers
restart: always
build:
context: .
dockerfile: ./services/watchers/Dockerfile.dev
volumes:
- ./:/app
- /lab/devthefuture/modjo:/app/.yarn-dev-portal/.links/modjo
environment:
LOGLEVEL: ${WATCHERS_LOGLEVEL:-debug}
DATABASE_URL: ${DATABASE_URL-psql://dev:dev@db/dev}
AMQP_URL: ${AMQP_URL-amqp://dev:dev@rabbitmq}
AMQP_PORT: 5672
HASURA_GRAPHQL_URL: ${HASURA_GRAPHQL_URL:-http://hasura:8080/v1/graphql}
HASURA_GRAPHQL_ADMIN_SECRET: ${HASURA_GRAPHQL_ADMIN_SECRET:-admin}
REDIS_HOT_GEODATA_HOST: redis-hot-geodata
REDIS_HOT_GEODATA_PASSWORD: redis-password
KEYDB_COLD_GEODATA_HOST: keydb-cold-geodata
KEYDB_COLD_GEODATA_PASSWORD: keydb-password
SENTRY_DSN: ${SENTRY_DSN_WATCHERS:-""}
SENTRY_ENVIRONMENT: ${SENTRY_ENVIRONMENT:-development}
depends_on:
- db
- rabbitmq
- redis-hot-geodata
- keydb-cold-geodata
osrm-car: &osrm
image: osrm/osrm-backend:latest
restart: always
# image: helpme_osrm
# build:
# context: .
# dockerfile: ./services/osrm/Dockerfile
volumes:
# - osrm-data:/data
- ./osm-files:/data
ports:
- ${OSRM_CAR_PORT:-4261}:5000
environment:
ALLOW_CORS: enabled
# UPDATES: enabled
command: osrm-routed --algorithm mld /data/car/switzerland-latest.osrm
osrm-foot:
<<: *osrm
command: osrm-routed --algorithm mld /data/foot/switzerland-latest.osrm
ports:
- ${OSRM_FOOT_PORT:-4262}:5000
# osrm-bicycle:
# image: osrm/osrm-backend:latest
# restart: always
# # image: helpme_osrm
# # build:
# # context: .
# # dockerfile: ./services/osrm/Dockerfile
# volumes:
# # - osrm-data:/data
# - ./osm-files:/data
# ports:
# - ${OSRM_BICYCLE_PORT:-4263}:5000
# environment:
# ALLOW_CORS: enabled
# # UPDATES: enabled
# command: osrm-routed --algorithm mld /data/bicycle/switzerland-latest.osrm
tileserver-gl:
image: maptiler/tileserver-gl:v4.4.3
restart: always
volumes:
- ./tileserver-files:/data
ports:
- ${TILESERVERGL_PORT:-4282}:8080
nominatim:
image: mediagis/nominatim:4.0
restart: always
ports:
- ${NOMINATIM_PORT:-4283}:8080
environment:
# see https://github.com/mediagis/nominatim-docker/tree/master/4.0#configuration for more options
# PBF_URL: https://download.geofabrik.de/europe/switzerland-latest.osm.pbf
PBF_PATH: /data/switzerland-latest.osm.pbf
REPLICATION_URL: https://download.geofabrik.de/europe/switzerland-updates/
NOMINATIM_PASSWORD: very_secure_password
volumes:
- nominatim-data:/var/lib/postgresql/12/main
- ./osm-files:/data
shm_size: 1gb
maildev:
image: maildev/maildev:2.1.0
ports:
- "${MAILDEV_SMTP_PORT:-4225}:1025"
- "${MAILDEV_WEB_PORT:-4226}:1080"
restart: always
# web:
# image: helpme_web
# restart: always
# build:
# context: .
# dockerfile: ./services/web/Dockerfile
# ports:
# - ${WEB_PORT:-4203}:8080
app:
image: helpme_app
restart: always
build:
context: .
dockerfile: ./services/app/Dockerfile
ports:
- ${SERVICE_APP_PORT:-4209}:8080
# networks:
# hostnet:
# external: true
# name: host
volumes:
pg_data:
name: helpme-pgdata
labels:
project: "alertesecours"
minio_data:
name: helpme-miniodata
labels:
project: "alertesecours"
redis_hg_data:
name: helpme-redishg-data
labels:
project: "alertesecours"
keydb_cg_data:
name: helpme-keydbcg-data
labels:
project: "alertesecours"
rabbitmq_data:
name: helpme-rabbitmq-data
labels:
project: "alertesecours"
osm-data:
name: helpme-osm-data
labels:
project: "alertesecours"
# external: true
osm-rendered-tiles:
name: helpme-osm-rendered-tiles
labels:
project: "alertesecours"
# external: true
nominatim-data:
name: helpme-nominatim-data
labels:
project: "alertesecours"
# osrm-data:
# name: helpme-osrm-data

View file

@ -0,0 +1,11 @@
# syntax = devthefuture/dockerfile-x
FROM ./ubuntu.dockerfile
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install -yq --no-install-recommends \
curl \
ca-certificates \
wget \
git \
&& rm -rf /var/lib/apt/lists/*

View file

@ -0,0 +1,3 @@
FROM nginxinc/nginx-unprivileged:1.25-alpine@sha256:557e9af4afa7a36462e313906fe42fba39c307ae2a72d5323d49963eb2883b45
COPY --chown=nginx:nginx dockerfile-x/nginx /etc/nginx/

View file

@ -0,0 +1,74 @@
# see https://www.nginx.com/resources/wiki/start/topics/examples/full/#nginx-conf
worker_processes auto; # it will be determinate automatically by the number of core
error_log /tmp/error.log;
pid /tmp/nginx.pid;
events {
worker_connections 4096; ## Default: 1024
}
http {
server_tokens off;
absolute_redirect off;
access_log /tmp/access.log;
default_type application/octet-stream;
error_log /tmp/error.log;
include /etc/nginx/mime.types;
keepalive_timeout 3000;
sendfile on;
server {
listen 8080;
root /usr/share/nginx/html;
index index.html;
server_name_in_redirect on;
add_header X-Frame-Options "deny";
add_header X-XSS-Protection "1; mode=block";
add_header X-Content-Type-Options "nosniff";
charset utf-8;
gzip on;
gzip_disable "msie6";
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_min_length 256;
gzip_types text/css application/json application/javascript application/x-javascript text/javascript application/vnd.ms-fontobject application/x-font-ttf font/opentype image/svg+xml image/x-icon;
client_max_body_size 10m;
error_page 500 502 503 504 /50x.html;
recursive_error_pages on;
location / {
# this always fallback on /index.html, never 404
try_files $uri $uri.html $uri/index.html $uri/ /index.html;
}
location /50x.html {
root /var/lib/nginx/html;
}
location /live {
default_type text/plain;
return 200 'OK';
}
include /etc/nginx/ready_response.conf;
location /ready {
default_type text/plain;
if ($ready_response = 'OK') {
return 200 $ready_response;
}
return 500 'Not Ready';
}
}
}

View file

@ -0,0 +1,20 @@
#!/usr/bin/env sh
echo "set \$ready_response 'Not Ready';" > /etc/nginx/conf.d/ready_response.conf
nginx -s reload
WAIT_TIME=30
if [ "$1" ]; then
WAIT_TIME="$1"
fi
for i in $(seq 1 $WAIT_TIME); do
if [ "$(nginx -s status | grep 'Active connections' | awk '{print $3}')" -eq "0" ]; then
exit 0
fi
sleep 1
done
nginx -s stop
exit 0

View file

@ -0,0 +1 @@
set $ready_response 'OK';

View file

@ -0,0 +1,27 @@
# syntax = devthefuture/dockerfile-x
FROM ./downloader.dockerfile AS build-node
# renovate: datasource=node depName=node versioning=node
ARG NODE_VERSION=20.3.0
ARG NODE_PACKAGE=node-v$NODE_VERSION-linux-x64
RUN curl https://nodejs.org/dist/v$NODE_VERSION/$NODE_PACKAGE.tar.gz \
| tar -xzC /opt/ \
&& mv /opt/$NODE_PACKAGE /opt/node
FROM ./ubuntu.dockerfile
COPY --from=build-node /opt/node /opt/node
ENV NODE_PATH /opt/node/lib/node_modules
ENV PATH /opt/node/bin:$PATH
RUN npm i -g yarn
WORKDIR /app
USER 1000
# enable yarn immutable dependencies
ENV CI=true

View file

@ -0,0 +1,12 @@
# syntax = devthefuture/dockerfile-x
# renovate: datasource=docker depName=ubuntu versioning=ubuntu
# ARG UBUNTU_VERSION=22.04
ARG UBUNTU_VERSION=22.04@sha256:56887c5194fddd8db7e36ced1c16b3569d89f74c801dc8a5adbf48236fb34564
FROM ubuntu:$UBUNTU_VERSION
RUN groupadd -g 1000 ubuntu && useradd -rm -d /home/ubuntu -s /bin/bash -g ubuntu -G sudo -u 1000 ubuntu
ENV HOME=/home/ubuntu
RUN chmod 0777 /home/ubuntu
RUN mkdir /app && chown 1000:1000 /app

View file

@ -0,0 +1,70 @@
# Graphql Conventions:
## Global rules:
All names are camelCase
## Hasura tables:
### spec:
- match SQL methods
### Root FieldsCustom:
- Select -> selectMany${Table}
- Select by PK -> selectOne${Table}
- Select Aggregate -> selectAgg${Table}
- Select Stream -> selectStream${Table}
- Insert/Upsert -> insertMany${Table}
- Insert/Upsert One -> insertOne${Table}
- Update -> updateMany${Table}
- Update Many -> updateBatch${Table}
- Update One -> updateOne${Table}
- Delete -> deleteMany${Table}
- Delete One -> deleteOne${Table}
### Relations naming:
- one -> one${Table}
- many -> many${Table}
- one named link -> one${Table}As${LinkName}
- many named link -> many${Table}As${LinkName}
- one by foreign key -> one${Table}By${ForeignKey}
- many by foreign key -> many${Table}By${ForeignKey}
### Enum tables naming:
- enum_${Type=ColumnName}
- enum_${Table}_${Type=ColumnName}
columns:
- value (primary)
- label (optional)
### Columns:
GraphQL field name: camelCase(${column})
### Computed Columns:
- PostgreSQL function name: computed_${table_name}__${column_name}
## API Remote Schema:
### spec:
- avoid collisions with hasura
- match HTTP methods
- match REST paths
### Root Fields:
- ${DataName}.post.js -> POST /${DataName} -> addOne${DataName}
- ${DataName}/{id}.get.js -> GET /${DataName}/{id} -> getOne${DataName}
- ${DataName}/{id}.sub.js -> GET /${DataName}/{id} -> ${DataName}
- ${DataName}/{id}.put.js -> PUT /${DataName}/{id} -> setOne${DataName}
- ${DataName}/{id}.delete.js -> DELETE /${DataName}/{id} -> delOne${DataName}
- ${DataName}/index.post.js -> POST /${DataName}/ -> addMany${DataName}
- ${DataName}/index.get.js -> GET /${DataName}/ -> getMany${DataName}
- ${DataName}/index.sub.js -> GET /${DataName}/ -> subMany${DataName}
- ${DataName}/index.put.js -> PUT /${DataName}/ -> setMany${DataName}
- ${DataName}/index.delete.js -> DELETE /${DataName}/ -> delMany${DataName}
- ${ActionName}.patch.js -> PATCH /${ActionName} -> do${ActionName}
### Async Api:
- ${DataName}.chan/index.sub.js -> WS /${ChannelName} -> subMany${ChannelName}
- ${DataName}.chan/index.pub.js -> WS /${ChannelName} -> pubMany${ChannelName}
- ${DataName}.chan/{id}.sub.js -> WS /${ChannelName} -> subOne${ChannelName}
- ${DataName}.chan/{id}.pub.js -> WS /${ChannelName} -> pubOne${ChannelName}
- ${ChannelName}.chan/${OperationName}.sub.js -> WS /${ChannelName} -> ${OperationName}${ChannelName}
- ${ChannelName}.chan/${OperationName}.pub.js -> WS /${ChannelName} -> ${OperationName}${ChannelName}

View file

@ -0,0 +1,8 @@
```sh
sudo apt-get install build-essential g++
sudo apt install openjdk-8-jre openjdk-8-jdk
sudo update-alternatives --config javac
sudo update-alternatives --config java
npm install -g turtle-cli
turtle build:android
```

View file

@ -0,0 +1,4 @@
# change png opacity with command line
```sh
convert hand-yellow-64.png -alpha set -background none -channel A -evaluate multiply 0.85 +channel hand-yellow-64.png
```

7
docs/dev/ci.md Normal file
View file

@ -0,0 +1,7 @@
# CI
## Release
```sh
yarn release
```

View file

@ -0,0 +1,8 @@
| Index Type | Best For | Supported Operations | Comments |
|------------|----------------------------------|-----------------------------|------------------------------------------------|
| B-Tree | Equality and range queries | =, &lt;, &lt;=, &gt;, &gt;= | Default index type |
| Hash | Equality queries | = | Less commonly used, fewer operations supported |
| GIN | Full-text search, arrays, JSONB | Various | Optimized for composite data types |
| GiST | Geometric data, complex searches | Various | Versatile, supports many types of searches |
| SP-GiST | Dynamic, partitioned data | Various | Suitable for k-nearest neighbors, quadtrees |
| BRIN | Large, naturally clustered data | Various | Efficient for large tables, time-series data |

View file

@ -0,0 +1,54 @@
# create init-migration from dev db
## remove all other migrations
```sh
rm -rf ./services/hasura/migrations/default/*
```
## dump
### Option 1: hasura dump api
```sh
curl -X POST http://localhost:4201/v1alpha1/pg_dump -H "Content-Type: application/json" -H "X-Hasura-Role: admin" -H "X-Hasura-Admin-Secret: admin" -d '{
"opts": ["-O", "-x", "--schema-only", "--schema", "public"],
"clean_output": true,
"source": "default"
}' --output init_migration.sql
export PGUSER=dev
export PGPASSWORD=dev
export PGDATABASE=dev
export PGHOST=localhost
export PGPORT=4204
pg_dump --data-only --inserts -t 'public.enum_*' | sed -n '/^INSERT INTO /p' >> init_migration.sql
pg_dump --data-only --inserts -t 'public.external_public_config' | sed -n '/^INSERT INTO /p' >> init_migration.sql
```
### Option 2: old school dump (doesn't seem to work when apply migration, or it was only the old enum_* part)
```sh
export PGUSER=dev
export PGPASSWORD=dev
export PGDATABASE=dev
export PGHOST=localhost
export PGPORT=4204
pg_dump -n public -s > init_migration.sql
pg_dump --data-only -t 'public.enum_*' >> init_migration.sql
pg_dump --data-only -t 'public.external_public_config' >> init_migration.sql
```
## save the dump as migration
```sh
migration_timestamp="$(date '+%s')000"
migration_dir="./services/hasura/migrations/default/${migration_timestamp}_init"
mkdir $migration_dir
touch "$migration_dir/down.sql"
mv init_migration.sql "$migration_dir/up.sql"
```
## up hasura
```sh
cd "services/hasura"
hasura migrate apply --version $migration_timestamp --skip-execution --endpoint http://localhost:4201 --admin-secret admin
```

7
docs/dev/dependencies.md Normal file
View file

@ -0,0 +1,7 @@
# Dependencies
## Add dependency to a package
```sh
yarn env-run
yarn workspace @as/web add react react-dom
yarn workspace @as/web add eslint --dev
```

View file

@ -0,0 +1,11 @@
# Init
install dependencies
```sh
yarn
```
re-init
```sh
yarn --force
```

15
docs/dev/osrm.md Normal file
View file

@ -0,0 +1,15 @@
```sh
docker run --rm -t -v "${PWD}/osm-files:/data" osrm/osrm-backend osrm-extract -p /opt/car.lua /data/car/switzerland-latest.osm.pbf
docker run --rm -t -v "${PWD}/osm-files:/data" osrm/osrm-backend osrm-extract -p /opt/foot.lua /data/foot/switzerland-latest.osm.pbf
docker run --rm -t -v "${PWD}/osm-files:/data" osrm/osrm-backend osrm-extract -p /opt/bicycle.lua /data/bicycle/switzerland-latest.osm.pbf
docker run --rm -t -v "${PWD}/osm-files:/data" osrm/osrm-backend osrm-partition /data/car/switzerland-latest.osrm
docker run --rm -t -v "${PWD}/osm-files:/data" osrm/osrm-backend osrm-partition /data/foot/switzerland-latest.osrm
docker run --rm -t -v "${PWD}/osm-files:/data" osrm/osrm-backend osrm-partition /data/bicycle/switzerland-latest.osrm
docker run --rm -t -v "${PWD}/osm-files:/data" osrm/osrm-backend osrm-customize /data/car/switzerland-latest.osrm
docker run --rm -t -v "${PWD}/osm-files:/data" osrm/osrm-backend osrm-customize /data/foot/switzerland-latest.osrm
docker run --rm -t -v "${PWD}/osm-files:/data" osrm/osrm-backend osrm-customize /data/bicycle/switzerland-latest.osrm
```

83
docs/dev/web/structure.md Normal file
View file

@ -0,0 +1,83 @@
from <https://github.com/alan2207/bulletproof-react/blob/master/docs/project-structure.md>
# Project Structure
Most of the code lives in the `src` folder and looks like this:
```
src
|
+-- assets # assets folder can contain all the static data such as images, fonts, etc.
|
+-- components # shared components used across the entire application
|
+-- config # all the global configuration, env variables etc. get exported from here and used in the app
|
+-- context # all of the global contexts
|
+-- features # feature based modules
|
+-- hooks # shared hooks used across the entire application
|
+-- lib # re-exporting different libraries preconfigured for the application
|
+-- routes # routes configuration
|
+-- test # test utilities and mock server
|
+-- types # base types used accross the application
|
+-- utils # shared utility functions
```
In order to scale the application in the easiest and most maintainable way, keep most of the code inside the `features` folder, which should contain different feature-based things. Every `feature` folder should contain domain specific code for a specific feature. This will allow you to keep functionalities scoped to a feature and not mix it with the shared things. This is much easier to maintain than a flat folder structure with many files.
A feature could have the following structure:
```
src/features/awesome-feature
|
+-- api # exported API request declarations related to the feature
|
+-- components # components scoped to the feature, not used anywhere else
|
+-- hooks # hooks scoped to the feature, not used anywhere else
|
+-- routes # route components for the given feature
|
+-- types # typescript types for the given feature
|
+-- utils # utility functions used only by the feature
|
+-- index.ts # entry point for the feature, it should serve as the public API of the given feature and exports everything that should be used outside the feature
```
A feature folder could also contain other features (if used only within the parent feature) or be kept separated, it's a matter of preference.
Everything from a feature should be exported from the `index.ts` file which behaves as the public API of the feature.
You should import stuff from other features only by using:
`import {AwesomeComponent} from "@/features/awesome-feature" `js
and not
`import {AwesomeComponent} from "@/features/awesome-feature/components/AwesomeComponent`
This can also been configured in the ESLint configuration to disallow the later import by the following rule:
```
{
rules: {
'no-restricted-imports': [
'error',
{
patterns: ['@/features/*/*'],
},
],
...rest of the configuration
}
```
This was inspired by how [NX](https://nx.dev/) handles libraries that are isolated but available to be used by the other modules. Think of a feature as a library or a module that is self-contained but can expose different parts to other features via it's entry point.

8
docs/geo/geo-redis.md Normal file
View file

@ -0,0 +1,8 @@
```redis
GEOPOS key member;
```
eg:
```redis
GEOPOS device 218;
```

203
docs/modjo-framework.md Normal file
View file

@ -0,0 +1,203 @@
# Modjo Framework Documentation
## Overview
Modjo is a Rapid Application Development (RAD) framework for Node.js that emphasizes:
- Full Inversion of Control (IoC) through dependency injection
- CQRS (Command Query Responsibility Segregation) architecture
- Pure NodeJS implementation without transpilation
- Microservices-oriented design
- Plugin-based extensibility
## Key Features & Core Concepts
### 1. Dependency Injection System
- Uses composition root design pattern
- Avoids prop drilling through context management
- Implements async thread-based context using `nctx`
- Supports hierarchical dependency trees
### 2. Plugin Architecture
- Modular design with official, contrib, and local plugins
- Plugin discovery system with multiple lookup paths:
- Local project plugins (`src/plugins`)
- Official plugins (`@modjo/*`)
- Community plugins (`modjo-plugins-*`)
- Plugin inheritance and composition
### 3. Microservices Support
- Built-in microservices architecture support:
- App service
- Watcher service
- Worker service
- CQRS implementation for scalability
- Event-driven architecture capabilities
### 4. Context Management
- Async context tracking
- Thread-local storage implementation
- Hierarchical context inheritance
- Context isolation between services
## API Reference
### Core Module
#### `modjo(dependency)`
Main framework entry point.
Parameters:
- `dependency`: Object | Function | Array
- Configuration object for the application
- Can include plugins, dependencies, and lifecycle hooks
Example:
```javascript
const modjo = require("@modjo/core")
modjo({
plugins: {
config: {
context: (ctx) => {
ctx.set("customConfig", myConfig)
}
}
},
dependencies: {
database: "postgres",
cache: "ioredis"
}
})
```
### Dependency Configuration
#### Structure
```javascript
{
pluginName: string, // Name of the plugin to use
key: string, // Unique identifier for the dependency
create: Function, // Factory function
build: Function, // Build-time setup
ready: Function, // Runtime initialization
dependencies: Object, // Nested dependencies
context: Function, // Context setup function
params: Array|Object // Parameters for create/build
}
```
### Plugin System
#### `getPlugin(name)`
Loads a plugin by name.
Parameters:
- `name`: string - Plugin identifier
Returns:
- Plugin module or throws if not found
## Usage & Best Practices
### 1. Dependency Configuration
```javascript
// Recommended structure
{
plugins: {
// Core plugins
config: {
context: (ctx) => {
// Configure context
}
}
},
dependencies: {
// Service dependencies
database: "postgres",
cache: {
pluginName: "ioredis",
context: (ctx) => {
// Redis-specific configuration
}
}
}
}
```
### 2. Context Usage
```javascript
const ctx = require("~/ctx")
// Setting context values
ctx.set("key", value)
// Getting context values
const value = ctx.get("key")
// Context inheritance
const childCtx = nctx.create(Symbol("child"))
childCtx.fallback(parentCtx)
```
### 3. Plugin Development
```javascript
// plugin/my-plugin/index.js
module.exports = {
create: async () => {
// Plugin initialization
return instance
},
build: async () => {
// Build-time setup
},
ready: async (instance) => {
// Runtime initialization
}
}
```
## Advanced Topics
### 1. CQRS Implementation
- Separate command and query responsibilities
- Event sourcing support
- Message queue integration
- Eventual consistency handling
### 2. Performance Optimization
- No transpilation overhead
- Efficient dependency resolution
- Lazy loading of plugins
- Context-based caching
### 3. Scaling Strategies
- Microservices decomposition
- Worker process management
- Queue-based task distribution
- State management across services
## Common Errors & Troubleshooting
### 1. Plugin Loading Issues
```
Error: required plugin not found: "plugin-name"
```
- Check plugin installation
- Verify plugin naming convention
- Check plugin path in project structure
### 2. Dependency Resolution
```
Error: Circular dependency detected
```
- Review dependency tree
- Break circular references
- Use context for shared state
### 3. Context Errors
```
Error: Context value not found: "key"
```
- Ensure context is properly initialized
- Check context hierarchy
- Verify context provider setup

5
docs/paradigm/cqrs.md Normal file
View file

@ -0,0 +1,5 @@
# CQRS
## cloud/micro-service = eventual consistency
solution: idemptotency of tasks, requests handlers etc...

View file

@ -0,0 +1,586 @@
# Alerte Secours - Modjo Implementation Guide
## Project Overview
Alerte Secours is a microservices-based application that leverages the Modjo framework for:
- API service management
- Database integration (PostgreSQL)
- Message queue handling (AMQP)
- Redis-based caching and queue deduplication
- OpenAPI and GraphQL integration
- Monitoring and error tracking (Sentry)
## Integration Details
### API Service Structure
The API service follows a well-organized structure that implements OpenAPI specifications and REST endpoints:
```
services/api/src/
├── api/
│ └── v1/
│ ├── formats/ # Custom format definitions
│ ├── operations/ # API endpoint implementations
│ │ ├── alert/ # Alert-related operations
│ │ ├── auth/ # Authentication operations
│ │ ├── external/ # External service integrations
│ │ ├── geoloc/ # Geolocation operations
│ │ ├── info/ # Information endpoints
│ │ └── user/ # User management
│ ├── security/ # Security implementations
│ ├── services/ # Shared services
│ ├── spec-openapi/ # OpenAPI specifications
│ └── validators/ # Input validation
└── tasks/ # Background task definitions
```
#### Key Implementation Patterns
1. **Operation Structure**
Each API operation follows a consistent pattern:
```javascript
module.exports = function() {
// Dependency injection through context
const sql = ctx.require("postgres")
const redis = ctx.require("redisHotGeodata")
// Operation implementation
async function doOperation(req) {
// Implementation
}
return [doOperation]
}
```
2. **Authentication Flow**
```javascript
// Example from auth/login/token.patch.js
async function doAuthLoginToken(req) {
const { authTokenJwt } = req.body
// JWT-based authentication with Hasura claims
const hasuraClaim = {
"x-hasura-default-role": defaultRole,
"x-hasura-allowed-roles": roles,
"x-hasura-user-id": userId.toString()
}
// Token generation with expiration
const jwtData = {
[claimsNamespace]: hasuraClaim,
exp: Math.round(new Date(Date.now() + jwtExpirationInHours * 3600000) / 1000)
}
return { userBearerJwt: await signJwt(jwtData) }
}
```
3. **API Schema Generation**
The project implements automatic OpenAPI and GraphQL schema generation based on the API service structure:
#### File Structure Convention
```
services/api/src/api/v1/
├── operations/ # API endpoints
│ ├── alert/
│ │ ├── send-alert.patch.js # Implementation
│ │ └── send-alert.patch.spec.yaml # OpenAPI spec
│ └── auth/
│ ├── login/
│ │ ├── token.patch.js
│ │ └── token.patch.spec.yaml
├── formats/ # Custom format definitions
├── security/ # Security schemes
├── services/ # Shared services
└── validators/ # Input validation
```
#### OpenAPI Generation
- The Modjo OA plugin automatically compiles the API specification from the directory structure
- Each endpoint implementation (.js) has an adjacent specification file (.yaml)
- The plugin merges all specs into a single OpenAPI document
- Swagger UI is automatically generated and served at `/swagger`
#### GraphQL Schema Generation
The project automatically generates GraphQL schemas through two mechanisms:
1. **Hasura Database Schema**
Following strict naming conventions for database operations:
1. **Hasura Table Operations**
```graphql
# Automatically generated from database tables
selectMany${Table} # Select operations
selectOne${Table} # Select by primary key
insertOne${Table} # Insert single record
updateMany${Table} # Bulk updates
deleteOne${Table} # Delete by primary key
```
2. **API Remote Schema**
```graphql
# Generated from REST endpoints
${DataName}.post.js -> addOne${DataName}
${DataName}.get.js -> getOne${DataName}
${DataName}.put.js -> setOne${DataName}
${DataName}.delete.js -> delOne${DataName}
```
3. **Real-time Subscriptions**
```graphql
# WebSocket channels
${DataName}.chan/index.sub.js -> subMany${ChannelName}
${DataName}.chan/{id}.sub.js -> subOne${ChannelName}
```
4. **OpenAPI to GraphQL Conversion**
The Modjo OA-GraphQL plugin automatically converts OpenAPI specifications to GraphQL schemas:
```javascript
// OpenAPI to GraphQL conversion configuration
const { schema } = await createGraphQLSchema(apiSpec, {
createSubscriptionsFromCallbacks: true,
baseUrl: `http://${host}:${port}/api/v1/oas`,
operationIdFieldNames: true,
fillEmptyResponses: true,
simpleEnumValues: true,
headers: (_method, _operationPath, _title, _resolverParams) => ({
...omit(req.headers, omitHeaders),
"x-origin": "GraphQL",
connection: "close",
})
})
```
Key features of the conversion:
- Automatic type generation from OpenAPI schemas
- REST endpoints mapped to GraphQL queries/mutations
- WebSocket callbacks converted to subscriptions
- Preserved security context and headers
- GraphQL Playground integration
5. **Relations and Types**
```graphql
# Auto-generated relationship fields
oneTableAsLinkName # One-to-one named relations
manyTableAsForeignKey # One-to-many by foreign key
# Auto-generated types from OpenAPI schemas
type Alert {
id: ID!
level: AlertLevel!
location: Point!
# Fields from OpenAPI schema
}
input AlertSendAlertInput {
callEmergency: Boolean
notifyAround: Boolean
# Fields from OpenAPI request body
}
```
#### Implementation Example
1. **REST Endpoint Implementation**
```javascript
// alert/send-alert.patch.js
module.exports = function() {
const sql = ctx.require("postgres")
async function doAlertSendAlert(req) {
// Implementation
}
return [doAlertSendAlert]
}
```
2. **OpenAPI Specification**
```yaml
# alert/send-alert.patch.spec.yaml
x-security:
- auth: ["user"]
requestBody:
required: true
content:
application/json:
schema:
type: object
properties:
callEmergency:
type: boolean
notifyAround:
type: boolean
notifyRelatives:
type: boolean
uuid:
type: string
format: uuid
level:
enum: [red, yellow, green]
subject:
type: string
location:
type: object
properties:
type:
enum: [Point]
coordinates:
type: array
format: location
items:
type: number
required:
- level
- uuid
responses:
200:
content:
application/json:
schema:
type: object
properties:
alertId:
type: integer
accessCode:
type: string
code:
type: string
```
The OpenAPI specification is automatically generated from these YAML files and exposed through:
```javascript
// spec-openapi/index.js
const apiSpec = {
openapi: "3.0.3",
info: {
title: "HelpMe Project API",
version: "1.0.0"
},
components: {
schemas: {},
},
paths: {},
"x-security-sets": {
auth: ["bearerAuth", "cookieAuth"]
}
}
```
### Core Service Structure
The project implements Modjo's microservices architecture across several services:
```
services/
├── api/ # Main API service
├── app/ # Frontend application
├── backoffice/ # Admin interface
├── files/ # File handling service
├── hasura/ # GraphQL engine
├── tasks/ # Background task processing
├── watchers/ # Event monitoring
└── web/ # Web interface
```
### Modjo Configuration
The main API service configuration demonstrates core Modjo integration:
```javascript
modjo({
plugins: {
config: {
context: (ctx) => {
ctx.set("customConfig", customConfig)
}
},
oa: {
pluginName: "oa",
dependencies: {
sentry: {},
postgres: {},
amqp: {},
redisQueueDedup: {
pluginName: "ioredis",
// Redis configuration
},
redisHotGeodata: {
pluginName: "ioredis",
// Geodata cache configuration
}
}
}
},
dependencies: {
oapi: {
pluginName: "microservice-oapi",
dependencies: {
oaGraphql: {
pluginName: "oa-graphql"
},
hasura: {
pluginName: "hasura"
}
}
}
}
})
```
### Frontend Integration
The React Native frontend application (`/lab/alerte-secours/as-app/src`) implements a comprehensive GraphQL integration:
```
src/
├── gql/ # GraphQL core definitions
│ ├── mutations/ # Mutation operations
│ └── queries/ # Query operations
├── scenes/ # Feature-specific components with GraphQL operations
├── hooks/ # Custom Apollo hooks
└── containers/ # Reusable GraphQL-aware components
```
#### Key GraphQL Patterns
1. **Real-time Subscriptions**
```javascript
// Alert subscription example
export const ALERTING_SUBSCRIPTION = gql`
subscription alertingSubscription($cursor: bigint!) {
alerting(where: { id: { _gt: $cursor } }) {
id
alert_id
user_id
device_id
}
}
`;
```
2. **Authentication Mutations**
```javascript
// Login mutation
export const LOGIN_USER_TOKEN_MUTATION = gql`
mutation loginUserToken($authTokenJwt: String!) {
userBearerJwt: doAuthLoginToken(authTokenJwt: $authTokenJwt) {
userBearerJwt
}
}
`;
```
3. **Alert Operations**
```javascript
// Send alert mutation
export const SEND_ALERT_MUTATION = gql`
mutation sendAlert($alertSendAlertInput: AlertSendAlertInput!) {
alertId: doAlertSendAlert(alertSendAlertInput: $alertSendAlertInput) {
alertId
code
accessCode
}
}
`;
```
4. **Custom Apollo Hooks**
```javascript
// Error handling hook
function useMutationWithError(...args) {
const [action, ...res] = useMutation(...args);
return [withError(action), ...res];
}
```
5. **Real-time Message System**
```javascript
// Message subscription
export const SELECT_STREAM_MESSAGES_SUBSCRIPTION = gql`
subscription selectStreamMessageSubscription($cursor: Int) {
message(where: { id: { _gt: $cursor } }) {
id
content
created_at
user_id
}
}
`;
```
#### Integration Features
1. **Real-time Updates**
- WebSocket-based subscriptions for alerts
- Live message streaming
- Location updates
2. **Authentication Flow**
- JWT-based authentication
- Token refresh handling
- Role-based access control
3. **Geolocation Features**
- Real-time location tracking
- Geocoding integration
- What3Words integration
4. **Offline Support**
- Apollo cache management
- Optimistic updates
- Error handling and retry logic
## Code Examples
### 1. Alert Operation Implementation
```javascript
// Alert sending implementation
async function doAlertSendAlert(req) {
const { deviceId, userId } = reqCtx.get("session")
// Database transaction
await sql.begin(async () => {
// Insert alert record
const [{ id }] = await sql`
INSERT INTO "alert" (...)
VALUES (...)
RETURNING id
`
// Create alerting record
await sql`
INSERT INTO "alerting" (...)
VALUES (...)
`
})
// Parallel task execution
await async.parallel([
// Redis geolocation indexing
async () => redis.geoadd("alert", longitude, latitude, alertId),
// Background tasks
async () => notifyAround && addTask(tasks.GEOCODE_ALERT, {...}),
async () => notifyRelatives && addTask(tasks.RELATIVE_ALERT, {...})
])
}
```
### 2. Redis Integration
The project uses Redis for two distinct purposes:
1. Queue Deduplication:
```javascript
redisQueueDedup: {
pluginName: "ioredis",
context: (ctx) => {
ctx.set("config", {
redis: {
host: process.env.REDIS_QUEUE_DEDUP_HOST,
port: process.env.REDIS_QUEUE_DEDUP_PORT || "6379",
username: process.env.REDIS_QUEUE_DEDUP_USERNAME || "default",
password: process.env.REDIS_QUEUE_DEDUP_PASSWORD,
db: process.env.REDIS_QUEUE_DEDUP_DB || "0"
}
})
}
}
```
2. Hot Geodata Caching:
```javascript
redisHotGeodata: {
pluginName: "ioredis",
context: (ctx) => {
ctx.set("config", {
redis: {
host: process.env.REDIS_HOT_GEODATA_HOST,
port: process.env.REDIS_HOT_GEODATA_PORT || "6379",
username: process.env.REDIS_HOT_GEODATA_USERNAME || "default",
password: process.env.REDIS_HOT_GEODATA_PASSWORD,
db: process.env.REDIS_HOT_GEODATA_DB || "0"
}
})
}
}
```
### 2. API Integration
The project uses Modjo's OpenAPI (oa) plugin with GraphQL support:
```javascript
oapi: {
pluginName: "microservice-oapi",
dependencies: {
oaGraphql: {
pluginName: "oa-graphql"
},
hasura: {
pluginName: "hasura"
}
}
}
```
## Specific Customizations
### 1. Error Tracking Configuration
Custom Sentry configuration for error tracking:
```javascript
global.modjoSentryConfig = {
package: require("../package.json"),
options: {}
}
require("common/sentry/instrument")
```
### 2. CQRS Implementation
The project implements CQRS with eventual consistency:
- Tasks and requests are made idempotent
- Event-driven architecture using AMQP
- Separate services for commands and queries
- Eventual consistency handled through message queues
## Common Issues & Troubleshooting
### 1. Redis Connection Issues
Problem: Redis connection failures
Solution:
- Verify environment variables are properly set
- Check Redis server availability
- Confirm network connectivity between services
- Validate authentication credentials
### 2. GraphQL Integration
Problem: GraphQL schema synchronization issues
Solution:
- Ensure Hasura metadata is up to date
- Verify PostgreSQL connection
- Check GraphQL endpoint configuration
- Review schema changes in version control
### 3. Message Queue Deduplication
Problem: Duplicate message processing
Solution:
- Verify Redis deduplication configuration
- Check message IDs are unique
- Ensure proper queue configuration
- Monitor Redis memory usage
### 4. Microservices Communication
Problem: Inter-service communication failures
Solution:
- Check service discovery configuration
- Verify network connectivity
- Ensure proper authentication between services
- Monitor service health endpoints

View file

@ -0,0 +1 @@
<mxfile host="Electron" modified="2021-09-18T04:58:45.014Z" agent="5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/14.6.13 Chrome/89.0.4389.128 Electron/12.0.7 Safari/537.36" etag="VOx330-yWAjKHvVesvTI" version="14.6.13" type="device"><diagram id="C5RBs43oDa-KdzZeNtuy" name="Page-1">7VpZd5s6EP41Pid5cA6LDfjRzto2aZubrXnqkUEGJTJyQV7Ir7+SEbu8JXjJvc2Dg0ZCAs03882MaOinw9llAEbeDXEgbmiKM2voZw1NMw2F/XJBFAtUU9diiRsgR8gywR16g0IobnTHyIFhYSAlBFM0Kgpt4vvQpgUZCAIyLQ4bEFxcdQRcWBHc2QBXpU/IoV4stTQzk19B5HrJyqrRiXuGIBks3iT0gEOmOZF+3tBPA0JofDWcnULMNy/Zl6cv0RO+fjUuv96Gf8BD79v998dmPNnFJrekrxBAn9Y7tdDlBOCx2C/xrjRKNjAgY9+BfBKlofc8OsTsUmWXL5DSSCgcjClhIhJQj7jEB/iakJEYNyA+FcNU3oa+0+WKZe0+JvZrLLpAGIs1WEuMt1grpAF5TXXHJ0gVwQdj0Ie4B+xXd/6gpwSTgHX5xId8KoeBQbxL9nDnmbS35t4KHYRkHNhwyThdQBwELhTzPc9mxm0Y6lj7dvfSGcJHzfGTcfz5cjgVmruEZAhpELEBAcSAokkRzEDYhJuOy/TOLoTqN4CBXoHBKUZ8QzQDDJkee34/5P8SI+U2zHcDBhMYLEEM19bUQxTejcB826bMyRRRlEcH24Cei0EYCt2uUP1mqmNPSuFs6WYnvYnTEz6vmXiAaeZA1ETm5ZxHS/m4fmbo9p/WWwQ7g5+TiES92c33m2arop+I+dTyvq8Ge9GWpdYqseqctuAM0V/89pO2aD3nes5miQXzRpSZ8yb2PmAdiRU3NN1pQ8tppWDI9VhaXzcMDpkAOBytub6O6SimWb95G1XzlirMqNu8xa0/CeJWmSBVt5QTtaVZbVP8FnCr6SU8xo8t5ihBMn2otVB6/vJwfXXx2G1GP77ePU6jh17Xb1oVlPpknyBVcxDNALsIpOzdf+Ububt4M7tt3kruC9me0gTegnTmsiK+lxtACfIDy4a2LYN832q32ooc8hAY/bk5SKG3XRuQjjP3yWjGQkZTPLYYW3FMvd+Uba/PGg39ospiHhn2x+FqBitwEqezCzBEmAPkCuIJpMgGEp4DGLk+a9jsoRiHSsmOLYl8l7WMrHU/N4Jma4v8p7eK/NeW0J8ioT9rW/RXDUoP1otsGNqudh4SSzeZqUOJ33AAtAZSv2HYFuwP6qdDc81oV9lXtCuFk1oNpwre4GVKDye2UlYALqGtjKme830LaGtDoH42EC6jhANJucwKBodjytYlcz7SDMxepddnyZXh8ivgOD982GUw/eIjeh8j9T+Wd7VLvGNKeEffUtolfYNqQPu9GtD+rY/klL3SuISqlROd/RWzFaHF96ZEyRAyGISwkN9U0qZ0LbG0bpYQFPucOhIl6WZ1Krh6riGd/2TQqxlYSX16hygySygqp9v1oWhZUFWo3ca04aBJAUrGnzEvUveYH6ZNkXh0ec09zj3S/oRsknnYY82niqVN/kB+yNx5OX06ylrHe2OlJKHCcEB3EJasT2tmidaMNauJRg20JgVONfzdb51md8XEWior7wlcl50B1Ba4Ztza6ZjvcYNsZ0GUGzDiriuUeMkFPNpRStiMJ1zbxb7DM8oTvP8HmtcojW8fsfKCTe0J/3oA3HXFu1plXHQuc81joSJgKhVATinIBrgrOobIcWJswhC9gf58Po4KYZds8nav0T5L9VzhpPRYXdzcSDcsD4nFNrTEx2gds7OtUKuuCGnxsWZcBC4ebs7LPsqRT/g/OBuhADrHf4vD5cNR88CKw9UkfRzyoHbvZbx8Ea/eunFa9VPzNb+0saDmdyhFvGUHprv/bmIBsZRi9gp8FxDLxhFUq11YR10WQtXFWvsJj955wLrSNLYB61oOT9fFuVRHygdh/SGXWg1rstp0uTDtEF6UviYu8muoSm+LtcqlZXWXteWlIXLpEIq9Nxogtl2l4ERwmg14vYU1OIY8/gtG6DC3vPwVVRqb76Lusdim9lUwEwo8Oq6aEB83HjmAwkphjd/1GzmfvqomyUzWQ9zaVTV1i2U11sw+841ZOPtYWj//Fw==</diagram></mxfile>

View file

@ -0,0 +1 @@
<mxfile host="Electron" modified="2021-08-15T12:35:12.647Z" agent="5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/14.6.13 Chrome/89.0.4389.128 Electron/12.0.7 Safari/537.36" etag="uRxsNaGn7MeyV7v3Pj5x" version="14.6.13" type="device"><diagram id="C5RBs43oDa-KdzZeNtuy" name="Page-1">7ZbbjtowEIafJhK9YJVDOezlQll62K5abdtVryqTDImL48k6zgb69LWJTRJCoVSoUqVyEezff3yab0Zxgmm6nguSJe8xAub4brR2gleO74+9QD21sKkEbxgMKiUWNDJaLTzQH2BE16gFjSBvGSUikzRriyFyDqFsaUQILNu2JbL2qhmJoSM8hIR11UcaycScyx/V+mugcWJX9obX1UhKrNmcJE9IhGVDCmZOMBWIsmql6ykwfXn2Xh7fbB7Z3Wo4f/sxfyKfJ+8+3X/pV5PdnvPK7ggCuLzs1H419TNhhbkvc1a5sRcosOAR6ElcJ5gkMmWq6anmd5ByYwJOColKQiETjJETdoeYGd8SuTQ2T/eBRzc6sKq/YBiuKumWMmbWUD3jH6teLgWudrHTE+wCoc2MLIBNSLiKtxudIkOhhjhy0FNFCgZzlnpzs1qd/ObdmhjkWIgQjvhMuuh1G/yZiMwBU5BiowwCGJH0uQ0pMazHO9/u1Q9I1f581+SlIvDK9ceDkfeyehqcTZYO9Gg9bKNq55dExCDNlDUyKipk07Bl2pAf3URr1WAPwLPsqlGtb3uN66qlLdNn8B10+CahpMgdvZdbhjHl/YzkeYkicvwhUwhMFkK1Yt3KMFd/N92B/hYZ+5rbE/BUUAHRi8PW7TqOP9VgpISyXpmA3sIzCLqk29f0WJYoaL/xIl2AOGBxe5ppjc5usV+mqk6TMqESHjKy5bVU1b2dvs20VBGexEwdyCTViZwjjMZctRks5dkppE4kYX00OezosA1M33ONUNaV3DMYuUmzilvUDmVUg7jzgRp3gLrH/yXzWLxPlkITbfcqUL9WwG0U/7RYWgsulzm0St3p4jTaI6iq/J2aealCdd3h6ivkHbBOx+afRu/CYNlP1r9I0WiPouBSFKlu/aVZ2evv9WD2Ew==</diagram></mxfile>

File diff suppressed because one or more lines are too long

13
jsconfig.json Normal file
View file

@ -0,0 +1,13 @@
{
"compilerOptions": {
"target": "ES6",
"module": "commonjs",
"allowSyntheticDefaultImports": true,
"baseUrl": ".",
"paths": {
"~/*": ["./services/*"],
"@modjo/*": ["./node_modules/@modjo/*"]
}
},
"exclude": ["node_modules", "**/node_modules/*"]
}

View file

@ -0,0 +1,47 @@
const { default: axios } = require("axios")
const qs = require("qs")
const { ctx } = require("@modjo/core")
// see https://nominatim.org/release-docs/latest/api/Reverse/
module.exports = async function nominatimReverse(coords, options = {}) {
const config = ctx.get("config.project")
const { nominatimUrl } = config
const logger = ctx.require("logger")
const [lon, lat] = coords
const search = qs.stringify({
format: "json", // see https://nominatim.org/release-docs/latest/api/Output/
zoom: 18,
...options,
lat,
lon,
})
let data
const url = `${nominatimUrl}/reverse?${search}`
try {
const res = await axios.request({
url,
method: "get",
headers: {
"accept-language": "fr", // RFC1766 ISO639
},
})
if (res.status !== 200) {
logger.error(
{ res, url },
"nominatim server did not answer with a HTTP code 200"
)
}
data = res.data
} catch (e) {
if (e.response?.data)
logger.error(
{ responseData: e.response.data, error: e },
"nominatim reverse failed"
)
else logger.error({ url, error: e }, "nominatim reverse failed")
}
return data
}

View file

@ -0,0 +1,32 @@
const { ConvertTo3waClient } = require("@what3words/api")
const { ctx } = require("@modjo/core")
// see https://developer.what3words.com/public-api
const what3wordsConfig = {
host: "https://api.what3words.com",
apiVersion: "v3",
}
module.exports = async function what3words(coords, options = {}) {
const config = ctx.get("config.project")
const { what3wordsApiKey } = config
const w3wClient = ConvertTo3waClient.init(what3wordsApiKey, what3wordsConfig)
const [lng, lat] = coords
let data
try {
data = await w3wClient.run({
coordinates: { lat, lng },
language: "fr",
format: "json",
...options,
})
// data = res.data
} catch (e) {
console.error(e)
}
return data
}

24
libs/common/jwks/jwks.js Normal file
View file

@ -0,0 +1,24 @@
const fs = require("fs-extra")
const fnv = require("fnv-plus")
module.exports = async function createJwks(env) {
const { JWK_FILE, OLDJWK_FILE } = env
const loadJwkFile = async (file) => {
const rawJwk = await fs.readFile(file, { encoding: "utf-8" })
const jwk = JSON.parse(rawJwk)
if (!jwk.kid) {
jwk.kid = fnv.hash(rawJwk, 128).hex()
}
return jwk
}
const keys = []
keys.push(await loadJwkFile(JWK_FILE))
if (OLDJWK_FILE) {
keys.push(await loadJwkFile(OLDJWK_FILE))
}
return keys
}

View file

@ -0,0 +1,79 @@
const postgres = require("postgres")
const PG_FOREIGN_KEY_VIOLATION = "23503"
const PG_UNIQUE_VIOLATION = "23505"
const PG_CHECK_VIOLATION = "23514"
const PG_NOT_NULL_VIOLATION = "23502"
const PG_RESTRICT_VIOLATION = "23001"
const PG_INVALID_TEXT_REPRESENTATION = "22P02"
const PG_NUMERIC_VALUE_OUT_OF_RANGE = "22003"
const PG_STRING_DATA_RIGHT_TRUNCATION = "22001"
const PG_DIVISION_BY_ZERO = "22012"
const PG_INVALID_PARAMETER_VALUE = "22023"
const PG_INTEGRITY_CONSTRAINT_VIOLATION = "23000"
const PG_INVALID_CURSOR_STATE = "24000"
const PG_TRANSACTION_ROLLBACK = "40000"
const PG_SERIALIZATION_FAILURE = "40001"
const PG_DEADLOCK_DETECTED = "40P01"
const PG_SYNTAX_ERROR = "42601"
const PG_INSUFFICIENT_PRIVILEGE = "42501"
const PG_INVALID_CATALOG_NAME = "3D000"
const PG_INVALID_SCHEMA_NAME = "3F000"
const PG_INVALID_COLUMN_REFERENCE = "42703"
const PG_DUPLICATE_COLUMN = "42701"
const PG_DUPLICATE_DATABASE = "42P04"
const PG_DUPLICATE_OBJECT = "42710"
const PG_DUPLICATE_TABLE = "42P07"
const PG_DATA_EXCEPTION = "22000"
async function ignoreError(sqlQuery, errorCodes = []) {
if (!Array.isArray(errorCodes)) {
errorCodes = [errorCodes]
}
try {
const result = await sqlQuery
return result
} catch (error) {
if (
error instanceof postgres.PostgresError &&
(errorCodes.includes(error.code) || errorCodes.includes("*"))
) {
console.log("ignoring error", error)
} else {
throw error
}
}
}
async function ignoreForeignKeyViolation(sqlQuery) {
return ignoreError(sqlQuery, [PG_FOREIGN_KEY_VIOLATION])
}
module.exports = {
ignoreError,
ignoreForeignKeyViolation,
PG_FOREIGN_KEY_VIOLATION,
PG_UNIQUE_VIOLATION,
PG_CHECK_VIOLATION,
PG_NOT_NULL_VIOLATION,
PG_RESTRICT_VIOLATION,
PG_INVALID_TEXT_REPRESENTATION,
PG_NUMERIC_VALUE_OUT_OF_RANGE,
PG_STRING_DATA_RIGHT_TRUNCATION,
PG_DIVISION_BY_ZERO,
PG_INVALID_PARAMETER_VALUE,
PG_INTEGRITY_CONSTRAINT_VIOLATION,
PG_INVALID_CURSOR_STATE,
PG_TRANSACTION_ROLLBACK,
PG_SERIALIZATION_FAILURE,
PG_DEADLOCK_DETECTED,
PG_SYNTAX_ERROR,
PG_INSUFFICIENT_PRIVILEGE,
PG_INVALID_CATALOG_NAME,
PG_INVALID_SCHEMA_NAME,
PG_INVALID_COLUMN_REFERENCE,
PG_DUPLICATE_COLUMN,
PG_DUPLICATE_DATABASE,
PG_DUPLICATE_OBJECT,
PG_DUPLICATE_TABLE,
PG_DATA_EXCEPTION,
}

View file

@ -0,0 +1,47 @@
const { ctx } = require("@modjo/core")
const { PhoneNumberUtil } = require("google-libphonenumber")
function parseFullNumber(n) {
const phoneUtil = PhoneNumberUtil.getInstance()
const num = phoneUtil.parse(n)
if (!num.hasNationalNumber()) {
throw new Error("invalid number")
}
const nationalNumber = num.getNationalNumberOrDefault().toString()
const code = num.getCountryCodeOrDefault()
const countryCode = phoneUtil.getRegionCodeForCountryCode(code)
return {
countryCode,
code: code.toString(),
nationalNumber,
}
}
function parseFullPhoneNumber(fullPhoneNumber) {
let phoneNumberObject
try {
phoneNumberObject = parseFullNumber(fullPhoneNumber)
} catch (err) {
const logger = ctx.require("logger")
logger.debug({ error: err }, "unable to parse phone number")
logger.error({ error: err }, "Error parsing phone number")
throw err
}
const { countryCode, nationalNumber } = phoneNumberObject
return { countryCode, nationalNumber }
}
function isFullPhoneNumber(fullPhoneNumber) {
try {
parseFullNumber(fullPhoneNumber)
return true
} catch (err) {
return false
}
}
module.exports = {
parseFullNumber,
parseFullPhoneNumber,
isFullPhoneNumber,
}

View file

@ -0,0 +1,23 @@
module.exports = function () {
function validate(str) {
const parts = str.split(".")
if (parts.length !== 3) {
return false
}
return parts.every((part) => {
try {
const decoded = Buffer.from(part, "base64url").toString("utf-8")
return decoded.length > 0
} catch {
return false
}
})
}
return {
type: "string",
validate,
}
}

View file

@ -0,0 +1,14 @@
function validateEmail(email) {
// Regular expression to validate the email format
const regex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/
// Test the email against the regular expression
return regex.test(email)
}
module.exports = function () {
return {
type: "string",
validate: validateEmail,
}
}

View file

@ -0,0 +1,6 @@
module.exports = function ({ validators: { isLatitude } }) {
return {
type: "number",
validate: isLatitude,
}
}

View file

@ -0,0 +1,6 @@
module.exports = function ({ validators: { isLocation } }) {
return {
type: "string",
validate: isLocation,
}
}

View file

@ -0,0 +1,6 @@
module.exports = function ({ validators: { isLongitude } }) {
return {
type: "number",
validate: isLongitude,
}
}

View file

@ -0,0 +1,6 @@
module.exports = function ({ validators: { isInteger } }) {
return {
type: "number",
validate: isInteger,
}
}

View file

@ -0,0 +1,8 @@
const { validate } = require("uuid")
module.exports = function () {
return {
type: "string",
validate,
}
}

View file

@ -0,0 +1,9 @@
module.exports = function ({ services: { auth } }) {
const headerPrefix = "Bearer "
const headerPrefixLength = headerPrefix.length
return async function bearerAuth(req, scopes, _schema) {
const jwtoken = req.headers.authorization.slice(headerPrefixLength)
return auth(jwtoken, scopes)
}
}

View file

@ -0,0 +1,11 @@
const { ctx } = require("@modjo/core")
module.exports = function ({ services: { auth } }) {
const config = ctx.require("config.project")
const { bearerCookieName } = config
return async function cookieAuth(req, scopes, _schema) {
const jwtoken = req.cookies[bearerCookieName]
return auth(jwtoken, scopes)
}
}

View file

@ -0,0 +1,44 @@
const { jwtVerify } = require("jose")
const getHasuraClaimsFromJWT = require("@modjo/hasura/utils/jwt/get-hasura-claims-from-jwt")
const { ctx } = require("@modjo/core")
const { reqCtx } = require("@modjo/express/ctx")
module.exports = function () {
const castIntVars = ["deviceId", "userId"]
function sessionVarsFromClaims(claims) {
const session = { ...claims }
for (const castIntVar of castIntVars) {
session[castIntVar] = parseInt(session[castIntVar], 10)
}
return session
}
const config = ctx.require("config.project")
const { claimsNamespace, JWKSet } = config
function isScopeAllowed(session, scopes) {
const { allowedRoles } = session
return scopes.some((scope) => allowedRoles.includes(scope))
}
return async function auth(jwt, scopes) {
try {
if (!jwt || !(await jwtVerify(jwt, JWKSet))) {
return false
}
} catch (err) {
const logger = ctx.require("logger")
logger.error({ error: err }, "jwVerify failed")
return false
}
const claims = getHasuraClaimsFromJWT(jwt, claimsNamespace)
const session = sessionVarsFromClaims(claims)
if (!isScopeAllowed(session, scopes)) {
return false
}
reqCtx.set("session", session)
return true
}
}

View file

@ -0,0 +1,32 @@
const { ctx } = require("@modjo/core")
const { reqCtx } = require("@modjo/express/ctx")
const { RateLimiterMemory, RateLimiterRes } = require("rate-limiter-flexible")
module.exports = () => {
const logger = ctx.require("logger")
return (options = {}) => {
const rateLimiter = new RateLimiterMemory({
...options,
})
return async (req, res, next) => {
const { ip } = req
const { userId } = reqCtx.get("session")
const key = `${ip}.${userId}`
try {
await rateLimiter.consume(key)
next()
} catch (error) {
if (!(error instanceof RateLimiterRes)) {
throw error
}
logger.error(
{ ip, userId, key },
"rate-limiter-flexible : Too Many Requests"
)
res.status(429).send("Too Many Requests")
}
}
}
}

View file

@ -0,0 +1,10 @@
module.exports = function () {
const rolesByLevel = {
admin: 1,
user: 100,
}
return function sortRolesByLevel(roles) {
return roles.sort((a, b) => rolesByLevel[a] - rolesByLevel[b])
}
}

View file

@ -0,0 +1,7 @@
type: object
additionalProperties: true
properties:
message:
type: string
code:
type: integer

View file

@ -0,0 +1,9 @@
type: object
additionalProperties: true
properties:
message:
type: string
code:
type: integer
minimum: 400
maximum: 599

View file

@ -0,0 +1,3 @@
type: http
scheme: bearer
bearerFormat: JWT

Some files were not shown because too many files have changed in this diff Show more