generated from avior/template-web-astro
chore: up to date to latest standards
This commit is contained in:
parent
e322b0dfdf
commit
82190b2696
30
.github/workflows/tests.yml
vendored
30
.github/workflows/tests.yml
vendored
@ -23,7 +23,25 @@ jobs:
|
||||
|
||||
- name: run Astro and TypeScript checks
|
||||
run: bun run check
|
||||
run:
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup BunJS
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Lint code
|
||||
run: bun run lint
|
||||
|
||||
unit-tests:
|
||||
name: Unit tests
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@ -41,8 +59,8 @@ jobs:
|
||||
# run: npm run install:test
|
||||
|
||||
- name: Test
|
||||
# run : npm run test
|
||||
run: bun run test:unit
|
||||
|
||||
secrets-scanner:
|
||||
name: Secrets Scanning
|
||||
runs-on: ubuntu-latest
|
||||
@ -72,13 +90,18 @@ jobs:
|
||||
- name: Setup BunJS
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Install project dependencies
|
||||
- name: Install dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Run coverage
|
||||
run: bun run test:unit
|
||||
continue-on-error: true # continue on error as we still want reporting to be done
|
||||
|
||||
# TODO: re-enable after sonar builtins are fixed
|
||||
# - name: Run Linter
|
||||
# run: npm run lint -- -f json -o eslint-report.json
|
||||
# continue-on-error: true # continue on error as we still want reporting to be done
|
||||
|
||||
- name: Scan repository
|
||||
uses: Aviortheking/sonarqube-action@v5.1.0
|
||||
with:
|
||||
@ -87,3 +110,4 @@ jobs:
|
||||
projectKey: ${{ env.REPO_NAME }}
|
||||
args: >
|
||||
-Dsonar.javascript.lcov.reportPaths=./coverage/lcov.info
|
||||
# -Dsonar.eslint.reportPaths=eslint-report.json
|
||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -20,8 +20,6 @@ pnpm-debug.log*
|
||||
# macOS-specific files
|
||||
.DS_Store
|
||||
|
||||
slicers/*
|
||||
|
||||
# Coverage
|
||||
coverage/
|
||||
|
||||
@ -29,3 +27,5 @@ coverage/
|
||||
/playwright/
|
||||
|
||||
/src/route.ts
|
||||
/src/models/migrations/list.ts
|
||||
/src/config/api-routes.d.ts
|
||||
|
@ -1,7 +1,11 @@
|
||||
import svelte from '@astrojs/svelte'
|
||||
import tailwind from "@astrojs/tailwind"
|
||||
import tailwind from '@astrojs/tailwind'
|
||||
import { defineConfig } from 'astro/config'
|
||||
import routing from './hooks/routing'
|
||||
import test from './hooks/test'
|
||||
import typesafeApi from './hooks/typesafe-api'
|
||||
import version from './hooks/version'
|
||||
import commantorHook from './node_modules/commantor/integrations/astro'
|
||||
|
||||
// const faviconHook = {
|
||||
// name: 'Favicon',
|
||||
@ -34,7 +38,9 @@ export default defineConfig({
|
||||
output: 'static',
|
||||
|
||||
// Add TailwindCSS
|
||||
integrations: [svelte(), tailwind(), routing()],
|
||||
integrations: [typesafeApi({
|
||||
output: 'src/config/api-routes.d.ts'
|
||||
}), test, svelte(), tailwind(), routing(), version(), commantorHook()],
|
||||
|
||||
// prefetch links
|
||||
prefetch: {
|
||||
|
16
cmd.ts
Normal file
16
cmd.ts
Normal file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Initialize Commantor for a terminal usage
|
||||
*
|
||||
* Simply run `./cmd.ts` to check it out !
|
||||
*/
|
||||
import commantor from 'commantor'
|
||||
|
||||
void commantor({
|
||||
path: './src/commands',
|
||||
hooks: {
|
||||
// load migrations into the app on setup
|
||||
'astro:config:setup': 'migrations:load',
|
||||
'astro:build:start': 'migrations:load'
|
||||
}
|
||||
})
|
341
eslint.config.mjs
Normal file
341
eslint.config.mjs
Normal file
@ -0,0 +1,341 @@
|
||||
import eslintPluginAstro from 'eslint-plugin-astro'
|
||||
import js from '@eslint/js'
|
||||
import jsxa11y from 'eslint-plugin-jsx-a11y'
|
||||
import tseslint from 'typescript-eslint'
|
||||
import stylistic from '@stylistic/eslint-plugin'
|
||||
import globals from 'globals'
|
||||
import customPlugin from './hooks/eslint-plugin/index.js'
|
||||
|
||||
export default [
|
||||
// eslint needs to have ignore placed here... WHY THE F*CK
|
||||
{
|
||||
ignores: [
|
||||
'node_modules/',
|
||||
'out/',
|
||||
'*.js',
|
||||
'__tests__/',
|
||||
'src/route.ts',
|
||||
'dist/',
|
||||
'.astro/',
|
||||
'.diaz/'
|
||||
]
|
||||
},
|
||||
|
||||
js.configs.recommended,
|
||||
...tseslint.configs.strictTypeChecked,
|
||||
...tseslint.configs.stylisticTypeChecked,
|
||||
// the Astro Plugin is doing too much shit and takes too much time
|
||||
...eslintPluginAstro.configs.recommended,
|
||||
// ...eslintPluginAstro.configs['jsx-a11y-strict'],
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
// old option currently used by Astro
|
||||
project: true,
|
||||
// future option that will be used after the plugin migrate
|
||||
// projectService: {
|
||||
// allowDefaultProject: ['*.astro', '*.js'],
|
||||
// },
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
createDefaultProgram: false
|
||||
},
|
||||
globals: {
|
||||
...globals.node,
|
||||
...globals.browser,
|
||||
...globals.nodeBuiltin
|
||||
}
|
||||
},
|
||||
plugins: {
|
||||
'@stylistic': stylistic,
|
||||
'jsx-a11y': jsxa11y,
|
||||
'custom-plugin': customPlugin
|
||||
},
|
||||
|
||||
rules: {
|
||||
'custom-plugin/use-logger': [
|
||||
'warn'
|
||||
],
|
||||
'custom-plugin/prefer-text-content': [
|
||||
'warn'
|
||||
],
|
||||
'@stylistic/arrow-parens': [
|
||||
'error',
|
||||
'always'
|
||||
],
|
||||
'@stylistic/brace-style': ['error'],
|
||||
'@stylistic/eol-last': 'error',
|
||||
'@stylistic/indent': [
|
||||
'error',
|
||||
'tab',
|
||||
{
|
||||
SwitchCase: 1
|
||||
}
|
||||
],
|
||||
'@stylistic/linebreak-style': [
|
||||
'error',
|
||||
'unix'
|
||||
],
|
||||
'@stylistic/max-len': [
|
||||
'warn',
|
||||
{
|
||||
code: 256
|
||||
}
|
||||
],
|
||||
'@stylistic/member-delimiter-style': [
|
||||
'error',
|
||||
{
|
||||
multiline: {
|
||||
delimiter: 'none',
|
||||
requireLast: true
|
||||
},
|
||||
singleline: {
|
||||
delimiter: 'comma',
|
||||
requireLast: false
|
||||
}
|
||||
}
|
||||
],
|
||||
'@stylistic/new-parens': 'error',
|
||||
'@stylistic/no-extra-parens': 'off',
|
||||
'@stylistic/no-extra-semi': 'error',
|
||||
'@stylistic/no-multiple-empty-lines': 'error',
|
||||
'@stylistic/no-trailing-spaces': 'error',
|
||||
'@stylistic/quote-props': [
|
||||
'error',
|
||||
'consistent-as-needed'
|
||||
],
|
||||
'@stylistic/quotes': [
|
||||
'error',
|
||||
'single',
|
||||
{
|
||||
avoidEscape: true
|
||||
}
|
||||
],
|
||||
'@stylistic/semi': [
|
||||
'error',
|
||||
'never'
|
||||
],
|
||||
'@stylistic/space-before-function-paren': [
|
||||
'error',
|
||||
{
|
||||
anonymous: 'never',
|
||||
asyncArrow: 'always',
|
||||
named: 'never'
|
||||
}
|
||||
],
|
||||
'@stylistic/spaced-comment': [
|
||||
'error',
|
||||
'always',
|
||||
{
|
||||
block: {
|
||||
exceptions: [
|
||||
'*'
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
'@stylistic/type-annotation-spacing': 'error',
|
||||
'no-promise-executor-return': 'never',
|
||||
|
||||
|
||||
'@typescript-eslint/adjacent-overload-signatures': 'error',
|
||||
'@typescript-eslint/array-type': [
|
||||
'error',
|
||||
{
|
||||
default: 'generic'
|
||||
}
|
||||
],
|
||||
'@typescript-eslint/consistent-type-assertions': 'error',
|
||||
'@typescript-eslint/consistent-type-definitions': 'error',
|
||||
'@typescript-eslint/explicit-member-accessibility': [
|
||||
'error',
|
||||
{
|
||||
accessibility: 'explicit'
|
||||
}
|
||||
],
|
||||
'@typescript-eslint/member-ordering': 'error',
|
||||
'@typescript-eslint/no-deprecated': 'warn',
|
||||
'@typescript-eslint/no-empty-function': 'error',
|
||||
'@typescript-eslint/no-empty-interface': 'error',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-extra-parens': 'off',
|
||||
'@typescript-eslint/no-extraneous-class': [
|
||||
'warn',
|
||||
{
|
||||
allowStaticOnly: true
|
||||
}
|
||||
],
|
||||
'@typescript-eslint/no-misused-new': 'error',
|
||||
// TODO: re enable after checking why it crash
|
||||
'@typescript-eslint/no-misused-promises': 'off',
|
||||
'@typescript-eslint/no-namespace': 'error',
|
||||
'@typescript-eslint/no-non-null-assertion': [
|
||||
'warn'
|
||||
],
|
||||
'@typescript-eslint/no-parameter-properties': 'off',
|
||||
'@typescript-eslint/no-shadow': 'error',
|
||||
'@typescript-eslint/no-unnecessary-type-arguments': 'warn',
|
||||
'@typescript-eslint/no-unused-expressions': [
|
||||
'error',
|
||||
{
|
||||
allowTernary: true
|
||||
}
|
||||
],
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
'error',
|
||||
{
|
||||
args: 'all',
|
||||
argsIgnorePattern: '^_',
|
||||
caughtErrors: 'all',
|
||||
caughtErrorsIgnorePattern: '^_',
|
||||
destructuredArrayIgnorePattern: '^_',
|
||||
varsIgnorePattern: '^_',
|
||||
ignoreRestSiblings: true
|
||||
}
|
||||
],
|
||||
'@typescript-eslint/prefer-for-of': 'error',
|
||||
'@typescript-eslint/prefer-function-type': 'error',
|
||||
'@typescript-eslint/prefer-namespace-keyword': 'error',
|
||||
'@typescript-eslint/require-await': 'off',
|
||||
'@typescript-eslint/restrict-template-expressions': [
|
||||
'error',
|
||||
{
|
||||
allowNumber: true
|
||||
}
|
||||
],
|
||||
'@typescript-eslint/triple-slash-reference': 'error',
|
||||
'@typescript-eslint/unified-signatures': 'error',
|
||||
|
||||
|
||||
'arrow-body-style': 'error',
|
||||
'complexity': [
|
||||
'warn',
|
||||
10
|
||||
],
|
||||
'constructor-super': 'error',
|
||||
'curly': 'error',
|
||||
'dot-notation': 'error',
|
||||
'eqeqeq': [
|
||||
'error',
|
||||
'smart'
|
||||
],
|
||||
'for-direction': 'error',
|
||||
'getter-return': 'error',
|
||||
'guard-for-in': 'error',
|
||||
'id-blacklist': [
|
||||
'error',
|
||||
'any',
|
||||
'Number',
|
||||
'number',
|
||||
'String',
|
||||
'string',
|
||||
'Boolean',
|
||||
'boolean',
|
||||
'Undefined'
|
||||
],
|
||||
'id-length': [
|
||||
'warn',
|
||||
{
|
||||
exceptions: [
|
||||
'_'
|
||||
]
|
||||
}
|
||||
],
|
||||
'id-match': 'error',
|
||||
'max-classes-per-file': [
|
||||
'error',
|
||||
1
|
||||
],
|
||||
'max-depth': [
|
||||
'warn',
|
||||
2
|
||||
],
|
||||
'no-async-promise-executor': 'error',
|
||||
'no-bitwise': 'error',
|
||||
'no-caller': 'error',
|
||||
'no-compare-neg-zero': 'error',
|
||||
'no-cond-assign': 'error',
|
||||
'no-console': 'off',
|
||||
'no-constant-condition': 'error',
|
||||
'no-control-regex': 'warn',
|
||||
'no-debugger': 'error',
|
||||
'no-delete-var': 'error',
|
||||
'no-dupe-args': 'error',
|
||||
'no-dupe-else-if': 'error',
|
||||
'no-dupe-keys': 'error',
|
||||
'no-duplicate-case': 'error',
|
||||
'no-empty': [
|
||||
'error',
|
||||
{
|
||||
allowEmptyCatch: true
|
||||
}
|
||||
],
|
||||
'no-empty-character-class': 'error',
|
||||
'no-eval': 'error',
|
||||
'no-ex-assign': 'error',
|
||||
'no-extra-boolean-cast': 'error',
|
||||
'no-fallthrough': 'off',
|
||||
'no-func-assign': 'error',
|
||||
'no-import-assign': 'error',
|
||||
'no-inner-declarations': 'error',
|
||||
'no-invalid-regexp': 'error',
|
||||
'no-irregular-whitespace': 'error',
|
||||
'no-label-var': 'error',
|
||||
'no-loss-of-precision': 'error',
|
||||
'no-misleading-character-class': 'error',
|
||||
'no-multi-assign': 'error',
|
||||
'no-new-wrappers': 'error',
|
||||
'no-obj-calls': 'error',
|
||||
'no-promise-executor-return': 'error',
|
||||
'no-prototype-builtins': 'error',
|
||||
'no-regex-spaces': 'error',
|
||||
'no-setter-return': 'error',
|
||||
'no-shadow': [
|
||||
'error',
|
||||
{
|
||||
builtinGlobals: false,
|
||||
hoist: 'all'
|
||||
}
|
||||
],
|
||||
'no-shadow-restricted-names': 'error',
|
||||
'no-sparse-arrays': 'error',
|
||||
'no-template-curly-in-string': 'warn',
|
||||
'no-throw-literal': 'error',
|
||||
'no-undef': 'error',
|
||||
'no-undef-init': 'error',
|
||||
'no-underscore-dangle': 'off',
|
||||
'no-unexpected-multiline': 'error',
|
||||
'no-unreachable': 'warn',
|
||||
'no-unreachable-loop': 'warn',
|
||||
'no-unsafe-finally': 'error',
|
||||
'no-unsafe-negation': 'error',
|
||||
'no-unsafe-optional-chaining': 'error',
|
||||
'no-unused-expressions': [
|
||||
'error',
|
||||
{
|
||||
allowTernary: true
|
||||
}
|
||||
],
|
||||
'no-unused-labels': 'error',
|
||||
'no-unused-vars': 'off',
|
||||
'no-var': 'error',
|
||||
'object-shorthand': [
|
||||
'warn',
|
||||
'methods'
|
||||
],
|
||||
'one-var': [
|
||||
'error',
|
||||
'never'
|
||||
],
|
||||
'prefer-arrow-callback': 'warn',
|
||||
'prefer-const': 'error',
|
||||
'prefer-rest-params': 'warn',
|
||||
'radix': 'error',
|
||||
'require-atomic-updates': 'warn',
|
||||
'use-isnan': 'error',
|
||||
'valid-typeof': 'warn'
|
||||
}
|
||||
}
|
||||
]
|
66
hooks/eslint-plugin/index.js
Normal file
66
hooks/eslint-plugin/index.js
Normal file
@ -0,0 +1,66 @@
|
||||
export default {
|
||||
rules: {
|
||||
'use-logger': {
|
||||
meta: {
|
||||
type: 'problem',
|
||||
docs: {
|
||||
description: 'Disallow use of console.log',
|
||||
category: 'Best Practices',
|
||||
recommended: true
|
||||
},
|
||||
messages: {
|
||||
noConsoleLog: "Using 'console.log' is not recommended for usage, use instead `Logger.info()` from `libs/Logger`."
|
||||
},
|
||||
schema: [] // No options for this rule
|
||||
},
|
||||
create(context) {
|
||||
return {
|
||||
MemberExpression(node) {
|
||||
if (
|
||||
node.object.name === 'console' &&
|
||||
node.property.name === 'log'
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'noConsoleLog'
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'prefer-text-content': {
|
||||
meta: {
|
||||
type: 'suggestion',
|
||||
docs: {
|
||||
description: 'Prefer textContent over innerText',
|
||||
category: 'Best Practices',
|
||||
recommended: false,
|
||||
},
|
||||
fixable: 'code',
|
||||
schema: [],
|
||||
messages: {
|
||||
useTextContent: "Use 'textContent' instead of 'innerText'.",
|
||||
},
|
||||
},
|
||||
create(context) {
|
||||
return {
|
||||
MemberExpression(node) {
|
||||
if (
|
||||
node.property &&
|
||||
node.property.name === 'innerText'
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'useTextContent',
|
||||
fix(fixer) {
|
||||
return fixer.replaceText(node.property, 'textContent')
|
||||
}
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
25
hooks/test.ts
Normal file
25
hooks/test.ts
Normal file
@ -0,0 +1,25 @@
|
||||
import type { AstroIntegration, AstroIntegrationLogger } from 'astro'
|
||||
|
||||
const setup = (step: keyof AstroIntegration['hooks']) => (ctx: { logger: AstroIntegrationLogger }) => {
|
||||
ctx.logger.info(step)
|
||||
}
|
||||
|
||||
const integration: AstroIntegration = {
|
||||
name: 'hooks',
|
||||
hooks: {
|
||||
'astro:config:setup': setup('astro:config:setup'),
|
||||
'astro:config:done': setup('astro:config:done'),
|
||||
'astro:server:setup': setup('astro:server:setup'),
|
||||
'astro:server:start': setup('astro:server:start'),
|
||||
'astro:server:done': setup('astro:server:done'),
|
||||
'astro:build:start': setup('astro:build:start'),
|
||||
'astro:build:setup': setup('astro:build:setup'),
|
||||
'astro:build:generated': setup('astro:build:generated'),
|
||||
'astro:build:ssr': setup('astro:build:ssr'),
|
||||
'astro:build:done': setup('astro:build:done'),
|
||||
'astro:route:setup': setup('astro:route:setup'),
|
||||
'astro:route:resolved': setup('astro:route:resolved')
|
||||
}
|
||||
}
|
||||
|
||||
export default integration
|
117
hooks/typesafe-api.ts
Normal file
117
hooks/typesafe-api.ts
Normal file
@ -0,0 +1,117 @@
|
||||
import { objectMap, objectRemap } from '@dzeio/object-util'
|
||||
import type { AstroIntegration } from 'astro'
|
||||
import fs from 'fs/promises'
|
||||
|
||||
interface Config {
|
||||
output?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* get every files recursivelly in a specific directory
|
||||
*
|
||||
* @param path the path to search
|
||||
* @returns the list of files recursivelly in the specific directory
|
||||
*/
|
||||
async function getFiles(path: string): Promise<Array<string>> {
|
||||
path = decodeURI(path)
|
||||
try {
|
||||
const dir = await fs.readdir(path)
|
||||
let files: Array<string> = []
|
||||
for (const file of dir) {
|
||||
if (file.startsWith('_')) {
|
||||
continue
|
||||
}
|
||||
const filePath = path + '/' + file
|
||||
if ((await fs.stat(filePath)).isDirectory()) {
|
||||
files = files.concat(await getFiles(filePath))
|
||||
} else if (file.endsWith('.ts')) {
|
||||
files.push(filePath)
|
||||
}
|
||||
}
|
||||
return files
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* format the path back to an url usable by the app
|
||||
*
|
||||
* @param path the path to format
|
||||
* @returns the path formatted as a URL
|
||||
*/
|
||||
function formatPath(basePath: string, path: string): string {
|
||||
// remove the base path
|
||||
path = path.replace(decodeURI(basePath), '')
|
||||
|
||||
// handle the `/` endpoint
|
||||
if (path === '') {
|
||||
path = '/'
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
async function run(config?: Config) {
|
||||
|
||||
// get the files list
|
||||
const files = await getFiles('./src/pages/api').then((ev) => ev.map((it) => formatPath('./src/pages/api', it)))
|
||||
|
||||
const methods = ['GET', 'POST', 'DELETE']
|
||||
|
||||
let content = ''
|
||||
const items: Record<string, Record<string, string>> = {}
|
||||
for (const entry of files) {
|
||||
const file = await fs.readFile('./src/pages/api' + entry, 'utf-8')
|
||||
const availableMethods: Array<string> = []
|
||||
for (const method of methods) {
|
||||
if (file.includes(method)) {
|
||||
availableMethods.push(method)
|
||||
}
|
||||
}
|
||||
if (availableMethods.length === 0) {
|
||||
continue
|
||||
}
|
||||
const prefix = entry.replace(/[/.[\]-]/g, '')
|
||||
content += `import type { ${availableMethods.map((it) => `${it} as ${prefix}${it}`).join((', '))} } from './pages/api${entry}'\n`
|
||||
|
||||
|
||||
let path = entry
|
||||
// remove the extension if asked
|
||||
const lastDot = path.lastIndexOf('.')
|
||||
path = path.slice(0, lastDot)
|
||||
|
||||
// remove the index from the element
|
||||
if (path.endsWith('/index')) {
|
||||
path = path.replace('/index', '')
|
||||
}
|
||||
|
||||
items[path] = {
|
||||
...objectRemap(availableMethods, (value) => ({ key: value as string, value: `${prefix}${value as string}` }))
|
||||
}
|
||||
}
|
||||
|
||||
content += `\ninterface APIRoutes {
|
||||
${objectMap(items, (record, key) => `\t'${key}': {
|
||||
${objectMap(record, (value, method) => ` ${method}: typeof ${value}`).join('\n')}
|
||||
}`).join('\n')}
|
||||
}`
|
||||
|
||||
content += '\n\nexport default APIRoutes\n'
|
||||
|
||||
await fs.writeFile(config?.output ?? './src/api-routes.d.ts', content)
|
||||
}
|
||||
|
||||
/**
|
||||
* launch the integration
|
||||
* @returns the routng integration
|
||||
*/
|
||||
const integration = (config?: Config): AstroIntegration => ({
|
||||
name: 'routing',
|
||||
hooks: {
|
||||
'astro:build:setup': () => run(config),
|
||||
'astro:server:setup': () => run(config)
|
||||
}
|
||||
})
|
||||
|
||||
export default integration
|
45
hooks/version.ts
Normal file
45
hooks/version.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import type { AstroIntegration } from 'astro'
|
||||
import { exec as execSync } from 'node:child_process'
|
||||
import { promisify } from 'node:util'
|
||||
|
||||
const exec = promisify(execSync)
|
||||
|
||||
/**
|
||||
* launch the integration
|
||||
* @returns the routng integration
|
||||
*/
|
||||
const integration: () => AstroIntegration = () => ({
|
||||
name: 'version',
|
||||
hooks: {
|
||||
'astro:config:setup': async (setup) => {
|
||||
try {
|
||||
const commit = (await exec('git rev-parse HEAD')).stdout
|
||||
const branch = (await exec('git rev-parse --abbrev-ref HEAD')).stdout
|
||||
const tag = (await exec('git tag -l --points-at HEAD')).stdout
|
||||
const envs: Record<string, string> = {}
|
||||
if (commit) {
|
||||
envs['import.meta.env.GIT_COMMIT'] = JSON.stringify(commit.slice(0, 8).trim())
|
||||
}
|
||||
if (branch) {
|
||||
envs['import.meta.env.GIT_BRANCH'] = JSON.stringify(branch.trim())
|
||||
}
|
||||
if (tag) {
|
||||
envs['import.meta.env.GIT_TAG'] = JSON.stringify(tag.trim())
|
||||
}
|
||||
|
||||
setup.updateConfig({
|
||||
vite: {
|
||||
define: {
|
||||
...envs
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (error: any) {
|
||||
setup.logger.warn(error.toString())
|
||||
setup.logger.warn('could not setup GIT envs')
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export default integration
|
48
package.json
48
package.json
@ -17,26 +17,36 @@
|
||||
"install:test": "playwright install --with-deps"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tauri-apps/api": "^2",
|
||||
"@tauri-apps/plugin-shell": "^2"
|
||||
"@dzeio/schema": "^0.4.3",
|
||||
"@lucide/astro": "^0.503.0",
|
||||
"@sentry/node": "^9.13.0",
|
||||
"@tauri-apps/api": "^2.5.0",
|
||||
"@tauri-apps/plugin-shell": "^2.2.1",
|
||||
"commantor": "^0.5.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@astrojs/check": "^0",
|
||||
"@astrojs/svelte": "^7",
|
||||
"@astrojs/tailwind": "^6",
|
||||
"@dzeio/logger": "^3",
|
||||
"@dzeio/object-util": "^1",
|
||||
"@dzeio/url-manager": "^1",
|
||||
"@playwright/test": "^1",
|
||||
"@tauri-apps/cli": "^2",
|
||||
"@types/node": "^22",
|
||||
"@vitest/coverage-v8": "^3",
|
||||
"astro": "^5",
|
||||
"lucide-astro": "^0",
|
||||
"sharp": "^0",
|
||||
"simple-icons-astro": "^14",
|
||||
"tailwindcss": "^3",
|
||||
"typescript": "^5",
|
||||
"vitest": "^3"
|
||||
"@astrojs/check": "^0.9.4",
|
||||
"@astrojs/svelte": "^7.0.10",
|
||||
"@astrojs/tailwind": "^6.0.2",
|
||||
"@dzeio/logger": "^3.2.1",
|
||||
"@dzeio/object-util": "^1.9.1",
|
||||
"@dzeio/url-manager": "^1.1.2",
|
||||
"@eslint/js": "^9.25.1",
|
||||
"@playwright/test": "^1.52.0",
|
||||
"@stylistic/eslint-plugin": "^4.2.0",
|
||||
"@tauri-apps/cli": "^2.5.0",
|
||||
"@types/node": "^22.14.1",
|
||||
"@vitest/coverage-v8": "^3.1.2",
|
||||
"astro": "^5.7.4",
|
||||
"eslint": "^9.25.1",
|
||||
"eslint-plugin-astro": "^1.3.1",
|
||||
"eslint-plugin-jsx-a11y": "^6.10.2",
|
||||
"globals": "^16.0.0",
|
||||
"sharp": "^0.34.1",
|
||||
"simple-icons-astro": "^14.12.3",
|
||||
"tailwindcss": "^3.4.17",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "^8.31.0",
|
||||
"vitest": "^3.1.2"
|
||||
}
|
||||
}
|
||||
|
22
src/commands/migrations/current.ts
Normal file
22
src/commands/migrations/current.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import type { Command } from 'commantor'
|
||||
import Logger from 'config/logger'
|
||||
import DaoFactory from 'models/dao-factory'
|
||||
|
||||
const command: Command = {
|
||||
name: 'migrations:current',
|
||||
description: 'Get the current version of the database',
|
||||
async run() {
|
||||
const client = await DaoFactory.client()
|
||||
await client.connect()
|
||||
const ver = await client.getVersion()
|
||||
if (ver < 0) {
|
||||
Logger.info('no database :(')
|
||||
} else {
|
||||
Logger.info(`Current database version: ${new Date(ver)}`)
|
||||
}
|
||||
return {
|
||||
code: 0
|
||||
}
|
||||
},
|
||||
}
|
||||
export default command
|
40
src/commands/migrations/load.ts
Normal file
40
src/commands/migrations/load.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import type { Command } from 'commantor'
|
||||
import Logger from 'config/logger'
|
||||
import fs from 'node:fs/promises'
|
||||
import path from 'node:path/posix'
|
||||
|
||||
const command: Command = {
|
||||
name: 'migrations:load',
|
||||
description: 'Load the list of migrations into the app',
|
||||
async run() {
|
||||
const source = './src/models/migrations'
|
||||
const target = './src/models/migrations/list.ts'
|
||||
const targetFolder = './src/models/migrations'
|
||||
const relative = path.relative(source, targetFolder)
|
||||
const migrations = (await fs.readdir(source))
|
||||
.filter((it) => !it.endsWith('.d.ts') && it.startsWith('Migration'))
|
||||
.map((it) => ({
|
||||
path: './' + relative + it,
|
||||
name: it.replace(/-/g, '').replace('.ts', '')
|
||||
}))
|
||||
.sort((a, b) => a.name > b.name ? 1 : -1)
|
||||
const importStr = (it: { path: string, name: string }) => `import ${it.name} from '${it.path}'`
|
||||
const file = `${migrations.map(importStr).join('\n')}
|
||||
|
||||
/**
|
||||
* DO NOT EDIT MANUALLY
|
||||
* Auto generated file from the command "./cmd.ts migrations:load"
|
||||
*/
|
||||
|
||||
export default [
|
||||
${migrations.map((it) => ` ${it.name}`).join(',\n')}
|
||||
]
|
||||
`
|
||||
await fs.writeFile(target, file)
|
||||
Logger.info('loaded migrations into', target)
|
||||
return {
|
||||
code: 0
|
||||
}
|
||||
},
|
||||
}
|
||||
export default command
|
20
src/commands/migrations/migrate.ts
Normal file
20
src/commands/migrations/migrate.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import type { Command } from 'commantor'
|
||||
import Logger from 'config/logger'
|
||||
import DaoFactory from 'models/dao-factory'
|
||||
|
||||
const command: Command = {
|
||||
name: 'migrations:migrate',
|
||||
description: 'Migrate the database to the latest version',
|
||||
async run() {
|
||||
const client = await DaoFactory.client()
|
||||
Logger.info('connecting...')
|
||||
await client.connect()
|
||||
Logger.info('migrating...')
|
||||
await client.migrateToLatest()
|
||||
Logger.info('migrations should be ok :D')
|
||||
return {
|
||||
code: 0
|
||||
}
|
||||
},
|
||||
}
|
||||
export default command
|
@ -1,12 +1,9 @@
|
||||
---
|
||||
import { getImage } from 'astro:assets'
|
||||
import AstroUtils from '../../libs/AstroUtils'
|
||||
import { objectOmit } from '@dzeio/object-util'
|
||||
import type { ImageMetadata } from 'astro'
|
||||
|
||||
const formats = [
|
||||
'avif',
|
||||
'webp'
|
||||
]
|
||||
const formats = ['avif', 'webp']
|
||||
|
||||
export interface Props extends Omit<astroHTML.JSX.ImgHTMLAttributes, 'src'> {
|
||||
src: ImageMetadata | string
|
||||
@ -17,18 +14,13 @@ export interface Props extends Omit<astroHTML.JSX.ImgHTMLAttributes, 'src'> {
|
||||
|
||||
type PictureResult = {
|
||||
format: 'new'
|
||||
formats: Array<{format: string, img: Awaited<ReturnType<typeof getImage>>}>
|
||||
formats: Array<{ format: string, img: Awaited<ReturnType<typeof getImage>> }>
|
||||
src: Awaited<ReturnType<typeof getImage>>
|
||||
} | {
|
||||
format: 'raw'
|
||||
src: string
|
||||
}
|
||||
|
||||
interface Result {
|
||||
light: PictureResult
|
||||
dark?: PictureResult | undefined
|
||||
}
|
||||
|
||||
async function resolvePicture(image: ImageMetadata | string): Promise<PictureResult> {
|
||||
const ext = typeof image === 'string' ? image.substring(image.lastIndexOf('.')) : image.format
|
||||
if (ext === 'svg') {
|
||||
@ -38,14 +30,25 @@ async function resolvePicture(image: ImageMetadata | string): Promise<PictureRes
|
||||
}
|
||||
}
|
||||
|
||||
const imageFormats: Array<{format: string, img: Awaited<ReturnType<typeof getImage>>}> = await Promise.all(
|
||||
formats.map(async (it) => ({
|
||||
img: await getImage({src: Astro.props.src, format: it, width: Astro.props.width, height: Astro.props.height}),
|
||||
format: it
|
||||
}))
|
||||
)
|
||||
const imageFormats: Array<{ format: string, img: Awaited<ReturnType<typeof getImage>> }> =
|
||||
await Promise.all(
|
||||
formats.map(async (it) => ({
|
||||
img: await getImage({
|
||||
src: Astro.props.src,
|
||||
format: it,
|
||||
width: Astro.props.width,
|
||||
height: Astro.props.height
|
||||
}),
|
||||
format: it
|
||||
}))
|
||||
)
|
||||
|
||||
const orig = await getImage({src: Astro.props.src, format: ext, width: Astro.props.width, height: Astro.props.height})
|
||||
const orig = await getImage({
|
||||
src: Astro.props.src,
|
||||
format: ext,
|
||||
width: Astro.props.width,
|
||||
height: Astro.props.height
|
||||
})
|
||||
|
||||
return {
|
||||
format: 'new',
|
||||
@ -54,35 +57,32 @@ async function resolvePicture(image: ImageMetadata | string): Promise<PictureRes
|
||||
}
|
||||
}
|
||||
|
||||
const res = await AstroUtils.wrap<Result>(async () => {
|
||||
return {
|
||||
light: await resolvePicture(Astro.props.src),
|
||||
dark: Astro.props.srcDark ? await resolvePicture(Astro.props.srcDark) : undefined
|
||||
}
|
||||
})
|
||||
const res = {
|
||||
light: await resolvePicture(Astro.props.src),
|
||||
dark: Astro.props.srcDark ? await resolvePicture(Astro.props.srcDark) : undefined
|
||||
}
|
||||
|
||||
const props = objectOmit(Astro.props, 'src', 'srcDark', 'class')
|
||||
|
||||
---
|
||||
|
||||
{res.light.format === 'new' && (
|
||||
{res.light.format === 'new' ? (
|
||||
<picture {...props} {...res.light.src.attributes} class:list={[res.light.src.attributes.class, Astro.props.class, {'dark:hidden': res.dark}]}>
|
||||
{res.light.formats.map((it) => (
|
||||
<source srcset={it.img.src} type={`image/${it.format}`} />
|
||||
))}
|
||||
<img src={res.light.src.src} />
|
||||
</picture>
|
||||
) || (
|
||||
) : (
|
||||
<img {...props} class:list={[Astro.props.class, {'dark:hidden': res.dark}]} src={res.light.src as string} />
|
||||
)}
|
||||
|
||||
{res.dark && res.dark.format === 'new' && (
|
||||
{res.dark && res.dark.format === 'new' ? (
|
||||
<picture {...props} {...res.dark.src.attributes} class:list={[res.dark.src.attributes.class, Astro.props.class, 'hidden', 'dark:block']}>
|
||||
{res.dark.formats.map((it) => (
|
||||
<source srcset={it.img.src} type={`image/${it.format}`} />
|
||||
))}
|
||||
<img src={res.dark.src.src} />
|
||||
</picture>
|
||||
) || (res.dark && (
|
||||
) : (res.dark && (
|
||||
<img {...props} class:list={[Astro.props.class, 'hidden', 'dark:block']} src={res.dark.src as string} />
|
||||
))}
|
||||
|
3
src/config/README.md
Normal file
3
src/config/README.md
Normal file
@ -0,0 +1,3 @@
|
||||
# Config folder
|
||||
|
||||
handler different libs config (like envs, models, etc...)
|
61
src/config/envs.d.ts
vendored
Normal file
61
src/config/envs.d.ts
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
export type environmentVariables =
|
||||
| 'NODE_ENV'
|
||||
| 'APP_URL'
|
||||
| 'LOG_LEVEL'
|
||||
|
||||
| 'CASSANDRA_CONTACT_POINT'
|
||||
| 'CASSANDRA_DATABASE'
|
||||
| 'CASSANDRA_AUTH_METHOD'
|
||||
| 'CASSANDRA_USERNAME'
|
||||
| 'CASSANDRA_PASSWORD'
|
||||
| 'CASSANDRA_AUTHORIZATION_ID'
|
||||
| 'CASSANDRA_LOCAL_DATA_CENTER'
|
||||
| 'CASSANDRA_SKIP_CREATE_KEYSPACE'
|
||||
|
||||
| 'POSTGRES_HOST'
|
||||
| 'POSTGRES_USERNAME'
|
||||
| 'POSTGRES_PASSWORD'
|
||||
| 'POSTGRES_PORT'
|
||||
| 'POSTGRES_DATABASE'
|
||||
| 'POSTGRES_CONNECTION_TIMEOUT'
|
||||
| 'POSTGRES_IDLE_TIMEOUT'
|
||||
| 'POSTGRES_MAX_CLIENT'
|
||||
|
||||
| 'LDAP_URL'
|
||||
| 'LDAP_USER_DN_SUFFIX'
|
||||
| 'LDAP_ADMIN_USERNAME'
|
||||
| 'LDAP_ADMIN_PASSWORD'
|
||||
| 'LDAP_ADMIN_DN'
|
||||
|
||||
| 'EMAIL_DEBUG'
|
||||
|
||||
| 'SMTP_HOST'
|
||||
| 'SMTP_PORT'
|
||||
| 'SMTP_SECURE'
|
||||
| 'SMTP_USER'
|
||||
| 'SMTP_PASS'
|
||||
| 'SMTP_FROM'
|
||||
|
||||
| 'IMAP_HOST'
|
||||
| 'IMAP_PORT'
|
||||
| 'IMAP_SECURE'
|
||||
| 'IMAP_USER'
|
||||
| 'IMAP_PASS'
|
||||
| 'IMAP_INBOX'
|
||||
|
||||
| 'CREDO_MAIL_USER'
|
||||
| 'CREDO_MAIL_PASS'
|
||||
| 'GTI_MAIL_USER'
|
||||
| 'GTI_MAIL_PASS'
|
||||
|
||||
| 'RESOURCES_FOLDER'
|
||||
|
||||
| 'BUILD_DATE'
|
||||
| 'GIT_COMMIT'
|
||||
| 'GIT_BRANCH'
|
||||
| 'GIT_TAG'
|
||||
|
||||
| 'SENTRY_DSN'
|
||||
| 'SENTRY_DEBUG'
|
||||
|
||||
| 'ENABLE_DEVTOOLS'
|
3
src/config/logger.ts
Normal file
3
src/config/logger.ts
Normal file
@ -0,0 +1,3 @@
|
||||
import ConsoleLogger from 'libs/psr/log/console-logger'
|
||||
|
||||
export default new ConsoleLogger()
|
9
src/config/models.ts
Normal file
9
src/config/models.ts
Normal file
@ -0,0 +1,9 @@
|
||||
import type { DaoConfig } from 'models/dao-factory'
|
||||
import migrations from 'models/migrations/list'
|
||||
|
||||
export default {
|
||||
mainClient: null,
|
||||
migrations: migrations,
|
||||
models: {
|
||||
}
|
||||
} satisfies DaoConfig
|
3
src/init.ts
Normal file
3
src/init.ts
Normal file
@ -0,0 +1,3 @@
|
||||
export default () => {
|
||||
/* function run on first start of the app */
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
export default class AstroUtils {
|
||||
public static async wrap<T = void>(fn: () => T | Promise<T>) {
|
||||
return await fn()
|
||||
}
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
type Fn<T extends HTMLElement> = (el: Component<T>) => void | Promise<void>
|
||||
|
||||
/**
|
||||
* Component client side initialisation class
|
||||
*/
|
||||
export default class Component<T extends HTMLElement> {
|
||||
private constructor(
|
||||
public element: T
|
||||
) {}
|
||||
|
||||
public handled(value: boolean): this
|
||||
public handled(): boolean
|
||||
public handled(value?: boolean): this | boolean {
|
||||
if (typeof value === 'undefined') {
|
||||
return typeof this.element.dataset.handled === 'string'
|
||||
}
|
||||
this.element.dataset.handled = ''
|
||||
return this
|
||||
}
|
||||
|
||||
public init(fn: (el: Component<T>) => void | Promise<void>) {
|
||||
if (this.handled()) {
|
||||
return
|
||||
}
|
||||
fn(this)
|
||||
this.handled(true)
|
||||
}
|
||||
|
||||
public child<El extends HTMLElement>(query: string, fn: Fn<El>) {
|
||||
this.element.querySelectorAll<El>(query).forEach((it) => {
|
||||
const cp = new Component(it)
|
||||
cp.init(fn)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* start handling an element
|
||||
* @param query the query to get the element
|
||||
* @param fn the function that is run ONCE per elements
|
||||
*/
|
||||
public static handle<T extends HTMLElement>(query: string, fn: (el: T) => void | Promise<void>) {
|
||||
document.querySelectorAll<T>(query).forEach((it) => {
|
||||
const cp = new Component(it)
|
||||
cp.init((it) => fn(it.element))
|
||||
})
|
||||
document.addEventListener('astro:page-load', () => {
|
||||
document.querySelectorAll<T>(query).forEach((it) => {
|
||||
const cp = new Component(it)
|
||||
cp.init((it) => fn(it.element))
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
91
src/libs/api.ts
Normal file
91
src/libs/api.ts
Normal file
@ -0,0 +1,91 @@
|
||||
import type { APIRoute } from 'astro'
|
||||
import type APIRoutes from 'config/api-routes'
|
||||
import { formatRoute } from 'route'
|
||||
import StatusCode from './http/status-code'
|
||||
import ResponseBuilder from './response-builder'
|
||||
|
||||
// Typescript is dumb so we need to add this `THANKS_TS` interface to make the different runtime typing works
|
||||
interface THANKS_TS<Output = any, Input = never> {
|
||||
_input?: Input
|
||||
_output?: Output
|
||||
}
|
||||
|
||||
// create the overload of APIRoute to add type safety
|
||||
export type TypeSafeAPIRoute<Output = any, Input = never> = APIRoute & THANKS_TS<Output, Input>
|
||||
|
||||
// extract both the input object and output object
|
||||
export type OutputParam<T> = T extends THANKS_TS<infer U, any> ? U : any
|
||||
export type InputParam<T> = T extends THANKS_TS<any, infer U> ? U : any
|
||||
|
||||
// type TestO = OutputParam<TypeSafeAPIRoute<string, number>
|
||||
// type TestI = InputParam<TypeSafeAPIRoute<string, number>
|
||||
// type TestN1 = InputParam<APIRoute>
|
||||
// type TestN2 = InputParam<APIRoute>
|
||||
|
||||
// extract the URL parameters
|
||||
type ExtractParameters<T> = T extends `${string}[${infer Param}]${string}`
|
||||
? Param | ExtractParameters<Exclude<T, T>>
|
||||
: never
|
||||
|
||||
/**
|
||||
* make a type safe API request
|
||||
*
|
||||
* @param path the path ro request (exclude the `/api` part)
|
||||
* @param opts the options to user
|
||||
* @param opts.params the parameters to input to the original url
|
||||
* @param opts.method the method to request
|
||||
* @param opts.body the body of the request, it can be a raw object to be sent as JSON or a `FormData` object
|
||||
* @returns the response from the API endpoint
|
||||
* @throws an error if statusCode is above or equal to 400
|
||||
*/
|
||||
export default async function api<R extends keyof APIRoutes, M extends keyof APIRoutes[R] = keyof APIRoutes[R]>(
|
||||
path: R,
|
||||
opts?: {
|
||||
params?: Record<ExtractParameters<R>, string | number>
|
||||
method?: M
|
||||
body?: InputParam<APIRoutes[R][M]>
|
||||
}
|
||||
): Promise<OutputParam<APIRoutes[R][M]>> {
|
||||
const { params, method = 'GET', body } = opts ?? {}
|
||||
|
||||
// fetch the remote
|
||||
const res = await fetch('/api' + formatRoute(path, params), {
|
||||
method: method as string,
|
||||
body: typeof body === 'undefined' || body instanceof FormData ? body ?? null : JSON.stringify(body)
|
||||
})
|
||||
|
||||
// throw if an error occur
|
||||
if (res.status >= 400) {
|
||||
throw new Error(`${path as string} status code invalid :(`)
|
||||
}
|
||||
|
||||
return res.json() as OutputParam<APIRoutes[R][M]>
|
||||
}
|
||||
|
||||
type PromiseLike<T> = Promise<T> | T
|
||||
|
||||
/**
|
||||
* make a type safe API Route
|
||||
* @param baseFunction the original APIRoute function with the additionnal possible return of an object
|
||||
* @returns the enhanced `APIRoute` function
|
||||
*/
|
||||
export function makeAPI<Output = any, Input = never>(
|
||||
baseFunction: (...params: Parameters<APIRoute>) => PromiseLike<Response | { body: Output, status: number } | Output>
|
||||
): TypeSafeAPIRoute<Output, Input> {
|
||||
return async (ctx) => {
|
||||
const res = await baseFunction(ctx)
|
||||
if (res instanceof Response) {
|
||||
return res
|
||||
}
|
||||
if (typeof res === 'object' && res !== null && 'body' in res && 'status' in res && Object.keys(res).length === 2) {
|
||||
return new ResponseBuilder()
|
||||
.body(res.body as object)
|
||||
.status(res.status)
|
||||
.build()
|
||||
}
|
||||
return new ResponseBuilder()
|
||||
.body(res as object)
|
||||
.status(StatusCode.OK)
|
||||
.build()
|
||||
}
|
||||
}
|
43
src/libs/env.ts
Normal file
43
src/libs/env.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import { getSecret } from 'astro:env/server'
|
||||
import type { environmentVariables } from 'config/envs'
|
||||
|
||||
/**
|
||||
* Get the environment variable
|
||||
*
|
||||
* @param key the env variable key
|
||||
* @param defaultValue a default value if applicable
|
||||
* @returns the environment value or undefined if not found
|
||||
*/
|
||||
export function getEnv(key: environmentVariables, defaultValue: string): string
|
||||
export function getEnv(key: environmentVariables, defaultValue?: string): string | undefined
|
||||
export function getEnv(key: environmentVariables, defaultValue?: string): string | undefined {
|
||||
// get the env variable through Astro > NodeJS > input
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
const res = getSecret(key) ?? import.meta.env[key] ?? process.env[key] ?? defaultValue
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
||||
return res ?? undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* get the environment variable and throws if not found
|
||||
*
|
||||
* @throws {Error} if the env variable is not found
|
||||
* @param key the env variable key
|
||||
* @param defaultValue a default value if applicable
|
||||
* @returns the environment value
|
||||
*/
|
||||
export function requireEnv(key: environmentVariables, defaultValue?: string): string {
|
||||
// get the env variable through Astro > NodeJS > input
|
||||
const res = getEnv(key, defaultValue)
|
||||
|
||||
// throw if env variable is not set
|
||||
if (!res) {
|
||||
throw new Error(`MissingEnvError: the env ${key} is not set!`)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
export function envExists(key: environmentVariables): boolean {
|
||||
return !!getEnv(key)
|
||||
}
|
5
src/libs/psr/README.md
Normal file
5
src/libs/psr/README.md
Normal file
@ -0,0 +1,5 @@
|
||||
# Psr Standards
|
||||
|
||||
Psr are a set of standards for the PHP language that can also be used for Javascript.
|
||||
|
||||
[www.php-fig.org](https://www.php-fig.org/)
|
30
src/libs/psr/log/color.ts
Normal file
30
src/libs/psr/log/color.ts
Normal file
@ -0,0 +1,30 @@
|
||||
// https://stackoverflow.com/a/41407246/7335674
|
||||
export enum Color {
|
||||
Reset = '\x1b[0m',
|
||||
Bright = '\x1b[1m',
|
||||
Dim = '\x1b[2m',
|
||||
Underscore = '\x1b[4m',
|
||||
Blink = '\x1b[5m',
|
||||
Reverse = '\x1b[7m',
|
||||
Hidden = '\x1b[8m',
|
||||
|
||||
Black = '\x1b[30m',
|
||||
Red = '\x1b[31m',
|
||||
Green = '\x1b[32m',
|
||||
Yellow = '\x1b[33m',
|
||||
Blue = '\x1b[34m',
|
||||
Magenta = '\x1b[35m',
|
||||
Cyan = '\x1b[36m',
|
||||
White = '\x1b[37m',
|
||||
Gray = '\x1b[90m',
|
||||
|
||||
BgBlack = '\x1b[40m',
|
||||
BgRed = '\x1b[41m',
|
||||
BgGreen = '\x1b[42m',
|
||||
BgYellow = '\x1b[43m',
|
||||
BgBlue = '\x1b[44m',
|
||||
BgMagenta = '\x1b[45m',
|
||||
BgCyan = '\x1b[46m',
|
||||
BgWhite = '\x1b[47m',
|
||||
BgGray = '\x1b[100m'
|
||||
}
|
23
src/libs/psr/log/console-logger.ts
Normal file
23
src/libs/psr/log/console-logger.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import ConsoleReplacer from './console-replacer'
|
||||
import LogLevel from './log-level'
|
||||
import LoggerAbstract from './logger-abstract'
|
||||
|
||||
export default class ConsoleLogger extends LoggerAbstract {
|
||||
private readonly console: Console
|
||||
|
||||
public constructor(
|
||||
obj: Console = console
|
||||
) {
|
||||
super()
|
||||
this.console = obj
|
||||
while (this.console instanceof ConsoleReplacer) {
|
||||
this.console = this.console.original
|
||||
}
|
||||
}
|
||||
|
||||
public override log(level: LogLevel, message?: any, ...optionalParams: Array<any>): void {
|
||||
if (this.canLog(level)) {
|
||||
this.console.log(this.processLog(level, message, optionalParams))
|
||||
}
|
||||
}
|
||||
}
|
119
src/libs/psr/log/console-replacer.ts
Normal file
119
src/libs/psr/log/console-replacer.ts
Normal file
@ -0,0 +1,119 @@
|
||||
import type { Console } from 'node:console'
|
||||
import type LoggerInterface from './LoggerInterface'
|
||||
|
||||
/**
|
||||
* Special class that is meant to replace `global.console` with it to be able to handle other logs
|
||||
*/
|
||||
export default class ConsoleReplacer implements Console {
|
||||
public readonly original: Console
|
||||
|
||||
/** @ts-expect-error normal behavior */
|
||||
public Console: console.ConsoleConstructor = null
|
||||
|
||||
public constructor(
|
||||
private readonly logger: LoggerInterface
|
||||
) {
|
||||
let original = console
|
||||
while (original instanceof ConsoleReplacer) {
|
||||
original = original.original
|
||||
}
|
||||
this.original = original
|
||||
}
|
||||
|
||||
public assert(_condition?: boolean, ..._data: Array<any>): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public clear(): void {/** normal */ }
|
||||
|
||||
public count(_label?: string): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public countReset(_label?: string): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public debug(...data: Array<any>): void {
|
||||
this.logger.debug(this.parseData(data))
|
||||
}
|
||||
public dir(_item?: any, _options?: any): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public dirxml(..._data: Array<any>): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public error(...data: Array<any>): void {
|
||||
this.logger.error(this.parseData(data))
|
||||
}
|
||||
public group(..._data: Array<any>): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public groupCollapsed(..._data: Array<any>): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public groupEnd(): void
|
||||
public groupEnd(): void
|
||||
public groupEnd(): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public info(...data: Array<any>): void {
|
||||
this.logger.info(this.parseData(data))
|
||||
}
|
||||
public log(...data: Array<any>): void {
|
||||
this.logger.info(this.parseData(data))
|
||||
}
|
||||
public table(_tabularData?: any, _properties?: Array<string>): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public time(_label?: string): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public timeEnd(label?: string): void
|
||||
public timeEnd(label?: string): void
|
||||
public timeEnd(_label?: unknown): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public timeLog(_label?: string, ..._data: Array<any>): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public timeStamp(_label?: string): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public trace(...data: Array<any>): void {
|
||||
const trace: { stack?: string } = {}
|
||||
// normal behavior, this.trace is used as the base as so to not include it's call in the trace
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
Error.captureStackTrace(trace, this.trace)
|
||||
this.logger.debug(`${this.parseData(data)} ${trace.stack}`)
|
||||
}
|
||||
public warn(...data: Array<any>): void {
|
||||
this.logger.warning(this.parseData(data))
|
||||
}
|
||||
public profile(_label?: string): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
public profileEnd(_label?: string): void {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
|
||||
private parseData(data: Array<any>): string {
|
||||
return data.map((it) => {
|
||||
if (it === null) {
|
||||
return 'null'
|
||||
}
|
||||
if (typeof it === 'string') {
|
||||
return it
|
||||
}
|
||||
let strVersion = '[object Object]'
|
||||
if (it instanceof Error) {
|
||||
strVersion = it.toString()
|
||||
}
|
||||
if (typeof it === 'object' && 'toString' in it) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
|
||||
strVersion = it.toString()
|
||||
}
|
||||
if (strVersion !== '[object Object]') {
|
||||
return strVersion
|
||||
}
|
||||
return JSON.stringify(it)
|
||||
}).join(' ')
|
||||
}
|
||||
}
|
16
src/libs/psr/log/file-system-logger.ts
Normal file
16
src/libs/psr/log/file-system-logger.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import fs from 'fs'
|
||||
import { getEnv } from 'libs/env'
|
||||
import LogLevel from './log-level'
|
||||
import LoggerAbstract from './logger-abstract'
|
||||
|
||||
export default class FileSystemLogger extends LoggerAbstract {
|
||||
|
||||
public log(level: LogLevel, message?: any, ...optionalParams: Array<any>): void {
|
||||
const now = new Date()
|
||||
|
||||
// store the logs inside the var/log folder
|
||||
fs.mkdirSync('var/log', { recursive: true })
|
||||
const filename = getEnv('NODE_ENV') === 'production' ? `prod-${now.toDateString()}.log` : 'dev.log'
|
||||
fs.appendFileSync(`var/log/${filename}`, this.processLog(level, message, optionalParams) + '\n')
|
||||
}
|
||||
}
|
12
src/libs/psr/log/log-level.ts
Normal file
12
src/libs/psr/log/log-level.ts
Normal file
@ -0,0 +1,12 @@
|
||||
enum LogLevel {
|
||||
EMERGENCY = 'emergency',
|
||||
ALERT = 'alert',
|
||||
CRITICAL = 'critical',
|
||||
ERROR = 'error',
|
||||
WARNING = 'warning',
|
||||
NOTICE = 'notice',
|
||||
INFO = 'info',
|
||||
DEBUG = 'debug'
|
||||
}
|
||||
|
||||
export default LogLevel
|
121
src/libs/psr/log/logger-abstract.ts
Normal file
121
src/libs/psr/log/logger-abstract.ts
Normal file
@ -0,0 +1,121 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
import { getEnv } from 'libs/env'
|
||||
import { Color } from './color'
|
||||
import LogLevel from './log-level'
|
||||
import type LoggerInterface from './logger-interface'
|
||||
|
||||
const order: Array<LogLevel> = [
|
||||
LogLevel.DEBUG,
|
||||
LogLevel.INFO,
|
||||
LogLevel.NOTICE,
|
||||
LogLevel.WARNING,
|
||||
LogLevel.ERROR,
|
||||
LogLevel.CRITICAL,
|
||||
LogLevel.ALERT,
|
||||
LogLevel.EMERGENCY,
|
||||
]
|
||||
|
||||
|
||||
export default abstract class LoggerAbstract implements LoggerInterface {
|
||||
public emergency(message?: any, ...optionalParams: Array<any>): void {
|
||||
this.log(LogLevel.EMERGENCY, message, ...optionalParams)
|
||||
}
|
||||
|
||||
public alert(message?: any, ...optionalParams: Array<any>): void {
|
||||
this.log(LogLevel.ALERT, message, ...optionalParams)
|
||||
}
|
||||
|
||||
public critical(message?: any, ...optionalParams: Array<any>): void {
|
||||
this.log(LogLevel.CRITICAL, message, ...optionalParams)
|
||||
}
|
||||
|
||||
public error(message?: any, ...optionalParams: Array<any>): void {
|
||||
this.log(LogLevel.ERROR, message, ...optionalParams)
|
||||
}
|
||||
|
||||
public warning(message?: any, ...optionalParams: Array<any>): void {
|
||||
this.log(LogLevel.WARNING, message, ...optionalParams)
|
||||
}
|
||||
|
||||
public notice(message?: any, ...optionalParams: Array<any>): void {
|
||||
this.log(LogLevel.NOTICE, message, ...optionalParams)
|
||||
}
|
||||
|
||||
public info(message?: any, ...optionalParams: Array<any>): void {
|
||||
this.log(LogLevel.INFO, message, ...optionalParams)
|
||||
}
|
||||
|
||||
public debug(message?: any, ...optionalParams: Array<any>): void {
|
||||
this.log(LogLevel.DEBUG, message, ...optionalParams)
|
||||
}
|
||||
|
||||
/**
|
||||
* process the log line into a standardized one
|
||||
*
|
||||
* note: it by itself does not add the final `\n`
|
||||
* @param level the log level
|
||||
* @param message the message to send
|
||||
* @param context the message context
|
||||
* @param colors should the message have colors ?
|
||||
* @returns the processed message
|
||||
*/
|
||||
protected processLog(level: LogLevel, message?: any, optionalParams: Array<any> = [], colors = false) {
|
||||
const now = new Date()
|
||||
let final = this.stringify(message)
|
||||
|
||||
for (const item of optionalParams) {
|
||||
final += ` ${this.stringify(item)}`
|
||||
}
|
||||
|
||||
// if (context) {
|
||||
// const clone = objectClone(context, { deep: false })
|
||||
// objectLoop(context, (value, key) => {
|
||||
// try {
|
||||
// final = final.replace(new RegExp(`{${key}}`, 'g'), this.stringify(value))
|
||||
// // eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||||
// delete clone[key]
|
||||
// } catch {
|
||||
// final += `${value}`
|
||||
// }
|
||||
// })
|
||||
// if (objectSize(clone) > 0) {
|
||||
// final += ` (${JSON.stringify(context)})`
|
||||
// }
|
||||
// }
|
||||
|
||||
let prefix = `[${now.toISOString()}] ${level}:`
|
||||
if (colors) {
|
||||
const levelColor = level === LogLevel.ERROR ? Color.Red : level === LogLevel.WARNING ? Color.Yellow : Color.Cyan
|
||||
prefix = `${Color.Reset}${Color.Dim}[${Color.Yellow}${now.toISOString()}${Color.Reset}${Color.Dim}] ${levelColor}${level.padStart(9, ' ')}${Color.Reset}:${Color.Green}${Color.Bright}`
|
||||
}
|
||||
|
||||
return `${prefix} ${this.prefixLines(final, prefix)}`
|
||||
}
|
||||
|
||||
protected canLog(level: LogLevel): boolean {
|
||||
let logLevel = Math.max(0, Math.min(parseInt(getEnv('LOG_LEVEL', '4'), 10), order.length - 1))
|
||||
if (isNaN(logLevel)) {
|
||||
logLevel = 0
|
||||
}
|
||||
const index = order.indexOf(level)
|
||||
return index >= logLevel
|
||||
}
|
||||
|
||||
private prefixLines(text: string, prefix: string): string {
|
||||
return text.split('\n').join('\n' + prefix + ' ')
|
||||
}
|
||||
|
||||
private stringify(content: any): string {
|
||||
if (typeof content === 'string') {
|
||||
return content
|
||||
}
|
||||
|
||||
if (content instanceof Error) {
|
||||
return `${content.name}: ${content.message}\n${content.stack}`
|
||||
}
|
||||
|
||||
return JSON.stringify(content)
|
||||
}
|
||||
|
||||
public abstract log(level: LogLevel, message?: any, ...optionalParams: Array<any>): void
|
||||
}
|
112
src/libs/psr/log/logger-interface.ts
Normal file
112
src/libs/psr/log/logger-interface.ts
Normal file
@ -0,0 +1,112 @@
|
||||
import type LogLevel from './log-level'
|
||||
|
||||
/**
|
||||
* Describes a logger instance.
|
||||
*
|
||||
* The message MUST be a string or object implementing __toString().
|
||||
*
|
||||
* The message MAY contain placeholders in the form: {foo} where foo
|
||||
* will be replaced by the context data in key "foo".
|
||||
*
|
||||
* The context array can contain arbitrary data, the only assumption that
|
||||
* can be made by implementors is that if an Exception instance is given
|
||||
* to produce a stack trace, it MUST be in a key named "exception".
|
||||
*
|
||||
* See https://github.com/php-fig/fig-standards/blob/master/accepted/PSR-3-logger-interface.md
|
||||
* for the full interface specification.
|
||||
*/
|
||||
export default interface LoggerInterface {
|
||||
/**
|
||||
* System is unusable.
|
||||
*
|
||||
* @param string $message
|
||||
* @param array $context
|
||||
* @return void
|
||||
*/
|
||||
emergency(message?: any, ...optionalParams: Array<any>): void
|
||||
|
||||
/**
|
||||
* Action must be taken immediately.
|
||||
*
|
||||
* Example: Entire website down, database unavailable, etc. This should
|
||||
* trigger the SMS alerts and wake you up.
|
||||
*
|
||||
* @param string $message
|
||||
* @param array $context
|
||||
* @return void
|
||||
*/
|
||||
alert(message?: any, ...optionalParams: Array<any>): void
|
||||
|
||||
/**
|
||||
* Critical conditions.
|
||||
*
|
||||
* Example: Application component unavailable, unexpected exception.
|
||||
*
|
||||
* @param string $message
|
||||
* @param array $context
|
||||
* @return void
|
||||
*/
|
||||
critical(message?: any, ...optionalParams: Array<any>): void
|
||||
|
||||
/**
|
||||
* Runtime errors that do not require immediate action but should typically
|
||||
* be logged and monitored.
|
||||
*
|
||||
* @param string $message
|
||||
* @param array $context
|
||||
* @return void
|
||||
*/
|
||||
error(message?: any, ...optionalParams: Array<any>): void
|
||||
|
||||
/**
|
||||
* Exceptional occurrences that are not errors.
|
||||
*
|
||||
* Example: Use of deprecated APIs, poor use of an API, undesirable things
|
||||
* that are not necessarily wrong.
|
||||
*
|
||||
* @param string $message
|
||||
* @param array $context
|
||||
* @return void
|
||||
*/
|
||||
warning(message?: any, ...optionalParams: Array<any>): void
|
||||
|
||||
/**
|
||||
* Normal but significant events.
|
||||
*
|
||||
* @param string $message
|
||||
* @param array $context
|
||||
* @return void
|
||||
*/
|
||||
notice(message?: any, ...optionalParams: Array<any>): void
|
||||
|
||||
/**
|
||||
* Interesting events.
|
||||
*
|
||||
* Example: User logs in, SQL logs.
|
||||
*
|
||||
* @param string $message
|
||||
* @param array $context
|
||||
* @return void
|
||||
*/
|
||||
info(message?: any, ...optionalParams: Array<any>): void
|
||||
|
||||
/**
|
||||
* Detailed debug information.
|
||||
*
|
||||
* @param string $message
|
||||
* @param array $context
|
||||
* @return void
|
||||
*/
|
||||
debug(message?: any, ...optionalParams: Array<any>): void
|
||||
|
||||
/**
|
||||
* Logs with an arbitrary level.
|
||||
*
|
||||
* @param mixed $level
|
||||
* @param string $message
|
||||
* @param array $context
|
||||
* @return void
|
||||
*/
|
||||
|
||||
log(level: LogLevel, message?: any, ...optionalParams: Array<any>): void
|
||||
}
|
22
src/libs/psr/log/multi-logger.ts
Normal file
22
src/libs/psr/log/multi-logger.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import LogLevel from './LogLevel'
|
||||
import LoggerAbstract from './LoggerAbstract'
|
||||
import type LoggerInterface from './LoggerInterface'
|
||||
|
||||
export default class FileSystemLogger extends LoggerAbstract {
|
||||
|
||||
private readonly loggers: Array<LoggerInterface>
|
||||
|
||||
public constructor(
|
||||
...loggers: Array<LoggerInterface>
|
||||
) {
|
||||
super()
|
||||
this.loggers = loggers
|
||||
}
|
||||
|
||||
public log(level: LogLevel, message?: any, ...optionalParams: Array<any>): void {
|
||||
for (const logger of this.loggers) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
logger.log(level, message, ...optionalParams)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,12 +1,12 @@
|
||||
import ResponseBuilder from '../ResponseBuilder'
|
||||
import ResponseBuilder from '../response-builder'
|
||||
|
||||
/**
|
||||
* Add headers:
|
||||
* Content-Type: application/problem+json
|
||||
*
|
||||
* following https://www.rfc-editor.org/rfc/rfc7807.html
|
||||
* following https://www.rfc-editor.org/rfc/rfc9457.html
|
||||
*/
|
||||
export default interface RFC7807 {
|
||||
export default interface RFC9457 {
|
||||
/**
|
||||
* A URI reference [RFC3986] that identifies the
|
||||
* problem type.
|
||||
@ -58,7 +58,7 @@ export default interface RFC7807 {
|
||||
* @param error the error (base items are type, status, title details and instance)
|
||||
* @returns
|
||||
*/
|
||||
export function buildRFC7807(error: RFC7807 & Record<string, any>, response: ResponseBuilder = new ResponseBuilder()): Response {
|
||||
export function buildRFC9457(error: RFC9457 & Record<string, any>, response: ResponseBuilder = new ResponseBuilder()): Response {
|
||||
response.addHeader('Content-Type', 'application/problem+json')
|
||||
.body(JSON.stringify(error))
|
||||
.status(error.status ?? 500)
|
41
src/middleware/database.ts
Normal file
41
src/middleware/database.ts
Normal file
@ -0,0 +1,41 @@
|
||||
import { defineMiddleware } from 'astro/middleware'
|
||||
import config from 'config/models'
|
||||
import { buildRFC9457 } from 'libs/rfc/rfc9457'
|
||||
import route from 'route'
|
||||
|
||||
// `context` and `next` are automatically typed
|
||||
export default defineMiddleware(async (ctx, next) => {
|
||||
const url = ctx.url
|
||||
const client = config.mainClient
|
||||
if (!client) {
|
||||
return next()
|
||||
}
|
||||
const c = await client.get()
|
||||
await c.connect()
|
||||
let isMigrated = await c.isMigrated()
|
||||
|
||||
if (!isMigrated) {
|
||||
await c.migrateToLatest()
|
||||
isMigrated = await c.isMigrated()
|
||||
}
|
||||
|
||||
// check if the client is ready
|
||||
if (!isMigrated && url.pathname !== '/setup') {
|
||||
if (url.pathname.startsWith('/api')) {
|
||||
// don't redirect but indicate it usign a RFC7807
|
||||
return buildRFC9457({
|
||||
title: 'Server is starting up',
|
||||
status: 503,
|
||||
details: 'Server is starting, please wait a bit'
|
||||
})
|
||||
}
|
||||
// redirect user to a specific page
|
||||
return ctx.redirect(route('/setup', { goto: url.pathname + url.search }))
|
||||
}
|
||||
|
||||
if (isMigrated && url.pathname === '/setup') {
|
||||
return ctx.redirect(url.searchParams.get('goto') ?? route('/'))
|
||||
}
|
||||
|
||||
return next()
|
||||
})
|
15
src/middleware/events.ts
Normal file
15
src/middleware/events.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import { defineMiddleware } from 'astro/middleware'
|
||||
import init from 'init'
|
||||
|
||||
let initialized = false
|
||||
|
||||
/**
|
||||
* That setup the evenet management system
|
||||
*/
|
||||
export default defineMiddleware((_, next) => {
|
||||
if (!initialized) {
|
||||
init()
|
||||
initialized = true
|
||||
}
|
||||
return next()
|
||||
})
|
7
src/middleware/index.ts
Normal file
7
src/middleware/index.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { sequence } from 'astro/middleware'
|
||||
|
||||
import database from './database'
|
||||
import events from './events'
|
||||
import logger from './logger'
|
||||
|
||||
export const onRequest = sequence(logger, events, database)
|
69
src/middleware/logger.ts
Normal file
69
src/middleware/logger.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import * as Sentry from '@sentry/node'
|
||||
import { defineMiddleware } from 'astro/middleware'
|
||||
import Logger from 'config/logger'
|
||||
import { getEnv, requireEnv } from 'libs/env'
|
||||
import ResponseBuilder from 'libs/response-builder'
|
||||
|
||||
const dsn = getEnv('SENTRY_DSN')
|
||||
if (dsn) {
|
||||
Sentry.init({
|
||||
dsn: dsn,
|
||||
debug: getEnv('SENTRY_DEBUG') === 'true',
|
||||
environment: requireEnv('NODE_ENV'),
|
||||
serverName: requireEnv('APP_URL'),
|
||||
includeLocalVariables: true,
|
||||
release: getEnv('GIT_TAG') ?? requireEnv('GIT_COMMIT')
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple Middleware that handle the logging of requests and handling processing errors
|
||||
*/
|
||||
export default defineMiddleware(async ({ request, url }, next) => {
|
||||
const now = new Date()
|
||||
// Date of request User-Agent 32 first chars request Method
|
||||
let prefix = `\x1b[22m ${request.headers.get('user-agent')?.slice(0, 32).padEnd(32)} ${request.method.padEnd(7)}`
|
||||
|
||||
const fullURL = url.toString()
|
||||
const path = fullURL.slice(fullURL.indexOf(url.pathname, fullURL.indexOf(url.host)))
|
||||
|
||||
// Log start of request
|
||||
if (!import.meta.env.PROD) {
|
||||
// time of request
|
||||
prefix = ''
|
||||
// HTTP Status Code path of request Time to run request
|
||||
Logger.info(`${prefix} ${''.padStart(5, ' ')} ${path}`)
|
||||
} else {
|
||||
// HTTP Status Code Time to run request path of request
|
||||
Logger.info(`${prefix} ${''.padStart(5, ' ')} ${''.padStart(7, ' ')} ${path}`)
|
||||
}
|
||||
|
||||
|
||||
// Handle if the request die
|
||||
try {
|
||||
const res = await next()
|
||||
|
||||
if (import.meta.env.PROD) {
|
||||
// HTTP Status time to execute path of request
|
||||
Logger.info(`${prefix} \x1b[34m[${'status' in res ? res.status : '???'}]\x1b[0m \x1b[2m${(new Date().getTime() - now.getTime()).toFixed(0).padStart(5, ' ')}ms\x1b[22m ${path}`)
|
||||
}
|
||||
|
||||
return res
|
||||
} catch (err) {
|
||||
if (import.meta.env.PROD) {
|
||||
// time to execute path of request
|
||||
Logger.info(`${prefix} \x1b[34m[500]\x1b[0m \x1b[2m${(new Date().getTime() - now.getTime()).toFixed(0).padStart(5, ' ')}ms\x1b[22m ${path}`)
|
||||
}
|
||||
|
||||
// log the error inside the browser
|
||||
Logger.critical(err)
|
||||
|
||||
// send the error to Glitchtip
|
||||
Sentry.captureException(err)
|
||||
|
||||
return new ResponseBuilder()
|
||||
.status(500)
|
||||
.body('An error occured while processing your request')
|
||||
.build()
|
||||
}
|
||||
})
|
@ -1,115 +0,0 @@
|
||||
/**
|
||||
* the Dao is the object that connect the Database or source to the application layer
|
||||
*
|
||||
* you MUST call it through the `DaoFactory` file
|
||||
*/
|
||||
export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
|
||||
/**
|
||||
* insert a new object into the source
|
||||
*
|
||||
* @param obj the object to create
|
||||
* @returns the object with it's id filled if create or null otherwise
|
||||
*/
|
||||
abstract create(obj: Omit<Object, 'id' | 'created' | 'updated'>): Promise<Object | null>
|
||||
|
||||
/**
|
||||
* insert a new object into the source
|
||||
*
|
||||
* @param obj the object to create
|
||||
* @returns the object with it's id filled if create or null otherwise
|
||||
*/
|
||||
public insert: Dao<Object>['create'] = (obj: Parameters<Dao<Object>['create']>[0]) => this.create(obj)
|
||||
|
||||
/**
|
||||
* find the list of objects having elements from the query
|
||||
*
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns an array containing the list of elements that match with the query
|
||||
*/
|
||||
abstract findAll(query?: Partial<Object>): Promise<Array<Object>>
|
||||
|
||||
/**
|
||||
* find the list of objects having elements from the query
|
||||
*
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns an array containing the list of elements that match with the query
|
||||
*/
|
||||
public find: Dao<Object>['findAll'] = (query: Parameters<Dao<Object>['findAll']>[0]) => this.findAll(query)
|
||||
|
||||
/**
|
||||
* find an object by it's id
|
||||
*
|
||||
* (shortcut to findOne({id: id}))
|
||||
*
|
||||
* @param id the id of the object
|
||||
* @returns
|
||||
*/
|
||||
public findById(id: Object['id']): Promise<Object | null> {
|
||||
return this.findOne({id: id} as Partial<Object>)
|
||||
}
|
||||
|
||||
/**
|
||||
* find an object by it's id
|
||||
*
|
||||
* (shortcut to findOne({id: id}))
|
||||
*
|
||||
* @param id the id of the object
|
||||
* @returns
|
||||
*/
|
||||
public get(id: Object['id']) {
|
||||
return this.findById(id)
|
||||
}
|
||||
|
||||
/**
|
||||
* find the first element that match `query`
|
||||
*
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns the first element matching with the query or null otherwise
|
||||
*/
|
||||
public async findOne(query?: Partial<Object>): Promise<Object | null> {
|
||||
return (await this.findAll(query))[0] ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* update the remote reference of the object
|
||||
*
|
||||
* note: it will not try to insert an item (use `upsert` to handle this)
|
||||
*
|
||||
* @param obj the object to update
|
||||
* @returns an object if it was able to update or null otherwise
|
||||
*/
|
||||
abstract update(obj: Object): Promise<Object | null>
|
||||
|
||||
/**
|
||||
* change some elements from the object and return the object updated
|
||||
* @param id the id of the object
|
||||
* @param changegs the change to make
|
||||
*/
|
||||
public async patch(id: string, changes: Partial<Object>): Promise<Object | null> {
|
||||
const query = await this.findById(id)
|
||||
if (!query) {
|
||||
return null
|
||||
}
|
||||
return await this.update({...query, ...changes})
|
||||
}
|
||||
/**
|
||||
* update the remote reference of the object or create it if not found
|
||||
* @param obj the object to update/insert
|
||||
* @returns the object is updated/inserted or null otherwise
|
||||
*/
|
||||
public async upsert(object: Object | Omit<Object, 'id' | 'created' | 'updated'>): Promise<Object | null> {
|
||||
if ('id' in object) {
|
||||
return this.update(object)
|
||||
}
|
||||
return this.insert(object)
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the object
|
||||
* @param obj the object to delete
|
||||
*
|
||||
* @returns if the object was deleted or not (if object is not in db it will return true)
|
||||
*/
|
||||
abstract delete(obj: Object): Promise<boolean>
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
/**
|
||||
* TODO:
|
||||
* Add to `DaoItem` your model name
|
||||
* Add to the function `initDao` the Dao
|
||||
*/
|
||||
|
||||
/**
|
||||
* the different Daos that can be initialized
|
||||
*
|
||||
* Touch this interface to define which key is linked to which Dao
|
||||
*/
|
||||
interface DaoItem {
|
||||
}
|
||||
|
||||
/**
|
||||
* Class to get any DAO
|
||||
*/
|
||||
export default class DaoFactory {
|
||||
/**
|
||||
* reference of the different Daos for a correct singleton implementation
|
||||
*/
|
||||
private static daos: Partial<DaoItem> = {}
|
||||
|
||||
/**
|
||||
* Get a a dao by its key
|
||||
*
|
||||
* it will throw an error if no Dao exists linked to the item key
|
||||
*
|
||||
* @param key the dao key to get
|
||||
* @returns the Dao you want as a singleton
|
||||
*/
|
||||
public static get<Key extends keyof DaoItem>(key: Key): DaoItem[Key] {
|
||||
if (!(key in this.daos)) {
|
||||
const dao = this.initDao(key)
|
||||
if (!dao) {
|
||||
throw new Error(`${key} has no valid Dao`)
|
||||
}
|
||||
this.daos[key] = dao as DaoItem[Key]
|
||||
}
|
||||
return this.daos[key] as DaoItem[Key]
|
||||
}
|
||||
|
||||
/**
|
||||
* init a dao by its key, it does not care if it exists or not
|
||||
*
|
||||
* @param item the element to init
|
||||
* @returns a new initialized dao or undefined if no dao is linked
|
||||
*/
|
||||
private static initDao(item: keyof DaoItem): any | undefined {
|
||||
switch (item) {
|
||||
default: return undefined
|
||||
}
|
||||
}
|
||||
}
|
243
src/models/adapters/adapter-utils.ts
Normal file
243
src/models/adapters/adapter-utils.ts
Normal file
@ -0,0 +1,243 @@
|
||||
import { objectFind, objectLoop } from '@dzeio/object-util'
|
||||
import { Sort, type Query, type QueryList, type QueryValues } from 'models/query'
|
||||
|
||||
export declare type AllowedValues = string | number | bigint | boolean | null | undefined
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
export function filter<T extends Record<string, unknown>>(query: Query<T>, results: Array<T>, options?: { debug?: boolean }): { filtered: Array<T>, unpaginatedLength: number } {
|
||||
if (options?.debug) {
|
||||
console.log('Query', query)
|
||||
}
|
||||
|
||||
// filter
|
||||
let filtered = results.filter((it) => {
|
||||
const res = objectLoop(query, (value, key) => {
|
||||
if (key === '$or') {
|
||||
for (const sub of value as Array<QueryList<T>>) {
|
||||
const final = filterEntry(sub, it)
|
||||
// eslint-disable-next-line max-depth
|
||||
if (final) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
if ((key as string).startsWith('$')) {
|
||||
return true
|
||||
}
|
||||
return filterEntry(query, it)
|
||||
})
|
||||
// console.log(it, res)
|
||||
return res
|
||||
})
|
||||
if (options?.debug) {
|
||||
console.log('postFilters', filtered)
|
||||
}
|
||||
|
||||
// sort
|
||||
if (query.$sort) {
|
||||
// temp until better solution is found
|
||||
const first = objectFind(query.$sort, () => true)
|
||||
filtered = filtered.sort((objA, objB) => {
|
||||
const a = objA[first!.key]
|
||||
const b = objB[first!.key]
|
||||
const ascend = first?.value !== Sort.DESC
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
if (ascend) {
|
||||
return b - a
|
||||
} else {
|
||||
return a - b
|
||||
}
|
||||
}
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
if (ascend) {
|
||||
return a.getTime() - b.getTime()
|
||||
} else {
|
||||
return b.getTime() - a.getTime()
|
||||
}
|
||||
}
|
||||
if (typeof a === 'string' && typeof b === 'string') {
|
||||
if (ascend) {
|
||||
return a.localeCompare(b)
|
||||
} else {
|
||||
return b.localeCompare(a)
|
||||
}
|
||||
|
||||
}
|
||||
if (ascend) {
|
||||
return a > b ? 1 : -1
|
||||
}
|
||||
return a > b ? -1 : 1
|
||||
})
|
||||
}
|
||||
if (options?.debug) {
|
||||
console.log('postSort', filtered)
|
||||
}
|
||||
|
||||
// length of the query assuming a single page
|
||||
const unpaginatedLength = filtered.length
|
||||
// limit
|
||||
if (query.$offset || query.$limit) {
|
||||
const offset = query.$offset ?? 0
|
||||
filtered = filtered.slice(offset, offset + (query.$limit ?? Infinity))
|
||||
}
|
||||
if (options?.debug) {
|
||||
console.log('postLimit', filtered)
|
||||
}
|
||||
|
||||
return { filtered, unpaginatedLength }
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param query the query of the entry
|
||||
* @param item the implementation of the item
|
||||
* @returns if it should be kept or not
|
||||
*/
|
||||
export function filterEntry<T extends Record<string, unknown>>(query: QueryList<T>, item: T): boolean {
|
||||
|
||||
const res = objectLoop(query as any, (queryValue, key: keyof typeof query) => {
|
||||
/**
|
||||
* TODO: handle $keys
|
||||
*/
|
||||
if ((key as string).startsWith('$')) {
|
||||
return true
|
||||
}
|
||||
|
||||
return filterValue(item[key], queryValue)
|
||||
})
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* indicate if a value should be kept by an ENTIRE query
|
||||
*
|
||||
* @param value the value to filter
|
||||
* @param query the full query
|
||||
* @returns if the query should keep the value or not
|
||||
*/
|
||||
function filterValue<T extends AllowedValues>(value: any, query: QueryValues<T>) {
|
||||
if (typeof query !== 'object' || query === null || query instanceof RegExp || Array.isArray(query)) {
|
||||
return filterItem(value, query)
|
||||
}
|
||||
|
||||
// loop through each keys of the query
|
||||
// eslint-disable-next-line arrow-body-style
|
||||
return objectLoop(query, (querySubValue: any, queryKey: any) => {
|
||||
return filterItem(value, { [queryKey]: querySubValue } as QueryValues<T>)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param value the value to check
|
||||
* @param query a SINGLE query to check against
|
||||
* @returns if the value should be kept or not
|
||||
*/
|
||||
// eslint-disable-next-line complexity
|
||||
function filterItem(value: any, query: QueryValues<AllowedValues>): boolean {
|
||||
// ignore checks using undefined (to check nullability, you MUST use `null` instead)
|
||||
if (typeof query === 'undefined') {
|
||||
return true
|
||||
}
|
||||
/**
|
||||
* check if the value is null
|
||||
*/
|
||||
if (query === null) {
|
||||
return typeof value === 'undefined' || value === null
|
||||
}
|
||||
|
||||
if (query instanceof RegExp) {
|
||||
return query.test(typeof value === 'string' ? value : value.toString())
|
||||
}
|
||||
|
||||
/**
|
||||
* ?!?
|
||||
*/
|
||||
if (value === null || typeof value === 'undefined') {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* strict value check by default
|
||||
*/
|
||||
if (!(typeof query === 'object')) {
|
||||
return query === value
|
||||
}
|
||||
|
||||
if ('$in' in query && Array.isArray(value)) {
|
||||
value.includes(query.$in)
|
||||
}
|
||||
|
||||
/**
|
||||
* Array checking and $in
|
||||
*/
|
||||
if (Array.isArray(query) || '$in' in query) {
|
||||
const arr = Array.isArray(query) ? query : query.$in
|
||||
return arr.includes(value)
|
||||
}
|
||||
|
||||
if ('$inc' in query) {
|
||||
return (value.toString() as string).toLowerCase().includes(query.$inc!.toString().toLowerCase())
|
||||
}
|
||||
|
||||
if ('$eq' in query) {
|
||||
return query.$eq === value
|
||||
}
|
||||
|
||||
/**
|
||||
* numbers specific cases for numbers
|
||||
*/
|
||||
if ('$gt' in query) {
|
||||
value = value instanceof Date ? value.getTime() : value
|
||||
const comparedValue = query.$gt instanceof Date ? query.$gt.getTime() : query.$gt
|
||||
return typeof value === 'number' && typeof comparedValue === 'number' && value > comparedValue
|
||||
}
|
||||
|
||||
if ('$lt' in query) {
|
||||
value = value instanceof Date ? value.getTime() : value
|
||||
const comparedValue = query.$lt instanceof Date ? query.$lt.getTime() : query.$lt
|
||||
return typeof value === 'number' && typeof comparedValue === 'number' && value < comparedValue
|
||||
}
|
||||
|
||||
if ('$gte' in query) {
|
||||
value = value instanceof Date ? value.getTime() : value
|
||||
const comparedValue = query.$gte instanceof Date ? query.$gte.getTime() : query.$gte
|
||||
return typeof value === 'number' && typeof comparedValue === 'number' && value >= comparedValue
|
||||
}
|
||||
|
||||
if ('$lte' in query) {
|
||||
value = value instanceof Date ? value.getTime() : value
|
||||
const comparedValue = query.$lte instanceof Date ? query.$lte.getTime() : query.$lte
|
||||
return typeof value === 'number' && typeof comparedValue === 'number' && value <= comparedValue
|
||||
}
|
||||
|
||||
if ('$len' in query && Array.isArray(value)) {
|
||||
return value.length === query.$len
|
||||
}
|
||||
|
||||
/**
|
||||
* Logical Operators
|
||||
*/
|
||||
if ('$or' in query && Array.isArray(query.$or)) {
|
||||
return !!query.$or.find((it) => filterValue(value, it as QueryValues<any>))
|
||||
}
|
||||
if ('$and' in query && Array.isArray(query.$and)) {
|
||||
return !query.$and.find((it) => !filterValue(value, it as QueryValues<any>))
|
||||
}
|
||||
|
||||
if ('$not' in query) {
|
||||
return !filterValue(value, query.$not as QueryValues<any>)
|
||||
}
|
||||
|
||||
if ('$nor' in query && Array.isArray(query.$nor)) {
|
||||
return !query.$nor.find((it) => filterValue(value, it as QueryValues<any>))
|
||||
}
|
||||
|
||||
if ('$nand' in query && Array.isArray(query.$nand)) {
|
||||
return !!query.$nand.find((it) => !filterValue(value, it as QueryValues<any>))
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
82
src/models/adapters/index.ts
Normal file
82
src/models/adapters/index.ts
Normal file
@ -0,0 +1,82 @@
|
||||
import type { SchemaInfer, SchemaObject } from '@dzeio/schema'
|
||||
import type { Query } from 'models/query'
|
||||
|
||||
type OneOrMany<T> = T | Array<T>
|
||||
|
||||
export interface DBPull<M extends SchemaObject> {
|
||||
/**
|
||||
* total number of rows that are valid with the specified query
|
||||
*/
|
||||
rows: number
|
||||
/**
|
||||
* total number of rows in the table
|
||||
*/
|
||||
rowsTotal: number
|
||||
|
||||
/**
|
||||
* current page number
|
||||
*/
|
||||
page: number
|
||||
|
||||
/**
|
||||
* total amount of pages
|
||||
*/
|
||||
pageTotal: number
|
||||
|
||||
/**
|
||||
* the data fetched
|
||||
*/
|
||||
data: Array<SchemaInfer<M>>
|
||||
}
|
||||
|
||||
/**
|
||||
* the Dao is the object that connect the Database or source to the application layer
|
||||
*
|
||||
* you MUST call it through the `DaoFactory` file
|
||||
*/
|
||||
export default interface DaoAdapter<M extends SchemaObject> {
|
||||
/**
|
||||
* create a new object in the remote source
|
||||
*
|
||||
* @param obj the object to create
|
||||
*/
|
||||
create?(obj: Partial<SchemaInfer<M>>): Promise<SchemaInfer<M> | null>
|
||||
create?(obj: Array<Partial<SchemaInfer<M>>>): Promise<Array<SchemaInfer<M> | null>>
|
||||
create?(obj: OneOrMany<Partial<SchemaInfer<M>>>): Promise<OneOrMany<SchemaInfer<M> | null>>
|
||||
|
||||
/**
|
||||
* read from the remote source
|
||||
*
|
||||
* @param query the query to filter/sort results
|
||||
*/
|
||||
read?(query?: Query<SchemaInfer<M>>): Promise<DBPull<M>>
|
||||
|
||||
/**
|
||||
* update an object to the remote source
|
||||
*
|
||||
* @param obj the object to update
|
||||
*/
|
||||
update?(obj: Partial<SchemaInfer<M>>): Promise<SchemaInfer<M> | null>
|
||||
|
||||
/**
|
||||
* depending if the object already exists or not
|
||||
* it will update an exisintg object or create a new one
|
||||
*
|
||||
* @param obj the object to insert/update
|
||||
*/
|
||||
upsert?(obj: Partial<SchemaInfer<M>>): Promise<SchemaInfer<M> | null>
|
||||
|
||||
/**
|
||||
* update an object to the remote source
|
||||
*
|
||||
* @param id (DEPRECATED) the ID of the object
|
||||
* @param obj the object to patch (MUST include ids, and changes)
|
||||
*/
|
||||
patch?(id: string, obj: Partial<SchemaInfer<M>>): Promise<SchemaInfer<M> | null>
|
||||
|
||||
/**
|
||||
* delete an object from the source
|
||||
* @param obj the object ot delete (it must at least include the id(s))
|
||||
*/
|
||||
delete?(obj: Partial<SchemaInfer<M>>): Promise<boolean>
|
||||
}
|
382
src/models/adapters/postgres-adapter.ts
Normal file
382
src/models/adapters/postgres-adapter.ts
Normal file
@ -0,0 +1,382 @@
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import { objectFind, objectKeys, objectLoop, objectMap, objectRemap, objectSize, objectValues } from '@dzeio/object-util'
|
||||
|
||||
import type { SchemaInfer, SchemaItem, SchemaObject } from '@dzeio/schema'
|
||||
import logger from 'config/logger'
|
||||
import PostgresClient from 'models/clients/postgres-client'
|
||||
import { Sort, type Query } from 'models/query'
|
||||
import crypto from 'node:crypto'
|
||||
import type DaoAdapter from '.'
|
||||
import type { DBPull } from '.'
|
||||
import { filter } from './adapter-utils'
|
||||
|
||||
type OneOrMany<T> = T | Array<T>
|
||||
|
||||
const specialKeywords = ['user', 'end']
|
||||
type DBValue = string | number | boolean | Date
|
||||
|
||||
export default class PostgresAdapter<M extends SchemaObject> implements DaoAdapter<M> {
|
||||
private readonly id: Array<string> = []
|
||||
|
||||
public constructor(
|
||||
/**
|
||||
* the schema used by Cassandra
|
||||
*/
|
||||
public readonly schema: M,
|
||||
/**
|
||||
* the table name
|
||||
*/
|
||||
public readonly table: string,
|
||||
|
||||
/**
|
||||
* additionnal options to make the adapter work
|
||||
*/
|
||||
private readonly options?: {
|
||||
/**
|
||||
* log the requests made to cassandra
|
||||
*/
|
||||
debug?: boolean
|
||||
}
|
||||
) {
|
||||
objectLoop(this.schema.model as Record<string, SchemaItem>, (tmp, key) => {
|
||||
if (tmp.attributes.includes('db:auto')) {
|
||||
this.id.push(key)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: make it clearer what it does
|
||||
public async create(obj: Partial<SchemaInfer<M>>): Promise<SchemaInfer<M> | null>
|
||||
public async create(obj: Array<Partial<SchemaInfer<M>>>): Promise<Array<SchemaInfer<M> | null>>
|
||||
public async create(original: OneOrMany<Partial<SchemaInfer<M>>>): Promise<OneOrMany<SchemaInfer<M> | null>> {
|
||||
// Convert to an array of elements
|
||||
const obj = Array.isArray(original) ? original : [original]
|
||||
if (!obj.length) {
|
||||
return []
|
||||
}
|
||||
|
||||
// handle automated values
|
||||
objectLoop(this.schema.model as Record<string, SchemaItem>, (item, key) => {
|
||||
if (item.attributes.includes('db:created') || item.attributes.includes('db:updated')) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj.forEach((it) => it[key] = new Date())
|
||||
} else if (item.attributes.includes('db:auto')) {
|
||||
obj.forEach((objet) => {
|
||||
if (objet[key]) {
|
||||
return
|
||||
}
|
||||
|
||||
if (item.isOfType('')) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
objet[key] = crypto.randomBytes(16).toString('hex')
|
||||
} else if (item.isOfType(123)) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
objet[key] = crypto.randomBytes(16).readUint32BE()
|
||||
} else {
|
||||
throw new Error('cannot generate ID because it is not compatible with it')
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// parse the data with the Model
|
||||
const clone = obj.map((o) => {
|
||||
const { object, errors } = this.schema.parse(o)
|
||||
if (errors) {
|
||||
console.error(errors)
|
||||
throw new Error(`Invalid data given to create the final object: ${JSON.stringify(errors)}`)
|
||||
}
|
||||
return object
|
||||
})
|
||||
|
||||
// prepare the database query
|
||||
const keys = objectKeys(clone.reduce((a, it) => ({ ...a, ...it }), {}))
|
||||
.map((it) => {
|
||||
if (typeof it === 'string' && specialKeywords.includes(it)) { // handle the special keyword
|
||||
return `"${it}"`
|
||||
}
|
||||
return it
|
||||
})
|
||||
const keysStr = keys.join(', ')
|
||||
const valuesStr = clone.map((_, i) => {
|
||||
const keyValuesStr = keys.map((_1, idx) => `$${i * keys.length + idx + 1}`).join(', ')
|
||||
return `(${keyValuesStr})`
|
||||
})
|
||||
const req = `INSERT INTO ${this.table} (${keysStr}) VALUES ${valuesStr.join(', ')};`
|
||||
const client = await PostgresClient.get()
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
const params = clone.flatMap((o: any) => keys.map((key) => this.valueToDB(key, o[key])))
|
||||
|
||||
if (this.options?.debug) {
|
||||
logger.debug(req, params)
|
||||
}
|
||||
|
||||
// send to the database
|
||||
try {
|
||||
await client.execute(req, params)
|
||||
} catch (e) {
|
||||
logger.debug(e, req, params)
|
||||
return null
|
||||
}
|
||||
const transform = (o: SchemaObject) => this.schema.parse(o).object ?? null
|
||||
// @ts-expect-error conditionnal output based on original input
|
||||
return (Array.isArray(original) ? clone.map(transform) : transform(clone[0]))
|
||||
}
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
public async read(query?: Query<SchemaInfer<M>>): Promise<DBPull<M>> {
|
||||
// prepare the request to the database based on the query parameters
|
||||
const req: Array<string> = ['SELECT', '*', 'FROM', this.table]
|
||||
const params: Array<DBValue> = []
|
||||
if (query) {
|
||||
const keyValues: Record<string, string | number | boolean | Date> = {}
|
||||
objectLoop(query, (value, key) => {
|
||||
if (typeof value === 'object' || String(key).startsWith('$')) {
|
||||
return
|
||||
}
|
||||
keyValues[String(key)] = this.valueToDB(key, value as any)
|
||||
})
|
||||
if (objectSize(keyValues)) {
|
||||
req.push('WHERE')
|
||||
objectLoop(keyValues, (value, key, index) => {
|
||||
params.push(value)
|
||||
req.push(`"${key}"=$${index + 1}`)
|
||||
if (index < objectSize(keyValues) - 1) {
|
||||
req.push('AND')
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const client = await PostgresClient.get()
|
||||
|
||||
if (this.options?.debug) {
|
||||
logger.debug(req)
|
||||
}
|
||||
|
||||
// read from the database
|
||||
let res: Array<Record<string, any>>
|
||||
try {
|
||||
res = await client.execute(req.join(' '), params)
|
||||
} catch (error) {
|
||||
console.error('error running request')
|
||||
console.error(req)
|
||||
throw error
|
||||
}
|
||||
if (res.length === 0) {
|
||||
return {
|
||||
rows: 0,
|
||||
pageTotal: 0,
|
||||
page: 1,
|
||||
rowsTotal: 0,
|
||||
data: []
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options?.debug) {
|
||||
logger.debug('preEdits', res)
|
||||
}
|
||||
|
||||
// post-process the data from the database
|
||||
const raw = res
|
||||
.map((obj) => {
|
||||
// remap to use system value instead of db values
|
||||
obj = objectRemap(this.schema.model as Record<string, SchemaItem>, (_, key) => ({
|
||||
key,
|
||||
value: this.dbToValue(key as any, (obj as Record<string, unknown>)[key])
|
||||
}))
|
||||
|
||||
// validate the schema
|
||||
const tmp = this.schema.parse(obj)
|
||||
if (tmp.valid) {
|
||||
return tmp.object
|
||||
}
|
||||
logger.debug(tmp.errors)
|
||||
return null
|
||||
})
|
||||
.filter((it): it is SchemaInfer<M> => !!it)
|
||||
|
||||
// temp modification of comportement to use the new and better query system
|
||||
if ((!query?.$sort) && objectFind(this.schema.model as Record<string, SchemaItem>, (_, key) => key === 'created')) {
|
||||
// temp fix for the sorting algorithm
|
||||
if (!query) {
|
||||
// @ts-expect-error normal currently
|
||||
query = { $sort: { created: Sort.DESC } }
|
||||
} else {
|
||||
query.$sort = { created: Sort.DESC }
|
||||
}
|
||||
}
|
||||
let dataset = raw
|
||||
|
||||
|
||||
if (this.options?.debug) {
|
||||
logger.debug('preFilters', dataset)
|
||||
}
|
||||
|
||||
if (query) {
|
||||
dataset = filter(query, dataset, this.options).filtered
|
||||
}
|
||||
return {
|
||||
rows: dataset.length,
|
||||
rowsTotal: res.length,
|
||||
page: 1,
|
||||
pageTotal: 1,
|
||||
// page: page,
|
||||
// pageTotal: pageLimit ? res.rowLength / pageLimit : 1,
|
||||
data: dataset
|
||||
}
|
||||
}
|
||||
|
||||
public async update(obj: SchemaInfer<M>): Promise<SchemaInfer<M> | null> {
|
||||
return this.patch(obj)
|
||||
}
|
||||
|
||||
public async patch(id: Partial<SchemaInfer<M>>): Promise<SchemaInfer<M> | null>
|
||||
public async patch(id: string, obj: Partial<SchemaInfer<M>>): Promise<SchemaInfer<M> | null>
|
||||
// eslint-disable-next-line complexity
|
||||
public async patch(id: string | Partial<SchemaInfer<M>>, obj?: Partial<SchemaInfer<M>>): Promise<SchemaInfer<M> | null> {
|
||||
if (!obj) {
|
||||
if (typeof id === 'string') {
|
||||
return null
|
||||
}
|
||||
obj = { ...id }
|
||||
}
|
||||
|
||||
// const tmp = this.schema.validate(obj)
|
||||
// // if (tmp.error) {
|
||||
// // throw new Error(`obj invalid can\'t patch ${JSON.stringify(tmp.error)}`)
|
||||
// // }
|
||||
|
||||
// obj = tmp.object
|
||||
|
||||
// update the updated time
|
||||
objectLoop(this.schema.model as Record<string, SchemaItem>, (item, key) => {
|
||||
if (item.attributes.includes('db:updated')) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = new Date()
|
||||
}
|
||||
})
|
||||
|
||||
// build the request parts
|
||||
const parts: Array<string> = ['UPDATE', this.table, 'SET']
|
||||
const params: Array<any> = []
|
||||
|
||||
// remove ids
|
||||
for (const tmp of this.id) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||||
delete obj[tmp]
|
||||
}
|
||||
|
||||
// map the items to update
|
||||
const keys = objectMap(obj as object, (_, key: string, idx) => {
|
||||
if (specialKeywords.includes(key)) {
|
||||
return `"${key}"=$${idx + 1}`
|
||||
}
|
||||
|
||||
return `${key}=$${idx + 1}`
|
||||
})
|
||||
parts.push(keys.join(', '))
|
||||
params.push(...objectValues(obj as Record<string, unknown>))
|
||||
|
||||
// filter by the ids
|
||||
parts.push('WHERE')
|
||||
const read: Partial<any> = {}
|
||||
for (let idx = 0; idx < this.id.length; idx++) {
|
||||
const key = this.id[idx]!
|
||||
|
||||
if (idx > 0) {
|
||||
parts.push('AND')
|
||||
}
|
||||
parts.push(`${key}=$${params.length + 1}`)
|
||||
const value = obj[key] ?? (typeof id === 'string' ? id : id[key]) as SchemaInfer<M>[keyof M]
|
||||
read[key] = this.valueToDB(key, value)
|
||||
if (!value) {
|
||||
throw new Error(`Missing id (${key})`)
|
||||
}
|
||||
params.push(value)
|
||||
}
|
||||
|
||||
const req = parts.join(' ')
|
||||
const client = await PostgresClient.get()
|
||||
|
||||
if (this.options?.debug) {
|
||||
logger.debug(req, params)
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await client.execute(req, params)
|
||||
// logger.debug(res, req)
|
||||
if (this.options?.debug) {
|
||||
logger.debug('post patch result', res, req)
|
||||
}
|
||||
return (await this.read(read)).data[0] ?? null
|
||||
} catch (e) {
|
||||
logger.debug(e, req, params)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
public async delete(obj: Partial<SchemaInfer<M>>): Promise<boolean> {
|
||||
const parts = ['DELETE', 'FROM', this.table, 'WHERE']
|
||||
|
||||
objectLoop(obj as SchemaInfer<M>, (value, key: string, idx) => {
|
||||
if (idx > 0) {
|
||||
parts.push('AND')
|
||||
}
|
||||
|
||||
if (specialKeywords.includes(key)) {
|
||||
key = `"${key}"`
|
||||
}
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
parts.push(`${key} = '${value}'`)
|
||||
break
|
||||
|
||||
default:
|
||||
parts.push(`${key} = ${value as any}`)
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
const client = await PostgresClient.get()
|
||||
|
||||
try {
|
||||
await client.execute(parts.join(' '))
|
||||
} catch (e) {
|
||||
console.error(e, parts)
|
||||
throw e
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
private valueToDB(key: keyof SchemaInfer<M>, value: unknown): DBValue {
|
||||
const item: SchemaItem<unknown> = (this.schema.model as Record<typeof key, SchemaItem>)[key]
|
||||
|
||||
if (item.isOfType({})) {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
|
||||
return value as DBValue
|
||||
}
|
||||
|
||||
private dbToValue<Key extends keyof SchemaInfer<M>>(key: Key, value: unknown): SchemaInfer<M>[Key] {
|
||||
const item: SchemaItem<unknown> = (this.schema.model as Record<Key, SchemaItem>)[key]
|
||||
|
||||
if (value instanceof Date) {
|
||||
// tranform the Date back into an UTC one
|
||||
const localeDateString = value.toISOString()
|
||||
return new Date(localeDateString) as SchemaInfer<M>[Key]
|
||||
}
|
||||
|
||||
if (item.isOfType(543) && typeof value === 'string') {
|
||||
return parseFloat(value) as SchemaInfer<M>[Key]
|
||||
}
|
||||
|
||||
if (item.isOfType({}) && typeof value === 'string') {
|
||||
return JSON.parse(value) as SchemaInfer<M>[Key]
|
||||
}
|
||||
|
||||
return value as SchemaInfer<M>[Key]
|
||||
}
|
||||
}
|
98
src/models/clients/index.ts
Normal file
98
src/models/clients/index.ts
Normal file
@ -0,0 +1,98 @@
|
||||
import config from 'config/models'
|
||||
import type MigrationObj from 'models/migrations'
|
||||
|
||||
export enum ConnectionStatus {
|
||||
DISCONNECTED,
|
||||
MIGRATING,
|
||||
READY
|
||||
}
|
||||
|
||||
export interface ClientStatic<C extends Client = Client> {
|
||||
get(): Promise<C>
|
||||
}
|
||||
|
||||
export default abstract class Client {
|
||||
|
||||
public status: ConnectionStatus = ConnectionStatus.DISCONNECTED
|
||||
|
||||
/**
|
||||
* -1: unknown
|
||||
* 0: migrating
|
||||
* 1: migrated
|
||||
*/
|
||||
public isMigrating = false
|
||||
|
||||
/**
|
||||
* Migrate the database to the latest version
|
||||
*/
|
||||
public async migrateToLatest() {
|
||||
if (await this.isMigrated() || this.isMigrating) {
|
||||
return
|
||||
}
|
||||
this.isMigrating = true
|
||||
|
||||
const migrations = this.getLocalMigrations()
|
||||
const remotes = await this.loadMigrations()
|
||||
|
||||
const missingFromRemote = migrations.filter((it) => !remotes.includes(it.date))
|
||||
for (const migration of missingFromRemote) {
|
||||
await migration.up(this)
|
||||
await this.setMigrated(migration)
|
||||
}
|
||||
|
||||
this.isMigrating = false
|
||||
}
|
||||
|
||||
public getLocalMigrations(): ReadonlyArray<MigrationObj> {
|
||||
return config.migrations as ReadonlyArray<MigrationObj>
|
||||
}
|
||||
|
||||
/**
|
||||
* wait until every migrations are done or fail
|
||||
*/
|
||||
public async waitForMigrations(): Promise<void> {
|
||||
// skip if already migrated
|
||||
if (await this.isMigrated()) {
|
||||
return
|
||||
}
|
||||
|
||||
// start migrations if not started
|
||||
if (!this.isMigrating) {
|
||||
await this.migrateToLatest()
|
||||
}
|
||||
|
||||
// wait for migrations to end
|
||||
while (!await this.isMigrated()) {
|
||||
await new Promise((res) => {
|
||||
setTimeout(res, 100)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
public async isMigrated(): Promise<boolean> {
|
||||
const migrations = this.getLocalMigrations()
|
||||
const remotes = await this.loadMigrations()
|
||||
|
||||
// TODO: compare migrations to remotes
|
||||
return migrations.length === remotes.length
|
||||
}
|
||||
|
||||
/**
|
||||
* load migrations that are in the system
|
||||
*/
|
||||
public abstract loadMigrations(): Promise<Array<number>>
|
||||
|
||||
/**
|
||||
* indicate that the migrations was finished
|
||||
*/
|
||||
public abstract setMigrated(migration: MigrationObj): Promise<void>
|
||||
|
||||
public abstract execute(query: string, params?: Array<unknown> | object, ...options: Array<any>): Promise<Array<Record<string, unknown>>>
|
||||
|
||||
public abstract connect(): Promise<void>
|
||||
|
||||
/**
|
||||
* indicate if the client is ready for new requests (not if migrations are done or not)
|
||||
*/
|
||||
public abstract isReady(): Promise<boolean>
|
||||
}
|
108
src/models/clients/postgres-client.ts
Normal file
108
src/models/clients/postgres-client.ts
Normal file
@ -0,0 +1,108 @@
|
||||
import Logger from 'config/logger'
|
||||
import { getEnv, requireEnv } from 'libs/env'
|
||||
import type Migration from 'models/migrations'
|
||||
import pg from 'pg'
|
||||
import Client from '.'
|
||||
const Postgres = pg.Pool
|
||||
|
||||
function parseIntUndefined(input: string | undefined) {
|
||||
if (typeof input === 'string') {
|
||||
return Number.parseInt(input, 10)
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
interface Request {
|
||||
query: string
|
||||
params: Array<unknown> | object | undefined
|
||||
}
|
||||
|
||||
export default class PostgresClient extends Client {
|
||||
private static instance: PostgresClient = new PostgresClient()
|
||||
private pool?: pg.Pool | null
|
||||
private lastRequest: Request | undefined
|
||||
|
||||
/**
|
||||
* get the connexion to cassandra, it will try until it succedeed
|
||||
*/
|
||||
public static async get() {
|
||||
return PostgresClient.instance
|
||||
}
|
||||
|
||||
public override async loadMigrations(): Promise<Array<number>> {
|
||||
try {
|
||||
const res = (await this.execute('SELECT migration FROM migrations'))
|
||||
.map((it) => typeof it.migration === 'number' ? it.migration : Number.parseInt(it.migration as string, 10))
|
||||
return res
|
||||
} catch (err) {
|
||||
// table does not exists
|
||||
await this.execute('CREATE TABLE IF NOT EXISTS migrations (migration INT PRIMARY KEY, created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP );')
|
||||
Logger.alert('Settings table does not exists')
|
||||
Logger.alert(err)
|
||||
return []
|
||||
}
|
||||
}
|
||||
public override async setMigrated(version: Migration): Promise<void> {
|
||||
await this.execute('INSERT INTO migrations (id) VALUES ($1);', [version.date.toString()])
|
||||
}
|
||||
public override async execute(query: string, params?: Array<unknown>, ..._options: Array<unknown>): Promise<Array<Record<string, unknown>>> {
|
||||
if (!this.pool || !await this.isReady()) {
|
||||
throw new Error('not connected')
|
||||
}
|
||||
this.lastRequest = { query, params }
|
||||
|
||||
const client = await this.pool.connect()
|
||||
try {
|
||||
// Logger.debug('db', query, params)
|
||||
const res = await client.query<Record<string, unknown>>(query, params)
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (!res.rows) {
|
||||
throw new Error('Strangely, Postgres does not return the `rows` parameter :(')
|
||||
}
|
||||
|
||||
return res.rows
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
throw new Error('Error connecting to Postgres')
|
||||
} finally {
|
||||
client.release()
|
||||
}
|
||||
}
|
||||
|
||||
public override async connect(): Promise<void> {
|
||||
if (this.pool) {
|
||||
return
|
||||
}
|
||||
Logger.info('starting Postgres client')
|
||||
|
||||
this.pool = new Postgres({
|
||||
host: requireEnv('POSTGRES_HOST'),
|
||||
user: requireEnv('POSTGRES_USERNAME'),
|
||||
password: requireEnv('POSTGRES_PASSWORD'),
|
||||
port: Number.parseInt(getEnv('POSTGRES_PORT', '5432'), 10),
|
||||
database: requireEnv('POSTGRES_DATABASE', 'projectmanager'),
|
||||
connectionTimeoutMillis: parseIntUndefined(getEnv('POSTGRES_CONNECTION_TIMEOUT')),
|
||||
idleTimeoutMillis: parseIntUndefined(getEnv('POSTGRES_IDLE_TIMEOUT')),
|
||||
max: parseIntUndefined(getEnv('POSTGRES_MAX_CLIENT')),
|
||||
// debug(connection, query, parameters, paramTypes) {
|
||||
// console.log(`${query}, ${parameters}`);
|
||||
// },
|
||||
})
|
||||
.on('error', (err) => {
|
||||
console.error('AN ERROR OCCURED INSIDE POSTGRESQL', err)
|
||||
console.error('THINGS MIGHT NOT WORK UNTIL RESTARTED')
|
||||
console.error('RESTARTING THE POSTGRESQL CLIENT TO TRY AGAIN.')
|
||||
if (this.lastRequest) {
|
||||
console.error('LAST REQUEST:', this.lastRequest.query, this.lastRequest.params)
|
||||
}
|
||||
this.pool = null
|
||||
void this.connect()
|
||||
})
|
||||
Logger.info('Connected to postgres')
|
||||
}
|
||||
|
||||
public override async isReady(): Promise<boolean> {
|
||||
return !!this.pool
|
||||
}
|
||||
}
|
71
src/models/dao-factory.ts
Normal file
71
src/models/dao-factory.ts
Normal file
@ -0,0 +1,71 @@
|
||||
import { objectPick, objectRemap } from '@dzeio/object-util'
|
||||
import config from 'config/models'
|
||||
import type Client from 'models/clients'
|
||||
import type Dao from 'models/dao'
|
||||
import type Migration from 'models/migrations'
|
||||
|
||||
export interface DaoConfig {
|
||||
/**
|
||||
* the main client used to handle migrations from
|
||||
*/
|
||||
mainClient: Client | null
|
||||
/**
|
||||
* list the migrations of the app
|
||||
*/
|
||||
migrations: Array<Migration> | null
|
||||
/**
|
||||
* indicate the list of models used through the app
|
||||
*/
|
||||
models: Record<string, Dao>
|
||||
}
|
||||
|
||||
/**
|
||||
* Class to get any DAO
|
||||
*/
|
||||
|
||||
type Remapped<T> = {
|
||||
// @ts-expect-error it works so idc
|
||||
[K in keyof T as `${K}Dao`]: T[K]
|
||||
}
|
||||
|
||||
// biome-ignore lint/complexity/noStaticOnlyClass: <explanation>
|
||||
export default class DaoFactory {
|
||||
/**
|
||||
* get the total list of daos available
|
||||
* @returns return the list of daos available
|
||||
*/
|
||||
public static getAll(): Record<string, Dao> {
|
||||
return config.models
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a a dao by its key
|
||||
*
|
||||
* it will throw an error if no Dao exists linked to the item key
|
||||
*
|
||||
* @param key the dao key to get
|
||||
* @returns the Dao you want as a singleton
|
||||
*/
|
||||
public static get<Key extends keyof typeof config['models']>(...key: [Key]): typeof config['models'][Key]
|
||||
public static get<Key extends keyof typeof config['models']>(...key: Array<Key>): Remapped<Pick<typeof config['models'], Key>>
|
||||
public static get<Key extends keyof typeof config['models']>(...key: Array<Key>): Remapped<Pick<typeof config['models'], Key>> {
|
||||
if (key.length === 1) {
|
||||
// @ts-expect-error normal behavior
|
||||
return config.models[key[0]]
|
||||
}
|
||||
const t = objectPick(config.models, ...key)
|
||||
return objectRemap(t, (v, k) => ({
|
||||
// @ts-expect-error normal behavior
|
||||
key: `${k}Dao`,
|
||||
value: v
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* get the main client linked to migrations
|
||||
* @returns the main client
|
||||
*/
|
||||
public static async client(): ReturnType<(typeof config.mainClient)['get']> {
|
||||
return config.mainClient.get()
|
||||
}
|
||||
}
|
171
src/models/dao.ts
Normal file
171
src/models/dao.ts
Normal file
@ -0,0 +1,171 @@
|
||||
import type { SchemaInfer, SchemaObject } from '@dzeio/schema'
|
||||
import Logger from 'config/logger'
|
||||
import type DaoAdapter from './adapters'
|
||||
|
||||
/**
|
||||
* the Dao is the object that connect the Database or source to the application layer
|
||||
*
|
||||
* you MUST call it through the `DaoFactory` file test commit
|
||||
*/
|
||||
export default class Dao<S extends SchemaObject = SchemaObject> {
|
||||
|
||||
public constructor(
|
||||
public readonly schema: S,
|
||||
public readonly adapter: DaoAdapter<S>
|
||||
) { }
|
||||
|
||||
/**
|
||||
* insert a new object into the source
|
||||
*
|
||||
* @param obj the object to create
|
||||
* @returns the object with it's id filled if create or null otherwise
|
||||
*/
|
||||
public async create(...params: Parameters<NonNullable<DaoAdapter<S>['create']>>): ReturnType<NonNullable<DaoAdapter<S>['create']>> {
|
||||
if (!this.adapter.create) {
|
||||
throw new Error('the Adapter does not allow you to create elements')
|
||||
}
|
||||
return this.adapter.create(...params)
|
||||
}
|
||||
|
||||
/**
|
||||
* insert a new object into the source
|
||||
*
|
||||
* @param obj the object to create
|
||||
* @returns the object with it's id filled if create or null otherwise
|
||||
*/
|
||||
public async insert(...params: Parameters<NonNullable<DaoAdapter<S>['create']>>): ReturnType<NonNullable<DaoAdapter<S>['create']>> {
|
||||
return this.create(...params)
|
||||
}
|
||||
|
||||
/**
|
||||
* find the list of objects having elements from the query
|
||||
*
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns an array containing the list of elements that match with the query
|
||||
*/
|
||||
|
||||
public async findAll(...params: Parameters<NonNullable<DaoAdapter<S>['read']>>): ReturnType<NonNullable<DaoAdapter<S>['read']>> {
|
||||
if (!this.adapter.read) {
|
||||
throw new Error('the Adapter does not allow you to read from the remote source')
|
||||
}
|
||||
|
||||
const req = this.adapter.read(...params)
|
||||
try {
|
||||
return await Promise.race([req, new Promise<void>((_, rej) => {
|
||||
setTimeout(() => {
|
||||
rej(new Error('timeout running request'))
|
||||
}, 3000)
|
||||
})]) as Awaited<typeof req>
|
||||
} catch (err) {
|
||||
Logger.error('DAO error:', err)
|
||||
return {
|
||||
rows: 0,
|
||||
rowsTotal: 0,
|
||||
page: 1,
|
||||
pageTotal: 0,
|
||||
data: []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* find the list of objects having elements from the query
|
||||
*
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns an array containing the list of elements that match with the query
|
||||
*/
|
||||
public async find(...params: Parameters<NonNullable<DaoAdapter<S>['read']>>): ReturnType<NonNullable<DaoAdapter<S>['read']>> {
|
||||
return this.findAll(...params)
|
||||
}
|
||||
|
||||
/**
|
||||
* find an object by it's id
|
||||
*
|
||||
* (shortcut to findOne({id: id}))
|
||||
*
|
||||
* @param id the id of the object
|
||||
* @returns
|
||||
*/
|
||||
public findById(id: any) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
return this.findOne({ id: id })
|
||||
}
|
||||
|
||||
/**
|
||||
* find an object by it's id
|
||||
*
|
||||
* (shortcut to `findOne({ id: id })`)
|
||||
*
|
||||
* @param id the id of the object
|
||||
* @returns
|
||||
*/
|
||||
public get(id: any) {
|
||||
return this.findById(id)
|
||||
}
|
||||
|
||||
/**
|
||||
* find the first element that match `query`
|
||||
*
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns the first element matching with the query or null otherwise
|
||||
*/
|
||||
public async findOne(query?: Parameters<this['findAll']>[0]): Promise<SchemaInfer<S> | null> {
|
||||
return (await this.findAll(query)).data[0] ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* update the remote reference of the object
|
||||
*
|
||||
* note: it will not try to insert an item (use `upsert` to handle this)
|
||||
*
|
||||
* @param obj the object to update
|
||||
* @returns an object if it was able to update or null otherwise
|
||||
*/
|
||||
public async update(...params: Parameters<NonNullable<DaoAdapter<S>['update']>>): ReturnType<NonNullable<DaoAdapter<S>['update']>> {
|
||||
if (!this.adapter.update) {
|
||||
throw new Error('the Adapter does not allow you to update to the remote source')
|
||||
}
|
||||
return this.adapter.update(...params)
|
||||
}
|
||||
|
||||
/**
|
||||
* change some elements from the object and return the object updated
|
||||
* @param id the id of the object
|
||||
* @param changes the change to make
|
||||
*/
|
||||
public async patch(...params: Parameters<NonNullable<DaoAdapter<S>['patch']>>): ReturnType<NonNullable<DaoAdapter<S>['patch']>> {
|
||||
if (!this.adapter.patch) {
|
||||
const query = await this.findById(params[0])
|
||||
if (!query) {
|
||||
return null
|
||||
}
|
||||
return await this.update({ ...query, ...params[1] })
|
||||
}
|
||||
return await this.adapter.patch(...params)
|
||||
}
|
||||
|
||||
/**
|
||||
* update the remote reference of the object or create it if not found
|
||||
* @param obj the object to update/insert
|
||||
* @returns the object is updated/inserted or null otherwise
|
||||
*/
|
||||
public async upsert(...params: Parameters<NonNullable<DaoAdapter<S>['upsert']>>): ReturnType<NonNullable<DaoAdapter<S>['upsert']>> {
|
||||
if (!this.adapter.upsert) {
|
||||
throw new Error('the Adapter does not allow you to upsert to the remote source')
|
||||
}
|
||||
return this.adapter.upsert(...params)
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the object
|
||||
* @param obj the object or ID to delete
|
||||
*
|
||||
* @returns if the object was deleted or not (if object is not in db it will return true)
|
||||
*/
|
||||
public async delete(...params: Parameters<NonNullable<DaoAdapter<S>['delete']>>): ReturnType<NonNullable<DaoAdapter<S>['delete']>> {
|
||||
if (!this.adapter.delete) {
|
||||
throw new Error('the Adapter does not allow you to delete on the remote source')
|
||||
}
|
||||
return this.adapter.delete(...params)
|
||||
}
|
||||
}
|
22
src/models/migrations/example.ts
Normal file
22
src/models/migrations/example.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import type Client from 'models/clients'
|
||||
import type Migration from '.'
|
||||
|
||||
/**
|
||||
* A system migration
|
||||
* the file need then to be imported into the `models/config.ts` file
|
||||
*/
|
||||
export default {
|
||||
/** SET THE DATE IN ISO FORMAT HERE */
|
||||
date: Date.UTC(2024, 3, 26, 11, 55, 28),
|
||||
async up(client: Client): Promise<boolean> {
|
||||
const requests: Array<string> = [
|
||||
|
||||
]
|
||||
|
||||
for (const request of requests) {
|
||||
await client.execute(request)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
} as Migration
|
9
src/models/migrations/index.d.ts
vendored
Normal file
9
src/models/migrations/index.d.ts
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
import type Client from 'models/clients'
|
||||
|
||||
export default interface Migration {
|
||||
/**
|
||||
* timestamp in UTC
|
||||
*/
|
||||
date: number
|
||||
up(client: Client): Promise<boolean>
|
||||
}
|
149
src/models/query.ts
Normal file
149
src/models/query.ts
Normal file
@ -0,0 +1,149 @@
|
||||
interface QueryRootFilters<Obj extends Record<string, unknown>> {
|
||||
/**
|
||||
* one of the results should be true to be true
|
||||
*/
|
||||
$or?: Array<QueryList<Obj>>
|
||||
/**
|
||||
* every results should be false to be true
|
||||
*/
|
||||
$nor?: Array<QueryList<Obj>>
|
||||
/**
|
||||
* (default) make sure every sub queries return true
|
||||
*/
|
||||
$and?: Array<QueryList<Obj>>
|
||||
/**
|
||||
* at least one result must be false
|
||||
*/
|
||||
$nand?: Array<QueryList<Obj>>
|
||||
/**
|
||||
* invert the result from the following query
|
||||
*/
|
||||
$not?: QueryList<Obj>
|
||||
/**
|
||||
* define a precise offset of the data you fetched
|
||||
*/
|
||||
$offset?: number
|
||||
/**
|
||||
* limit the number of elements returned from the dataset
|
||||
*/
|
||||
$limit?: number
|
||||
/**
|
||||
* sort the data the way you want with each keys being priorized
|
||||
*
|
||||
* ex:
|
||||
* {a: Sort.DESC, b: Sort.ASC}
|
||||
*
|
||||
* will sort first by a and if equal will sort by b
|
||||
*/
|
||||
$sort?: SortInterface<Obj>
|
||||
}
|
||||
|
||||
/**
|
||||
* Logical operators that can be used to filter data
|
||||
*/
|
||||
export type QueryLogicalOperator<Value> = {
|
||||
/**
|
||||
* one of the results should be true to be true
|
||||
*/
|
||||
$or: Array<QueryValues<Value>>
|
||||
} | {
|
||||
/**
|
||||
* every results should be false to be true
|
||||
*/
|
||||
$nor: Array<QueryValues<Value>>
|
||||
} | {
|
||||
/**
|
||||
* at least one result must be false
|
||||
*/
|
||||
$nand: Array<QueryValues<Value>>
|
||||
} | {
|
||||
/**
|
||||
* (default) make sure every sub queries return true
|
||||
*/
|
||||
$and: Array<QueryValues<Value>>
|
||||
} | {
|
||||
/**
|
||||
* invert the result from the following query
|
||||
*/
|
||||
$not: QueryValues<Value>
|
||||
}
|
||||
|
||||
/**
|
||||
* differents comparisons operators that can be used to filter data
|
||||
*/
|
||||
export type QueryComparisonOperator<Value> = {
|
||||
/**
|
||||
* the remote source value must be absolutelly equal to the proposed value
|
||||
*/
|
||||
$eq: Value | null
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be greater than the proposed value
|
||||
*/
|
||||
$gt: number | Date
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be lesser than the proposed value
|
||||
*/
|
||||
$lt: number | Date
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be greater or equal than the proposed value
|
||||
*/
|
||||
$gte: number | Date
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be lesser or equal than the proposed value
|
||||
*/
|
||||
$lte: number | Date
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be one of the proposed values
|
||||
*/
|
||||
$in: Array<Value>
|
||||
} | {
|
||||
/**
|
||||
* (for string only) part of the proposed value must be in the remote source
|
||||
*/
|
||||
$inc: Value | null
|
||||
}
|
||||
|
||||
export type QueryList<Obj extends Record<string, unknown>> = {
|
||||
[Key in keyof Obj]?: QueryValues<Obj[Key]>
|
||||
}
|
||||
|
||||
/**
|
||||
* Differents values the element can take
|
||||
* if null it will check if it is NULL on the remote
|
||||
* if array it will check oneOf
|
||||
* if RegExp it will check if regexp match
|
||||
*/
|
||||
export type QueryValues<Value> = Value |
|
||||
null |
|
||||
Array<Value> |
|
||||
RegExp |
|
||||
QueryComparisonOperator<Value> |
|
||||
QueryLogicalOperator<Value>
|
||||
|
||||
/**
|
||||
* The query element that allows you to query different elements
|
||||
*/
|
||||
export type Query<Obj extends Record<string, unknown>> = QueryList<Obj> & QueryRootFilters<Obj>
|
||||
|
||||
/**
|
||||
* sorting interface with priority
|
||||
*/
|
||||
export type SortInterface<Obj extends Record<string, unknown>> = {
|
||||
[Key in keyof Obj]?: Sort
|
||||
}
|
||||
|
||||
export enum Sort {
|
||||
/**
|
||||
* Sort the values from the lowest to the largest
|
||||
*/
|
||||
ASC,
|
||||
/**
|
||||
* Sort the values form the largest to the lowest
|
||||
*/
|
||||
DESC
|
||||
}
|
3
src/services/README.md
Normal file
3
src/services/README.md
Normal file
@ -0,0 +1,3 @@
|
||||
# Services folder
|
||||
|
||||
handle business logic
|
@ -1,4 +1,4 @@
|
||||
const defaultTheme = require('tailwindcss/defaultTheme')
|
||||
/* eslint-disable @typescript-eslint/no-require-imports */
|
||||
const colors = require('tailwindcss/colors')
|
||||
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
@ -6,12 +6,12 @@ module.exports = {
|
||||
content: ['./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}'],
|
||||
theme: {
|
||||
fontFamily: {
|
||||
'sans': ['-apple-system', 'BlinkMacSystemFont', 'Segoe UI', 'Helvetica', 'Arial', 'sans-serif', 'Apple Color Emoji', 'Segoe UI Emoji'],
|
||||
sans: ['-apple-system', 'BlinkMacSystemFont', 'Segoe UI', 'Helvetica', 'Arial', 'sans-serif', 'Apple Color Emoji', 'Segoe UI Emoji'],
|
||||
},
|
||||
extend: {
|
||||
colors: {
|
||||
// primary color used by the projet, easily swappable
|
||||
primary: colors.amber,
|
||||
'primary': colors.amber,
|
||||
'gtk-neutral': { // https://blog.gtk.org/files/2019/01/color-palette.png
|
||||
100: '#F6F5F4',
|
||||
200: '#DEDDDA',
|
||||
|
@ -1,8 +1,22 @@
|
||||
{
|
||||
"extends": "./node_modules/astro/tsconfigs/strictest.json",
|
||||
"exclude": ["cypress"],
|
||||
"include": [
|
||||
".astro/types.d.ts",
|
||||
"**/*",
|
||||
"src/config/types.d.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"dist",
|
||||
"cypress"
|
||||
],
|
||||
"compilerOptions": {
|
||||
"baseUrl": "src",
|
||||
"allowJs": true
|
||||
"experimentalDecorators": true,
|
||||
"allowJs": true,
|
||||
// hide an issue with typescript
|
||||
"noUnusedLocals": false
|
||||
},
|
||||
"ts-node": {
|
||||
"esm": true
|
||||
}
|
||||
}
|
||||
|
@ -1,13 +1,10 @@
|
||||
/// <reference types="vitest" />
|
||||
import { getViteConfig } from 'astro/config'
|
||||
// import { configDefaults } from 'vitest/config'
|
||||
|
||||
export default getViteConfig({
|
||||
test: {
|
||||
include: [
|
||||
'./tests/**.ts'
|
||||
'./tests/**/*.ts'
|
||||
]
|
||||
/* for example, use global to avoid globals imports (describe, test, expect): */
|
||||
// globals: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
})
|
||||
|
Loading…
x
Reference in New Issue
Block a user