feat: Filemagedon
Signed-off-by: Avior <git@avior.me>
This commit is contained in:
parent
3e91597dca
commit
bc97d9106b
9
console.ts
Normal file
9
console.ts
Normal file
@ -0,0 +1,9 @@
|
||||
/**
|
||||
* This file is used as a shortcute to `src/commands/index.ts`
|
||||
*
|
||||
* It allows you to run commands that will change things in the codebase
|
||||
*
|
||||
* to start, run `bun console.ts`
|
||||
*/
|
||||
|
||||
import 'commands/index'
|
@ -13,7 +13,8 @@
|
||||
"test:unit": "vitest --coverage --run",
|
||||
"test:e2e": "playwright test",
|
||||
"install:test": "playwright install --with-deps",
|
||||
"lint": "biome check ."
|
||||
"lint": "biome check .",
|
||||
"command": "bun run src/commands/index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@astrojs/node": "^8.0.0",
|
||||
|
21
src/commands/Migrations/current.ts
Normal file
21
src/commands/Migrations/current.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import type { Command } from 'commands'
|
||||
import DaoFactory from 'models/DaoFactory'
|
||||
|
||||
const command: Command = {
|
||||
name: 'migrations:current',
|
||||
description: 'Get the current version of the database',
|
||||
async run() {
|
||||
const client = await DaoFactory.client()
|
||||
await client.connect()
|
||||
const ver = await client.getVersion()
|
||||
if (ver < 0) {
|
||||
console.log('no database :(')
|
||||
} else {
|
||||
console.log(`Current database version: ${new Date(ver)}`)
|
||||
}
|
||||
return {
|
||||
code: 0
|
||||
}
|
||||
},
|
||||
}
|
||||
export default command
|
19
src/commands/Migrations/migrate.ts
Normal file
19
src/commands/Migrations/migrate.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import type { Command } from 'commands'
|
||||
import DaoFactory from 'models/DaoFactory'
|
||||
|
||||
const command: Command = {
|
||||
name: 'migrations:migrate',
|
||||
description: 'Migrate the database to the latest version',
|
||||
async run() {
|
||||
const client = await DaoFactory.client()
|
||||
console.log('connecting...')
|
||||
await client.connect()
|
||||
console.log('migrating...')
|
||||
await client.migrateToLatest()
|
||||
console.log('migrations should be ok :D')
|
||||
return {
|
||||
code: 0
|
||||
}
|
||||
},
|
||||
}
|
||||
export default command
|
78
src/commands/index.ts
Normal file
78
src/commands/index.ts
Normal file
@ -0,0 +1,78 @@
|
||||
import fs from "node:fs/promises"
|
||||
|
||||
interface Context {
|
||||
args: Array<string>
|
||||
commands: Array<Command>
|
||||
command: string
|
||||
}
|
||||
|
||||
interface Response {
|
||||
code: number
|
||||
}
|
||||
|
||||
export interface Command {
|
||||
run(input: Context): Promise<Response> | Response
|
||||
name: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
const builtinCommands: Array<Command> = [
|
||||
{
|
||||
name: "help",
|
||||
run({ commands }) {
|
||||
console.table(
|
||||
commands.map((command) => ({
|
||||
name: command.name,
|
||||
description: command.description ?? "no description",
|
||||
})),
|
||||
);
|
||||
return {
|
||||
code: 0,
|
||||
};
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
async function createContext(): Promise<Context> {
|
||||
const ctx = {
|
||||
args: process.argv.slice(3),
|
||||
commands: await getCommands(),
|
||||
command: process.argv[2] ?? "help",
|
||||
};
|
||||
return ctx
|
||||
}
|
||||
|
||||
async function listfiles(folder: string): Promise<Array<string>> {
|
||||
const files = await fs.readdir(folder)
|
||||
const res: Array<string> = [];
|
||||
for (const file of files) {
|
||||
const path = `${folder}/${file}`
|
||||
if ((await fs.stat(path)).isDirectory()) {
|
||||
res.push(...(await listfiles(path)))
|
||||
} else {
|
||||
res.push(path)
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
async function getCommands(): Promise<Array<Command>> {
|
||||
const files = (await listfiles(__dirname))
|
||||
.filter((it) => it !== `${__dirname}/index.ts`)
|
||||
.map((it) => import(it).then((it) => it.default))
|
||||
return builtinCommands.concat(await Promise.all(files))
|
||||
}
|
||||
|
||||
;(async () => {
|
||||
const context = await createContext()
|
||||
for (const command of context.commands) {
|
||||
if (command.name === context.command) {
|
||||
const res = await command.run(context)
|
||||
process.exit(res.code)
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`command "${context.command}" not found, please use "help" to get the list of commands`,
|
||||
)
|
||||
})()
|
@ -1,7 +1,6 @@
|
||||
---
|
||||
import { getImage } from 'astro:assets'
|
||||
import { objectOmit } from '@dzeio/object-util'
|
||||
import AstroUtils from '../../libs/AstroUtils'
|
||||
|
||||
const formats = ['avif', 'webp']
|
||||
|
||||
@ -62,12 +61,10 @@ async function resolvePicture(image: ImageMetadata | string): Promise<PictureRes
|
||||
}
|
||||
}
|
||||
|
||||
const res = await AstroUtils.wrap<Result>(async () => {
|
||||
return {
|
||||
const res = {
|
||||
light: await resolvePicture(Astro.props.src),
|
||||
dark: Astro.props.srcDark ? await resolvePicture(Astro.props.srcDark) : undefined
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const props = objectOmit(Astro.props, 'src', 'srcDark', 'class')
|
||||
---
|
||||
|
@ -1,5 +0,0 @@
|
||||
export default class AstroUtils {
|
||||
public static async wrap<T = void>(fn: () => T | Promise<T>) {
|
||||
return await fn()
|
||||
}
|
||||
}
|
249
src/libs/Emails/Email.ts
Normal file
249
src/libs/Emails/Email.ts
Normal file
@ -0,0 +1,249 @@
|
||||
import type { ImapMessageAttributes } from 'imap'
|
||||
import Imap from 'imap'
|
||||
|
||||
interface EmailAddress {
|
||||
name?: string
|
||||
address: string
|
||||
}
|
||||
|
||||
interface Headers {
|
||||
to: Array<EmailAddress>
|
||||
from: EmailAddress
|
||||
date: Date
|
||||
subject: string
|
||||
}
|
||||
|
||||
export default class Email {
|
||||
public constructor(
|
||||
private imap: Imap,
|
||||
private mailbox: Imap.Box,
|
||||
public readonly id: number
|
||||
) {}
|
||||
|
||||
public async getAttributes(): Promise<ImapMessageAttributes | Array<ImapMessageAttributes>> {
|
||||
return new Promise<ImapMessageAttributes | Array<ImapMessageAttributes>>((res) => {
|
||||
this.imap.fetch(this.id, {
|
||||
bodies: [],
|
||||
struct: true
|
||||
}).on('message', (msg) => msg.on('attributes', (attrs) => {
|
||||
res(attrs.struct as Array<ImapMessageAttributes>)
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
public async getFlags(): Promise<Array<string>> {
|
||||
|
||||
return new Promise<Array<string>>((res) => {
|
||||
this.imap.fetch(this.id, {
|
||||
bodies: [],
|
||||
struct: false
|
||||
}).on('message', (msg) => msg.on('attributes', (attrs) => {
|
||||
res(attrs.flags)
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
public async getText() {
|
||||
const part = await this.getTextPart()
|
||||
if (!part) {
|
||||
return ''
|
||||
}
|
||||
return this.fetchPart(part.partID).then(this.decodeEmail)
|
||||
}
|
||||
|
||||
public async isSeen() {
|
||||
return this.getFlags().then((arr) => arr.includes('\\Seen'))
|
||||
}
|
||||
|
||||
public async setIsSeen(isSeen: boolean): Promise<boolean> {
|
||||
console.log(this.mailbox.flags, this.mailbox.permFlags, '\\\\Seen')
|
||||
console.log('isSeen', isSeen, await this.isSeen())
|
||||
if (await this.isSeen() === isSeen) {
|
||||
return isSeen
|
||||
}
|
||||
if (isSeen) {
|
||||
return new Promise<boolean>((res, rej) => {
|
||||
this.imap.addFlags(this.id, 'SEEN', (err) => {
|
||||
if (err) {
|
||||
rej(err)
|
||||
return
|
||||
}
|
||||
res(isSeen)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
return new Promise<boolean>((res, rej) => {
|
||||
this.imap.addKeywords(this.id, 'SEEN', (err) => {
|
||||
if (err) {
|
||||
rej(err)
|
||||
return
|
||||
}
|
||||
res(isSeen)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
public async getHeaders(): Promise<Headers> {
|
||||
const req = this.imap.fetch(this.id, {
|
||||
bodies: ['HEADER'],
|
||||
struct: false
|
||||
})
|
||||
return new Promise<Headers>((res) => {
|
||||
req.on('message', (msg) => msg.on('body', (stream) => {
|
||||
let bfr = ''
|
||||
stream
|
||||
.on('data', (chunk) => bfr += chunk.toString('utf8'))
|
||||
.once('end', () => {
|
||||
const tmp = Imap.parseHeader(bfr)
|
||||
function t(email: string): EmailAddress {
|
||||
if (email.includes('<')) {
|
||||
const [name, addr] = email.split('<', 2)
|
||||
if (!addr || !name) {
|
||||
return {
|
||||
address: email
|
||||
}
|
||||
}
|
||||
return {
|
||||
name,
|
||||
address: addr.slice(0, addr.indexOf('>'))
|
||||
}
|
||||
}
|
||||
return {
|
||||
address: email
|
||||
}
|
||||
}
|
||||
// console.log(tmp)
|
||||
res({
|
||||
subject: tmp.subject?.[0] as string,
|
||||
to: tmp.to?.map(t) ?? [],
|
||||
from: t(tmp.from?.[0] ?? ''),
|
||||
date: new Date(tmp.date?.[0] ?? '')
|
||||
})
|
||||
})
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* hold the attachment ID
|
||||
*/
|
||||
public async getAttachments(): Promise<Array<any>> {
|
||||
return this.getAttachmentParts()
|
||||
}
|
||||
|
||||
public async downloadAttachment(attachment: any): Promise<{filename: string, data: Buffer}> {
|
||||
const req = this.imap.fetch(this.id, {
|
||||
bodies: [attachment.partID],
|
||||
struct: true
|
||||
})
|
||||
return new Promise((res) => {
|
||||
req.on('message', (msg) => {
|
||||
const filename = attachment.params.name
|
||||
const encoding = attachment.encoding
|
||||
console.log(filename, encoding, msg)
|
||||
let buffer = new Uint8Array(0)
|
||||
msg.on('body', (stream) => {
|
||||
stream.on('data', (chunk: Buffer) => {
|
||||
// merge into one common buffer
|
||||
const len = buffer.length + chunk.length
|
||||
const merged = new Uint8Array(len)
|
||||
merged.set(buffer)
|
||||
merged.set(chunk, buffer.length)
|
||||
buffer = merged
|
||||
}).once('end', () => {
|
||||
res({filename, data: Buffer.from(
|
||||
Buffer
|
||||
.from(buffer)
|
||||
.toString('ascii'),
|
||||
'base64'
|
||||
)})
|
||||
})
|
||||
}).once('end', () => {
|
||||
console.log('ended')
|
||||
}).once('error', (err) => {
|
||||
console.log(err)
|
||||
})
|
||||
}).once('error', (err) => {
|
||||
console.log(err)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// this is defined but eslint do not seems to reconize it
|
||||
// eslint-disable-next-line no-undef
|
||||
private fetchPart(partID: string, encoding: BufferEncoding = 'utf8'): Promise<string> {
|
||||
const req = this.imap.fetch(this.id, {
|
||||
bodies: partID,
|
||||
struct: true
|
||||
})
|
||||
return new Promise((res) => {
|
||||
req.on('message', (msg) => {
|
||||
msg.on('body', (strm) => {
|
||||
let bfr = ''
|
||||
strm.on('data', (chunk: Buffer) => bfr += chunk.toString(encoding))
|
||||
.once('end', () => res(bfr))
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
private async getTextPart(attrs?: ImapMessageAttributes | Array<ImapMessageAttributes>): Promise<{partID: string} | null> {
|
||||
if (!attrs) {
|
||||
attrs = await this.getAttributes()
|
||||
}
|
||||
// @ts-expect-error IMAP does not return the correct type
|
||||
for (const item of attrs) {
|
||||
if (Array.isArray(item)) {
|
||||
return this.getTextPart(item)
|
||||
} else if (item.type === 'text' && item.subtype === 'plain') {
|
||||
return item
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
private async getAttachmentParts(attrs?: ImapMessageAttributes | Array<ImapMessageAttributes>): Promise<Array<{partID: string}>> {
|
||||
if (!attrs) {
|
||||
attrs = await this.getAttributes()
|
||||
}
|
||||
const attachments = []
|
||||
for (const item of attrs) {
|
||||
if (Array.isArray(item)) {
|
||||
attachments.push(...await this.getAttachmentParts(item))
|
||||
} else if (item.disposition && ['inline', 'attachment'].indexOf(item.disposition.type.toLowerCase()) > -1) {
|
||||
attachments.push(item)
|
||||
}
|
||||
|
||||
}
|
||||
return attachments
|
||||
}
|
||||
|
||||
private decodeEmail(data: string): string {
|
||||
// normalise end-of-line signals
|
||||
data = data.replace(/(\r\n|\n|\r)/g, '\n')
|
||||
|
||||
// replace equals sign at end-of-line with nothing
|
||||
data = data.replace(/=\n/g, '')
|
||||
|
||||
// encoded text might contain percent signs
|
||||
// decode each section separately
|
||||
const bits = data.split('%')
|
||||
for (let idx = 0; idx < bits.length; idx++) {
|
||||
let char = bits[idx]
|
||||
if (!char) {
|
||||
continue
|
||||
}
|
||||
|
||||
// replace equals sign with percent sign
|
||||
char = char.replace(/=/g, '%')
|
||||
|
||||
// decode the section
|
||||
bits[idx] = decodeURIComponent(char)
|
||||
}
|
||||
|
||||
// join the sections back together
|
||||
return bits.join('%')
|
||||
}
|
||||
}
|
234
src/libs/Emails/EmailServer.ts
Normal file
234
src/libs/Emails/EmailServer.ts
Normal file
@ -0,0 +1,234 @@
|
||||
import { objectFind } from '@dzeio/object-util'
|
||||
import Imap from 'imap'
|
||||
import { getEnv, requireEnv } from 'libs/Env'
|
||||
import nodemailer from 'nodemailer'
|
||||
import type Mail from 'nodemailer/lib/mailer'
|
||||
import Email from './Email'
|
||||
import htmlEmail from './HTML'
|
||||
|
||||
export default class EmailServer {
|
||||
private static instances: Record<string, EmailServer> = {}
|
||||
|
||||
public debug: boolean
|
||||
|
||||
private currentBox: Imap.Box | null = null
|
||||
|
||||
private imap: Imap
|
||||
|
||||
private smtp: nodemailer.Transporter
|
||||
|
||||
private readonly config: {
|
||||
imap: {
|
||||
host: string
|
||||
port: number
|
||||
tls: boolean
|
||||
username: string
|
||||
}
|
||||
smtp: {
|
||||
host: string
|
||||
port: number
|
||||
secure: boolean
|
||||
username: string
|
||||
}
|
||||
}
|
||||
|
||||
private constructor(
|
||||
private username: string,
|
||||
password: string,
|
||||
initConfig?: {
|
||||
auth?: {
|
||||
username: string
|
||||
password: string
|
||||
}
|
||||
imap?: {
|
||||
host: string
|
||||
port?: number
|
||||
tls?: boolean
|
||||
username?: string
|
||||
password?: string
|
||||
}
|
||||
smtp?: {
|
||||
host: string
|
||||
port?: number
|
||||
secure?: boolean
|
||||
username?: string
|
||||
password?: string
|
||||
},
|
||||
debug?: boolean
|
||||
}
|
||||
) {
|
||||
this.debug = initConfig?.debug ?? getEnv('EMAIL_DEBUG', 'false') === 'true'
|
||||
|
||||
this.config = {
|
||||
imap: {
|
||||
host: initConfig?.imap?.host ?? requireEnv('IMAP_HOST'),
|
||||
port: initConfig?.imap?.port ?? Number.parseInt(getEnv('IMAP_PORT', '993'), 10),
|
||||
tls: initConfig?.imap?.tls ?? !!getEnv('IMAP_SECURE'),
|
||||
username: initConfig?.imap?.username ?? initConfig?.auth?.username ?? username
|
||||
},
|
||||
smtp: {
|
||||
host: initConfig?.smtp?.host ?? requireEnv('SMTP_HOST'),
|
||||
port: initConfig?.smtp?.port ?? Number.parseInt(getEnv('SMTP_PORT', '465'), 10),
|
||||
secure: initConfig?.smtp?.secure ?? !!getEnv('SMTP_SECURE'),
|
||||
username: initConfig?.smtp?.username ?? initConfig?.auth?.username ?? username
|
||||
}
|
||||
}
|
||||
|
||||
EmailServer.instances[username] = this
|
||||
|
||||
this.imap = new Imap({
|
||||
user: this.config.imap.username,
|
||||
password: initConfig?.imap?.password ?? initConfig?.auth?.password ?? password,
|
||||
host: this.config.imap.host,
|
||||
port: this.config.imap.port,
|
||||
tls: this.config.imap.tls,
|
||||
debug: (info: string) => {
|
||||
if (this.debug) {
|
||||
console.log('IMAP[DEBUG]:', info)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: library return `any`
|
||||
const smtpLogger = (level: string) => (...params: Array<any>) => {
|
||||
if (this.debug) {
|
||||
console.log(`SMTP[${level}]:`, ...params)
|
||||
}
|
||||
}
|
||||
|
||||
this.smtp = nodemailer.createTransport({
|
||||
host: this.config.smtp.host,
|
||||
port: this.config.smtp.port,
|
||||
secure: this.config.smtp.secure,
|
||||
auth: {
|
||||
user: this.config.smtp.username,
|
||||
pass: initConfig?.smtp?.password ?? initConfig?.auth?.password ?? password
|
||||
},
|
||||
logger: {
|
||||
level: (level: string) => {
|
||||
if (this.debug) {
|
||||
console.log('SMTP[]:', level)
|
||||
}
|
||||
},
|
||||
trace: smtpLogger('TRACE'),
|
||||
debug: smtpLogger('DEBUG'),
|
||||
info: smtpLogger('INFO'),
|
||||
warn: smtpLogger('WARN'),
|
||||
error: smtpLogger('ERROR'),
|
||||
fatal: smtpLogger('FATAL'),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
public static async getInstance(type: 'credo' | 'gti') {
|
||||
const items = {
|
||||
credo: {
|
||||
username: requireEnv('CREDO_MAIL_USER'),
|
||||
password: requireEnv('CREDO_MAIL_PASS'),
|
||||
},
|
||||
gti: {
|
||||
username: requireEnv('GTI_MAIL_USER'),
|
||||
password: requireEnv('GTI_MAIL_PASS'),
|
||||
}
|
||||
} as const
|
||||
const correct = items[type]
|
||||
const tmp = objectFind(EmailServer.instances, (_, key) => key === type)
|
||||
let instance = tmp?.value ?? undefined
|
||||
|
||||
if (!instance) {
|
||||
instance = new EmailServer(correct.username, correct.password)
|
||||
EmailServer.instances[type] = instance
|
||||
await instance.connect()
|
||||
}
|
||||
|
||||
return instance
|
||||
}
|
||||
|
||||
public destroy() {
|
||||
delete EmailServer.instances[this.username]
|
||||
this.imap.end()
|
||||
}
|
||||
|
||||
public listEmails(): Promise<Array<Email>> {
|
||||
return new Promise((res, rej) => {
|
||||
this.imap.search(['ALL'], (err, uids) => {
|
||||
if (err) {
|
||||
rej(err)
|
||||
return
|
||||
}
|
||||
res(uids.map((uid) => this.getEmail(uid)))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
public getEmail(id: number) {
|
||||
if (!this.currentBox) {
|
||||
throw new Error('can\'t fetch a mail while out of a box')
|
||||
}
|
||||
return new Email(this.imap, this.currentBox, id)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param content the email text content
|
||||
* @param recipient the email recipient (who to send it to)
|
||||
* @param subject the email subject
|
||||
* @param footer the email footer
|
||||
*/
|
||||
// eslint-disable-next-line complexity
|
||||
public async sendEmail(content: string, recipient: string | Array<string>, subject: string, footer?: {path?: string, id?: string}, options: Mail.Options = {}) {
|
||||
if (typeof recipient !== 'string' && recipient.length === 0) {
|
||||
if (this.debug) {
|
||||
console.log('Email canceled, no recipient')
|
||||
}
|
||||
return
|
||||
}
|
||||
const domain = requireEnv('APP_URL')
|
||||
const footerTxt = `\nIdentifiant de conversation: {{${footer?.id}}}
|
||||
|
||||
${footer?.path ? `Votre lien vers le site internet: https://${domain}${footer?.path}` : '' }`
|
||||
|
||||
const req: Mail.Options = Object.assign({
|
||||
bcc: recipient,
|
||||
from: getEnv('SMTP_FROM') ?? this.username,
|
||||
subject,
|
||||
text: content + (footer ? footerTxt : ''),
|
||||
html: footer ? htmlEmail(content, footer?.id, footer?.path ? `${domain}${footer?.path}` : undefined) : undefined
|
||||
}, options)
|
||||
|
||||
if (this.debug) {
|
||||
console.log('------------------- SEND EMAIL DEBUG -------------------')
|
||||
console.log(req)
|
||||
console.log('------------------- SEND EMAIL DEBUG -------------------')
|
||||
} else {
|
||||
const res = await this.smtp.sendMail(req)
|
||||
if (this.debug) {
|
||||
console.log('------------------- SENT EMAIL DEBUG -------------------')
|
||||
console.log(res)
|
||||
console.log('------------------- SENT EMAIL DEBUG -------------------')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private connect() {
|
||||
console.log('Starting connection to IMAP')
|
||||
return new Promise<void>((res, rej) => {
|
||||
this.imap.once('ready', () => {
|
||||
console.log('connection to IMAP ready, opening box')
|
||||
this.imap.openBox(requireEnv('IMAP_INBOX', 'INBOX'), true, (err, box) => {
|
||||
if (err) {
|
||||
rej(err)
|
||||
return
|
||||
}
|
||||
this.currentBox = box
|
||||
console.log('inbox open, ready for queries!')
|
||||
res()
|
||||
})
|
||||
}).once('error', (err: Error) => {
|
||||
console.log('An error occured while connecting to the IMAP server', this.config.imap)
|
||||
rej(err)
|
||||
})
|
||||
this.imap.connect()
|
||||
})
|
||||
}
|
||||
}
|
74
src/libs/Emails/SMTP.ts
Normal file
74
src/libs/Emails/SMTP.ts
Normal file
@ -0,0 +1,74 @@
|
||||
import nodemailer from 'nodemailer'
|
||||
import type NodeMailer from 'nodemailer/lib/mailer'
|
||||
|
||||
/**
|
||||
* Environment variables used
|
||||
* EMAIL_USERNAME
|
||||
* EMAIL_PASSWORD
|
||||
* EMAIL_HOST
|
||||
* EMAIL_FROM
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
export default class Mailer<Templates extends Record<string, Record<string, any>> = {}> {
|
||||
public constructor(
|
||||
private fetcher: (template: keyof Templates, ext: 'html' | 'txt') => Promise<{
|
||||
content: string
|
||||
title: string
|
||||
} | null>, private settings: {
|
||||
username: string
|
||||
password: string
|
||||
host: string
|
||||
from: string
|
||||
secure?: boolean
|
||||
tls?: boolean
|
||||
}) {}
|
||||
|
||||
/**
|
||||
* Send the Email
|
||||
* @param template the Template to use
|
||||
* @param to the destination Email
|
||||
* @param vars variables of the email, don't give subject as it is added inside the function
|
||||
* @param options Email options
|
||||
*/
|
||||
public async send<T extends keyof Templates>(template: T, to: string | Array<string>, vars: Templates[T], options?: Omit<NodeMailer.Options, 'to' | 'from' | 'subject' | 'html' | 'text'>) {
|
||||
const mailer = nodemailer.createTransport({
|
||||
host: this.settings.host,
|
||||
auth: {
|
||||
user: this.settings.username,
|
||||
pass: this.settings.password
|
||||
},
|
||||
logger: true,
|
||||
secure: true
|
||||
})
|
||||
const { title } = await this.fetcher(template, 'txt') ?? { title: '' }
|
||||
await mailer.sendMail(Object.assign(options ?? {}, {
|
||||
to,
|
||||
from: this.settings.from,
|
||||
subject: title,
|
||||
html: await this.html(template, { ...vars, subject: title }),
|
||||
text: await this.text(template, { ...vars, subject: title })
|
||||
}))
|
||||
}
|
||||
|
||||
public html<T extends keyof Templates>(template: T, vars: Templates[T]) {
|
||||
return this.generateTemplate(template, vars, 'html')
|
||||
}
|
||||
|
||||
public text<T extends keyof Templates>(template: T, vars: Templates[T]) {
|
||||
return this.generateTemplate(template, vars, 'txt')
|
||||
}
|
||||
|
||||
private async generateTemplate<T extends keyof Templates>(template: T, _vars: Templates[T], ext: 'html' | 'txt') {
|
||||
try {
|
||||
const txt = await this.fetcher(template, ext)
|
||||
if (!txt) {
|
||||
console.warn(`Error, Template not found (${template as string} - ${ext})`)
|
||||
return undefined
|
||||
}
|
||||
return txt.content
|
||||
} catch {
|
||||
console.warn(`Error, Template not found (${template as string} - ${ext})`)
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
}
|
114
src/libs/FilesFormats/CSV.ts
Normal file
114
src/libs/FilesFormats/CSV.ts
Normal file
@ -0,0 +1,114 @@
|
||||
import { objectMap, objectSort, objectValues } from '@dzeio/object-util'
|
||||
import assert from "node:assert/strict"
|
||||
|
||||
export interface CSVOptions {
|
||||
lineSeparator?: string
|
||||
columnSeparator?: string
|
||||
/**
|
||||
* if set, it will skip trying to parse a number
|
||||
*/
|
||||
skipParsing?: boolean
|
||||
}
|
||||
|
||||
// biome-ignore lint/complexity/noStaticOnlyClass: <explanation>
|
||||
// biome-ignore lint/style/useNamingConvention: <explanation>
|
||||
export default class CSV {
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
public static parse(data: string, options?: CSVOptions): Array<Record<string, string | number>> {
|
||||
assert(typeof data === 'string', `Data is not a string (${typeof data})`)
|
||||
const lineSeparator = options?.lineSeparator ?? '\n'
|
||||
const colSeparator = options?.columnSeparator ?? ','
|
||||
|
||||
let headers: Array<string> | null = null
|
||||
|
||||
const res: Array<Record<string, string | number>> = []
|
||||
|
||||
let values: Array<string> = []
|
||||
|
||||
let previousSplit = 0
|
||||
let quoteCount = 0
|
||||
for (let idx = 0; idx < data.length; idx++) {
|
||||
const char = data[idx]
|
||||
if (char === '"') {
|
||||
quoteCount++
|
||||
}
|
||||
if ((char === colSeparator || char === lineSeparator) && quoteCount % 2 === 0) {
|
||||
let text = data.slice(previousSplit, idx)
|
||||
if (text.startsWith('"') && text.endsWith('"')) {
|
||||
text = text.slice(1, text.length - 1)
|
||||
}
|
||||
values.push(text)
|
||||
previousSplit = idx + 1
|
||||
}
|
||||
if (char === lineSeparator && quoteCount % 2 === 0) {
|
||||
if (!headers) {
|
||||
headers = values
|
||||
values = []
|
||||
continue
|
||||
}
|
||||
const lineFinal: Record<string, string | number> = {}
|
||||
let filled = false // filled make sure to skip empty lines
|
||||
// eslint-disable-next-line max-depth
|
||||
for (let idx2 = 0; idx2 < values.length; idx2++) {
|
||||
let value: string | number = values[idx2]!
|
||||
if (value.length === 0) {
|
||||
continue
|
||||
}
|
||||
// eslint-disable-next-line max-depth
|
||||
if (!options?.skipParsing && /^-?(\d|\.|E)+$/g.test(value as string)) {
|
||||
value = Number.parseFloat(value as string)
|
||||
}
|
||||
const key = headers[idx2]!
|
||||
lineFinal[key] = value
|
||||
filled = true
|
||||
}
|
||||
if (filled) {
|
||||
res.push(lineFinal)
|
||||
}
|
||||
values = []
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
public static stringify(headers: Array<string>, data: Array<Record<string, unknown>>, options?: CSVOptions) {
|
||||
const ls = options?.lineSeparator ?? '\n'
|
||||
|
||||
// encode headers
|
||||
let body = CSV.encodeLine(headers, options) + ls
|
||||
|
||||
// encode data body
|
||||
for (const entry of data) {
|
||||
body += CSV.encodeLine(objectValues(objectSort(entry, headers)), options) + ls
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
private static encodeLine(line: Array<unknown>, options?: CSVOptions): string {
|
||||
const ls = options?.lineSeparator ?? '\n'
|
||||
const cs = options?.columnSeparator ?? ','
|
||||
|
||||
return objectMap(line, (it) => {
|
||||
if (typeof it !== 'string') {
|
||||
return it
|
||||
}
|
||||
if (it.includes('"') || it.includes(ls)|| it.includes(cs)) {
|
||||
return `"${it}"`
|
||||
}
|
||||
return it
|
||||
//column separator
|
||||
}).join(cs)
|
||||
}
|
||||
|
||||
public static getHeaders(data: string, options?: {
|
||||
lineSeparator?: string
|
||||
columnSeparator?: string
|
||||
}) {
|
||||
const ls = options?.lineSeparator ?? '\n'
|
||||
const cs = options?.columnSeparator ?? ','
|
||||
|
||||
//line separator et column separator
|
||||
return data.split(ls)[0]?.split(cs)!
|
||||
}
|
||||
}
|
93
src/libs/FilesFormats/XLSX.ts
Normal file
93
src/libs/FilesFormats/XLSX.ts
Normal file
@ -0,0 +1,93 @@
|
||||
import { objectRemap } from '@dzeio/object-util'
|
||||
import AdmZip from 'adm-zip'
|
||||
import CSV from './CSV'
|
||||
import XML, { type Tag } from './XML'
|
||||
|
||||
export default class XLSX {
|
||||
/**
|
||||
*
|
||||
* @param xlsx the xlsx data as a buffer
|
||||
* @returns a Record with each sheets and the raw CSV linked to it
|
||||
*/
|
||||
public static async parse(xlsx: ArrayBuffer): Promise<Record<string, string>> {
|
||||
const zip = new AdmZip(Buffer.from(xlsx))
|
||||
const shared = await XML.parse(zip.readAsText('xl/sharedStrings.xml'))
|
||||
const workbook = zip.readAsText('xl/workbook.xml')
|
||||
const relations = zip.readAsText('xl/_rels/workbook.xml.rels')
|
||||
const sheetsRelations = await XML.parse(relations)
|
||||
const sheetsList = XML.findChild(await XML.parse(workbook), 'sheets')!.childs?.map((it) => ({
|
||||
name: XML.getAttribute(it as Tag, 'name'),
|
||||
id: XML.getAttribute(it as Tag, 'r:id'),
|
||||
path: '',
|
||||
data: ''
|
||||
}))!
|
||||
for (const sheetItem of sheetsList) {
|
||||
const rels = (sheetsRelations.childs as Array<Tag>)
|
||||
const rel = rels.find((it) => XML.getAttribute(it, 'Id') === sheetItem.id)
|
||||
sheetItem.path = XML.getAttribute(rel!, 'Target')!
|
||||
}
|
||||
|
||||
await Promise.all(sheetsList.map(async (it) => {
|
||||
it.data = await this.parseWorkSheet(shared, zip.readAsText(`xl/${it.path}`))
|
||||
return it
|
||||
}))
|
||||
|
||||
return objectRemap(sheetsList, (v) => ({
|
||||
key: v.name,
|
||||
value: v.data
|
||||
}))
|
||||
}
|
||||
public static async parseWorkSheet(refs: Tag, data: string): Promise<string> {
|
||||
const json = await XML.parse(data)
|
||||
const sheetData = XML.findChild(json, 'sheetData')!
|
||||
let headers: Array<string> = []
|
||||
const res: Array<Record<string, any>> = []
|
||||
let headerDone = false
|
||||
for (const row of sheetData.childs ?? []) {
|
||||
const line: Array<string> = []
|
||||
const id = XML.getAttribute((row as Tag), 'r')
|
||||
for (const col of (row as Tag).childs ?? []) {
|
||||
if (!(col as Tag).childs) {
|
||||
continue
|
||||
}
|
||||
const type = XML.getAttribute(col as Tag, 't')
|
||||
const colIdx = XML.getAttribute(col as Tag, 'r')
|
||||
const idx = colIdx!.charCodeAt(0) - 65 // TODO: handle more than 26 cols
|
||||
const value = XML.findChild(col as Tag, 'v')?.childs?.[0]
|
||||
if (!value || typeof value !== 'string') {
|
||||
continue
|
||||
}
|
||||
// const value = ((col as Tag).childs![0] as Tag).childs![0] as string
|
||||
if (type === 's') {
|
||||
line[idx] = this.getRef(refs, value)
|
||||
} else {
|
||||
line[idx] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (!headerDone) {
|
||||
headers = line
|
||||
} else {
|
||||
res[parseInt(id!, 10) - 1] = objectRemap(line, (v, idx: number) => {
|
||||
return {
|
||||
key: headers[idx] as string,
|
||||
value: v
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
headerDone = true
|
||||
}
|
||||
|
||||
return CSV.stringify(headers, res)
|
||||
}
|
||||
|
||||
private static getRef(refs: Tag, id: string | number): string {
|
||||
if (typeof id === 'string') {
|
||||
id = parseInt(id, 10)
|
||||
}
|
||||
|
||||
return ((refs.childs![id] as Tag)!.childs![0] as Tag)!.childs![0] as string
|
||||
}
|
||||
}
|
262
src/libs/FilesFormats/XML.ts
Normal file
262
src/libs/FilesFormats/XML.ts
Normal file
@ -0,0 +1,262 @@
|
||||
import { objectMap } from '@dzeio/object-util'
|
||||
import xml2js from 'xml2js'
|
||||
|
||||
export interface Attribute {
|
||||
key: string
|
||||
value: string
|
||||
}
|
||||
|
||||
export interface Tag {
|
||||
|
||||
/**
|
||||
* the name of the tag
|
||||
*/
|
||||
name: string
|
||||
|
||||
/**
|
||||
* The tag attributes
|
||||
*/
|
||||
attrs?: Array<Attribute | undefined>
|
||||
|
||||
/**
|
||||
* The tag childs
|
||||
*/
|
||||
childs?: Array<Tag | string | undefined>
|
||||
}
|
||||
|
||||
/**
|
||||
* xml2js tag interface not published for our options
|
||||
*/
|
||||
interface xml2jsTag {
|
||||
/**
|
||||
* the name of the tag (it is a private value because of our integration, but we still want to get it)
|
||||
*/
|
||||
// @ts-ignore see above
|
||||
// #name: string
|
||||
/**
|
||||
* the attributes record
|
||||
*/
|
||||
$?: Record<string, string>
|
||||
|
||||
/**
|
||||
* the possible text (only when #name === __text__)
|
||||
*/
|
||||
_?: string
|
||||
|
||||
/**
|
||||
* the tag Childs
|
||||
*/
|
||||
$$?: Array<xml2jsTag>
|
||||
}
|
||||
|
||||
export interface XMLOptions {
|
||||
|
||||
/**
|
||||
* if set (min: `true`)
|
||||
* it will render the content in a user readable form
|
||||
* (should not break compatibility (ODT excluded))
|
||||
*/
|
||||
pretty?: boolean | {
|
||||
|
||||
/**
|
||||
* the whitespace character(s)
|
||||
* default: '\t'
|
||||
*/
|
||||
whitespaceCharacter?: string
|
||||
|
||||
/**
|
||||
* the base number of whitespace character to use
|
||||
* default: 0
|
||||
*/
|
||||
baseCount?: number
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* XML, the XML parser/stringifier that keep everything in order !
|
||||
*/
|
||||
export default class XML {
|
||||
|
||||
/**
|
||||
* Parse XML content to xml Tags
|
||||
* @param str the XML string
|
||||
*/
|
||||
public static async parse(str: string): Promise<Tag> {
|
||||
const xml: xml2jsTag = await xml2js.parseStringPromise(str, {
|
||||
charsAsChildren: true,
|
||||
explicitChildren: true,
|
||||
explicitRoot: false,
|
||||
|
||||
preserveChildrenOrder: true
|
||||
})
|
||||
return this.convert(xml)
|
||||
}
|
||||
|
||||
public static getAttribute(tag: Tag, key: string): string | undefined {
|
||||
if (!tag.attrs || tag.attrs.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
return (tag.attrs.find((it) => it?.key === key))?.value
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform the Parsed XML back to XML
|
||||
* @param tag the root tag
|
||||
* @param options the options used
|
||||
*/
|
||||
public static async stringify(tag: Tag, options?: XMLOptions): Promise<string> {
|
||||
return this.stringifySync(tag, options)
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform the Parsed XML back to XML
|
||||
* @param tag the root tag
|
||||
* @param options the options used
|
||||
*/
|
||||
public static stringifySync(tag: Tag, options?: XMLOptions): string {
|
||||
const pretty = !!options?.pretty
|
||||
const baseCount = (typeof options?.pretty === 'object' && options?.pretty?.baseCount) || 0
|
||||
const whitespaceCharacter = (typeof options?.pretty === 'object' && options?.pretty?.whitespaceCharacter) || '\t'
|
||||
const hasChilds = Array.isArray(tag.childs) && tag.childs.length > 0
|
||||
let base = options?.pretty ? ''.padEnd(baseCount, whitespaceCharacter) : ''
|
||||
|
||||
if (!tag.name) {
|
||||
throw new Error('Tag name MUST be set')
|
||||
} else if (tag.name.includes(' ')) {
|
||||
throw new Error(`The tag name MUST not include spaces (${tag.name})`)
|
||||
}
|
||||
|
||||
// start of tag
|
||||
base += `<${tag.name}`
|
||||
|
||||
// add every attributes
|
||||
if (tag.attrs) {
|
||||
for (const attr of tag.attrs) {
|
||||
if (typeof attr === 'undefined') {
|
||||
continue
|
||||
}
|
||||
base += ` ${attr.key}`
|
||||
if (typeof attr.value === 'string') {
|
||||
base += `="${this.escape(attr.value)}"`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// end of tag
|
||||
base += hasChilds ? '>' : '/>'
|
||||
|
||||
if (options?.pretty) {
|
||||
base += '\n'
|
||||
}
|
||||
|
||||
// process childs
|
||||
if (hasChilds) {
|
||||
for (const subTag of tag.childs!) {
|
||||
if (typeof subTag === 'undefined') {
|
||||
continue
|
||||
}
|
||||
if (typeof subTag === 'string') {
|
||||
if (pretty) {
|
||||
base += ''.padEnd(baseCount + 1, whitespaceCharacter)
|
||||
}
|
||||
base += this.escape(subTag)
|
||||
if (pretty) {
|
||||
base += '\n'
|
||||
}
|
||||
} else {
|
||||
base += this.stringifySync(subTag, pretty ? { pretty: { baseCount: baseCount + 1, whitespaceCharacter } } : undefined)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// end of tag
|
||||
if (hasChilds) {
|
||||
if (pretty) {
|
||||
base += ''.padEnd(baseCount, whitespaceCharacter)
|
||||
}
|
||||
base += `</${tag.name}>`
|
||||
if (pretty) {
|
||||
base += '\n'
|
||||
}
|
||||
}
|
||||
return base
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param it the element to validate
|
||||
* @returns {boolean} if {it} is of type {@link Tag}
|
||||
*/
|
||||
public static isTag(it: any): it is Tag {
|
||||
if (typeof it === 'object') {
|
||||
return 'name' in it
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
public static findChild(tag: Tag, name: string): Tag | null {
|
||||
if (tag.name === name) {
|
||||
return tag
|
||||
} else if (tag.childs) {
|
||||
for (const child of tag.childs) {
|
||||
if (typeof child !== 'object') {
|
||||
continue
|
||||
}
|
||||
const found = this.findChild(child, name)
|
||||
if (found) {
|
||||
return found
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a xml2js tag to a XML.Tag
|
||||
* @param {xml2jsTag} tag the xml2js tag
|
||||
* @returns {Tag} the XML Tag
|
||||
*/
|
||||
private static convert(tag: xml2jsTag): Tag {
|
||||
const baseTag: Tag = {
|
||||
name: tag['#name']
|
||||
}
|
||||
|
||||
// convert XML2JS attributes to our attribut format
|
||||
// (Allowing users to keep order and to add items not only at the bottom)
|
||||
if (tag.$) {
|
||||
baseTag.attrs = objectMap(tag.$, (v, k) => ({ key: k, value: v }))
|
||||
}
|
||||
|
||||
// convert childs
|
||||
if (tag.$$) {
|
||||
baseTag.childs = tag.$$
|
||||
.map((subTag) => {
|
||||
// if child is a string
|
||||
if (subTag['#name'] === '__text__') {
|
||||
return subTag._
|
||||
}
|
||||
|
||||
// convert child
|
||||
return this.convert(subTag)
|
||||
})
|
||||
// filter empty items
|
||||
.filter((v) => !!v)
|
||||
}
|
||||
return baseTag
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape every XML characters
|
||||
* @param str the base string
|
||||
* @returns {string} the escaped string
|
||||
*/
|
||||
private static escape(str: string): string {
|
||||
return str
|
||||
.replace(/&/gu, '&')
|
||||
.replace(/"/gu, '"')
|
||||
.replace(/'/gu, ''')
|
||||
.replace(/</gu, '<')
|
||||
.replace(/>/gu, '>')
|
||||
}
|
||||
|
||||
}
|
159
src/libs/Form.ts
Normal file
159
src/libs/Form.ts
Normal file
@ -0,0 +1,159 @@
|
||||
import { objectFind, objectLoop } from '@dzeio/object-util'
|
||||
import type { Model, ModelInfer, ValidationError, ValidationResult } from './Schema'
|
||||
import Schema from './Schema'
|
||||
|
||||
/**
|
||||
* Handle most of the form validation and error reporting
|
||||
*
|
||||
* create a new one by using {@link Form.create}
|
||||
*
|
||||
* note: This library is made to work with {@link Model}
|
||||
*/
|
||||
export default class Form<M extends Model> {
|
||||
|
||||
private data?: ValidationResult<ModelInfer<M>>
|
||||
private formData?: FormData
|
||||
private schema: Schema
|
||||
|
||||
private globalError?: string
|
||||
private errors: Partial<Record<keyof M, string>> = {}
|
||||
private defaultValues: Partial<Record<keyof M, any>> = {}
|
||||
|
||||
/**
|
||||
* Create a ready to use form
|
||||
* @param model the model the form should respect
|
||||
* @param request the request element
|
||||
* @returns the Form object
|
||||
*/
|
||||
public static async create<M extends Model>(model: M, request: Request) {
|
||||
const fm = new Form(model, request)
|
||||
await fm.init()
|
||||
return fm
|
||||
}
|
||||
|
||||
private constructor(public readonly model: M, private readonly request: Request) {
|
||||
this.schema = new Schema(model)
|
||||
}
|
||||
|
||||
/**
|
||||
* you should not use this function by itself, it is called bu {@link Form.create}
|
||||
*/
|
||||
public async init() {
|
||||
try {
|
||||
if (this.request.method === 'POST') {
|
||||
if (!(this.request.headers.get('Content-Type') ?? '').startsWith('multipart/form-data')) {
|
||||
console.warn('form\'s content-type is not multipart/form-data')
|
||||
}
|
||||
this.formData = await this.request.formData()
|
||||
this.data = this.schema.validateFormData(this.formData) as any
|
||||
if (this.data?.error) {
|
||||
for (const error of this.data.error) {
|
||||
if (error.field) {
|
||||
const field = error.field
|
||||
if (field.includes('.')) {
|
||||
this.errors[field.slice(0, field.indexOf('.')) as keyof M] = error.message
|
||||
} else {
|
||||
this.errors[error.field as keyof M] = error.message
|
||||
}
|
||||
} else {
|
||||
this.globalError = error.message
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
public defaultValue(name: keyof M, value: any) {
|
||||
this.defaultValues[name] = value
|
||||
return this
|
||||
}
|
||||
|
||||
public defaultObject(obj: Record<string, any>) {
|
||||
objectLoop(obj, (value, key) => {
|
||||
this.defaultValue(key, value)
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* indicate if the form is valid or not
|
||||
* @returns if the form submitted is valid or not
|
||||
*/
|
||||
public isValid(): boolean {
|
||||
if (this.request.method !== 'POST' || !this.data) {
|
||||
return false
|
||||
}
|
||||
if (this.data.error) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param message the error message
|
||||
* @param key (optionnal) the specific key to apply the error to
|
||||
*/
|
||||
public setError(message: string, key?: keyof M) {
|
||||
if (key) {
|
||||
this.errors[key] = message
|
||||
} else {
|
||||
this.globalError = message
|
||||
}
|
||||
}
|
||||
|
||||
public getError(key?: keyof M): string | undefined {
|
||||
if (!key) {
|
||||
return this.globalError
|
||||
}
|
||||
return this.errors[key]
|
||||
}
|
||||
|
||||
public hasError(key?: keyof M): boolean {
|
||||
return !!this.getError(key)
|
||||
}
|
||||
|
||||
public getAnyError(): string | undefined {
|
||||
if (this.globalError) {
|
||||
return this.globalError
|
||||
}
|
||||
const other = objectFind(this.errors, (value) => !!value)
|
||||
if (other) {
|
||||
return `${other.key.toString()}: ${other.value}`
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
public hasAnyError(): boolean {
|
||||
return !!this.getAnyError()
|
||||
}
|
||||
|
||||
public attrs(key: keyof M) {
|
||||
return this.attributes(key)
|
||||
}
|
||||
|
||||
public attributes(key: keyof M): Record<string, any> {
|
||||
const schema = this.model[key]
|
||||
if (!schema) {
|
||||
return {}
|
||||
}
|
||||
const attrs: Record<string, any> = {
|
||||
name: key
|
||||
}
|
||||
if (!schema.attributes.includes('form:password')) {
|
||||
const value: any = this.formData?.get(key as string) as string ?? this.defaultValues[key]
|
||||
if (value instanceof Date) {
|
||||
attrs.value = `${value.getFullYear().toString().padStart(4, '0')}-${(value.getMonth() + 1).toString().padStart(2, '0')}-${value.getDate().toString().padStart(2, '0')}`
|
||||
} else {
|
||||
attrs.value = value
|
||||
}
|
||||
}
|
||||
|
||||
return attrs
|
||||
}
|
||||
|
||||
public getData(): ModelInfer<M> {
|
||||
return this.data!.object!
|
||||
}
|
||||
}
|
97
src/libs/S3.ts
Normal file
97
src/libs/S3.ts
Normal file
@ -0,0 +1,97 @@
|
||||
import type { APIRoute } from 'astro'
|
||||
import fs from 'node:fs/promises'
|
||||
import ResponseBuilder from './ResponseBuilder'
|
||||
|
||||
|
||||
/**
|
||||
* Easily setup an S3 system right in your own API
|
||||
*
|
||||
* ex: (create a `[...path].ts` file and put this inside)
|
||||
* ```
|
||||
* import S3 from 'libs/S3'
|
||||
*
|
||||
* // root path of the storage
|
||||
* const s3 = new S3('./.data')
|
||||
*
|
||||
* export const GET = s3.GET
|
||||
* export const PUT = s3.PUT
|
||||
* export const DELETE = s3.DELETE
|
||||
* ```
|
||||
*/
|
||||
export default class S3 {
|
||||
public constructor(
|
||||
private readonly rootPath: string
|
||||
) {}
|
||||
|
||||
public async getFile(path: string): Promise<Buffer> {
|
||||
return fs.readFile(this.getFullPath(path))
|
||||
}
|
||||
|
||||
public async putFile(path: string, data: Buffer, overwrite = false): Promise<boolean> {
|
||||
const fullPath = this.getFullPath(path)
|
||||
if (!overwrite && await this.exists(fullPath)) {
|
||||
return false
|
||||
}
|
||||
|
||||
const folder = fullPath.slice(0, fullPath.lastIndexOf('/'))
|
||||
await fs.mkdir(folder, { recursive: true })
|
||||
|
||||
await fs.writeFile(this.getFullPath(path), data)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
public async deleteFile(path: string): Promise<boolean> {
|
||||
await fs.rm(this.getFullPath(path))
|
||||
return true
|
||||
}
|
||||
|
||||
public GET: APIRoute = async (ctx) => {
|
||||
const path = ctx.params.path!
|
||||
const file = await this.getFile(path)
|
||||
|
||||
return new ResponseBuilder()
|
||||
.body(file)
|
||||
.addHeader('Content-Disposition', `attachment; filename="${path}"`)
|
||||
.status(200)
|
||||
.build()
|
||||
}
|
||||
|
||||
public PUT: APIRoute = async (ctx) => {
|
||||
const path = ctx.params.path!
|
||||
const data = await ctx.request.arrayBuffer()
|
||||
const bfr = Buffer.from(data)
|
||||
const ok = await this.putFile(path, bfr)
|
||||
|
||||
return new ResponseBuilder()
|
||||
.body({
|
||||
path: path,
|
||||
size: bfr.byteLength
|
||||
})
|
||||
.status(ok ? 201 : 400)
|
||||
.build()
|
||||
}
|
||||
|
||||
public DELETE: APIRoute = async (ctx) => {
|
||||
const path = ctx.params.path!
|
||||
|
||||
await this.deleteFile(path)
|
||||
|
||||
return new ResponseBuilder()
|
||||
.status(200)
|
||||
.build()
|
||||
}
|
||||
|
||||
private async exists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.stat(path)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
private getFullPath(path: string): string {
|
||||
return this.rootPath + '/' + path
|
||||
}
|
||||
}
|
23
src/libs/Schema/Items/DzeioLiteral.ts
Normal file
23
src/libs/Schema/Items/DzeioLiteral.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import SchemaItem, { type JSONSchemaItem } from '../SchemaItem'
|
||||
|
||||
export default class DzeioLiteral<T> extends SchemaItem<T> {
|
||||
public constructor(private readonly value: T) {
|
||||
super()
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input === value
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
public override isOfType(input: unknown): input is T {
|
||||
return typeof input === typeof this.value
|
||||
}
|
||||
|
||||
public override toJSON(): JSONSchemaItem {
|
||||
return {
|
||||
type: 'literal',
|
||||
params: [this.value as string]
|
||||
}
|
||||
}
|
||||
}
|
93
src/libs/Schema/Items/SchemaArray.ts
Normal file
93
src/libs/Schema/Items/SchemaArray.ts
Normal file
@ -0,0 +1,93 @@
|
||||
import type { ValidationError, ValidationResult } from '..'
|
||||
import SchemaItem from '../SchemaItem'
|
||||
|
||||
export default class SchemaArray<A> extends SchemaItem<Array<A>> {
|
||||
|
||||
public constructor(
|
||||
private readonly values: SchemaItem<A>
|
||||
) {
|
||||
super()
|
||||
}
|
||||
|
||||
public override parse(input: unknown): A[] | unknown {
|
||||
// let master handle the first pass is desired
|
||||
input = super.parse(input)
|
||||
|
||||
if (!Array.isArray(input)) {
|
||||
return input
|
||||
}
|
||||
|
||||
const clone = []
|
||||
for (const item of input) {
|
||||
clone.push(this.values.parse(item))
|
||||
}
|
||||
|
||||
return clone
|
||||
}
|
||||
|
||||
public override validate(input: A[], fast = false): ValidationResult<A[]> {
|
||||
const tmp = super.validate(input, fast)
|
||||
if (tmp.error) {
|
||||
return tmp
|
||||
}
|
||||
const clone: Array<A> = []
|
||||
const errs: Array<ValidationError> = []
|
||||
for (let idx = 0; idx < tmp.object.length; idx++) {
|
||||
const item = tmp.object[idx];
|
||||
const res = this.values.validate(item as A)
|
||||
if (res.error) {
|
||||
const errors = res.error.map((it) => ({
|
||||
message: it.message,
|
||||
field: it.field ? `${idx}.${it.field}` : idx.toString()
|
||||
}))
|
||||
if (fast) {
|
||||
return {
|
||||
error: errors
|
||||
}
|
||||
}
|
||||
errs.push(...errors)
|
||||
} else {
|
||||
clone.push(res.object as A)
|
||||
}
|
||||
}
|
||||
|
||||
if (errs.length > 0) {
|
||||
return {
|
||||
error: errs
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
object: clone
|
||||
}
|
||||
}
|
||||
|
||||
public override transform(input: A[]): A[] {
|
||||
const clone = []
|
||||
for (const item of super.transform(input)) {
|
||||
clone.push(this.values.transform(item))
|
||||
}
|
||||
|
||||
return clone
|
||||
}
|
||||
|
||||
/**
|
||||
* transform the array so it only contains one of each elements
|
||||
*/
|
||||
public unique(): this {
|
||||
this.transforms.push((input) => input.filter((it, idx) => input.indexOf(it) === idx))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
public override isOfType(input: unknown): input is Array<A> {
|
||||
return Array.isArray(input)
|
||||
}
|
||||
|
||||
// public override toJSON(): JSONSchemaItem {
|
||||
// return {
|
||||
// type: 'array',
|
||||
// childs: this.values
|
||||
// }
|
||||
// }
|
||||
}
|
8
src/libs/Schema/Items/SchemaBoolean.ts
Normal file
8
src/libs/Schema/Items/SchemaBoolean.ts
Normal file
@ -0,0 +1,8 @@
|
||||
import SchemaItem from '../SchemaItem'
|
||||
|
||||
export default class SchemaBoolean extends SchemaItem<boolean> {
|
||||
|
||||
public override isOfType(input: unknown): input is boolean {
|
||||
return typeof input === 'boolean'
|
||||
}
|
||||
}
|
48
src/libs/Schema/Items/SchemaDate.ts
Normal file
48
src/libs/Schema/Items/SchemaDate.ts
Normal file
@ -0,0 +1,48 @@
|
||||
import SchemaItem from '../SchemaItem'
|
||||
|
||||
export default class SchemaDate extends SchemaItem<Date> {
|
||||
|
||||
public parseString(): this {
|
||||
this.parseActions.push((input) => typeof input === 'string' ? new Date(input) : input)
|
||||
return this
|
||||
}
|
||||
|
||||
public min(value: Date, message?: string): this {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input >= value
|
||||
},
|
||||
message: message
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
public parseFromExcelString(): this {
|
||||
this.parseActions.push((input) => {
|
||||
if (typeof input !== 'string') {
|
||||
return input
|
||||
}
|
||||
const days = parseFloat(input)
|
||||
const millis = days * 24 * 60 * 60 * 1000
|
||||
const date = new Date('1900-01-01')
|
||||
date.setTime(date.getTime() + millis)
|
||||
return date
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
public max(value: Date, message?: string): this {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input <= value
|
||||
},
|
||||
message: message
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
public override isOfType(input: unknown): input is Date {
|
||||
return input instanceof Date && !isNaN(input.getTime())
|
||||
}
|
||||
}
|
21
src/libs/Schema/Items/SchemaFile.ts
Normal file
21
src/libs/Schema/Items/SchemaFile.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import SchemaItem from '../SchemaItem'
|
||||
|
||||
export default class SchemaFile extends SchemaItem<File> {
|
||||
constructor () {
|
||||
super()
|
||||
this.parseActions.push((input) => this.isOfType(input) && input.size > 0 ? input : undefined)
|
||||
}
|
||||
|
||||
public extension(ext: string, message?: string): this {
|
||||
this.validations.push({
|
||||
fn: (input) => input.name.endsWith(ext),
|
||||
message
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
public override isOfType(input: unknown): input is File {
|
||||
return input instanceof File
|
||||
}
|
||||
}
|
65
src/libs/Schema/Items/SchemaNullable.ts
Normal file
65
src/libs/Schema/Items/SchemaNullable.ts
Normal file
@ -0,0 +1,65 @@
|
||||
import type { ValidationResult } from '..'
|
||||
import SchemaItem from '../SchemaItem'
|
||||
import { isNull } from '../utils'
|
||||
|
||||
export default class SchemaNullable<A> extends SchemaItem<A | undefined | null> {
|
||||
|
||||
public constructor(private readonly item: SchemaItem<A>) {
|
||||
super()
|
||||
}
|
||||
|
||||
public emptyAsNull(): this {
|
||||
this.parseActions.push((input) => {
|
||||
if (typeof input === 'string' && input === '') {
|
||||
return null
|
||||
}
|
||||
return input
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
public falthyAsNull(): this {
|
||||
this.parseActions.push((input) => {
|
||||
if (!input) {
|
||||
return null
|
||||
}
|
||||
return input
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
public override transform(input: A | null | undefined): A | null | undefined {
|
||||
const transformed = super.transform(input)
|
||||
|
||||
if (isNull(transformed) || isNull(input)) {
|
||||
return transformed
|
||||
}
|
||||
|
||||
return this.item.transform(input)
|
||||
}
|
||||
|
||||
public override validate(input: A | null | undefined): ValidationResult<A | null | undefined> {
|
||||
if (isNull(input)) {
|
||||
return {
|
||||
object: input
|
||||
}
|
||||
}
|
||||
return this.item.validate(input)
|
||||
}
|
||||
|
||||
public override parse(input: unknown): (A | null | undefined) | unknown {
|
||||
const parsed = super.parse(input)
|
||||
|
||||
if (isNull(parsed) || isNull(input)) {
|
||||
return parsed
|
||||
}
|
||||
|
||||
return this.item.parse(input)
|
||||
}
|
||||
|
||||
public override isOfType(input: unknown): input is A | undefined | null {
|
||||
return isNull(input) || this.item.isOfType(input)
|
||||
}
|
||||
}
|
89
src/libs/Schema/Items/SchemaNumber.ts
Normal file
89
src/libs/Schema/Items/SchemaNumber.ts
Normal file
@ -0,0 +1,89 @@
|
||||
import SchemaItem from '../SchemaItem'
|
||||
|
||||
export default class SchemaNumber extends SchemaItem<number> {
|
||||
|
||||
public min(...params: Parameters<SchemaNumber['gte']>): this {
|
||||
return this.gte(...params)
|
||||
}
|
||||
|
||||
public max(...params: Parameters<SchemaNumber['lte']>): this {
|
||||
return this.lte(...params)
|
||||
}
|
||||
|
||||
/**
|
||||
* validate that the number is less or equal than {@link value}
|
||||
* @param value the maxumum value (inclusive)
|
||||
* @param message the message sent if not valid
|
||||
*/
|
||||
public lte(value: number, message?: string): this {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input <= value
|
||||
},
|
||||
message: message
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* validate that the number is more or equal than {@link value}
|
||||
* @param value the minimum value (inclusive)
|
||||
* @param message the message sent if not valid
|
||||
*/
|
||||
public gte(value: number, message?: string): this {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input >= value
|
||||
},
|
||||
message: message
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* validate that the number is less than {@link value}
|
||||
* @param value the maxumum value (exclusive)
|
||||
* @param message the message sent if not valid
|
||||
*/
|
||||
public lt(value: number, message?: string): this {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input < value
|
||||
},
|
||||
message: message
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* validate that the number is more than {@link value}
|
||||
* @param value the minimum value (exclusive)
|
||||
* @param message the message sent if not valid
|
||||
*/
|
||||
public gt(value: number, message?: string): this {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input > value
|
||||
},
|
||||
message: message
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to parse strings before validating
|
||||
*/
|
||||
public parseString(): this {
|
||||
this.parseActions.push((input) =>
|
||||
typeof input === 'string' ? Number.parseFloat(input) : input
|
||||
)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
public override isOfType(input: unknown): input is number {
|
||||
return typeof input === 'number' && !Number.isNaN(input)
|
||||
}
|
||||
}
|
88
src/libs/Schema/Items/SchemaRecord.ts
Normal file
88
src/libs/Schema/Items/SchemaRecord.ts
Normal file
@ -0,0 +1,88 @@
|
||||
import { isObject, objectLoop, objectRemap } from '@dzeio/object-util'
|
||||
import type { ValidationError, ValidationResult } from '..'
|
||||
import SchemaItem from '../SchemaItem'
|
||||
|
||||
export default class SchemaRecord<A extends string | number | symbol, B> extends SchemaItem<Record<A, B>> {
|
||||
|
||||
public constructor(
|
||||
private readonly key: SchemaItem<A>,
|
||||
private readonly values: SchemaItem<B>
|
||||
) {
|
||||
super()
|
||||
}
|
||||
|
||||
public override parse(input: unknown): unknown {
|
||||
input = super.parse(input)
|
||||
|
||||
if (!this.isOfType(input)) {
|
||||
return input
|
||||
}
|
||||
|
||||
const finalObj: Record<A, B> = {} as Record<A, B>
|
||||
const error = objectLoop(input, (value, key) => {
|
||||
const res1 = this.key.parse(key)
|
||||
const res2 = this.values.parse(value)
|
||||
if (typeof res1 !== 'string' && typeof res1 !== 'number') {
|
||||
return false
|
||||
}
|
||||
// @ts-expect-error normal behavior
|
||||
finalObj[res1] = res2
|
||||
return true
|
||||
})
|
||||
if (error) {
|
||||
return input
|
||||
}
|
||||
return finalObj
|
||||
}
|
||||
|
||||
public override transform(input: Record<A, B>): Record<A, B> {
|
||||
return objectRemap(super.transform(input), (value, key) => {
|
||||
return {
|
||||
key: this.key.transform(key),
|
||||
value: this.values.transform(value)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
public override validate(input: Record<A, B>, fast = false): ValidationResult<Record<A, B>> {
|
||||
const tmp = super.validate(input)
|
||||
if (tmp.error) {
|
||||
return tmp
|
||||
}
|
||||
|
||||
const errs: Array<ValidationError> = []
|
||||
const finalObj: Record<A, B> = {} as Record<A, B>
|
||||
|
||||
objectLoop(tmp.object, (value, key) => {
|
||||
const res1 = this.key.validate(key)
|
||||
const res2 = this.values.validate(value)
|
||||
const localErrs = (res1.error ?? []).concat(...(res2.error ?? []))
|
||||
if (localErrs.length > 0) {
|
||||
errs.push(...localErrs.map((it) => ({
|
||||
message: it.message,
|
||||
field: it.field ? `${key as string}.${it.field}` : key.toString()
|
||||
})))
|
||||
return !fast
|
||||
} else {
|
||||
// @ts-expect-error the check in the if assure the typing below
|
||||
finalObj[res1.object] = res2.object
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
if (errs.length > 0) {
|
||||
return {
|
||||
error: errs
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
object: finalObj
|
||||
}
|
||||
}
|
||||
|
||||
public override isOfType(input: unknown): input is Record<A, B> {
|
||||
return isObject(input) && Object.prototype.toString.call(input) === '[object Object]'
|
||||
}
|
||||
}
|
76
src/libs/Schema/Items/SchemaString.ts
Normal file
76
src/libs/Schema/Items/SchemaString.ts
Normal file
@ -0,0 +1,76 @@
|
||||
import SchemaItem from '../SchemaItem'
|
||||
import SchemaNullable from './SchemaNullable'
|
||||
|
||||
export default class SchemaString extends SchemaItem<string> {
|
||||
/**
|
||||
* force the input text to be a minimum of `value` size
|
||||
* @param value the minimum length of the text
|
||||
* @param message the message to display on an error
|
||||
*/
|
||||
public min(value: number, message?: string): SchemaString {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input.length >= value
|
||||
},
|
||||
message: message
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* force the input text to be a maximum of `value` size
|
||||
* @param value the maximum length of the text
|
||||
* @param message the message to display on an error
|
||||
*/
|
||||
public max(value: number, message?: string): SchemaString {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input.length <= value
|
||||
},
|
||||
message: message
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* the value must not be empty (`''`)
|
||||
* @param message
|
||||
* @returns
|
||||
*/
|
||||
public notEmpty(message?: string): this {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return input !== ''
|
||||
},
|
||||
message: message
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* note: this nullable MUST be used last as it change the type of the returned function
|
||||
*/
|
||||
public nullable() {
|
||||
return new SchemaNullable(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* force the input text to respect a Regexp
|
||||
* @param regex the regex to validate against
|
||||
* @param message the message to display on an error
|
||||
*/
|
||||
public regex(regex: RegExp, message?: string): SchemaString {
|
||||
this.validations.push({
|
||||
fn(input) {
|
||||
return regex.test(input)
|
||||
},
|
||||
message
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
public override isOfType(input: unknown): input is string {
|
||||
return typeof input === 'string'
|
||||
}
|
||||
}
|
25
src/libs/Schema/README.md
Normal file
25
src/libs/Schema/README.md
Normal file
@ -0,0 +1,25 @@
|
||||
a Full featured and lightweight Schema validation/parsing library
|
||||
|
||||
it is meant to be used for input validation
|
||||
|
||||
example :
|
||||
|
||||
```ts
|
||||
import Schema, { s, type SchemaInfer } from 'libs/Schema'
|
||||
|
||||
const schema = new Schema({
|
||||
test: s.record(s.string(), s.object({
|
||||
a: s.number().parseString().min(3, 'a is too small')
|
||||
}))
|
||||
})
|
||||
|
||||
const t = {
|
||||
test: {
|
||||
b: {a: '34'}
|
||||
}
|
||||
}
|
||||
|
||||
// validate that `t` is coherant with the schema above
|
||||
const { object, error } = schema.validate(t)
|
||||
console.log(object, error)
|
||||
```
|
169
src/libs/Schema/SchemaItem.ts
Normal file
169
src/libs/Schema/SchemaItem.ts
Normal file
@ -0,0 +1,169 @@
|
||||
import type { ValidationResult } from '.'
|
||||
import Schema from '.'
|
||||
import { isNull } from './utils'
|
||||
|
||||
export interface Messages {
|
||||
globalInvalid: string
|
||||
}
|
||||
|
||||
/**
|
||||
* An element of a schema
|
||||
*/
|
||||
export default abstract class SchemaItem<T> {
|
||||
/**
|
||||
* get additionnal attributes used to make the Schema work with outside libs
|
||||
*/
|
||||
public attributes: Array<string> = []
|
||||
|
||||
/**
|
||||
* the list of validations
|
||||
*/
|
||||
protected validations: Array<{
|
||||
fn: (input: T) => boolean
|
||||
message?: string | undefined
|
||||
}> = []
|
||||
|
||||
protected parseActions: Array<(input: unknown) => T | unknown> = []
|
||||
protected transforms: Array<(input: T) => T> = []
|
||||
|
||||
/**
|
||||
* set the list of attributes for the item of the schema
|
||||
* @param attributes the attributes
|
||||
*/
|
||||
public attr(...attributes: Array<string>) {
|
||||
this.attributes = attributes
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* set the default value of the schema element
|
||||
* @param value the default value
|
||||
* @param strict if strict, it will use it for null/undefined, else it will check for falthy values
|
||||
*/
|
||||
public defaultValue(value: T, strict = true) {
|
||||
this.parseActions.push((input) => {
|
||||
if (strict && isNull(input)) {
|
||||
return value
|
||||
}
|
||||
if (!value) {
|
||||
return input
|
||||
}
|
||||
return input
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param values the possible values the field can be
|
||||
* @param message the message returned if it does not respect the value
|
||||
*/
|
||||
public in(values: Array<T>, message?: string) {
|
||||
this.validations.push({
|
||||
fn: (input) => values.includes(input),
|
||||
message
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to parse the input from another format
|
||||
*
|
||||
* @param input the input to transform, it is done before validation, so the value can be anything
|
||||
* @returns the transformed value
|
||||
*/
|
||||
public parse(input: unknown): T | unknown {
|
||||
for (const transform of this.parseActions) {
|
||||
const tmp = transform(input)
|
||||
if (this.isOfType(tmp)) {
|
||||
return tmp
|
||||
}
|
||||
}
|
||||
|
||||
return input
|
||||
}
|
||||
|
||||
/**
|
||||
* transform a valid value
|
||||
*
|
||||
* @param input the input to transform, it MUST be validated beforehand
|
||||
* @returns the transformed value
|
||||
*/
|
||||
public transform(input: T): T {
|
||||
let res = input
|
||||
|
||||
for (const action of this.transforms) {
|
||||
res = action(res)
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* validate that the input is valid or not
|
||||
* @param input the input to validate
|
||||
* @param fast if true the validation stops at the first error
|
||||
* @returns a string if it's not valid, else null
|
||||
*/
|
||||
public validate(input: T, fast = false): ValidationResult<T> {
|
||||
for (const validation of this.validations) {
|
||||
if (!validation.fn(input as T)) {
|
||||
return {
|
||||
error: [{
|
||||
message: validation.message ?? Schema.messages.globalInvalid
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
object: input as T
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* validate that the input value is of the type of the schema item
|
||||
*
|
||||
* it makes others functions easier to works with
|
||||
* @param input the input to validate
|
||||
*/
|
||||
public abstract isOfType(input: unknown): input is T
|
||||
|
||||
// public abstract toJSON(): JSONSchemaItem
|
||||
}
|
||||
|
||||
type Parseable = string | number | boolean
|
||||
|
||||
export interface ValidatorJSON {
|
||||
/**
|
||||
* the function name (ex: `min`, `max`)
|
||||
*/
|
||||
name: string
|
||||
/**
|
||||
* the function parameters
|
||||
*/
|
||||
params?: Array<Parseable>
|
||||
}
|
||||
|
||||
export interface JSONSchemaItem {
|
||||
/**
|
||||
* Schema item
|
||||
*
|
||||
* ex: `string`, `number`, `boolean`, ...
|
||||
*/
|
||||
type: string
|
||||
/**
|
||||
* constructor params
|
||||
*/
|
||||
params?: Array<Parseable>
|
||||
/**
|
||||
* list of attributes
|
||||
*/
|
||||
attributes?: Array<string>
|
||||
actions?: Array<ValidatorJSON>
|
||||
}
|
||||
|
||||
export type JSONSchema = {
|
||||
[a: string]: JSONSchemaItem
|
||||
}
|
217
src/libs/Schema/index.ts
Normal file
217
src/libs/Schema/index.ts
Normal file
@ -0,0 +1,217 @@
|
||||
import { isObject, objectLoop } from '@dzeio/object-util'
|
||||
import DzeioLiteral from './Items/DzeioLiteral'
|
||||
import SchemaArray from './Items/SchemaArray'
|
||||
import SchemaBoolean from './Items/SchemaBoolean'
|
||||
import SchemaDate from './Items/SchemaDate'
|
||||
import SchemaFile from './Items/SchemaFile'
|
||||
import SchemaNullable from './Items/SchemaNullable'
|
||||
import SchemaNumber from './Items/SchemaNumber'
|
||||
import SchemaRecord from './Items/SchemaRecord'
|
||||
import SchemaString from './Items/SchemaString'
|
||||
import SchemaItem from './SchemaItem'
|
||||
|
||||
export interface ValidationError {
|
||||
message: string
|
||||
field?: string
|
||||
value?: unknown
|
||||
}
|
||||
|
||||
export type ValidationResult<T> = {
|
||||
object: T
|
||||
error?: undefined
|
||||
} | {
|
||||
object?: undefined
|
||||
error: Array<ValidationError>
|
||||
}
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
export type Model = Record<string, SchemaItem<any>>
|
||||
|
||||
export type SchemaInfer<S extends Schema> = ModelInfer<S['model']>
|
||||
|
||||
export type ModelInfer<M extends Model> = {
|
||||
[key in keyof M]: ReturnType<M[key]['transform']>
|
||||
}
|
||||
|
||||
/**
|
||||
* A schema to validate input or external datas
|
||||
*/
|
||||
export default class Schema<M extends Model = Model> extends SchemaItem<ModelInfer<Model>> {
|
||||
|
||||
public static messages = {
|
||||
typeInvalid: 'Type of field is not valid',
|
||||
notAnObject: 'the data submitted is not valid',
|
||||
globalInvalid: 'the field is invalid'
|
||||
}
|
||||
|
||||
public constructor(public readonly model: M) {
|
||||
super()
|
||||
}
|
||||
|
||||
/**
|
||||
* See {@link SchemaString}
|
||||
*/
|
||||
public static string(
|
||||
...inputs: ConstructorParameters<typeof SchemaString>
|
||||
) : SchemaString {
|
||||
return new SchemaString(...inputs)
|
||||
}
|
||||
|
||||
public static file(
|
||||
...inputs: ConstructorParameters<typeof SchemaFile>
|
||||
): SchemaFile {
|
||||
return new SchemaFile(...inputs)
|
||||
}
|
||||
|
||||
public static number(
|
||||
...inputs: ConstructorParameters<typeof SchemaNumber>
|
||||
): SchemaNumber {
|
||||
return new SchemaNumber(...inputs)
|
||||
}
|
||||
|
||||
public static date(
|
||||
...inputs: ConstructorParameters<typeof SchemaDate>
|
||||
): SchemaDate {
|
||||
return new SchemaDate(...inputs)
|
||||
}
|
||||
|
||||
public static literal<T>(
|
||||
...inputs: ConstructorParameters<typeof DzeioLiteral<T>>
|
||||
): DzeioLiteral<T> {
|
||||
return new DzeioLiteral<T>(...inputs)
|
||||
}
|
||||
|
||||
public static object<T extends Model>(
|
||||
...inputs: ConstructorParameters<typeof Schema<T>>
|
||||
): Schema<T> {
|
||||
return new Schema(...inputs)
|
||||
}
|
||||
|
||||
public static record<A extends string | number, B>(
|
||||
...inputs: ConstructorParameters<typeof SchemaRecord<A, B>>
|
||||
): SchemaRecord<A, B> {
|
||||
return new SchemaRecord<A, B>(...inputs)
|
||||
}
|
||||
|
||||
public static array<A>(
|
||||
...inputs: ConstructorParameters<typeof SchemaArray<A>>
|
||||
): SchemaArray<A> {
|
||||
return new SchemaArray<A>(...inputs)
|
||||
}
|
||||
|
||||
public static nullable<A>(
|
||||
...inputs: ConstructorParameters<typeof SchemaNullable<A>>
|
||||
): SchemaNullable<A> {
|
||||
return new SchemaNullable<A>(...inputs)
|
||||
}
|
||||
|
||||
public static boolean(
|
||||
...inputs: ConstructorParameters<typeof SchemaBoolean>
|
||||
): SchemaBoolean {
|
||||
return new SchemaBoolean(...inputs)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param query the URL params to validate
|
||||
* @returns
|
||||
*/
|
||||
public validateQuery(query: URLSearchParams, fast = false): ReturnType<Schema<M>['validate']> {
|
||||
const record: Record<string, unknown> = {}
|
||||
for (const [key, value] of query) {
|
||||
record[key] = value
|
||||
}
|
||||
|
||||
return this.validate(record, fast)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param form the form to validate
|
||||
*/
|
||||
public validateForm(form: HTMLFormElement, fast = false): ReturnType<Schema<M>['validate']> {
|
||||
const data = new FormData(form)
|
||||
return this.validateFormData(data, fast)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param data the FormData to validate
|
||||
* @returns
|
||||
*/
|
||||
public validateFormData(data: FormData, fast = false): ReturnType<Schema<M>['validate']> {
|
||||
const record: Record<string, unknown> = {}
|
||||
for (const [key, value] of data) {
|
||||
const isArray = this.model[key]?.isOfType([]) ?? false
|
||||
record[key] = isArray ? data.getAll(key) : value
|
||||
}
|
||||
|
||||
return this.validate(record, fast)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param input the data to validate
|
||||
* @param options additionnal validation options
|
||||
* @returns blablabla
|
||||
*/
|
||||
public override validate(input: unknown, fast = false): ValidationResult<SchemaInfer<this>> {
|
||||
if (!isObject(input)) {
|
||||
return {
|
||||
error: [{
|
||||
message: Schema.messages.notAnObject
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
const errors: Array<ValidationError> = []
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
const res: ModelInfer<M> = {} as any
|
||||
objectLoop(this.model, (v, k) => {
|
||||
// parse value from other formats
|
||||
const value = v.parse(input[k])
|
||||
|
||||
// validate that the value is of type
|
||||
if (!v.isOfType(value)) {
|
||||
errors.push({
|
||||
message: Schema.messages.typeInvalid,
|
||||
field: k,
|
||||
value: value
|
||||
})
|
||||
return !fast
|
||||
}
|
||||
|
||||
// run validations
|
||||
const invalid = v.validate(value)
|
||||
if (invalid.error) {
|
||||
errors.push(...invalid.error.map((it) => ({
|
||||
message: it.message,
|
||||
field: it.field ? `${k}.${it.field}` : k
|
||||
})))
|
||||
return !fast
|
||||
}
|
||||
|
||||
// transform and assign final value
|
||||
// @ts-expect-error normal behavior
|
||||
res[k] = v.transform(value)
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
if (errors.length > 0) {
|
||||
return {
|
||||
error: errors
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
object: res
|
||||
}
|
||||
}
|
||||
|
||||
public override isOfType(input: unknown): input is ModelInfer<Model> {
|
||||
return isObject(input)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* alias of {@link Schema}
|
||||
*/
|
||||
export const s = Schema
|
3
src/libs/Schema/utils.ts
Normal file
3
src/libs/Schema/utils.ts
Normal file
@ -0,0 +1,3 @@
|
||||
export function isNull(value: unknown): value is undefined | null {
|
||||
return typeof value === 'undefined' || value === null
|
||||
}
|
234
src/models/Adapters/AdapterUtils.ts
Normal file
234
src/models/Adapters/AdapterUtils.ts
Normal file
@ -0,0 +1,234 @@
|
||||
import { objectFind, objectLoop } from '@dzeio/object-util'
|
||||
import { Sort, type Query, type QueryList, type QueryValues } from 'models/Query'
|
||||
|
||||
export declare type AllowedValues = string | number | bigint | boolean | null | undefined
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
export function filter<T extends object>(query: Query<T>, results: Array<T>, options?: { debug?: boolean }): {filtered: Array<T>, unpaginatedLength: number} {
|
||||
if (options?.debug) {
|
||||
console.log('Query', query)
|
||||
}
|
||||
// filter
|
||||
let filtered = results.filter((it) => {
|
||||
const res = objectLoop(query, (value, key) => {
|
||||
if (key === '$or') {
|
||||
for (const sub of value as any) {
|
||||
const final = filterEntry(sub, it)
|
||||
// eslint-disable-next-line max-depth
|
||||
if (final) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
if ((key as string).startsWith('$')) {
|
||||
return true
|
||||
}
|
||||
return filterEntry(query, it)
|
||||
})
|
||||
// console.log(it, res)
|
||||
return res
|
||||
})
|
||||
if (options?.debug) {
|
||||
console.log('postFilters', filtered)
|
||||
}
|
||||
|
||||
// sort
|
||||
if (query.$sort) {
|
||||
// temp until better solution is found
|
||||
const first = objectFind(query.$sort, () => true)
|
||||
filtered = filtered.sort((objA, objB) => {
|
||||
const a = objA[first!.key]
|
||||
const b = objB[first!.key]
|
||||
const ascend = first?.value !== Sort.DESC
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
if (ascend) {
|
||||
return b - a
|
||||
} else {
|
||||
return a - b
|
||||
}
|
||||
}
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
if (ascend) {
|
||||
return a.getTime() - b.getTime()
|
||||
} else {
|
||||
return b.getTime() - a.getTime()
|
||||
}
|
||||
}
|
||||
if (typeof a === 'string' && typeof b === 'string') {
|
||||
if (ascend) {
|
||||
return a.localeCompare(b)
|
||||
} else {
|
||||
return b.localeCompare(a)
|
||||
}
|
||||
|
||||
}
|
||||
if (ascend) {
|
||||
return a > b ? 1 : -1
|
||||
}
|
||||
return a > b ? -1 : 1
|
||||
})
|
||||
}
|
||||
if (options?.debug) {
|
||||
console.log('postSort', filtered)
|
||||
}
|
||||
|
||||
// length of the query assuming a single page
|
||||
const unpaginatedLength = filtered.length
|
||||
// limit
|
||||
if (query.$offset || query.$limit) {
|
||||
const offset = query.$offset ?? 0
|
||||
filtered = filtered.slice(offset, offset + (query.$limit ?? Infinity))
|
||||
}
|
||||
if (options?.debug) {
|
||||
console.log('postLimit', filtered)
|
||||
}
|
||||
|
||||
return { filtered, unpaginatedLength }
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param query the query of the entry
|
||||
* @param item the implementation of the item
|
||||
* @returns if it should be kept or not
|
||||
*/
|
||||
export function filterEntry<T extends object>(query: QueryList<T>, item: T): boolean {
|
||||
// eslint-disable-next-line complexity
|
||||
const res = objectLoop(query as any, (queryValue, key: keyof typeof query) => {
|
||||
/**
|
||||
* TODO: handle $keys
|
||||
*/
|
||||
if ((key as string).startsWith('$')) {
|
||||
return true
|
||||
}
|
||||
|
||||
return filterValue(item[key], queryValue)
|
||||
})
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* indicate if a value should be kept by an ENTIRE query
|
||||
*
|
||||
* @param value the value to filter
|
||||
* @param query the full query
|
||||
* @returns if the query should keep the value or not
|
||||
*/
|
||||
function filterValue<T extends AllowedValues>(value: any, query: QueryValues<T>) {
|
||||
if (typeof query !== 'object' || query === null || query instanceof RegExp || Array.isArray(query)) {
|
||||
return filterItem(value, query)
|
||||
}
|
||||
|
||||
// loop through each keys of the query
|
||||
// eslint-disable-next-line arrow-body-style
|
||||
return objectLoop(query, (querySubValue: any, queryKey: any) => {
|
||||
return filterItem(value, {[queryKey]: querySubValue } as QueryValues<T>)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param value the value to check
|
||||
* @param query a SINGLE query to check against
|
||||
* @returns if the value should be kept or not
|
||||
*/
|
||||
// eslint-disable-next-line complexity
|
||||
function filterItem(value: any, query: QueryValues<AllowedValues>): boolean {
|
||||
/**
|
||||
* check if the value is null
|
||||
*/
|
||||
if (query === null) {
|
||||
return typeof value === 'undefined' || value === null
|
||||
}
|
||||
|
||||
if (query instanceof RegExp) {
|
||||
return query.test(typeof value === 'string' ? value : value.toString())
|
||||
}
|
||||
|
||||
/**
|
||||
* ?!?
|
||||
*/
|
||||
if (value === null || typeof value === 'undefined') {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* strict value check by default
|
||||
*/
|
||||
if (!(typeof query === 'object')) {
|
||||
return query === value
|
||||
}
|
||||
|
||||
/**
|
||||
* Array checking and $in
|
||||
*/
|
||||
if (Array.isArray(query) || '$in' in query) {
|
||||
const arr = Array.isArray(query) ? query : query.$in as Array<AllowedValues>
|
||||
return arr.includes(value)
|
||||
}
|
||||
|
||||
if ('$inc' in query) {
|
||||
return (value.toString() as string).toLowerCase().includes(query.$inc!.toString()!.toLowerCase())
|
||||
}
|
||||
|
||||
if ('$eq' in query) {
|
||||
return query.$eq === value
|
||||
}
|
||||
|
||||
/**
|
||||
* numbers specific cases for numbers
|
||||
*/
|
||||
if ('$gt' in query) {
|
||||
value = value instanceof Date ? value.getTime() : value
|
||||
const comparedValue = query.$gt instanceof Date ? query.$gt.getTime() : query.$gt
|
||||
return typeof value === 'number' && typeof comparedValue === 'number' && value > comparedValue
|
||||
}
|
||||
|
||||
if ('$lt' in query) {
|
||||
value = value instanceof Date ? value.getTime() : value
|
||||
const comparedValue = query.$lt instanceof Date ? query.$lt.getTime() : query.$lt
|
||||
return typeof value === 'number' && typeof comparedValue === 'number' && value < comparedValue
|
||||
}
|
||||
|
||||
if ('$gte' in query) {
|
||||
value = value instanceof Date ? value.getTime() : value
|
||||
const comparedValue = query.$gte instanceof Date ? query.$gte.getTime() : query.$gte
|
||||
return typeof value === 'number' && typeof comparedValue === 'number' && value >= comparedValue
|
||||
}
|
||||
|
||||
if ('$lte' in query) {
|
||||
value = value instanceof Date ? value.getTime() : value
|
||||
const comparedValue = query.$lte instanceof Date ? query.$lte.getTime() : query.$lte
|
||||
return typeof value === 'number' && typeof comparedValue === 'number' && value <= comparedValue
|
||||
}
|
||||
|
||||
if ('$len' in query && Array.isArray(value)) {
|
||||
return value.length === query.$len
|
||||
}
|
||||
|
||||
/**
|
||||
* Logical Operators
|
||||
*/
|
||||
if ('$or' in query && Array.isArray(query.$or)) {
|
||||
return !!query.$or.find((it) => filterValue(value, it as QueryValues<any>))
|
||||
}
|
||||
if ('$and' in query && Array.isArray(query.$and)) {
|
||||
return !query.$and.find((it) => !filterValue(value, it as QueryValues<any>))
|
||||
}
|
||||
|
||||
if ('$not' in query) {
|
||||
return !filterValue(value, query.$not as QueryValues<any>)
|
||||
}
|
||||
|
||||
if ('$nor' in query && Array.isArray(query.$nor)) {
|
||||
return !query.$nor.find((it) => filterValue(value, it as QueryValues<any>))
|
||||
}
|
||||
|
||||
if ('$nand' in query && Array.isArray(query.$nand)) {
|
||||
return !!query.$nand.find((it) => !filterValue(value, it as QueryValues<any>))
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
54
src/models/Adapters/CSVAdapter.ts
Normal file
54
src/models/Adapters/CSVAdapter.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import CSV, { type CSVOptions } from 'libs/FileFormats/CSV'
|
||||
import type Schema from 'libs/Schema'
|
||||
import type { SchemaInfer } from 'libs/Schema'
|
||||
import fs from 'node:fs'
|
||||
import type { Query } from '../Query'
|
||||
import { filter } from './AdapterUtils'
|
||||
import type DaoAdapter from './DaoAdapter'
|
||||
import type { DBPull } from './DaoAdapter'
|
||||
|
||||
|
||||
export default class CSVAdapter<T extends Schema> implements DaoAdapter<T> {
|
||||
|
||||
private data: Array<SchemaInfer<T>>
|
||||
|
||||
public constructor(
|
||||
public readonly schema: T,
|
||||
public readonly serverPath: string,
|
||||
private readonly csvOptions?: CSVOptions
|
||||
) {
|
||||
const data = fs.readFileSync(serverPath, 'utf-8')
|
||||
this.data = CSV.parse(data, csvOptions) as Array<SchemaInfer<T>>
|
||||
}
|
||||
|
||||
public async create(_obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
public async read(query?: Query<SchemaInfer<T>> | undefined): Promise<DBPull<T>> {
|
||||
|
||||
const res = filter(query ?? {}, this.data)
|
||||
|
||||
return {
|
||||
rows: res.filtered.length,
|
||||
rowsTotal: res.unpaginatedLength,
|
||||
page: 1,
|
||||
pageTotal: 1,
|
||||
data: res.filtered
|
||||
}
|
||||
}
|
||||
|
||||
public async update(_obj: SchemaInfer<T>): Promise<SchemaInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
public async patch(_id: string, _obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
|
||||
}
|
||||
|
||||
public async delete(obj: SchemaInfer<T>): Promise<boolean> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
}
|
433
src/models/Adapters/CassandraAdapter.ts.old
Normal file
433
src/models/Adapters/CassandraAdapter.ts.old
Normal file
@ -0,0 +1,433 @@
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import { objectFind, objectKeys, objectLoop, objectMap, objectRemap, objectValues } from '@dzeio/object-util'
|
||||
import type { ArrayOrObject } from 'cassandra-driver'
|
||||
import crypto from 'node:crypto'
|
||||
import { Sort, type Query } from '../Query'
|
||||
import { filter } from './AdapterUtils'
|
||||
import type DaoAdapter from './DaoAdapter'
|
||||
import type { DBPull } from './DaoAdapter'
|
||||
|
||||
/**
|
||||
* @deprecated need to be moved to the new Schema system
|
||||
*
|
||||
* (won't be done because we mostly won't be using it again...)
|
||||
*/
|
||||
export default class CassandraAdapter<T extends Schema> implements DaoAdapter<T> {
|
||||
|
||||
private id!: Array<string>
|
||||
|
||||
public constructor(
|
||||
/**
|
||||
* the schema used by Cassandra
|
||||
*/
|
||||
public readonly schema: Schema<T>,
|
||||
/**
|
||||
* the table name
|
||||
*/
|
||||
public readonly table: string,
|
||||
/**
|
||||
* the id(s)
|
||||
*/
|
||||
id?: keyof T | Array<keyof T>,
|
||||
|
||||
/**
|
||||
* other secondary keys necessary to update data
|
||||
*/
|
||||
private readonly partitionKeys?: Array<keyof T>,
|
||||
/**
|
||||
* additionnal options to make the adapter work
|
||||
*/
|
||||
public readonly options: {
|
||||
/**
|
||||
* log the requests made to cassandra
|
||||
*/
|
||||
debug?: boolean
|
||||
} = {}
|
||||
) {
|
||||
if (!id) {
|
||||
objectLoop(schema.model, (value, key) => {
|
||||
if (!isSchemaItem(value)) {
|
||||
return true
|
||||
}
|
||||
if (!value.database?.unique) {
|
||||
return true
|
||||
}
|
||||
id = key
|
||||
return false
|
||||
})
|
||||
} else {
|
||||
this.id = typeof id === 'string' ? [id] : id as Array<string>
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: make it clearer what it does
|
||||
public async create(obj: Partial<Implementation<T>>): Promise<Implementation<T> | null> {
|
||||
|
||||
objectLoop(this.schema.model, (item, key) => {
|
||||
if (isSchemaItem(item) && (item.database?.created || item.database?.updated)) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = new Date()
|
||||
} else if (isSchemaItem(item) && item.database?.auto && !obj[key]) {
|
||||
if (item.type === String) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = crypto.randomBytes(16).toString('hex')
|
||||
} else {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = crypto.randomBytes(16).readUint32BE()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const clone = this.schema.parse(obj)
|
||||
if (!clone) {
|
||||
throw new Error('Invalid data given to create the final object')
|
||||
}
|
||||
|
||||
const keys = objectKeys(clone)
|
||||
const keysStr = keys.join(', ')
|
||||
const values = keys.fill('?').join(', ')
|
||||
const req = `INSERT INTO ${this.table} (${keysStr}) VALUES (${values});`
|
||||
const client = (await Client.get())!
|
||||
|
||||
const params = objectMap(clone as any, (value, key) => this.valueToDB(key as any, value))
|
||||
|
||||
if (this.options?.debug) {
|
||||
console.log(req, params)
|
||||
}
|
||||
|
||||
try {
|
||||
await client.execute(req, params, { prepare: true })
|
||||
} catch (e) {
|
||||
console.log(e, req, params)
|
||||
return null
|
||||
}
|
||||
return this.schema.parse(clone)
|
||||
}
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
public async read(query?: Query<Implementation<T>> | undefined): Promise<DBPull<T>> {
|
||||
let req: Array<string> = ['SELECT', '*', 'FROM', this.table]
|
||||
const params: ArrayOrObject = []
|
||||
|
||||
// list of the differents items in the WHERE statement
|
||||
const whereItems: Array<string> = []
|
||||
// if ((query?.where?.length ?? 0) > 0 && (query?.where?.length !== 1 || query?.where?.[0]?.[1] !== 'includes')) {
|
||||
// for (const it of query?.where ?? []) {
|
||||
// // eslint-disable-next-line max-depth
|
||||
// switch (it[1]) {
|
||||
// case 'in':
|
||||
// // eslint-disable-next-line no-case-declarations
|
||||
// const arr = it[2] as Array<any>
|
||||
|
||||
// whereItems.push(`${String(it[0])} IN (${arr.map(() => '?').join(',')})`)
|
||||
// params.push(...arr)
|
||||
// break
|
||||
|
||||
// case 'equal':
|
||||
// whereItems.push(`${String(it[0])} = ?`)
|
||||
// params.push(it[2])
|
||||
// break
|
||||
|
||||
// case 'after':
|
||||
// whereItems.push(`${String(it[0])} >= ?`)
|
||||
// params.push(it[2])
|
||||
// break
|
||||
|
||||
// case 'before':
|
||||
// whereItems.push(`${String(it[0])} <= ?`)
|
||||
// params.push(it[2])
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
if (whereItems.length > 0) {
|
||||
req.push('WHERE')
|
||||
for (let idx = 0; idx < whereItems.length; idx++) {
|
||||
const item = whereItems[idx] as string
|
||||
if (idx > 0) {
|
||||
req.push('AND')
|
||||
}
|
||||
req.push(item)
|
||||
}
|
||||
}
|
||||
|
||||
// ORDER BY (not working as we want :()
|
||||
// const sort = query?.$sort
|
||||
// if (sort && sort.length >= 1) {
|
||||
// const suffix = sort[0]?.[1] === 'asc' ? 'ASC' : 'DESC'
|
||||
// req = req.concat(['ORDER', 'BY', sort[0]?.[0] as string, suffix])
|
||||
// }
|
||||
|
||||
// LIMIT (not working because of ORDER BY)
|
||||
// const page: number = query?.page ?? 0
|
||||
// const pageLimit: number | null = query?.limit ?? null
|
||||
// let limit: number | null = null
|
||||
// if (pageLimit && pageLimit > 0) {
|
||||
// limit = pageLimit * (page + 1)
|
||||
// req = req.concat(['LIMIT', limit.toString()])
|
||||
// }
|
||||
|
||||
// ALLOWW FILTERING
|
||||
req = req.concat(['ALLOW', 'FILTERING'])
|
||||
const client = (await Client.get())!
|
||||
|
||||
if (this.options?.debug) {
|
||||
console.log(req, params)
|
||||
}
|
||||
|
||||
let res: Array<Record<string, any>>
|
||||
try {
|
||||
res = await client.execute(req.join(' '), params)
|
||||
} catch (error) {
|
||||
console.error('error running request')
|
||||
console.error(req, params)
|
||||
throw error
|
||||
}
|
||||
if (!res) {
|
||||
return {
|
||||
rows: 0,
|
||||
pageTotal: 0,
|
||||
page: 1,
|
||||
rowsTotal: 0,
|
||||
data: []
|
||||
}
|
||||
}
|
||||
|
||||
let dataset = res
|
||||
.map((obj) => objectRemap(this.schema.model, (_, key) => ({
|
||||
key,
|
||||
value: this.dbToValue(key, obj[key])
|
||||
})))
|
||||
.map((obj) => {
|
||||
objectLoop(this.schema.model, (item, key) => {
|
||||
if (Array.isArray(item) && !obj[key]) {
|
||||
obj[key] = []
|
||||
}
|
||||
})
|
||||
|
||||
return obj
|
||||
})
|
||||
.map((it) => this.schema.parse(it))
|
||||
.filter((it): it is Implementation<T> => !!it)
|
||||
|
||||
/**
|
||||
* POST QUERY TREATMENT
|
||||
*/
|
||||
// if ((query?.where?.length ?? 0) > 0) {
|
||||
// for (const it of query?.where ?? []) {
|
||||
// // eslint-disable-next-line max-depth
|
||||
// switch (it[1]) {
|
||||
// case 'includes':
|
||||
// dataset = dataset.filter((entry) => entry[it[0]]?.toString()?.includes(it[2]))
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// sort
|
||||
// const sort = query?.$sort
|
||||
// if (sort) {
|
||||
// const sortKey = sort ? sort[0]![0] : objectFind(this.schema.model, (value) => {
|
||||
// if (!isSchemaItem(value)) {
|
||||
// return false
|
||||
// }
|
||||
// return !!value.database?.created
|
||||
// })
|
||||
// const sortValue = sort ? sort[0]![1] : 'asc'
|
||||
// if (sortKey && sortValue) {
|
||||
// if (sortValue === 'asc') {
|
||||
// dataset = dataset.sort((a, b) => b[sortKey as string]! > a[sortKey as string]! ? 1 : -1)
|
||||
// } else {
|
||||
// dataset = dataset.sort((a, b) => b[sortKey as string]! < a[sortKey as string]! ? 1 : -1)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// console.log(res.rows, req)
|
||||
// post request processing
|
||||
// if (limit) {
|
||||
// dataset = dataset.slice(page * (query?.limit ?? 0), limit)
|
||||
// }
|
||||
|
||||
// length of the query assuming a single page
|
||||
let unpaginatedLength = dataset.length
|
||||
// temp modification of comportement to use the new and better query system
|
||||
if ((!query || !query?.$sort) && objectFind(this.schema.model, (_, key) => key === 'created')) {
|
||||
// temp fix for the sorting algorithm
|
||||
if (!query) {
|
||||
// @ts-expect-error normal currently
|
||||
query = { $sort: { created: Sort.DESC }}
|
||||
} else {
|
||||
query.$sort = { created: Sort.DESC }
|
||||
}
|
||||
}
|
||||
if (query) {
|
||||
const { filtered, unpaginatedLength: ul } = filter(query, dataset, this.options)
|
||||
dataset = filtered
|
||||
unpaginatedLength = ul
|
||||
}
|
||||
|
||||
// console.log(res)
|
||||
const pageLimit = query?.$limit ?? 10
|
||||
const pageOffset = query?.$offset ?? 0
|
||||
return {
|
||||
rows: dataset.length,
|
||||
rowsTotal: unpaginatedLength,
|
||||
page: Math.floor(pageOffset / pageLimit),
|
||||
pageTotal: Math.max(1, Math.ceil(unpaginatedLength / pageLimit)),
|
||||
data: dataset
|
||||
}
|
||||
}
|
||||
|
||||
public async update(obj: Implementation<T>): Promise<Implementation<T> | null> {
|
||||
return this.patch(obj)
|
||||
}
|
||||
|
||||
public async patch(id: Partial<Implementation<T>>): Promise<Implementation<T> | null>
|
||||
public async patch(id: string, obj: Partial<Implementation<T>>): Promise<Implementation<T> | null>
|
||||
// eslint-disable-next-line complexity
|
||||
public async patch(id: string | Partial<Implementation<T>>, obj?: Partial<Implementation<T>>): Promise<Implementation<T> | null> {
|
||||
|
||||
if (!obj) {
|
||||
if (typeof id === 'string') {
|
||||
return null
|
||||
}
|
||||
obj = {...id} as Partial<Implementation<T>>
|
||||
}
|
||||
|
||||
// update the updated time
|
||||
objectLoop(this.schema.model, (item, key) => {
|
||||
if (isSchemaItem(item) && item.database?.updated) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = new Date()
|
||||
}
|
||||
})
|
||||
|
||||
// build the request parts
|
||||
const parts: Array<string> = ['UPDATE', this.table, 'SET']
|
||||
const params: Array<any> = []
|
||||
|
||||
// remove ids
|
||||
const ids = Array.isArray(this.id) ? this.id : [this.id]
|
||||
for (const tmp of ids) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||||
delete obj[tmp]
|
||||
}
|
||||
|
||||
// map the items to update
|
||||
const keys = objectMap(obj as {}, (_, key) => `${key}=?`)
|
||||
parts.push(keys.join(', '))
|
||||
params.push(...objectValues(obj as {}))
|
||||
|
||||
// filter by the ids
|
||||
parts.push('WHERE')
|
||||
const read: Partial<any> = {}
|
||||
for (let idx = 0; idx < ids.length; idx++) {
|
||||
const key = ids[idx] as string
|
||||
|
||||
if (idx > 0) {
|
||||
parts.push('AND')
|
||||
}
|
||||
parts.push(`${key}=?`)
|
||||
const value = obj[key] ?? (typeof id === 'string' ? id : id[key])
|
||||
read[key] = this.valueToDB(key, value)
|
||||
if (!value) {
|
||||
throw new Error(`Missing id (${key})`)
|
||||
}
|
||||
params.push(value)
|
||||
}
|
||||
|
||||
if (this.partitionKeys && this.partitionKeys?.length > 0) {
|
||||
const { data } = await this.read(read)
|
||||
const item = data[0]
|
||||
for (const key of this.partitionKeys) {
|
||||
parts.push('AND', `${key as string}=?`)
|
||||
params.push(this.valueToDB(key, item![key]))
|
||||
}
|
||||
}
|
||||
|
||||
const req = parts.join(' ')
|
||||
const client = await Client.get()
|
||||
|
||||
if (this.options?.debug) {
|
||||
console.log(req, params)
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await client?.execute(req, params)
|
||||
// console.log(res, req)
|
||||
if (this.options?.debug) {
|
||||
console.log('post patch result', res, req)
|
||||
}
|
||||
return (await this.read(read)).data[0] ?? null
|
||||
} catch (e) {
|
||||
console.log(e, req, params)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
public async delete(obj: Implementation<T>): Promise<boolean> {
|
||||
const parts = ['DELETE', 'FROM', this.table, 'WHERE']
|
||||
const params: ArrayOrObject = []
|
||||
|
||||
objectLoop(obj as {}, (value, key) => {
|
||||
let allowedWheres = ([] as Array<any>).concat(Array.isArray(this.id) ? this.id : [this.id])
|
||||
if (this.partitionKeys) {
|
||||
allowedWheres.push(...this.partitionKeys )
|
||||
}
|
||||
if (!allowedWheres.includes(key)) {
|
||||
return
|
||||
}
|
||||
if (parts.length > 4) {
|
||||
parts.push('AND')
|
||||
}
|
||||
parts.push(`${key}=?`)
|
||||
params.push(value)
|
||||
})
|
||||
|
||||
const client = await Client.get()
|
||||
|
||||
if (this.options?.debug) {
|
||||
console.log(parts, params)
|
||||
}
|
||||
|
||||
try {
|
||||
await client!.execute(parts.join(' '), params)
|
||||
} catch (e) {
|
||||
console.error(e, parts, params)
|
||||
throw e
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
private valueToDB(key: keyof T, value: any): string | number | boolean | Date {
|
||||
const item = this.schema.model[key] as Item
|
||||
const type = isSchemaItem(item) ? item.type : item
|
||||
|
||||
if (typeof type === 'object' && !Array.isArray(type) && !(value instanceof Date)) {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
|
||||
if (typeof value === 'undefined' || value === null) {
|
||||
return value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
private dbToValue(key: keyof T, value: string | number | boolean | Date): any {
|
||||
const item = this.schema.model[key] as Item
|
||||
const type = isSchemaItem(item) ? item.type : item
|
||||
|
||||
if (typeof type === 'object' && !Array.isArray(type) && !(value instanceof Date)) {
|
||||
return JSON.parse(value as string)
|
||||
}
|
||||
|
||||
if (typeof value === 'undefined' || value === null) {
|
||||
return value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
}
|
@ -1,16 +1,46 @@
|
||||
import type Schema from 'libs/Schema'
|
||||
import type { Model, ModelInfer, SchemaInfer } from 'libs/Schema'
|
||||
import type { Query } from '../Query'
|
||||
|
||||
export interface DBPull<T extends Schema> {
|
||||
/**
|
||||
* total number of rows that are valid with the specified query
|
||||
*/
|
||||
rows: number
|
||||
/**
|
||||
* total number of rows in the table
|
||||
*/
|
||||
rowsTotal: number
|
||||
|
||||
/**
|
||||
* current page number
|
||||
*/
|
||||
page: number
|
||||
|
||||
/**
|
||||
* total amount of pages
|
||||
*/
|
||||
pageTotal: number
|
||||
|
||||
/**
|
||||
* the data fetched
|
||||
*/
|
||||
data: Array<SchemaInfer<T>>
|
||||
}
|
||||
|
||||
/**
|
||||
* the Dao is the object that connect the Database or source to the application layer
|
||||
* the DaoAdapter is the object that connect the Database or source to the application layer
|
||||
*
|
||||
* you MUST call it through the `DaoFactory` file
|
||||
*/
|
||||
export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
export default abstract class DaoAdapter<M extends Model = Model> {
|
||||
/**
|
||||
* insert a new object into the source
|
||||
*
|
||||
* @param obj the object to create
|
||||
* @returns the object with it's id filled if create or null otherwise
|
||||
*/
|
||||
abstract create(obj: Omit<Object, 'id' | 'created' | 'updated'>): Promise<Object | null>
|
||||
abstract create(obj: Partial<ModelInfer<M>>): Promise<ModelInfer<M> | null>
|
||||
|
||||
/**
|
||||
* insert a new object into the source
|
||||
@ -18,7 +48,7 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
* @param obj the object to create
|
||||
* @returns the object with it's id filled if create or null otherwise
|
||||
*/
|
||||
public insert: Dao<Object>['create'] = (obj: Parameters<Dao<Object>['create']>[0]) =>
|
||||
public insert: DaoAdapter<ModelInfer<M>>['create'] = (obj: Parameters<DaoAdapter<ModelInfer<M>>['create']>[0]) =>
|
||||
this.create(obj)
|
||||
|
||||
/**
|
||||
@ -27,7 +57,7 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns an array containing the list of elements that match with the query
|
||||
*/
|
||||
abstract findAll(query?: Partial<Object>): Promise<Array<Object>>
|
||||
abstract read(query?: Query<ModelInfer<M>> | undefined): Promise<DBPull<Schema<M>>>
|
||||
|
||||
/**
|
||||
* find the list of objects having elements from the query
|
||||
@ -35,8 +65,17 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns an array containing the list of elements that match with the query
|
||||
*/
|
||||
public find: Dao<Object>['findAll'] = (query: Parameters<Dao<Object>['findAll']>[0]) =>
|
||||
this.findAll(query)
|
||||
public findAll: DaoAdapter<ModelInfer<M>>['read'] = (query: Parameters<DaoAdapter<ModelInfer<M>>['read']>[0]) =>
|
||||
this.read(query)
|
||||
|
||||
/**
|
||||
* find the list of objects having elements from the query
|
||||
*
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns an array containing the list of elements that match with the query
|
||||
*/
|
||||
public find: DaoAdapter<ModelInfer<M>>['read'] = (query: Parameters<DaoAdapter<ModelInfer<M>>['read']>[0]) =>
|
||||
this.read(query)
|
||||
|
||||
/**
|
||||
* find an object by it's id
|
||||
@ -46,8 +85,8 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
* @param id the id of the object
|
||||
* @returns
|
||||
*/
|
||||
public findById(id: Object['id']): Promise<Object | null> {
|
||||
return this.findOne({ id: id } as Partial<Object>)
|
||||
public findById(id: ModelInfer<M>['id']): Promise<ModelInfer<M> | null> {
|
||||
return this.findOne({ id: id } as Partial<ModelInfer<M>>)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -58,7 +97,7 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
* @param id the id of the object
|
||||
* @returns
|
||||
*/
|
||||
public get(id: Object['id']) {
|
||||
public get(id: ModelInfer<M>['id']) {
|
||||
return this.findById(id)
|
||||
}
|
||||
|
||||
@ -68,8 +107,8 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
|
||||
* @returns the first element matching with the query or null otherwise
|
||||
*/
|
||||
public async findOne(query?: Partial<Object>): Promise<Object | null> {
|
||||
return (await this.findAll(query))[0] ?? null
|
||||
public async findOne(query?: Partial<ModelInfer<M>>): Promise<ModelInfer<M> | null> {
|
||||
return (await this.findAll(query)).data[0] ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
@ -80,14 +119,14 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
* @param obj the object to update
|
||||
* @returns an object if it was able to update or null otherwise
|
||||
*/
|
||||
abstract update(obj: Object): Promise<Object | null>
|
||||
abstract update(obj: Partial<ModelInfer<M>>): Promise<ModelInfer<M> | null>
|
||||
|
||||
/**
|
||||
* change some elements from the object and return the object updated
|
||||
* @param id the id of the object
|
||||
* @param changegs the change to make
|
||||
*/
|
||||
public async patch(id: string, changes: Partial<Object>): Promise<Object | null> {
|
||||
public async patch(id: ModelInfer<M>['id'], changes: Partial<ModelInfer<M>>): Promise<ModelInfer<M> | null> {
|
||||
const query = await this.findById(id)
|
||||
if (!query) {
|
||||
return null
|
||||
@ -100,8 +139,8 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
* @returns the object is updated/inserted or null otherwise
|
||||
*/
|
||||
public async upsert(
|
||||
object: Object | Omit<Object, 'id' | 'created' | 'updated'>
|
||||
): Promise<Object | null> {
|
||||
object: Partial<ModelInfer<M>>
|
||||
): Promise<ModelInfer<M> | null> {
|
||||
if ('id' in object) {
|
||||
return this.update(object)
|
||||
}
|
||||
@ -114,5 +153,5 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
|
||||
*
|
||||
* @returns if the object was deleted or not (if object is not in db it will return true)
|
||||
*/
|
||||
abstract delete(obj: Object): Promise<boolean>
|
||||
abstract delete(obj: ModelInfer<M>): Promise<boolean>
|
||||
}
|
221
src/models/Adapters/FSAdapter.ts
Normal file
221
src/models/Adapters/FSAdapter.ts
Normal file
@ -0,0 +1,221 @@
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
import { objectLoop } from '@dzeio/object-util'
|
||||
import archiver from 'archiver'
|
||||
import type Schema from 'libs/Schema'
|
||||
import type { Model, ModelInfer } from 'libs/Schema'
|
||||
import type SchemaBuffer from 'libs/Schema/Items/SchemaBuffer'
|
||||
import type SchemaNumber from 'libs/Schema/Items/SchemaNumber'
|
||||
import type SchemaString from 'libs/Schema/Items/SchemaString'
|
||||
import fileSystem from 'node:fs'
|
||||
import fs from 'node:fs/promises'
|
||||
import type { Query } from '../Query'
|
||||
import type DaoAdapter from './DaoAdapter'
|
||||
import type { DBPull } from './DaoAdapter'
|
||||
|
||||
interface FS extends Model {
|
||||
filename: SchemaString
|
||||
path: SchemaString
|
||||
// eslint-disable-next-line no-undef
|
||||
data: SchemaBuffer
|
||||
type: SchemaString
|
||||
size: SchemaNumber
|
||||
}
|
||||
|
||||
export default class FSAdapter<T extends FS> implements DaoAdapter<Schema<T>> {
|
||||
|
||||
public constructor(
|
||||
public readonly schema: Schema<T>,
|
||||
public readonly basePath: string
|
||||
) {
|
||||
if (basePath.endsWith('/')) {
|
||||
console.warn('the base path should not end wiath a "/", removing it')
|
||||
basePath = basePath.slice(0, basePath.lastIndexOf('/'))
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: make it clearer what it does
|
||||
public async create(obj: Partial<ModelInfer<T>>): Promise<ModelInfer<T> | null> {
|
||||
const realPath = this.getFullPath(obj.path!)
|
||||
|
||||
const finalFolder = realPath.slice(0, realPath.lastIndexOf('/'))
|
||||
|
||||
console.log('making the directory', finalFolder)
|
||||
await fs.mkdir(finalFolder, { recursive: true })
|
||||
|
||||
if (obj.type === 'file') {
|
||||
console.log('getting the data', finalFolder)
|
||||
const data = obj.data
|
||||
|
||||
console.log('writing to', realPath)
|
||||
if ((data as any) instanceof Buffer) {
|
||||
await fs.writeFile(realPath, data as Buffer)
|
||||
} else {
|
||||
await fs.writeFile(realPath, data as string)
|
||||
}
|
||||
return obj as ModelInfer<T>
|
||||
}
|
||||
|
||||
console.log('making the final directory', realPath)
|
||||
await fs.mkdir(realPath)
|
||||
return obj as ModelInfer<T>
|
||||
}
|
||||
|
||||
public async createZippedBufferFromDirectory(directoryPath: string) {
|
||||
const archive = archiver('zip', {zlib: {level: 9}})
|
||||
archive.on('error', (err) => {
|
||||
throw err
|
||||
})
|
||||
archive.on('warning', (err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
console.log('warning: ', err)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
const fileName = `${this.basePath}/zip/${directoryPath.split(this.basePath)[1]}.zip`
|
||||
fs.mkdir(fileName.slice(0, fileName.lastIndexOf('/')), {recursive: true})
|
||||
const output = fileSystem.createWriteStream(fileName)
|
||||
archive.pipe(output)
|
||||
archive.directory(directoryPath, false)
|
||||
|
||||
const timeout = (cb: (value: (value: unknown) => void) => void, interval: number) => () =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(() => cb(resolve), interval)
|
||||
})
|
||||
const onTimeout = (seconds: number) => timeout((resolve) =>
|
||||
resolve(`Timed out while zipping ${directoryPath}`), seconds * 1000)()
|
||||
const error = await Promise.race([archive.finalize(), onTimeout(60)])
|
||||
if (typeof error === 'string') {
|
||||
console.log('Error:', error)
|
||||
return null
|
||||
}
|
||||
return await fs.readFile(fileName)
|
||||
}
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
public async read(query?: Query<ModelInfer<T>> | undefined): Promise<DBPull<Schema<T>>> {
|
||||
|
||||
const localPath = query?.path as string ?? ''
|
||||
|
||||
const realPath = this.getFullPath(localPath)
|
||||
|
||||
console.log('get the full path', realPath)
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(realPath)
|
||||
|
||||
const files: Array<ModelInfer<T>> = []
|
||||
if (stats.isDirectory()) {
|
||||
const dirFiles = await fs.readdir(realPath)
|
||||
// eslint-disable-next-line max-depth
|
||||
// if (toZip === true) { // put queried file/folder in a zip file
|
||||
// const buffer = await this.createZippedBufferFromDirectory(realPath)
|
||||
// // eslint-disable-next-line max-depth
|
||||
// if (buffer !== null) {
|
||||
// files.push({
|
||||
// path: localPath,
|
||||
// filename: localPath.slice(localPath.lastIndexOf('/') + 1),
|
||||
// data: buffer,
|
||||
// type: 'file',
|
||||
// size: buffer.length,
|
||||
// } as ModelInfer<T>)
|
||||
// }
|
||||
// } else { // return every sub files
|
||||
for await (const file of dirFiles) {
|
||||
files.push(await this.readFile(`${localPath}/${file}`))
|
||||
}
|
||||
// }
|
||||
} else {
|
||||
files.push(await this.readFile(localPath))
|
||||
}
|
||||
|
||||
const pageLimit = query?.$limit ?? Infinity
|
||||
const pageOffset = query?.$offset ?? 0
|
||||
return {
|
||||
rows: files.length,
|
||||
rowsTotal: files.length,
|
||||
page: Math.floor(pageOffset / pageLimit),
|
||||
pageTotal: Math.max(1, Math.ceil(files.length / pageLimit)),
|
||||
data: files.slice(pageOffset, pageOffset + pageLimit)
|
||||
}
|
||||
} catch {
|
||||
return {
|
||||
rows: 0,
|
||||
rowsTotal: 0,
|
||||
page: 0,
|
||||
pageTotal: 0,
|
||||
data: []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async update(_obj: ModelInfer<T>): Promise<ModelInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
public async patch(_id: string, _obj: Partial<ModelInfer<T>>): Promise<ModelInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
|
||||
}
|
||||
|
||||
public async delete(obj: ModelInfer<T>): Promise<boolean> {
|
||||
const localPath = obj?.path as string ?? ''
|
||||
const realPath = this.getFullPath(localPath)
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(realPath)
|
||||
if (!stats) {
|
||||
return false
|
||||
}
|
||||
fs.rm(realPath, { recursive: true, force: true })
|
||||
return true
|
||||
} catch {
|
||||
console.error('Could not remove file', localPath)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
private getFullPath(localPath?: string): string {
|
||||
if (localPath && !localPath?.startsWith('/')) {
|
||||
console.warn('Your path should start with a "/", adding it')
|
||||
localPath = (`/${localPath}`)
|
||||
}
|
||||
|
||||
let realPath = this.basePath + (localPath ? localPath : '')
|
||||
|
||||
if (realPath.includes('\\')) {
|
||||
realPath = realPath.replace(/\\/g, '/')
|
||||
}
|
||||
|
||||
return realPath
|
||||
}
|
||||
|
||||
private async readFile(localPath: string): Promise<ModelInfer<T>> {
|
||||
|
||||
const path = this.getFullPath(localPath)
|
||||
console.log('reading file at', path)
|
||||
const stats = await fs.stat(path)
|
||||
const type = stats.isFile() ? 'file' : 'directory'
|
||||
console.log('file is a', type)
|
||||
|
||||
const obj: ModelInfer<T> = {
|
||||
path: localPath,
|
||||
filename: localPath.slice(localPath.lastIndexOf('/') + 1),
|
||||
data: type === 'file' ? await fs.readFile(path) : '',
|
||||
type: type,
|
||||
size: stats.size
|
||||
} as any
|
||||
|
||||
objectLoop(this.schema.model, (item, key) => {
|
||||
if (item.attributes.includes('db:created')) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = stats.ctime
|
||||
} else if (item.attributes.includes('db:updated')) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = stats.mtime
|
||||
}
|
||||
})
|
||||
|
||||
return obj
|
||||
}
|
||||
}
|
221
src/models/Adapters/LDAPAdapter.ts
Normal file
221
src/models/Adapters/LDAPAdapter.ts
Normal file
@ -0,0 +1,221 @@
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
import { objectClone, objectLoop, objectMap, objectOmit, objectRemap } from '@dzeio/object-util'
|
||||
import ldap from 'ldapjs'
|
||||
import type Schema from 'libs/Schema'
|
||||
import type { SchemaInfer } from 'libs/Schema'
|
||||
import type DaoAdapter from 'models/Adapters/DaoAdapter'
|
||||
import type { DBPull } from 'models/Adapters/DaoAdapter'
|
||||
import type { Query } from 'models/Query'
|
||||
import { filter } from './AdapterUtils'
|
||||
type LDAPFields = 'uid' | 'mail' | 'givenname' | 'sn' | 'jpegphoto' | 'password'
|
||||
|
||||
export default class LDAPAdapter<T extends Schema> implements DaoAdapter<T> {
|
||||
|
||||
private reverseReference: Partial<Record<LDAPFields | string, keyof T>> = {}
|
||||
private attributes: Array<LDAPFields | string> = []
|
||||
|
||||
public constructor(
|
||||
public readonly schema: T,
|
||||
public readonly options: {
|
||||
url: string
|
||||
dnSuffix: string
|
||||
admin: {
|
||||
dn?: string | undefined
|
||||
username?: string | undefined
|
||||
password: string
|
||||
}
|
||||
fieldsCorrespondance?: Partial<Record<keyof SchemaInfer<T>, LDAPFields | string>>
|
||||
}
|
||||
) {
|
||||
objectLoop(options.fieldsCorrespondance ?? {}, (value, key) => {
|
||||
this.reverseReference[value] = key
|
||||
this.attributes.push(value)
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: make it clearer what it does
|
||||
public async create(_obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
public async read(query?: Query<SchemaInfer<T>> | undefined): Promise<DBPull<T>> {
|
||||
const passwordField = this.options.fieldsCorrespondance?.password ?? 'password'
|
||||
const doLogin = !!query?.[passwordField]
|
||||
|
||||
const emptyResult = {
|
||||
rows: 0,
|
||||
rowsTotal: 0,
|
||||
page: 1,
|
||||
pageTotal: 0,
|
||||
data: []
|
||||
}
|
||||
|
||||
if (!query) {
|
||||
return emptyResult
|
||||
}
|
||||
|
||||
// console.log(await this.ldapFind({mail: 'f.bouillon@aptatio.com'}))
|
||||
|
||||
const userdn = objectMap(query, (value, key) => `${(this.options.fieldsCorrespondance as any)[key] ?? key}=${value}`)
|
||||
?.filter((it) => it.slice(0, it.indexOf('=')) !== passwordField)
|
||||
?.join(',')
|
||||
if (!doLogin) {
|
||||
const bind = this.options.admin.dn ?? `cn=${this.options.admin.username},${this.options.dnSuffix}`
|
||||
try {
|
||||
const client = await this.bind(bind, this.options.admin.password)
|
||||
// @ts-expect-error nique ta mere
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const results = (await this.ldapFind(client, objectMap(query, (value, key) => ({key: this.options.fieldsCorrespondance?.[key], value: value}))
|
||||
)).map((it) => this.schema.parse(
|
||||
objectRemap(it, (value, key) => ({key: this.reverseReference[key.toLowerCase() as string] as string, value: value}))
|
||||
)).filter((it): it is SchemaInfer<T> => !!it)
|
||||
|
||||
const res = filter(query, results)
|
||||
|
||||
return {
|
||||
rows: res.filtered.length,
|
||||
rowsTotal: results.length,
|
||||
page: 1,
|
||||
pageTotal: 1,
|
||||
data: res.filtered
|
||||
}
|
||||
} catch {
|
||||
return emptyResult
|
||||
}
|
||||
}
|
||||
|
||||
// password authentication
|
||||
try {
|
||||
const clone = objectClone(query)
|
||||
delete clone.password
|
||||
|
||||
// find using admin privileges
|
||||
const res = await this.read(clone)
|
||||
const user = res.data[0]
|
||||
if (!user) {
|
||||
return emptyResult
|
||||
}
|
||||
const password = query.password as string ?? ''
|
||||
const client = await this.bind(`uid=${user[this.reverseReference.uid as keyof typeof user]!},${this.options.dnSuffix}`, password)
|
||||
// @ts-expect-error nique x2
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const results = (await this.ldapFind(client, objectMap(clone, (value, key) => {
|
||||
const finalKey = this.options.fieldsCorrespondance?.[key]
|
||||
|
||||
return {key: finalKey, value: value}
|
||||
})
|
||||
)).map((it) => this.schema.parse(
|
||||
objectRemap(it, (value, key) => ({ key: this.reverseReference[key as string] as string, value: value }))
|
||||
)).filter((it): it is SchemaInfer<T> => !!it)
|
||||
|
||||
const final = filter(objectOmit(query, 'password'), results)
|
||||
// console.log(final, query, results)
|
||||
|
||||
if (final.filtered.length !== 1) {
|
||||
return emptyResult
|
||||
}
|
||||
|
||||
return {
|
||||
rows: final.filtered.length,
|
||||
rowsTotal: results.length,
|
||||
page: 1,
|
||||
pageTotal: 1,
|
||||
data: final.filtered
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
console.log('error, user not found', e)
|
||||
return emptyResult
|
||||
}
|
||||
}
|
||||
|
||||
public async update(_obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
public async patch(_id: string, _obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
public async delete(_obj: Partial<SchemaInfer<T>>): Promise<boolean> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
private bind(dn: string, password: string): Promise<ldap.Client> {
|
||||
const client = ldap.createClient({
|
||||
url: this.options.url
|
||||
})
|
||||
return new Promise<ldap.Client>((res, rej) => {
|
||||
console.log('binding as', dn)
|
||||
client.on('connect', () => {
|
||||
client.bind(dn, password, (err) => {
|
||||
if (err) {
|
||||
console.error('error binding as', dn, err)
|
||||
client.unbind()
|
||||
rej(err)
|
||||
return
|
||||
}
|
||||
console.log('binded as', dn)
|
||||
res(client)
|
||||
})
|
||||
})
|
||||
.on('timeout', (err) => rej(err))
|
||||
.on('connectTimeout', (err) => rej(err))
|
||||
.on('error', (err) => rej(err))
|
||||
.on('connectError', (err) => rej(err))
|
||||
})
|
||||
}
|
||||
|
||||
private async ldapFind(client: ldap.Client, filters: Array<{key: LDAPFields, value: string}>): Promise<Array<Record<LDAPFields, string | Array<string> | undefined>>> {
|
||||
|
||||
if (filters.length === 0) {
|
||||
return []
|
||||
}
|
||||
const firstFilter = filters.shift()!
|
||||
return new Promise<Array<Record<LDAPFields, string | Array<string> | undefined>>>((res, rej) => {
|
||||
const users: Array<Record<LDAPFields, string | Array<string> | undefined>> = []
|
||||
const filter = {
|
||||
attribute: firstFilter.key as any,
|
||||
value: firstFilter.value,
|
||||
}
|
||||
console.log('Searching on LDAP')
|
||||
client.search(
|
||||
this.options.dnSuffix, {
|
||||
filter: new ldap.EqualityFilter(filter),
|
||||
// filter: `${filter.attribute}:caseExactMatch:=${filter.value}`,
|
||||
scope: 'sub',
|
||||
attributes: this.attributes
|
||||
}, (err, search) => {
|
||||
if (err) {
|
||||
rej(err)
|
||||
}
|
||||
// console.log('search', search, err)
|
||||
search.on('searchEntry', (entry) => {
|
||||
users.push(this.parseUser(entry))
|
||||
}).on('error', (err2) => {
|
||||
rej(err2)
|
||||
client.unbind()
|
||||
console.error('error in search lol', err2)
|
||||
}).on('end', () => {
|
||||
console.log(users)
|
||||
res(users)
|
||||
|
||||
client.unbind()
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
private parseUser(usr: ldap.SearchEntry): Record<LDAPFields, string | Array<string> | undefined> {
|
||||
const user: Record<string, string | Array<string> | undefined> = { dn: usr.objectName ?? undefined }
|
||||
|
||||
for (const attribute of usr.attributes) {
|
||||
user[attribute.type] = attribute.values.length === 1 ? attribute.values[0] : attribute.values
|
||||
|
||||
}
|
||||
|
||||
return user
|
||||
}
|
||||
}
|
69
src/models/Adapters/MultiAdapter.ts
Normal file
69
src/models/Adapters/MultiAdapter.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import type Schema from 'libs/Schema'
|
||||
import type { SchemaInfer } from 'libs/Schema'
|
||||
import type DaoAdapter from 'models/Adapters/DaoAdapter'
|
||||
|
||||
export default class MultiAdapter<T extends Schema> implements DaoAdapter<T> {
|
||||
|
||||
public constructor(
|
||||
public readonly schema: T,
|
||||
public readonly adapters: Array<{
|
||||
adapter: DaoAdapter<T>
|
||||
fields: Array<keyof T>
|
||||
/**
|
||||
* a field from the main adapter that will backreference the child adapter
|
||||
*/
|
||||
childReference?: keyof T
|
||||
}> = []
|
||||
) {}
|
||||
|
||||
// TODO: make it clearer what it does
|
||||
public async create(obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
let final: SchemaInfer<T> = {} as any
|
||||
// start by processing the childs
|
||||
for (const adapter of this.adapters.sort((a) => a.childReference ? -1 : 1)) {
|
||||
const partialObject: Partial<SchemaInfer<T>> = {}
|
||||
for (const key of adapter.fields) {
|
||||
partialObject[key] = obj[key]
|
||||
}
|
||||
const res = await adapter.adapter.create!(partialObject as any)
|
||||
if (res && adapter.childReference) {
|
||||
obj[adapter.childReference] = res[adapter.childReference]
|
||||
}
|
||||
final = {...final, ...res}
|
||||
}
|
||||
return final
|
||||
}
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
// public async read(query?: Query<SchemaInfer<T>> | undefined): Promise<DBPull<T>> {
|
||||
// let final: SchemaInfer<T> = {} as any
|
||||
// // start by processing the childs
|
||||
// for (const adapter of this.adapters.sort((a) => a.childReference ? -1 : 1)) {
|
||||
// const partialObject: Partial<SchemaInfer<T>> = {}
|
||||
// for (const key of adapter.fields) {
|
||||
// partialObject[key] = obj[key]
|
||||
// }
|
||||
// const res = await adapter.adapter.read!(query)
|
||||
// if (res && adapter.childReference) {
|
||||
// obj[adapter.childReference] = res[adapter.childReference]
|
||||
// }
|
||||
// final = {...final, ...res}
|
||||
// }
|
||||
// // step 2 merge elements
|
||||
// return final
|
||||
// }
|
||||
|
||||
public async update(_obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
public async patch(_id: string, _obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
throw new Error('not implemented')
|
||||
|
||||
}
|
||||
|
||||
public async delete(_obj: Partial<SchemaInfer<T>>): Promise<boolean> {
|
||||
throw new Error('not implemented')
|
||||
|
||||
}
|
||||
}
|
324
src/models/Adapters/PostgresAdapter.ts
Normal file
324
src/models/Adapters/PostgresAdapter.ts
Normal file
@ -0,0 +1,324 @@
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import { objectFind, objectKeys, objectLoop, objectMap, objectRemap, objectValues } from '@dzeio/object-util'
|
||||
|
||||
import type Schema from 'libs/Schema'
|
||||
import type { SchemaInfer } from 'libs/Schema'
|
||||
import type SchemaItem from 'libs/Schema/SchemaItem'
|
||||
import crypto from 'node:crypto'
|
||||
import PostgresClient from '../Clients/PostgresClient'
|
||||
import { Sort, type Query } from '../Query'
|
||||
import { filter } from './AdapterUtils'
|
||||
import type { DBPull } from './DaoAdapter'
|
||||
import DaoAdapter from './DaoAdapter'
|
||||
|
||||
const specialKeywords = ['user', 'end'] as const
|
||||
|
||||
export default class PostgresAdapter<T extends Schema> extends DaoAdapter<T['model']> {
|
||||
|
||||
private id: Array<string> = []
|
||||
|
||||
public constructor(
|
||||
/**
|
||||
* the schema used by Cassandra
|
||||
*/
|
||||
public readonly schema: T,
|
||||
/**
|
||||
* the table name
|
||||
*/
|
||||
public readonly table: string,
|
||||
|
||||
/**
|
||||
* additionnal options to make the adapter work
|
||||
*/
|
||||
private readonly options?: {
|
||||
/**
|
||||
* log the requests made to cassandra
|
||||
*/
|
||||
debug?: boolean
|
||||
}
|
||||
) {
|
||||
super()
|
||||
objectLoop(this.schema.model, (schema, key) => {
|
||||
if (schema.attributes.includes('db:auto')) {
|
||||
this.id.push(key)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: make it clearer what it does
|
||||
public async create(obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
// handle automated values
|
||||
objectLoop(this.schema.model, (item, key) => {
|
||||
if (item.attributes.includes('db:created') || item.attributes.includes('db:updated')) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = new Date()
|
||||
} else if (item.attributes.includes('db:auto') && !obj[key]) {
|
||||
if (item.isOfType('')) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = crypto.randomBytes(16).toString('hex')
|
||||
} else if (item.isOfType(123)) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = crypto.randomBytes(16).readUint32BE()
|
||||
} else {
|
||||
throw new Error('cannot generate ID because it is not compatible with it')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// parse the data with the Schema
|
||||
const { object: clone, error} = this.schema.validate(obj)
|
||||
if (error) {
|
||||
console.error(error)
|
||||
throw new Error('Invalid data given to create the final object')
|
||||
}
|
||||
|
||||
// prepare the database query
|
||||
const keys = objectKeys(clone)
|
||||
.map((it) => {
|
||||
if (specialKeywords.includes(it)) { // handle the special keyword
|
||||
return `"${it}"`
|
||||
}
|
||||
return it
|
||||
})
|
||||
const keysStr = keys.join(', ')
|
||||
const values = keys.map((_, idx) => `$${idx+1}`).join(', ')
|
||||
const req = `INSERT INTO ${this.table} (${keysStr}) VALUES (${values});`
|
||||
const client = await PostgresClient.get()
|
||||
|
||||
const params = objectMap(clone as any, (value, key) => this.valueToDB(key as any, value))
|
||||
|
||||
if (this.options?.debug) {
|
||||
console.log(req, params) // 27 from 1 36 from 0
|
||||
}
|
||||
|
||||
// send to the database
|
||||
try {
|
||||
await client.execute(req, params)
|
||||
} catch (e) {
|
||||
console.log(e, req, params)
|
||||
return null
|
||||
}
|
||||
return this.schema.validate(clone).object ?? null
|
||||
}
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
public async read(query?: Query<SchemaInfer<T>> | undefined): Promise<DBPull<T>> {
|
||||
// prepare the request to the database based on the query parameters
|
||||
let req: Array<string> = ['SELECT', '*', 'FROM', this.table]
|
||||
|
||||
const client = await PostgresClient.get()
|
||||
|
||||
if (this.options?.debug) {
|
||||
console.log(req)
|
||||
}
|
||||
|
||||
// read from the database
|
||||
let res: Array<Record<string, any>>
|
||||
try {
|
||||
res = await client.execute(`${req.join(' ')}`)
|
||||
} catch (error) {
|
||||
console.error('error running request')
|
||||
console.error(req)
|
||||
throw error
|
||||
}
|
||||
if (!res) {
|
||||
return {
|
||||
rows: 0,
|
||||
pageTotal: 0,
|
||||
page: 1,
|
||||
rowsTotal: 0,
|
||||
data: []
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options?.debug) {
|
||||
console.log('preEdits', res)
|
||||
}
|
||||
|
||||
// post-process the data from the database
|
||||
const raw = res
|
||||
.map((obj) => {
|
||||
// remap to use system value instead of db values
|
||||
obj = objectRemap(this.schema.model, (_, key) => ({
|
||||
key,
|
||||
value: this.dbToValue(key as any, (obj as any)[key])
|
||||
}))
|
||||
|
||||
// validate the schema
|
||||
const res = this.schema.validate(obj)
|
||||
if (res.object) {
|
||||
return res.object
|
||||
}
|
||||
console.log(res.error)
|
||||
return null
|
||||
})
|
||||
.filter((it): it is SchemaInfer<T> => !!it)
|
||||
|
||||
// temp modification of comportement to use the new and better query system
|
||||
if ((!query || !query?.$sort) && objectFind(this.schema.model, (_, key) => key === 'created')) {
|
||||
// temp fix for the sorting algorithm
|
||||
if (!query) {
|
||||
// @ts-expect-error normal currently
|
||||
query = { $sort: { created: Sort.DESC }}
|
||||
} else {
|
||||
query.$sort = { created: Sort.DESC }
|
||||
}
|
||||
}
|
||||
let dataset = raw
|
||||
|
||||
|
||||
if (this.options?.debug) {
|
||||
console.log('preFilters', dataset)
|
||||
}
|
||||
|
||||
if (query) {
|
||||
dataset = filter(query, dataset, this.options).filtered
|
||||
}
|
||||
return {
|
||||
rows: dataset.length ?? 0,
|
||||
rowsTotal: res.length ?? 0,
|
||||
page: 1,
|
||||
pageTotal: 1,
|
||||
// page: page,
|
||||
// pageTotal: pageLimit ? res.rowLength / pageLimit : 1,
|
||||
data: dataset
|
||||
}
|
||||
}
|
||||
|
||||
public async update(obj: SchemaInfer<T>): Promise<SchemaInfer<T> | null> {
|
||||
return this.patch(obj)
|
||||
}
|
||||
|
||||
public async patch(id: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null>
|
||||
public async patch(id: string, obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null>
|
||||
// eslint-disable-next-line complexity
|
||||
public async patch(id: string | Partial<SchemaInfer<T>>, obj?: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
|
||||
|
||||
if (!obj) {
|
||||
if (typeof id === 'string') {
|
||||
return null
|
||||
}
|
||||
obj = {...id} as Partial<SchemaInfer<T>>
|
||||
}
|
||||
|
||||
// const tmp = this.schema.validate(obj)
|
||||
// // if (tmp.error) {
|
||||
// // throw new Error(`obj invalid can\'t patch ${JSON.stringify(tmp.error)}`)
|
||||
// // }
|
||||
|
||||
// obj = tmp.object
|
||||
|
||||
// update the updated time
|
||||
objectLoop(this.schema.model, (item, key) => {
|
||||
if (item.attributes.includes('db:updated')) {
|
||||
// @ts-expect-error things get validated anyway
|
||||
obj[key] = new Date()
|
||||
}
|
||||
})
|
||||
|
||||
// build the request parts
|
||||
const parts: Array<string> = ['UPDATE', this.table, 'SET']
|
||||
const params: Array<any> = []
|
||||
|
||||
// remove ids
|
||||
for (const tmp of this.id) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||||
delete obj[tmp]
|
||||
}
|
||||
|
||||
// map the items to update
|
||||
const keys = objectMap(obj as {}, (_, key, idx) => {
|
||||
if (specialKeywords.includes(key)) {
|
||||
return `"${key}"=$${idx+1}`
|
||||
}
|
||||
|
||||
return `${key}=$${idx+1}`
|
||||
})
|
||||
parts.push(keys.join(', '))
|
||||
params.push(...objectValues(obj as {}))
|
||||
|
||||
// filter by the ids
|
||||
parts.push('WHERE')
|
||||
const read: Partial<any> = {}
|
||||
for (let idx = 0; idx < this.id.length; idx++) {
|
||||
const key = this.id[idx] as string
|
||||
|
||||
if (idx > 0) {
|
||||
parts.push('AND')
|
||||
}
|
||||
parts.push(`${key}=$${params.length+1}`)
|
||||
const value = obj[key] ?? (typeof id === 'string' ? id : id[key])
|
||||
read[key] = this.valueToDB(key as any, value)
|
||||
if (!value) {
|
||||
throw new Error(`Missing id (${key})`)
|
||||
}
|
||||
params.push(value)
|
||||
}
|
||||
|
||||
const req = parts.join(' ')
|
||||
const client = await PostgresClient.get()
|
||||
|
||||
if (this.options?.debug) {
|
||||
console.log(req, params)
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await client!.execute(req, params)
|
||||
// console.log(res, req)
|
||||
if (this.options?.debug) {
|
||||
console.log('post patch result', res, req)
|
||||
}
|
||||
return (await this.read(read)).data[0] ?? null
|
||||
} catch (e) {
|
||||
console.log(e, req, params)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
public async delete(obj: SchemaInfer<T>): Promise<boolean> {
|
||||
const parts = ['DELETE', 'FROM', this.table, 'WHERE']
|
||||
|
||||
objectLoop(obj as {}, (value, key, idx) => {
|
||||
if (idx > 0) {
|
||||
parts.push('AND')
|
||||
}
|
||||
parts.push(`${key}=${value}`)
|
||||
})
|
||||
|
||||
const client = await PostgresClient.get()
|
||||
|
||||
if (this.options?.debug) {}
|
||||
|
||||
try {
|
||||
await client!.execute(`${parts.join(' ')}`)
|
||||
} catch (e) {
|
||||
console.error(e, parts)
|
||||
throw e
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
private valueToDB(key: keyof T, value: any): string | number | boolean | Date {
|
||||
const item: SchemaItem<unknown> = (this.schema.model as any)[key]
|
||||
|
||||
if (item.isOfType({})) {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
private dbToValue(key: keyof T, value: string | number | boolean | Date): any {
|
||||
const item: SchemaItem<unknown> = (this.schema.model as any)[key]
|
||||
|
||||
if (item.isOfType(543) && typeof value === 'string') {
|
||||
return parseFloat(value)
|
||||
}
|
||||
|
||||
if (item.isOfType({}) && typeof value === 'string') {
|
||||
return JSON.parse(value)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
}
|
130
src/models/Clients/CassandraClient.ts
Normal file
130
src/models/Clients/CassandraClient.ts
Normal file
@ -0,0 +1,130 @@
|
||||
import { objectRemap } from '@dzeio/object-util'
|
||||
import Cassandra from 'cassandra-driver'
|
||||
import { getEnv, requireEnv } from 'libs/Env'
|
||||
import Client from './Client'
|
||||
|
||||
export default class CassandraClient extends Client {
|
||||
|
||||
private static instance: CassandraClient | null = null
|
||||
private client?: Cassandra.Client | null = null
|
||||
|
||||
|
||||
public async getVersion(): Promise<number> {
|
||||
try {
|
||||
await this.execute(`USE ${requireEnv('CASSANDRA_DATABASE')}`)
|
||||
} catch (e) {
|
||||
// database not found
|
||||
console.log('database not found', e)
|
||||
return -1
|
||||
}
|
||||
try {
|
||||
const res = await this.execute('SELECT value FROM settings WHERE id = \'db_version\'')
|
||||
const value = res[0]?.value
|
||||
if (value.includes('T')) {
|
||||
return new Date(value).getTime()
|
||||
}
|
||||
return Number.parseInt(value)
|
||||
} catch (e) {
|
||||
// table does not exists
|
||||
console.log('Settings table does not exists', e)
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
public override async setVersion(version: number): Promise<void> {
|
||||
await this.execute(`
|
||||
UPDATE settings SET value = ? WHERE id = 'db_version';
|
||||
`.trim(), [version.toString()])
|
||||
}
|
||||
|
||||
public async execute(query: string, params?: Array<unknown> | object, options?: Cassandra.QueryOptions): Promise<Array<Record<string, any>>> {
|
||||
if (!this.client || this.client.getState().getConnectedHosts().length === 0) {
|
||||
throw new Error('not connected to the database !')
|
||||
}
|
||||
|
||||
const res = await this.client.execute(query, params, options)
|
||||
// if (query.includes('users'))
|
||||
// console.log(res)
|
||||
|
||||
|
||||
return res.rows?.map((it) => objectRemap(it.keys(), (key: string) => ({key: key, value: it.get(key)}))) ?? []
|
||||
}
|
||||
|
||||
/**
|
||||
* get the connexion to cassandra, it will try until it succedeed
|
||||
*/
|
||||
public static async get() {
|
||||
const client = CassandraClient.instance ?? new CassandraClient()
|
||||
CassandraClient.instance = client
|
||||
return client
|
||||
}
|
||||
|
||||
/**
|
||||
* connect to Cassandra
|
||||
*/
|
||||
// eslint-disable-next-line complexity
|
||||
public async connect() {
|
||||
if (await this.isReady()) {
|
||||
return
|
||||
}
|
||||
|
||||
console.log('connecting to cassandra')
|
||||
let authProvider: Cassandra.auth.AuthProvider|undefined
|
||||
|
||||
const method = getEnv('CASSANDRA_AUTH_METHOD')
|
||||
if (method) {
|
||||
// eslint-disable-next-line max-depth
|
||||
switch (method.toLowerCase()) {
|
||||
case 'passwordauthenticator':
|
||||
case 'plaintext':
|
||||
authProvider = new Cassandra.auth.PlainTextAuthProvider(
|
||||
requireEnv('CASSANDRA_USERNAME'),
|
||||
requireEnv('CASSANDRA_PASSWORD')
|
||||
)
|
||||
break
|
||||
case 'dseplaintext':
|
||||
authProvider = new Cassandra.auth.DsePlainTextAuthProvider(
|
||||
requireEnv('CASSANDRA_USERNAME'),
|
||||
requireEnv('CASSANDRA_PASSWORD'),
|
||||
getEnv('CASSANDRA_AUTHORIZATION_ID')
|
||||
)
|
||||
break
|
||||
case 'none':
|
||||
break
|
||||
default:
|
||||
console.error('Please use a valid CASSANDRA_AUTH_METHOD value (none|plaintext|dseplaintext)')
|
||||
throw new Error('Please use a valid CASSANDRA_AUTH_METHOD value (none|plaintext|dseplaintext)')
|
||||
}
|
||||
}
|
||||
|
||||
this.client = new Cassandra.Client({
|
||||
contactPoints: [requireEnv('CASSANDRA_CONTACT_POINT')],
|
||||
authProvider: authProvider as Cassandra.auth.AuthProvider,
|
||||
localDataCenter: getEnv('CASSANDRA_LOCAL_DATA_CENTER', 'datacenter1')
|
||||
})
|
||||
// this.client.on('log', (level, loggerName, message, furtherInfo) => {
|
||||
// console.log(`${level} - ${loggerName}: ${message}`);
|
||||
// })
|
||||
|
||||
try {
|
||||
await this.client.connect()
|
||||
} catch (e) {
|
||||
this.client = null
|
||||
console.error(e)
|
||||
throw new Error('Error connecting to Cassandra')
|
||||
}
|
||||
// try {
|
||||
// await Migration.migrateToLatest()
|
||||
// } catch (e) {
|
||||
// this.migrated = -1
|
||||
// console.error(e)
|
||||
// throw new Error('An error occured while migrating')
|
||||
// }
|
||||
// this.migrated = 1
|
||||
|
||||
}
|
||||
|
||||
public async isReady(): Promise<boolean> {
|
||||
return !!this.client && this.client.getState().getConnectedHosts().length >= 1
|
||||
}
|
||||
}
|
137
src/models/Clients/Client.ts
Normal file
137
src/models/Clients/Client.ts
Normal file
@ -0,0 +1,137 @@
|
||||
import config from 'models/config'
|
||||
import type MigrationObj from 'models/Migrations'
|
||||
|
||||
export enum ConnectionStatus {
|
||||
DISCONNECTED,
|
||||
MIGRATING,
|
||||
READY
|
||||
}
|
||||
|
||||
export interface ClientStatic<C extends Client = Client> {
|
||||
get(): Promise<C>
|
||||
}
|
||||
|
||||
export default abstract class Client {
|
||||
|
||||
|
||||
public status: ConnectionStatus = ConnectionStatus.DISCONNECTED
|
||||
|
||||
/**
|
||||
* -1: unknown
|
||||
* 0: migrating
|
||||
* 1: migrated
|
||||
*/
|
||||
public migrationStatus = -1
|
||||
|
||||
/**
|
||||
* get the current migration version
|
||||
*
|
||||
* -1 nothing/error
|
||||
* 0+ current migration
|
||||
*/
|
||||
public abstract getVersion(): Promise<number>
|
||||
public abstract setVersion(version: number): Promise<void>
|
||||
|
||||
public abstract execute(query: string, params?: Array<unknown> | object, ...options: Array<any>): Promise<Array<Record<string, unknown>>>
|
||||
|
||||
public abstract connect(): Promise<void>
|
||||
|
||||
/**
|
||||
* Migrate the database to the latest version
|
||||
*/
|
||||
public async migrateToLatest() {
|
||||
const migrations = this.getMigrations()
|
||||
const latest = migrations[migrations.length - 1]
|
||||
if (!latest) {
|
||||
return
|
||||
}
|
||||
return await this.migrateTo(latest.date)
|
||||
}
|
||||
|
||||
public getMigrations(): ReadonlyArray<MigrationObj> {
|
||||
return config.migrations as ReadonlyArray<MigrationObj>
|
||||
}
|
||||
|
||||
/**
|
||||
* migrate to a specific date in time
|
||||
* @param date the date to try to migrate to
|
||||
*/
|
||||
public async migrateTo(date: number) {
|
||||
this.migrationStatus = 0
|
||||
|
||||
let version = await this.getVersion()
|
||||
|
||||
const migrations = this.getMigrations()
|
||||
|
||||
const time = !version ? -1 : version
|
||||
|
||||
console.log('Current DB version', version)
|
||||
// same version, don't to anything
|
||||
if (date === time) {
|
||||
this.migrationStatus = 1
|
||||
return
|
||||
}
|
||||
console.log('\x1b[35mCurrent DB version', version, '\x1b[0m')
|
||||
|
||||
// run up migrations
|
||||
if (time < date) {
|
||||
console.log('\x1b[35m', 'Migrating up to', date, '\x1b[0m')
|
||||
const migrationsToRun = migrations.filter((it) => it.date > time && it.date <= date)
|
||||
for (const migration of migrationsToRun) {
|
||||
console.log('\x1b[35m', 'Migrating from', version, 'to', migration.date, '\x1b[0m')
|
||||
await migration.up(this)
|
||||
await this.setVersion(migration.date)
|
||||
version = migration.date
|
||||
}
|
||||
} else { // run down migrations
|
||||
console.log('\x1b[35m', 'Migrating down to', date, '\x1b[0m')
|
||||
const migrationsToRun = migrations.filter((it) => it.date < time && it.date >= date)
|
||||
.toReversed()
|
||||
for (const migration of migrationsToRun) {
|
||||
console.log('\x1b[35m', 'Migrating from', version, 'to', migration.date, '\x1b[0m')
|
||||
await migration.down?.(this)
|
||||
await this.setVersion(migration.date)
|
||||
version = migration.date
|
||||
}
|
||||
}
|
||||
console.log('\x1b[32mDone migrating\x1b[0m')
|
||||
this.migrationStatus = 1
|
||||
}
|
||||
|
||||
// public getStatus(): Promise<ClientStatus>
|
||||
|
||||
// public abstract isMigrated(): Promise<boolean>
|
||||
|
||||
/**
|
||||
* indicate if the client is ready for new requests (not if migrations are done or not)
|
||||
*/
|
||||
public abstract isReady(): Promise<boolean>
|
||||
|
||||
/**
|
||||
* wait until every migrations are done or fail
|
||||
*/
|
||||
public async waitForMigrations(): Promise<void> {
|
||||
if (this.migrationStatus === -1) {
|
||||
await this.migrateToLatest()
|
||||
}
|
||||
while (!await this.isMigrated()) {
|
||||
console.log('waiting...')
|
||||
await new Promise((res) => setTimeout(res, 100))
|
||||
}
|
||||
}
|
||||
|
||||
public async isMigrated(): Promise<boolean> {
|
||||
return this.migrationStatus === 1
|
||||
// if (this.migrationStatus < 1) {
|
||||
// return false
|
||||
// } else if (this.migrationStatus === 1) {
|
||||
// return
|
||||
// }
|
||||
// const migrations = this.getMigrations()
|
||||
// const last = migrations[migrations.length - 1]
|
||||
// if (!last) {
|
||||
// return true
|
||||
// }
|
||||
// return last.date === await this.getVersion()
|
||||
}
|
||||
}
|
71
src/models/Clients/PostgresClient.ts
Normal file
71
src/models/Clients/PostgresClient.ts
Normal file
@ -0,0 +1,71 @@
|
||||
import { wait } from 'libs/AsyncUtils'
|
||||
import { getEnv, requireEnv } from 'libs/Env'
|
||||
import pg from 'pg'
|
||||
import Client from '.'
|
||||
const Postgres = pg.Client
|
||||
|
||||
// biome-ignore lint/complexity/noStaticOnlyClass: <explanation>
|
||||
export default class PostgresClient extends Client {
|
||||
private static instance: PostgresClient = new PostgresClient()
|
||||
private client?: pg.Client | null
|
||||
public override async getVersion(): Promise<number> {
|
||||
try {
|
||||
const res = await this.execute(`SELECT value FROM settings WHERE id = 'db_version'`)
|
||||
|
||||
const value = res[0]?.value
|
||||
if (!value) {
|
||||
return -1
|
||||
}
|
||||
return Number.parseInt(value)
|
||||
} catch (e) {
|
||||
// table does not exists
|
||||
console.log('Settings table does not exists', e)
|
||||
return -1
|
||||
}
|
||||
}
|
||||
public override async setVersion(version: number): Promise<void> {
|
||||
await this.execute(`UPDATE settings SET value = $1 WHERE id = 'db_version';`, [version.toString()])
|
||||
}
|
||||
public override async execute(query: string, params?: Array<unknown> | object, ...options: Array<any>): Promise<Array<Record<string, unknown>>> {
|
||||
if (!this.client || !await this.isReady()) {
|
||||
throw new Error('not connected')
|
||||
}
|
||||
const res = await this.client.query<Record<string, unknown>>(query, params)
|
||||
return res.rows
|
||||
}
|
||||
public override async connect(): Promise<void> {
|
||||
if (this.client) {
|
||||
return
|
||||
}
|
||||
this.client = new Postgres({
|
||||
host: requireEnv('POSTGRES_HOST'),
|
||||
user: requireEnv('POSTGRES_USERNAME'),
|
||||
password: requireEnv('POSTGRES_PASSWORD'),
|
||||
port: parseInt(getEnv('POSTGRES_PORT', '5432')),
|
||||
database: requireEnv('POSTGRES_DATABASE', 'projectmanager'),
|
||||
// debug(connection, query, parameters, paramTypes) {
|
||||
// console.log(`${query}, ${parameters}`);
|
||||
// },
|
||||
})
|
||||
.on('end', () => {
|
||||
this.client = null
|
||||
})
|
||||
try {
|
||||
await this.client.connect()
|
||||
} catch (e) {
|
||||
this.client = null
|
||||
console.error(e)
|
||||
throw new Error('Error connecting to Postgres')
|
||||
}
|
||||
}
|
||||
public override async isReady(): Promise<boolean> {
|
||||
return !!this.client
|
||||
}
|
||||
|
||||
/**
|
||||
* get the connexion to cassandra, it will try until it succedeed
|
||||
*/
|
||||
public static async get() {
|
||||
return PostgresClient.instance
|
||||
}
|
||||
}
|
@ -1,24 +1,19 @@
|
||||
/**
|
||||
* TODO:
|
||||
* Add to `DaoItem` your model name
|
||||
* Add to the function `initDao` the Dao
|
||||
*/
|
||||
|
||||
/**
|
||||
* the different Daos that can be initialized
|
||||
*
|
||||
* Touch this interface to define which key is linked to which Dao
|
||||
*/
|
||||
interface DaoItem {}
|
||||
import type { default as Dao, default as DaoAdapter } from './Adapters/DaoAdapter'
|
||||
import config from './config'
|
||||
|
||||
/**
|
||||
* Class to get any DAO
|
||||
*/
|
||||
|
||||
// biome-ignore lint/complexity/noStaticOnlyClass: <explanation>
|
||||
export default class DaoFactory {
|
||||
/**
|
||||
* reference of the different Daos for a correct singleton implementation
|
||||
* get the total list of daos available
|
||||
* @returns return the list of daos available
|
||||
*/
|
||||
private static daos: Partial<DaoItem> = {}
|
||||
public static getAll(): Record<string, DaoAdapter> {
|
||||
return config.models
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a a dao by its key
|
||||
@ -28,27 +23,15 @@ export default class DaoFactory {
|
||||
* @param key the dao key to get
|
||||
* @returns the Dao you want as a singleton
|
||||
*/
|
||||
public static get<Key extends keyof DaoItem>(key: Key): DaoItem[Key] {
|
||||
if (!(key in this.daos)) {
|
||||
const dao = this.initDao(key)
|
||||
if (!dao) {
|
||||
throw new Error(`${key} has no valid Dao`)
|
||||
}
|
||||
this.daos[key] = dao as DaoItem[Key]
|
||||
}
|
||||
return this.daos[key] as DaoItem[Key]
|
||||
public static get<Key extends keyof typeof config['models']>(key: Key): typeof config['models'][Key] {
|
||||
return config.models[key]
|
||||
}
|
||||
|
||||
/**
|
||||
* init a dao by its key, it does not care if it exists or not
|
||||
*
|
||||
* @param item the element to init
|
||||
* @returns a new initialized dao or undefined if no dao is linked
|
||||
* get the main client linked to migrations
|
||||
* @returns the main client
|
||||
*/
|
||||
private static initDao(item: keyof DaoItem): any | undefined {
|
||||
switch (item) {
|
||||
default:
|
||||
return undefined
|
||||
}
|
||||
public static async client(): ReturnType<(typeof config.mainClient)['get']> {
|
||||
return config.mainClient.get()
|
||||
}
|
||||
}
|
||||
|
35
src/models/Migrations/Example.ts
Normal file
35
src/models/Migrations/Example.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import type Client from 'models/Clients/Client'
|
||||
import type Migration from './Migration'
|
||||
|
||||
/**
|
||||
* A system migration
|
||||
* the file need then to be imported into the `models/config.ts` file
|
||||
*/
|
||||
export default {
|
||||
/** SET THE DATE IN ISO FORMAT HERE */
|
||||
date: Date.UTC(2024, 3, 26, 11, 55, 28),
|
||||
async up(client: Client): Promise<boolean> {
|
||||
const requests: Array<string> = [
|
||||
|
||||
]
|
||||
|
||||
for await (const request of requests) {
|
||||
await client.execute(request)
|
||||
}
|
||||
|
||||
return true
|
||||
},
|
||||
async down(client: Client) {
|
||||
const requests: Array<string> = [
|
||||
|
||||
]
|
||||
|
||||
for await (const request of requests) {
|
||||
try {
|
||||
await client.execute(request)
|
||||
} catch {}
|
||||
}
|
||||
|
||||
return true
|
||||
},
|
||||
} as Migration
|
10
src/models/Migrations/Migration.d.ts
vendored
Normal file
10
src/models/Migrations/Migration.d.ts
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
import type Client from 'models/Clients/Client'
|
||||
|
||||
export default interface Migration {
|
||||
/**
|
||||
* timestamp in UTC
|
||||
*/
|
||||
date: number
|
||||
up(client: Client): Promise<boolean>
|
||||
down?(client: Client): Promise<boolean>
|
||||
}
|
149
src/models/Query.ts
Normal file
149
src/models/Query.ts
Normal file
@ -0,0 +1,149 @@
|
||||
interface QueryRootFilters<Obj extends Record<string, unknown>> {
|
||||
/**
|
||||
* one of the results should be true to be true
|
||||
*/
|
||||
$or?: Array<QueryList<Obj>>
|
||||
/**
|
||||
* every results should be false to be true
|
||||
*/
|
||||
$nor?: Array<QueryList<Obj>>
|
||||
/**
|
||||
* (default) make sure every sub queries return true
|
||||
*/
|
||||
$and?: Array<QueryList<Obj>>
|
||||
/**
|
||||
* at least one result must be false
|
||||
*/
|
||||
$nand?: Array<QueryList<Obj>>
|
||||
/**
|
||||
* invert the result from the following query
|
||||
*/
|
||||
$not?: QueryList<Obj>
|
||||
/**
|
||||
* define a precise offset of the data you fetched
|
||||
*/
|
||||
$offset?: number
|
||||
/**
|
||||
* limit the number of elements returned from the dataset
|
||||
*/
|
||||
$limit?: number
|
||||
/**
|
||||
* sort the data the way you want with each keys being priorized
|
||||
*
|
||||
* ex:
|
||||
* {a: Sort.DESC, b: Sort.ASC}
|
||||
*
|
||||
* will sort first by a and if equal will sort by b
|
||||
*/
|
||||
$sort?: SortInterface<Obj>
|
||||
}
|
||||
|
||||
/**
|
||||
* Logical operators that can be used to filter data
|
||||
*/
|
||||
export type QueryLogicalOperator<Value> = {
|
||||
/**
|
||||
* one of the results should be true to be true
|
||||
*/
|
||||
$or: Array<QueryValues<Value>>
|
||||
} | {
|
||||
/**
|
||||
* every results should be false to be true
|
||||
*/
|
||||
$nor: Array<QueryValues<Value>>
|
||||
} | {
|
||||
/**
|
||||
* at least one result must be false
|
||||
*/
|
||||
$nand: Array<QueryValues<Value>>
|
||||
} | {
|
||||
/**
|
||||
* (default) make sure every sub queries return true
|
||||
*/
|
||||
$and: Array<QueryValues<Value>>
|
||||
} | {
|
||||
/**
|
||||
* invert the result from the following query
|
||||
*/
|
||||
$not: QueryValues<Value>
|
||||
}
|
||||
|
||||
/**
|
||||
* differents comparisons operators that can be used to filter data
|
||||
*/
|
||||
export type QueryComparisonOperator<Value> = {
|
||||
/**
|
||||
* the remote source value must be absolutelly equal to the proposed value
|
||||
*/
|
||||
$eq: Value | null
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be greater than the proposed value
|
||||
*/
|
||||
$gt: number | Date
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be lesser than the proposed value
|
||||
*/
|
||||
$lt: number | Date
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be greater or equal than the proposed value
|
||||
*/
|
||||
$gte: number | Date
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be lesser or equal than the proposed value
|
||||
*/
|
||||
$lte: number | Date
|
||||
} | {
|
||||
/**
|
||||
* the remote source value must be one of the proposed values
|
||||
*/
|
||||
$in: Array<Value>
|
||||
} | {
|
||||
/**
|
||||
* (for string only) part of the proposed value must be in the remote source
|
||||
*/
|
||||
$inc: Value | null
|
||||
}
|
||||
|
||||
export type QueryList<Obj extends Record<string, unknown>> = {
|
||||
[Key in keyof Obj]?: QueryValues<Obj[Key]>
|
||||
}
|
||||
|
||||
/**
|
||||
* Differents values the element can take
|
||||
* if null it will check if it is NULL on the remote
|
||||
* if array it will check oneOf
|
||||
* if RegExp it will check if regexp match
|
||||
*/
|
||||
export type QueryValues<Value> = Value |
|
||||
null |
|
||||
Array<Value> |
|
||||
RegExp |
|
||||
QueryComparisonOperator<Value> |
|
||||
QueryLogicalOperator<Value>
|
||||
|
||||
/**
|
||||
* The query element that allows you to query different elements
|
||||
*/
|
||||
export type Query<Obj extends Record<string, unknown>> = QueryList<Obj> & QueryRootFilters<Obj>
|
||||
|
||||
/**
|
||||
* sorting interface with priority
|
||||
*/
|
||||
export type SortInterface<Obj extends Record<string, unknown>> = {
|
||||
[Key in keyof Obj]?: Sort
|
||||
}
|
||||
|
||||
export enum Sort {
|
||||
/**
|
||||
* Sort the values from the lowest to the largest
|
||||
*/
|
||||
ASC,
|
||||
/**
|
||||
* Sort the values form the largest to the lowest
|
||||
*/
|
||||
DESC
|
||||
}
|
48
src/models/config.ts
Normal file
48
src/models/config.ts
Normal file
@ -0,0 +1,48 @@
|
||||
import Schema from 'libs/Schema'
|
||||
import type Dao from './Adapters/DaoAdapter'
|
||||
import PostgresAdapter from './Adapters/PostgresAdapter'
|
||||
import CassandraClient from './Clients/CassandraClient'
|
||||
import type { ClientStatic } from './Clients/Client'
|
||||
import type Migration from './Migrations/Migration'
|
||||
|
||||
// @ts-ignore
|
||||
interface Config {
|
||||
/**
|
||||
* the main client is responsible for the Migration system
|
||||
*/
|
||||
mainClient: ClientStatic
|
||||
|
||||
/**
|
||||
* define every models of the application
|
||||
*/
|
||||
models: Record<string, Dao>
|
||||
|
||||
/**
|
||||
* Define the application migrations
|
||||
*/
|
||||
migrations: Array<Migration>
|
||||
}
|
||||
|
||||
const config = {
|
||||
/**
|
||||
* the main client is responsible for the Migration system
|
||||
*/
|
||||
mainClient: CassandraClient as ClientStatic<CassandraClient>,
|
||||
|
||||
/**
|
||||
* define every models of the application
|
||||
*/
|
||||
models: {
|
||||
session: new PostgresAdapter(new Schema({}), 'pouet')
|
||||
// session: new Dao(Session, new CassandraAdapter(Session, 'Session', 'id')),
|
||||
},
|
||||
|
||||
/**
|
||||
* Define the application migrations
|
||||
*/
|
||||
migrations: [
|
||||
// Migration20240326115528
|
||||
]
|
||||
} as const
|
||||
|
||||
export default config
|
@ -1,10 +1,9 @@
|
||||
/// <reference types="vitest" />
|
||||
import { getViteConfig } from 'astro/config'
|
||||
// import { configDefaults } from 'vitest/config'
|
||||
|
||||
export default getViteConfig({
|
||||
test: {
|
||||
include: ['./tests/**.ts']
|
||||
include: ['./tests/**/*.ts']
|
||||
/* for example, use global to avoid globals imports (describe, test, expect): */
|
||||
// globals: true,
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user