feat: Filemagedon
Some checks failed
Build, check & Test / run (push) Failing after 1m45s
Lint / run (push) Failing after 48s
Build Docker Image / build_docker (push) Failing after 3m18s

Signed-off-by: Avior <git@avior.me>
This commit is contained in:
2024-09-11 14:38:58 +02:00
parent 3e91597dca
commit bc97d9106b
45 changed files with 4548 additions and 64 deletions

View File

@@ -0,0 +1,234 @@
import { objectFind, objectLoop } from '@dzeio/object-util'
import { Sort, type Query, type QueryList, type QueryValues } from 'models/Query'
export declare type AllowedValues = string | number | bigint | boolean | null | undefined
// eslint-disable-next-line complexity
export function filter<T extends object>(query: Query<T>, results: Array<T>, options?: { debug?: boolean }): {filtered: Array<T>, unpaginatedLength: number} {
if (options?.debug) {
console.log('Query', query)
}
// filter
let filtered = results.filter((it) => {
const res = objectLoop(query, (value, key) => {
if (key === '$or') {
for (const sub of value as any) {
const final = filterEntry(sub, it)
// eslint-disable-next-line max-depth
if (final) {
return true
}
}
return false
}
if ((key as string).startsWith('$')) {
return true
}
return filterEntry(query, it)
})
// console.log(it, res)
return res
})
if (options?.debug) {
console.log('postFilters', filtered)
}
// sort
if (query.$sort) {
// temp until better solution is found
const first = objectFind(query.$sort, () => true)
filtered = filtered.sort((objA, objB) => {
const a = objA[first!.key]
const b = objB[first!.key]
const ascend = first?.value !== Sort.DESC
if (typeof a === 'number' && typeof b === 'number') {
if (ascend) {
return b - a
} else {
return a - b
}
}
if (a instanceof Date && b instanceof Date) {
if (ascend) {
return a.getTime() - b.getTime()
} else {
return b.getTime() - a.getTime()
}
}
if (typeof a === 'string' && typeof b === 'string') {
if (ascend) {
return a.localeCompare(b)
} else {
return b.localeCompare(a)
}
}
if (ascend) {
return a > b ? 1 : -1
}
return a > b ? -1 : 1
})
}
if (options?.debug) {
console.log('postSort', filtered)
}
// length of the query assuming a single page
const unpaginatedLength = filtered.length
// limit
if (query.$offset || query.$limit) {
const offset = query.$offset ?? 0
filtered = filtered.slice(offset, offset + (query.$limit ?? Infinity))
}
if (options?.debug) {
console.log('postLimit', filtered)
}
return { filtered, unpaginatedLength }
}
/**
*
* @param query the query of the entry
* @param item the implementation of the item
* @returns if it should be kept or not
*/
export function filterEntry<T extends object>(query: QueryList<T>, item: T): boolean {
// eslint-disable-next-line complexity
const res = objectLoop(query as any, (queryValue, key: keyof typeof query) => {
/**
* TODO: handle $keys
*/
if ((key as string).startsWith('$')) {
return true
}
return filterValue(item[key], queryValue)
})
return res
}
/**
* indicate if a value should be kept by an ENTIRE query
*
* @param value the value to filter
* @param query the full query
* @returns if the query should keep the value or not
*/
function filterValue<T extends AllowedValues>(value: any, query: QueryValues<T>) {
if (typeof query !== 'object' || query === null || query instanceof RegExp || Array.isArray(query)) {
return filterItem(value, query)
}
// loop through each keys of the query
// eslint-disable-next-line arrow-body-style
return objectLoop(query, (querySubValue: any, queryKey: any) => {
return filterItem(value, {[queryKey]: querySubValue } as QueryValues<T>)
})
}
/**
*
* @param value the value to check
* @param query a SINGLE query to check against
* @returns if the value should be kept or not
*/
// eslint-disable-next-line complexity
function filterItem(value: any, query: QueryValues<AllowedValues>): boolean {
/**
* check if the value is null
*/
if (query === null) {
return typeof value === 'undefined' || value === null
}
if (query instanceof RegExp) {
return query.test(typeof value === 'string' ? value : value.toString())
}
/**
* ?!?
*/
if (value === null || typeof value === 'undefined') {
return false
}
/**
* strict value check by default
*/
if (!(typeof query === 'object')) {
return query === value
}
/**
* Array checking and $in
*/
if (Array.isArray(query) || '$in' in query) {
const arr = Array.isArray(query) ? query : query.$in as Array<AllowedValues>
return arr.includes(value)
}
if ('$inc' in query) {
return (value.toString() as string).toLowerCase().includes(query.$inc!.toString()!.toLowerCase())
}
if ('$eq' in query) {
return query.$eq === value
}
/**
* numbers specific cases for numbers
*/
if ('$gt' in query) {
value = value instanceof Date ? value.getTime() : value
const comparedValue = query.$gt instanceof Date ? query.$gt.getTime() : query.$gt
return typeof value === 'number' && typeof comparedValue === 'number' && value > comparedValue
}
if ('$lt' in query) {
value = value instanceof Date ? value.getTime() : value
const comparedValue = query.$lt instanceof Date ? query.$lt.getTime() : query.$lt
return typeof value === 'number' && typeof comparedValue === 'number' && value < comparedValue
}
if ('$gte' in query) {
value = value instanceof Date ? value.getTime() : value
const comparedValue = query.$gte instanceof Date ? query.$gte.getTime() : query.$gte
return typeof value === 'number' && typeof comparedValue === 'number' && value >= comparedValue
}
if ('$lte' in query) {
value = value instanceof Date ? value.getTime() : value
const comparedValue = query.$lte instanceof Date ? query.$lte.getTime() : query.$lte
return typeof value === 'number' && typeof comparedValue === 'number' && value <= comparedValue
}
if ('$len' in query && Array.isArray(value)) {
return value.length === query.$len
}
/**
* Logical Operators
*/
if ('$or' in query && Array.isArray(query.$or)) {
return !!query.$or.find((it) => filterValue(value, it as QueryValues<any>))
}
if ('$and' in query && Array.isArray(query.$and)) {
return !query.$and.find((it) => !filterValue(value, it as QueryValues<any>))
}
if ('$not' in query) {
return !filterValue(value, query.$not as QueryValues<any>)
}
if ('$nor' in query && Array.isArray(query.$nor)) {
return !query.$nor.find((it) => filterValue(value, it as QueryValues<any>))
}
if ('$nand' in query && Array.isArray(query.$nand)) {
return !!query.$nand.find((it) => !filterValue(value, it as QueryValues<any>))
}
return false
}

View File

@@ -0,0 +1,54 @@
import CSV, { type CSVOptions } from 'libs/FileFormats/CSV'
import type Schema from 'libs/Schema'
import type { SchemaInfer } from 'libs/Schema'
import fs from 'node:fs'
import type { Query } from '../Query'
import { filter } from './AdapterUtils'
import type DaoAdapter from './DaoAdapter'
import type { DBPull } from './DaoAdapter'
export default class CSVAdapter<T extends Schema> implements DaoAdapter<T> {
private data: Array<SchemaInfer<T>>
public constructor(
public readonly schema: T,
public readonly serverPath: string,
private readonly csvOptions?: CSVOptions
) {
const data = fs.readFileSync(serverPath, 'utf-8')
this.data = CSV.parse(data, csvOptions) as Array<SchemaInfer<T>>
}
public async create(_obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
throw new Error('not implemented')
}
// eslint-disable-next-line complexity
public async read(query?: Query<SchemaInfer<T>> | undefined): Promise<DBPull<T>> {
const res = filter(query ?? {}, this.data)
return {
rows: res.filtered.length,
rowsTotal: res.unpaginatedLength,
page: 1,
pageTotal: 1,
data: res.filtered
}
}
public async update(_obj: SchemaInfer<T>): Promise<SchemaInfer<T> | null> {
throw new Error('not implemented')
}
public async patch(_id: string, _obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
throw new Error('not implemented')
}
public async delete(obj: SchemaInfer<T>): Promise<boolean> {
throw new Error('not implemented')
}
}

View File

@@ -0,0 +1,433 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { objectFind, objectKeys, objectLoop, objectMap, objectRemap, objectValues } from '@dzeio/object-util'
import type { ArrayOrObject } from 'cassandra-driver'
import crypto from 'node:crypto'
import { Sort, type Query } from '../Query'
import { filter } from './AdapterUtils'
import type DaoAdapter from './DaoAdapter'
import type { DBPull } from './DaoAdapter'
/**
* @deprecated need to be moved to the new Schema system
*
* (won't be done because we mostly won't be using it again...)
*/
export default class CassandraAdapter<T extends Schema> implements DaoAdapter<T> {
private id!: Array<string>
public constructor(
/**
* the schema used by Cassandra
*/
public readonly schema: Schema<T>,
/**
* the table name
*/
public readonly table: string,
/**
* the id(s)
*/
id?: keyof T | Array<keyof T>,
/**
* other secondary keys necessary to update data
*/
private readonly partitionKeys?: Array<keyof T>,
/**
* additionnal options to make the adapter work
*/
public readonly options: {
/**
* log the requests made to cassandra
*/
debug?: boolean
} = {}
) {
if (!id) {
objectLoop(schema.model, (value, key) => {
if (!isSchemaItem(value)) {
return true
}
if (!value.database?.unique) {
return true
}
id = key
return false
})
} else {
this.id = typeof id === 'string' ? [id] : id as Array<string>
}
}
// TODO: make it clearer what it does
public async create(obj: Partial<Implementation<T>>): Promise<Implementation<T> | null> {
objectLoop(this.schema.model, (item, key) => {
if (isSchemaItem(item) && (item.database?.created || item.database?.updated)) {
// @ts-expect-error things get validated anyway
obj[key] = new Date()
} else if (isSchemaItem(item) && item.database?.auto && !obj[key]) {
if (item.type === String) {
// @ts-expect-error things get validated anyway
obj[key] = crypto.randomBytes(16).toString('hex')
} else {
// @ts-expect-error things get validated anyway
obj[key] = crypto.randomBytes(16).readUint32BE()
}
}
})
const clone = this.schema.parse(obj)
if (!clone) {
throw new Error('Invalid data given to create the final object')
}
const keys = objectKeys(clone)
const keysStr = keys.join(', ')
const values = keys.fill('?').join(', ')
const req = `INSERT INTO ${this.table} (${keysStr}) VALUES (${values});`
const client = (await Client.get())!
const params = objectMap(clone as any, (value, key) => this.valueToDB(key as any, value))
if (this.options?.debug) {
console.log(req, params)
}
try {
await client.execute(req, params, { prepare: true })
} catch (e) {
console.log(e, req, params)
return null
}
return this.schema.parse(clone)
}
// eslint-disable-next-line complexity
public async read(query?: Query<Implementation<T>> | undefined): Promise<DBPull<T>> {
let req: Array<string> = ['SELECT', '*', 'FROM', this.table]
const params: ArrayOrObject = []
// list of the differents items in the WHERE statement
const whereItems: Array<string> = []
// if ((query?.where?.length ?? 0) > 0 && (query?.where?.length !== 1 || query?.where?.[0]?.[1] !== 'includes')) {
// for (const it of query?.where ?? []) {
// // eslint-disable-next-line max-depth
// switch (it[1]) {
// case 'in':
// // eslint-disable-next-line no-case-declarations
// const arr = it[2] as Array<any>
// whereItems.push(`${String(it[0])} IN (${arr.map(() => '?').join(',')})`)
// params.push(...arr)
// break
// case 'equal':
// whereItems.push(`${String(it[0])} = ?`)
// params.push(it[2])
// break
// case 'after':
// whereItems.push(`${String(it[0])} >= ?`)
// params.push(it[2])
// break
// case 'before':
// whereItems.push(`${String(it[0])} <= ?`)
// params.push(it[2])
// break
// }
// }
// }
if (whereItems.length > 0) {
req.push('WHERE')
for (let idx = 0; idx < whereItems.length; idx++) {
const item = whereItems[idx] as string
if (idx > 0) {
req.push('AND')
}
req.push(item)
}
}
// ORDER BY (not working as we want :()
// const sort = query?.$sort
// if (sort && sort.length >= 1) {
// const suffix = sort[0]?.[1] === 'asc' ? 'ASC' : 'DESC'
// req = req.concat(['ORDER', 'BY', sort[0]?.[0] as string, suffix])
// }
// LIMIT (not working because of ORDER BY)
// const page: number = query?.page ?? 0
// const pageLimit: number | null = query?.limit ?? null
// let limit: number | null = null
// if (pageLimit && pageLimit > 0) {
// limit = pageLimit * (page + 1)
// req = req.concat(['LIMIT', limit.toString()])
// }
// ALLOWW FILTERING
req = req.concat(['ALLOW', 'FILTERING'])
const client = (await Client.get())!
if (this.options?.debug) {
console.log(req, params)
}
let res: Array<Record<string, any>>
try {
res = await client.execute(req.join(' '), params)
} catch (error) {
console.error('error running request')
console.error(req, params)
throw error
}
if (!res) {
return {
rows: 0,
pageTotal: 0,
page: 1,
rowsTotal: 0,
data: []
}
}
let dataset = res
.map((obj) => objectRemap(this.schema.model, (_, key) => ({
key,
value: this.dbToValue(key, obj[key])
})))
.map((obj) => {
objectLoop(this.schema.model, (item, key) => {
if (Array.isArray(item) && !obj[key]) {
obj[key] = []
}
})
return obj
})
.map((it) => this.schema.parse(it))
.filter((it): it is Implementation<T> => !!it)
/**
* POST QUERY TREATMENT
*/
// if ((query?.where?.length ?? 0) > 0) {
// for (const it of query?.where ?? []) {
// // eslint-disable-next-line max-depth
// switch (it[1]) {
// case 'includes':
// dataset = dataset.filter((entry) => entry[it[0]]?.toString()?.includes(it[2]))
// break
// }
// }
// }
// sort
// const sort = query?.$sort
// if (sort) {
// const sortKey = sort ? sort[0]![0] : objectFind(this.schema.model, (value) => {
// if (!isSchemaItem(value)) {
// return false
// }
// return !!value.database?.created
// })
// const sortValue = sort ? sort[0]![1] : 'asc'
// if (sortKey && sortValue) {
// if (sortValue === 'asc') {
// dataset = dataset.sort((a, b) => b[sortKey as string]! > a[sortKey as string]! ? 1 : -1)
// } else {
// dataset = dataset.sort((a, b) => b[sortKey as string]! < a[sortKey as string]! ? 1 : -1)
// }
// }
// }
// console.log(res.rows, req)
// post request processing
// if (limit) {
// dataset = dataset.slice(page * (query?.limit ?? 0), limit)
// }
// length of the query assuming a single page
let unpaginatedLength = dataset.length
// temp modification of comportement to use the new and better query system
if ((!query || !query?.$sort) && objectFind(this.schema.model, (_, key) => key === 'created')) {
// temp fix for the sorting algorithm
if (!query) {
// @ts-expect-error normal currently
query = { $sort: { created: Sort.DESC }}
} else {
query.$sort = { created: Sort.DESC }
}
}
if (query) {
const { filtered, unpaginatedLength: ul } = filter(query, dataset, this.options)
dataset = filtered
unpaginatedLength = ul
}
// console.log(res)
const pageLimit = query?.$limit ?? 10
const pageOffset = query?.$offset ?? 0
return {
rows: dataset.length,
rowsTotal: unpaginatedLength,
page: Math.floor(pageOffset / pageLimit),
pageTotal: Math.max(1, Math.ceil(unpaginatedLength / pageLimit)),
data: dataset
}
}
public async update(obj: Implementation<T>): Promise<Implementation<T> | null> {
return this.patch(obj)
}
public async patch(id: Partial<Implementation<T>>): Promise<Implementation<T> | null>
public async patch(id: string, obj: Partial<Implementation<T>>): Promise<Implementation<T> | null>
// eslint-disable-next-line complexity
public async patch(id: string | Partial<Implementation<T>>, obj?: Partial<Implementation<T>>): Promise<Implementation<T> | null> {
if (!obj) {
if (typeof id === 'string') {
return null
}
obj = {...id} as Partial<Implementation<T>>
}
// update the updated time
objectLoop(this.schema.model, (item, key) => {
if (isSchemaItem(item) && item.database?.updated) {
// @ts-expect-error things get validated anyway
obj[key] = new Date()
}
})
// build the request parts
const parts: Array<string> = ['UPDATE', this.table, 'SET']
const params: Array<any> = []
// remove ids
const ids = Array.isArray(this.id) ? this.id : [this.id]
for (const tmp of ids) {
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete obj[tmp]
}
// map the items to update
const keys = objectMap(obj as {}, (_, key) => `${key}=?`)
parts.push(keys.join(', '))
params.push(...objectValues(obj as {}))
// filter by the ids
parts.push('WHERE')
const read: Partial<any> = {}
for (let idx = 0; idx < ids.length; idx++) {
const key = ids[idx] as string
if (idx > 0) {
parts.push('AND')
}
parts.push(`${key}=?`)
const value = obj[key] ?? (typeof id === 'string' ? id : id[key])
read[key] = this.valueToDB(key, value)
if (!value) {
throw new Error(`Missing id (${key})`)
}
params.push(value)
}
if (this.partitionKeys && this.partitionKeys?.length > 0) {
const { data } = await this.read(read)
const item = data[0]
for (const key of this.partitionKeys) {
parts.push('AND', `${key as string}=?`)
params.push(this.valueToDB(key, item![key]))
}
}
const req = parts.join(' ')
const client = await Client.get()
if (this.options?.debug) {
console.log(req, params)
}
try {
const res = await client?.execute(req, params)
// console.log(res, req)
if (this.options?.debug) {
console.log('post patch result', res, req)
}
return (await this.read(read)).data[0] ?? null
} catch (e) {
console.log(e, req, params)
}
return null
}
public async delete(obj: Implementation<T>): Promise<boolean> {
const parts = ['DELETE', 'FROM', this.table, 'WHERE']
const params: ArrayOrObject = []
objectLoop(obj as {}, (value, key) => {
let allowedWheres = ([] as Array<any>).concat(Array.isArray(this.id) ? this.id : [this.id])
if (this.partitionKeys) {
allowedWheres.push(...this.partitionKeys )
}
if (!allowedWheres.includes(key)) {
return
}
if (parts.length > 4) {
parts.push('AND')
}
parts.push(`${key}=?`)
params.push(value)
})
const client = await Client.get()
if (this.options?.debug) {
console.log(parts, params)
}
try {
await client!.execute(parts.join(' '), params)
} catch (e) {
console.error(e, parts, params)
throw e
}
return true
}
private valueToDB(key: keyof T, value: any): string | number | boolean | Date {
const item = this.schema.model[key] as Item
const type = isSchemaItem(item) ? item.type : item
if (typeof type === 'object' && !Array.isArray(type) && !(value instanceof Date)) {
return JSON.stringify(value)
}
if (typeof value === 'undefined' || value === null) {
return value
}
return value
}
private dbToValue(key: keyof T, value: string | number | boolean | Date): any {
const item = this.schema.model[key] as Item
const type = isSchemaItem(item) ? item.type : item
if (typeof type === 'object' && !Array.isArray(type) && !(value instanceof Date)) {
return JSON.parse(value as string)
}
if (typeof value === 'undefined' || value === null) {
return value
}
return value
}
}

View File

@@ -1,16 +1,46 @@
import type Schema from 'libs/Schema'
import type { Model, ModelInfer, SchemaInfer } from 'libs/Schema'
import type { Query } from '../Query'
export interface DBPull<T extends Schema> {
/**
* total number of rows that are valid with the specified query
*/
rows: number
/**
* total number of rows in the table
*/
rowsTotal: number
/**
* current page number
*/
page: number
/**
* total amount of pages
*/
pageTotal: number
/**
* the data fetched
*/
data: Array<SchemaInfer<T>>
}
/**
* the Dao is the object that connect the Database or source to the application layer
* the DaoAdapter is the object that connect the Database or source to the application layer
*
* you MUST call it through the `DaoFactory` file
*/
export default abstract class Dao<Object extends { id: any } = { id: any }> {
export default abstract class DaoAdapter<M extends Model = Model> {
/**
* insert a new object into the source
*
* @param obj the object to create
* @returns the object with it's id filled if create or null otherwise
*/
abstract create(obj: Omit<Object, 'id' | 'created' | 'updated'>): Promise<Object | null>
abstract create(obj: Partial<ModelInfer<M>>): Promise<ModelInfer<M> | null>
/**
* insert a new object into the source
@@ -18,7 +48,7 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
* @param obj the object to create
* @returns the object with it's id filled if create or null otherwise
*/
public insert: Dao<Object>['create'] = (obj: Parameters<Dao<Object>['create']>[0]) =>
public insert: DaoAdapter<ModelInfer<M>>['create'] = (obj: Parameters<DaoAdapter<ModelInfer<M>>['create']>[0]) =>
this.create(obj)
/**
@@ -27,7 +57,7 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
* @returns an array containing the list of elements that match with the query
*/
abstract findAll(query?: Partial<Object>): Promise<Array<Object>>
abstract read(query?: Query<ModelInfer<M>> | undefined): Promise<DBPull<Schema<M>>>
/**
* find the list of objects having elements from the query
@@ -35,8 +65,17 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
* @returns an array containing the list of elements that match with the query
*/
public find: Dao<Object>['findAll'] = (query: Parameters<Dao<Object>['findAll']>[0]) =>
this.findAll(query)
public findAll: DaoAdapter<ModelInfer<M>>['read'] = (query: Parameters<DaoAdapter<ModelInfer<M>>['read']>[0]) =>
this.read(query)
/**
* find the list of objects having elements from the query
*
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
* @returns an array containing the list of elements that match with the query
*/
public find: DaoAdapter<ModelInfer<M>>['read'] = (query: Parameters<DaoAdapter<ModelInfer<M>>['read']>[0]) =>
this.read(query)
/**
* find an object by it's id
@@ -46,8 +85,8 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
* @param id the id of the object
* @returns
*/
public findById(id: Object['id']): Promise<Object | null> {
return this.findOne({ id: id } as Partial<Object>)
public findById(id: ModelInfer<M>['id']): Promise<ModelInfer<M> | null> {
return this.findOne({ id: id } as Partial<ModelInfer<M>>)
}
/**
@@ -58,7 +97,7 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
* @param id the id of the object
* @returns
*/
public get(id: Object['id']) {
public get(id: ModelInfer<M>['id']) {
return this.findById(id)
}
@@ -68,8 +107,8 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
* @param query a partial object which filter depending on the elements, if not set it will fetch everything
* @returns the first element matching with the query or null otherwise
*/
public async findOne(query?: Partial<Object>): Promise<Object | null> {
return (await this.findAll(query))[0] ?? null
public async findOne(query?: Partial<ModelInfer<M>>): Promise<ModelInfer<M> | null> {
return (await this.findAll(query)).data[0] ?? null
}
/**
@@ -80,14 +119,14 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
* @param obj the object to update
* @returns an object if it was able to update or null otherwise
*/
abstract update(obj: Object): Promise<Object | null>
abstract update(obj: Partial<ModelInfer<M>>): Promise<ModelInfer<M> | null>
/**
* change some elements from the object and return the object updated
* @param id the id of the object
* @param changegs the change to make
*/
public async patch(id: string, changes: Partial<Object>): Promise<Object | null> {
public async patch(id: ModelInfer<M>['id'], changes: Partial<ModelInfer<M>>): Promise<ModelInfer<M> | null> {
const query = await this.findById(id)
if (!query) {
return null
@@ -100,8 +139,8 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
* @returns the object is updated/inserted or null otherwise
*/
public async upsert(
object: Object | Omit<Object, 'id' | 'created' | 'updated'>
): Promise<Object | null> {
object: Partial<ModelInfer<M>>
): Promise<ModelInfer<M> | null> {
if ('id' in object) {
return this.update(object)
}
@@ -114,5 +153,5 @@ export default abstract class Dao<Object extends { id: any } = { id: any }> {
*
* @returns if the object was deleted or not (if object is not in db it will return true)
*/
abstract delete(obj: Object): Promise<boolean>
abstract delete(obj: ModelInfer<M>): Promise<boolean>
}

View File

@@ -0,0 +1,221 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import { objectLoop } from '@dzeio/object-util'
import archiver from 'archiver'
import type Schema from 'libs/Schema'
import type { Model, ModelInfer } from 'libs/Schema'
import type SchemaBuffer from 'libs/Schema/Items/SchemaBuffer'
import type SchemaNumber from 'libs/Schema/Items/SchemaNumber'
import type SchemaString from 'libs/Schema/Items/SchemaString'
import fileSystem from 'node:fs'
import fs from 'node:fs/promises'
import type { Query } from '../Query'
import type DaoAdapter from './DaoAdapter'
import type { DBPull } from './DaoAdapter'
interface FS extends Model {
filename: SchemaString
path: SchemaString
// eslint-disable-next-line no-undef
data: SchemaBuffer
type: SchemaString
size: SchemaNumber
}
export default class FSAdapter<T extends FS> implements DaoAdapter<Schema<T>> {
public constructor(
public readonly schema: Schema<T>,
public readonly basePath: string
) {
if (basePath.endsWith('/')) {
console.warn('the base path should not end wiath a "/", removing it')
basePath = basePath.slice(0, basePath.lastIndexOf('/'))
}
}
// TODO: make it clearer what it does
public async create(obj: Partial<ModelInfer<T>>): Promise<ModelInfer<T> | null> {
const realPath = this.getFullPath(obj.path!)
const finalFolder = realPath.slice(0, realPath.lastIndexOf('/'))
console.log('making the directory', finalFolder)
await fs.mkdir(finalFolder, { recursive: true })
if (obj.type === 'file') {
console.log('getting the data', finalFolder)
const data = obj.data
console.log('writing to', realPath)
if ((data as any) instanceof Buffer) {
await fs.writeFile(realPath, data as Buffer)
} else {
await fs.writeFile(realPath, data as string)
}
return obj as ModelInfer<T>
}
console.log('making the final directory', realPath)
await fs.mkdir(realPath)
return obj as ModelInfer<T>
}
public async createZippedBufferFromDirectory(directoryPath: string) {
const archive = archiver('zip', {zlib: {level: 9}})
archive.on('error', (err) => {
throw err
})
archive.on('warning', (err) => {
if (err.code === 'ENOENT') {
console.log('warning: ', err)
} else {
throw err
}
})
const fileName = `${this.basePath}/zip/${directoryPath.split(this.basePath)[1]}.zip`
fs.mkdir(fileName.slice(0, fileName.lastIndexOf('/')), {recursive: true})
const output = fileSystem.createWriteStream(fileName)
archive.pipe(output)
archive.directory(directoryPath, false)
const timeout = (cb: (value: (value: unknown) => void) => void, interval: number) => () =>
new Promise((resolve) => {
setTimeout(() => cb(resolve), interval)
})
const onTimeout = (seconds: number) => timeout((resolve) =>
resolve(`Timed out while zipping ${directoryPath}`), seconds * 1000)()
const error = await Promise.race([archive.finalize(), onTimeout(60)])
if (typeof error === 'string') {
console.log('Error:', error)
return null
}
return await fs.readFile(fileName)
}
// eslint-disable-next-line complexity
public async read(query?: Query<ModelInfer<T>> | undefined): Promise<DBPull<Schema<T>>> {
const localPath = query?.path as string ?? ''
const realPath = this.getFullPath(localPath)
console.log('get the full path', realPath)
try {
const stats = await fs.stat(realPath)
const files: Array<ModelInfer<T>> = []
if (stats.isDirectory()) {
const dirFiles = await fs.readdir(realPath)
// eslint-disable-next-line max-depth
// if (toZip === true) { // put queried file/folder in a zip file
// const buffer = await this.createZippedBufferFromDirectory(realPath)
// // eslint-disable-next-line max-depth
// if (buffer !== null) {
// files.push({
// path: localPath,
// filename: localPath.slice(localPath.lastIndexOf('/') + 1),
// data: buffer,
// type: 'file',
// size: buffer.length,
// } as ModelInfer<T>)
// }
// } else { // return every sub files
for await (const file of dirFiles) {
files.push(await this.readFile(`${localPath}/${file}`))
}
// }
} else {
files.push(await this.readFile(localPath))
}
const pageLimit = query?.$limit ?? Infinity
const pageOffset = query?.$offset ?? 0
return {
rows: files.length,
rowsTotal: files.length,
page: Math.floor(pageOffset / pageLimit),
pageTotal: Math.max(1, Math.ceil(files.length / pageLimit)),
data: files.slice(pageOffset, pageOffset + pageLimit)
}
} catch {
return {
rows: 0,
rowsTotal: 0,
page: 0,
pageTotal: 0,
data: []
}
}
}
public async update(_obj: ModelInfer<T>): Promise<ModelInfer<T> | null> {
throw new Error('not implemented')
}
public async patch(_id: string, _obj: Partial<ModelInfer<T>>): Promise<ModelInfer<T> | null> {
throw new Error('not implemented')
}
public async delete(obj: ModelInfer<T>): Promise<boolean> {
const localPath = obj?.path as string ?? ''
const realPath = this.getFullPath(localPath)
try {
const stats = await fs.stat(realPath)
if (!stats) {
return false
}
fs.rm(realPath, { recursive: true, force: true })
return true
} catch {
console.error('Could not remove file', localPath)
return false
}
}
private getFullPath(localPath?: string): string {
if (localPath && !localPath?.startsWith('/')) {
console.warn('Your path should start with a "/", adding it')
localPath = (`/${localPath}`)
}
let realPath = this.basePath + (localPath ? localPath : '')
if (realPath.includes('\\')) {
realPath = realPath.replace(/\\/g, '/')
}
return realPath
}
private async readFile(localPath: string): Promise<ModelInfer<T>> {
const path = this.getFullPath(localPath)
console.log('reading file at', path)
const stats = await fs.stat(path)
const type = stats.isFile() ? 'file' : 'directory'
console.log('file is a', type)
const obj: ModelInfer<T> = {
path: localPath,
filename: localPath.slice(localPath.lastIndexOf('/') + 1),
data: type === 'file' ? await fs.readFile(path) : '',
type: type,
size: stats.size
} as any
objectLoop(this.schema.model, (item, key) => {
if (item.attributes.includes('db:created')) {
// @ts-expect-error things get validated anyway
obj[key] = stats.ctime
} else if (item.attributes.includes('db:updated')) {
// @ts-expect-error things get validated anyway
obj[key] = stats.mtime
}
})
return obj
}
}

View File

@@ -0,0 +1,221 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import { objectClone, objectLoop, objectMap, objectOmit, objectRemap } from '@dzeio/object-util'
import ldap from 'ldapjs'
import type Schema from 'libs/Schema'
import type { SchemaInfer } from 'libs/Schema'
import type DaoAdapter from 'models/Adapters/DaoAdapter'
import type { DBPull } from 'models/Adapters/DaoAdapter'
import type { Query } from 'models/Query'
import { filter } from './AdapterUtils'
type LDAPFields = 'uid' | 'mail' | 'givenname' | 'sn' | 'jpegphoto' | 'password'
export default class LDAPAdapter<T extends Schema> implements DaoAdapter<T> {
private reverseReference: Partial<Record<LDAPFields | string, keyof T>> = {}
private attributes: Array<LDAPFields | string> = []
public constructor(
public readonly schema: T,
public readonly options: {
url: string
dnSuffix: string
admin: {
dn?: string | undefined
username?: string | undefined
password: string
}
fieldsCorrespondance?: Partial<Record<keyof SchemaInfer<T>, LDAPFields | string>>
}
) {
objectLoop(options.fieldsCorrespondance ?? {}, (value, key) => {
this.reverseReference[value] = key
this.attributes.push(value)
})
}
// TODO: make it clearer what it does
public async create(_obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
throw new Error('not implemented')
}
// eslint-disable-next-line complexity
public async read(query?: Query<SchemaInfer<T>> | undefined): Promise<DBPull<T>> {
const passwordField = this.options.fieldsCorrespondance?.password ?? 'password'
const doLogin = !!query?.[passwordField]
const emptyResult = {
rows: 0,
rowsTotal: 0,
page: 1,
pageTotal: 0,
data: []
}
if (!query) {
return emptyResult
}
// console.log(await this.ldapFind({mail: 'f.bouillon@aptatio.com'}))
const userdn = objectMap(query, (value, key) => `${(this.options.fieldsCorrespondance as any)[key] ?? key}=${value}`)
?.filter((it) => it.slice(0, it.indexOf('=')) !== passwordField)
?.join(',')
if (!doLogin) {
const bind = this.options.admin.dn ?? `cn=${this.options.admin.username},${this.options.dnSuffix}`
try {
const client = await this.bind(bind, this.options.admin.password)
// @ts-expect-error nique ta mere
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const results = (await this.ldapFind(client, objectMap(query, (value, key) => ({key: this.options.fieldsCorrespondance?.[key], value: value}))
)).map((it) => this.schema.parse(
objectRemap(it, (value, key) => ({key: this.reverseReference[key.toLowerCase() as string] as string, value: value}))
)).filter((it): it is SchemaInfer<T> => !!it)
const res = filter(query, results)
return {
rows: res.filtered.length,
rowsTotal: results.length,
page: 1,
pageTotal: 1,
data: res.filtered
}
} catch {
return emptyResult
}
}
// password authentication
try {
const clone = objectClone(query)
delete clone.password
// find using admin privileges
const res = await this.read(clone)
const user = res.data[0]
if (!user) {
return emptyResult
}
const password = query.password as string ?? ''
const client = await this.bind(`uid=${user[this.reverseReference.uid as keyof typeof user]!},${this.options.dnSuffix}`, password)
// @ts-expect-error nique x2
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const results = (await this.ldapFind(client, objectMap(clone, (value, key) => {
const finalKey = this.options.fieldsCorrespondance?.[key]
return {key: finalKey, value: value}
})
)).map((it) => this.schema.parse(
objectRemap(it, (value, key) => ({ key: this.reverseReference[key as string] as string, value: value }))
)).filter((it): it is SchemaInfer<T> => !!it)
const final = filter(objectOmit(query, 'password'), results)
// console.log(final, query, results)
if (final.filtered.length !== 1) {
return emptyResult
}
return {
rows: final.filtered.length,
rowsTotal: results.length,
page: 1,
pageTotal: 1,
data: final.filtered
}
} catch (e) {
console.log('error, user not found', e)
return emptyResult
}
}
public async update(_obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
throw new Error('not implemented')
}
public async patch(_id: string, _obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
throw new Error('not implemented')
}
public async delete(_obj: Partial<SchemaInfer<T>>): Promise<boolean> {
throw new Error('not implemented')
}
private bind(dn: string, password: string): Promise<ldap.Client> {
const client = ldap.createClient({
url: this.options.url
})
return new Promise<ldap.Client>((res, rej) => {
console.log('binding as', dn)
client.on('connect', () => {
client.bind(dn, password, (err) => {
if (err) {
console.error('error binding as', dn, err)
client.unbind()
rej(err)
return
}
console.log('binded as', dn)
res(client)
})
})
.on('timeout', (err) => rej(err))
.on('connectTimeout', (err) => rej(err))
.on('error', (err) => rej(err))
.on('connectError', (err) => rej(err))
})
}
private async ldapFind(client: ldap.Client, filters: Array<{key: LDAPFields, value: string}>): Promise<Array<Record<LDAPFields, string | Array<string> | undefined>>> {
if (filters.length === 0) {
return []
}
const firstFilter = filters.shift()!
return new Promise<Array<Record<LDAPFields, string | Array<string> | undefined>>>((res, rej) => {
const users: Array<Record<LDAPFields, string | Array<string> | undefined>> = []
const filter = {
attribute: firstFilter.key as any,
value: firstFilter.value,
}
console.log('Searching on LDAP')
client.search(
this.options.dnSuffix, {
filter: new ldap.EqualityFilter(filter),
// filter: `${filter.attribute}:caseExactMatch:=${filter.value}`,
scope: 'sub',
attributes: this.attributes
}, (err, search) => {
if (err) {
rej(err)
}
// console.log('search', search, err)
search.on('searchEntry', (entry) => {
users.push(this.parseUser(entry))
}).on('error', (err2) => {
rej(err2)
client.unbind()
console.error('error in search lol', err2)
}).on('end', () => {
console.log(users)
res(users)
client.unbind()
})
}
)
})
}
private parseUser(usr: ldap.SearchEntry): Record<LDAPFields, string | Array<string> | undefined> {
const user: Record<string, string | Array<string> | undefined> = { dn: usr.objectName ?? undefined }
for (const attribute of usr.attributes) {
user[attribute.type] = attribute.values.length === 1 ? attribute.values[0] : attribute.values
}
return user
}
}

View File

@@ -0,0 +1,69 @@
import type Schema from 'libs/Schema'
import type { SchemaInfer } from 'libs/Schema'
import type DaoAdapter from 'models/Adapters/DaoAdapter'
export default class MultiAdapter<T extends Schema> implements DaoAdapter<T> {
public constructor(
public readonly schema: T,
public readonly adapters: Array<{
adapter: DaoAdapter<T>
fields: Array<keyof T>
/**
* a field from the main adapter that will backreference the child adapter
*/
childReference?: keyof T
}> = []
) {}
// TODO: make it clearer what it does
public async create(obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
let final: SchemaInfer<T> = {} as any
// start by processing the childs
for (const adapter of this.adapters.sort((a) => a.childReference ? -1 : 1)) {
const partialObject: Partial<SchemaInfer<T>> = {}
for (const key of adapter.fields) {
partialObject[key] = obj[key]
}
const res = await adapter.adapter.create!(partialObject as any)
if (res && adapter.childReference) {
obj[adapter.childReference] = res[adapter.childReference]
}
final = {...final, ...res}
}
return final
}
// eslint-disable-next-line complexity
// public async read(query?: Query<SchemaInfer<T>> | undefined): Promise<DBPull<T>> {
// let final: SchemaInfer<T> = {} as any
// // start by processing the childs
// for (const adapter of this.adapters.sort((a) => a.childReference ? -1 : 1)) {
// const partialObject: Partial<SchemaInfer<T>> = {}
// for (const key of adapter.fields) {
// partialObject[key] = obj[key]
// }
// const res = await adapter.adapter.read!(query)
// if (res && adapter.childReference) {
// obj[adapter.childReference] = res[adapter.childReference]
// }
// final = {...final, ...res}
// }
// // step 2 merge elements
// return final
// }
public async update(_obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
throw new Error('not implemented')
}
public async patch(_id: string, _obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
throw new Error('not implemented')
}
public async delete(_obj: Partial<SchemaInfer<T>>): Promise<boolean> {
throw new Error('not implemented')
}
}

View File

@@ -0,0 +1,324 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { objectFind, objectKeys, objectLoop, objectMap, objectRemap, objectValues } from '@dzeio/object-util'
import type Schema from 'libs/Schema'
import type { SchemaInfer } from 'libs/Schema'
import type SchemaItem from 'libs/Schema/SchemaItem'
import crypto from 'node:crypto'
import PostgresClient from '../Clients/PostgresClient'
import { Sort, type Query } from '../Query'
import { filter } from './AdapterUtils'
import type { DBPull } from './DaoAdapter'
import DaoAdapter from './DaoAdapter'
const specialKeywords = ['user', 'end'] as const
export default class PostgresAdapter<T extends Schema> extends DaoAdapter<T['model']> {
private id: Array<string> = []
public constructor(
/**
* the schema used by Cassandra
*/
public readonly schema: T,
/**
* the table name
*/
public readonly table: string,
/**
* additionnal options to make the adapter work
*/
private readonly options?: {
/**
* log the requests made to cassandra
*/
debug?: boolean
}
) {
super()
objectLoop(this.schema.model, (schema, key) => {
if (schema.attributes.includes('db:auto')) {
this.id.push(key)
}
})
}
// TODO: make it clearer what it does
public async create(obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
// handle automated values
objectLoop(this.schema.model, (item, key) => {
if (item.attributes.includes('db:created') || item.attributes.includes('db:updated')) {
// @ts-expect-error things get validated anyway
obj[key] = new Date()
} else if (item.attributes.includes('db:auto') && !obj[key]) {
if (item.isOfType('')) {
// @ts-expect-error things get validated anyway
obj[key] = crypto.randomBytes(16).toString('hex')
} else if (item.isOfType(123)) {
// @ts-expect-error things get validated anyway
obj[key] = crypto.randomBytes(16).readUint32BE()
} else {
throw new Error('cannot generate ID because it is not compatible with it')
}
}
})
// parse the data with the Schema
const { object: clone, error} = this.schema.validate(obj)
if (error) {
console.error(error)
throw new Error('Invalid data given to create the final object')
}
// prepare the database query
const keys = objectKeys(clone)
.map((it) => {
if (specialKeywords.includes(it)) { // handle the special keyword
return `"${it}"`
}
return it
})
const keysStr = keys.join(', ')
const values = keys.map((_, idx) => `$${idx+1}`).join(', ')
const req = `INSERT INTO ${this.table} (${keysStr}) VALUES (${values});`
const client = await PostgresClient.get()
const params = objectMap(clone as any, (value, key) => this.valueToDB(key as any, value))
if (this.options?.debug) {
console.log(req, params) // 27 from 1 36 from 0
}
// send to the database
try {
await client.execute(req, params)
} catch (e) {
console.log(e, req, params)
return null
}
return this.schema.validate(clone).object ?? null
}
// eslint-disable-next-line complexity
public async read(query?: Query<SchemaInfer<T>> | undefined): Promise<DBPull<T>> {
// prepare the request to the database based on the query parameters
let req: Array<string> = ['SELECT', '*', 'FROM', this.table]
const client = await PostgresClient.get()
if (this.options?.debug) {
console.log(req)
}
// read from the database
let res: Array<Record<string, any>>
try {
res = await client.execute(`${req.join(' ')}`)
} catch (error) {
console.error('error running request')
console.error(req)
throw error
}
if (!res) {
return {
rows: 0,
pageTotal: 0,
page: 1,
rowsTotal: 0,
data: []
}
}
if (this.options?.debug) {
console.log('preEdits', res)
}
// post-process the data from the database
const raw = res
.map((obj) => {
// remap to use system value instead of db values
obj = objectRemap(this.schema.model, (_, key) => ({
key,
value: this.dbToValue(key as any, (obj as any)[key])
}))
// validate the schema
const res = this.schema.validate(obj)
if (res.object) {
return res.object
}
console.log(res.error)
return null
})
.filter((it): it is SchemaInfer<T> => !!it)
// temp modification of comportement to use the new and better query system
if ((!query || !query?.$sort) && objectFind(this.schema.model, (_, key) => key === 'created')) {
// temp fix for the sorting algorithm
if (!query) {
// @ts-expect-error normal currently
query = { $sort: { created: Sort.DESC }}
} else {
query.$sort = { created: Sort.DESC }
}
}
let dataset = raw
if (this.options?.debug) {
console.log('preFilters', dataset)
}
if (query) {
dataset = filter(query, dataset, this.options).filtered
}
return {
rows: dataset.length ?? 0,
rowsTotal: res.length ?? 0,
page: 1,
pageTotal: 1,
// page: page,
// pageTotal: pageLimit ? res.rowLength / pageLimit : 1,
data: dataset
}
}
public async update(obj: SchemaInfer<T>): Promise<SchemaInfer<T> | null> {
return this.patch(obj)
}
public async patch(id: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null>
public async patch(id: string, obj: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null>
// eslint-disable-next-line complexity
public async patch(id: string | Partial<SchemaInfer<T>>, obj?: Partial<SchemaInfer<T>>): Promise<SchemaInfer<T> | null> {
if (!obj) {
if (typeof id === 'string') {
return null
}
obj = {...id} as Partial<SchemaInfer<T>>
}
// const tmp = this.schema.validate(obj)
// // if (tmp.error) {
// // throw new Error(`obj invalid can\'t patch ${JSON.stringify(tmp.error)}`)
// // }
// obj = tmp.object
// update the updated time
objectLoop(this.schema.model, (item, key) => {
if (item.attributes.includes('db:updated')) {
// @ts-expect-error things get validated anyway
obj[key] = new Date()
}
})
// build the request parts
const parts: Array<string> = ['UPDATE', this.table, 'SET']
const params: Array<any> = []
// remove ids
for (const tmp of this.id) {
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete obj[tmp]
}
// map the items to update
const keys = objectMap(obj as {}, (_, key, idx) => {
if (specialKeywords.includes(key)) {
return `"${key}"=$${idx+1}`
}
return `${key}=$${idx+1}`
})
parts.push(keys.join(', '))
params.push(...objectValues(obj as {}))
// filter by the ids
parts.push('WHERE')
const read: Partial<any> = {}
for (let idx = 0; idx < this.id.length; idx++) {
const key = this.id[idx] as string
if (idx > 0) {
parts.push('AND')
}
parts.push(`${key}=$${params.length+1}`)
const value = obj[key] ?? (typeof id === 'string' ? id : id[key])
read[key] = this.valueToDB(key as any, value)
if (!value) {
throw new Error(`Missing id (${key})`)
}
params.push(value)
}
const req = parts.join(' ')
const client = await PostgresClient.get()
if (this.options?.debug) {
console.log(req, params)
}
try {
const res = await client!.execute(req, params)
// console.log(res, req)
if (this.options?.debug) {
console.log('post patch result', res, req)
}
return (await this.read(read)).data[0] ?? null
} catch (e) {
console.log(e, req, params)
}
return null
}
public async delete(obj: SchemaInfer<T>): Promise<boolean> {
const parts = ['DELETE', 'FROM', this.table, 'WHERE']
objectLoop(obj as {}, (value, key, idx) => {
if (idx > 0) {
parts.push('AND')
}
parts.push(`${key}=${value}`)
})
const client = await PostgresClient.get()
if (this.options?.debug) {}
try {
await client!.execute(`${parts.join(' ')}`)
} catch (e) {
console.error(e, parts)
throw e
}
return true
}
private valueToDB(key: keyof T, value: any): string | number | boolean | Date {
const item: SchemaItem<unknown> = (this.schema.model as any)[key]
if (item.isOfType({})) {
return JSON.stringify(value)
}
return value
}
private dbToValue(key: keyof T, value: string | number | boolean | Date): any {
const item: SchemaItem<unknown> = (this.schema.model as any)[key]
if (item.isOfType(543) && typeof value === 'string') {
return parseFloat(value)
}
if (item.isOfType({}) && typeof value === 'string') {
return JSON.parse(value)
}
return value
}
}

View File

@@ -0,0 +1,130 @@
import { objectRemap } from '@dzeio/object-util'
import Cassandra from 'cassandra-driver'
import { getEnv, requireEnv } from 'libs/Env'
import Client from './Client'
export default class CassandraClient extends Client {
private static instance: CassandraClient | null = null
private client?: Cassandra.Client | null = null
public async getVersion(): Promise<number> {
try {
await this.execute(`USE ${requireEnv('CASSANDRA_DATABASE')}`)
} catch (e) {
// database not found
console.log('database not found', e)
return -1
}
try {
const res = await this.execute('SELECT value FROM settings WHERE id = \'db_version\'')
const value = res[0]?.value
if (value.includes('T')) {
return new Date(value).getTime()
}
return Number.parseInt(value)
} catch (e) {
// table does not exists
console.log('Settings table does not exists', e)
return -1
}
}
public override async setVersion(version: number): Promise<void> {
await this.execute(`
UPDATE settings SET value = ? WHERE id = 'db_version';
`.trim(), [version.toString()])
}
public async execute(query: string, params?: Array<unknown> | object, options?: Cassandra.QueryOptions): Promise<Array<Record<string, any>>> {
if (!this.client || this.client.getState().getConnectedHosts().length === 0) {
throw new Error('not connected to the database !')
}
const res = await this.client.execute(query, params, options)
// if (query.includes('users'))
// console.log(res)
return res.rows?.map((it) => objectRemap(it.keys(), (key: string) => ({key: key, value: it.get(key)}))) ?? []
}
/**
* get the connexion to cassandra, it will try until it succedeed
*/
public static async get() {
const client = CassandraClient.instance ?? new CassandraClient()
CassandraClient.instance = client
return client
}
/**
* connect to Cassandra
*/
// eslint-disable-next-line complexity
public async connect() {
if (await this.isReady()) {
return
}
console.log('connecting to cassandra')
let authProvider: Cassandra.auth.AuthProvider|undefined
const method = getEnv('CASSANDRA_AUTH_METHOD')
if (method) {
// eslint-disable-next-line max-depth
switch (method.toLowerCase()) {
case 'passwordauthenticator':
case 'plaintext':
authProvider = new Cassandra.auth.PlainTextAuthProvider(
requireEnv('CASSANDRA_USERNAME'),
requireEnv('CASSANDRA_PASSWORD')
)
break
case 'dseplaintext':
authProvider = new Cassandra.auth.DsePlainTextAuthProvider(
requireEnv('CASSANDRA_USERNAME'),
requireEnv('CASSANDRA_PASSWORD'),
getEnv('CASSANDRA_AUTHORIZATION_ID')
)
break
case 'none':
break
default:
console.error('Please use a valid CASSANDRA_AUTH_METHOD value (none|plaintext|dseplaintext)')
throw new Error('Please use a valid CASSANDRA_AUTH_METHOD value (none|plaintext|dseplaintext)')
}
}
this.client = new Cassandra.Client({
contactPoints: [requireEnv('CASSANDRA_CONTACT_POINT')],
authProvider: authProvider as Cassandra.auth.AuthProvider,
localDataCenter: getEnv('CASSANDRA_LOCAL_DATA_CENTER', 'datacenter1')
})
// this.client.on('log', (level, loggerName, message, furtherInfo) => {
// console.log(`${level} - ${loggerName}: ${message}`);
// })
try {
await this.client.connect()
} catch (e) {
this.client = null
console.error(e)
throw new Error('Error connecting to Cassandra')
}
// try {
// await Migration.migrateToLatest()
// } catch (e) {
// this.migrated = -1
// console.error(e)
// throw new Error('An error occured while migrating')
// }
// this.migrated = 1
}
public async isReady(): Promise<boolean> {
return !!this.client && this.client.getState().getConnectedHosts().length >= 1
}
}

View File

@@ -0,0 +1,137 @@
import config from 'models/config'
import type MigrationObj from 'models/Migrations'
export enum ConnectionStatus {
DISCONNECTED,
MIGRATING,
READY
}
export interface ClientStatic<C extends Client = Client> {
get(): Promise<C>
}
export default abstract class Client {
public status: ConnectionStatus = ConnectionStatus.DISCONNECTED
/**
* -1: unknown
* 0: migrating
* 1: migrated
*/
public migrationStatus = -1
/**
* get the current migration version
*
* -1 nothing/error
* 0+ current migration
*/
public abstract getVersion(): Promise<number>
public abstract setVersion(version: number): Promise<void>
public abstract execute(query: string, params?: Array<unknown> | object, ...options: Array<any>): Promise<Array<Record<string, unknown>>>
public abstract connect(): Promise<void>
/**
* Migrate the database to the latest version
*/
public async migrateToLatest() {
const migrations = this.getMigrations()
const latest = migrations[migrations.length - 1]
if (!latest) {
return
}
return await this.migrateTo(latest.date)
}
public getMigrations(): ReadonlyArray<MigrationObj> {
return config.migrations as ReadonlyArray<MigrationObj>
}
/**
* migrate to a specific date in time
* @param date the date to try to migrate to
*/
public async migrateTo(date: number) {
this.migrationStatus = 0
let version = await this.getVersion()
const migrations = this.getMigrations()
const time = !version ? -1 : version
console.log('Current DB version', version)
// same version, don't to anything
if (date === time) {
this.migrationStatus = 1
return
}
console.log('\x1b[35mCurrent DB version', version, '\x1b[0m')
// run up migrations
if (time < date) {
console.log('\x1b[35m', 'Migrating up to', date, '\x1b[0m')
const migrationsToRun = migrations.filter((it) => it.date > time && it.date <= date)
for (const migration of migrationsToRun) {
console.log('\x1b[35m', 'Migrating from', version, 'to', migration.date, '\x1b[0m')
await migration.up(this)
await this.setVersion(migration.date)
version = migration.date
}
} else { // run down migrations
console.log('\x1b[35m', 'Migrating down to', date, '\x1b[0m')
const migrationsToRun = migrations.filter((it) => it.date < time && it.date >= date)
.toReversed()
for (const migration of migrationsToRun) {
console.log('\x1b[35m', 'Migrating from', version, 'to', migration.date, '\x1b[0m')
await migration.down?.(this)
await this.setVersion(migration.date)
version = migration.date
}
}
console.log('\x1b[32mDone migrating\x1b[0m')
this.migrationStatus = 1
}
// public getStatus(): Promise<ClientStatus>
// public abstract isMigrated(): Promise<boolean>
/**
* indicate if the client is ready for new requests (not if migrations are done or not)
*/
public abstract isReady(): Promise<boolean>
/**
* wait until every migrations are done or fail
*/
public async waitForMigrations(): Promise<void> {
if (this.migrationStatus === -1) {
await this.migrateToLatest()
}
while (!await this.isMigrated()) {
console.log('waiting...')
await new Promise((res) => setTimeout(res, 100))
}
}
public async isMigrated(): Promise<boolean> {
return this.migrationStatus === 1
// if (this.migrationStatus < 1) {
// return false
// } else if (this.migrationStatus === 1) {
// return
// }
// const migrations = this.getMigrations()
// const last = migrations[migrations.length - 1]
// if (!last) {
// return true
// }
// return last.date === await this.getVersion()
}
}

View File

@@ -0,0 +1,71 @@
import { wait } from 'libs/AsyncUtils'
import { getEnv, requireEnv } from 'libs/Env'
import pg from 'pg'
import Client from '.'
const Postgres = pg.Client
// biome-ignore lint/complexity/noStaticOnlyClass: <explanation>
export default class PostgresClient extends Client {
private static instance: PostgresClient = new PostgresClient()
private client?: pg.Client | null
public override async getVersion(): Promise<number> {
try {
const res = await this.execute(`SELECT value FROM settings WHERE id = 'db_version'`)
const value = res[0]?.value
if (!value) {
return -1
}
return Number.parseInt(value)
} catch (e) {
// table does not exists
console.log('Settings table does not exists', e)
return -1
}
}
public override async setVersion(version: number): Promise<void> {
await this.execute(`UPDATE settings SET value = $1 WHERE id = 'db_version';`, [version.toString()])
}
public override async execute(query: string, params?: Array<unknown> | object, ...options: Array<any>): Promise<Array<Record<string, unknown>>> {
if (!this.client || !await this.isReady()) {
throw new Error('not connected')
}
const res = await this.client.query<Record<string, unknown>>(query, params)
return res.rows
}
public override async connect(): Promise<void> {
if (this.client) {
return
}
this.client = new Postgres({
host: requireEnv('POSTGRES_HOST'),
user: requireEnv('POSTGRES_USERNAME'),
password: requireEnv('POSTGRES_PASSWORD'),
port: parseInt(getEnv('POSTGRES_PORT', '5432')),
database: requireEnv('POSTGRES_DATABASE', 'projectmanager'),
// debug(connection, query, parameters, paramTypes) {
// console.log(`${query}, ${parameters}`);
// },
})
.on('end', () => {
this.client = null
})
try {
await this.client.connect()
} catch (e) {
this.client = null
console.error(e)
throw new Error('Error connecting to Postgres')
}
}
public override async isReady(): Promise<boolean> {
return !!this.client
}
/**
* get the connexion to cassandra, it will try until it succedeed
*/
public static async get() {
return PostgresClient.instance
}
}

View File

@@ -1,24 +1,19 @@
/**
* TODO:
* Add to `DaoItem` your model name
* Add to the function `initDao` the Dao
*/
/**
* the different Daos that can be initialized
*
* Touch this interface to define which key is linked to which Dao
*/
interface DaoItem {}
import type { default as Dao, default as DaoAdapter } from './Adapters/DaoAdapter'
import config from './config'
/**
* Class to get any DAO
*/
// biome-ignore lint/complexity/noStaticOnlyClass: <explanation>
export default class DaoFactory {
/**
* reference of the different Daos for a correct singleton implementation
* get the total list of daos available
* @returns return the list of daos available
*/
private static daos: Partial<DaoItem> = {}
public static getAll(): Record<string, DaoAdapter> {
return config.models
}
/**
* Get a a dao by its key
@@ -28,27 +23,15 @@ export default class DaoFactory {
* @param key the dao key to get
* @returns the Dao you want as a singleton
*/
public static get<Key extends keyof DaoItem>(key: Key): DaoItem[Key] {
if (!(key in this.daos)) {
const dao = this.initDao(key)
if (!dao) {
throw new Error(`${key} has no valid Dao`)
}
this.daos[key] = dao as DaoItem[Key]
}
return this.daos[key] as DaoItem[Key]
public static get<Key extends keyof typeof config['models']>(key: Key): typeof config['models'][Key] {
return config.models[key]
}
/**
* init a dao by its key, it does not care if it exists or not
*
* @param item the element to init
* @returns a new initialized dao or undefined if no dao is linked
* get the main client linked to migrations
* @returns the main client
*/
private static initDao(item: keyof DaoItem): any | undefined {
switch (item) {
default:
return undefined
}
public static async client(): ReturnType<(typeof config.mainClient)['get']> {
return config.mainClient.get()
}
}

View File

@@ -0,0 +1,35 @@
import type Client from 'models/Clients/Client'
import type Migration from './Migration'
/**
* A system migration
* the file need then to be imported into the `models/config.ts` file
*/
export default {
/** SET THE DATE IN ISO FORMAT HERE */
date: Date.UTC(2024, 3, 26, 11, 55, 28),
async up(client: Client): Promise<boolean> {
const requests: Array<string> = [
]
for await (const request of requests) {
await client.execute(request)
}
return true
},
async down(client: Client) {
const requests: Array<string> = [
]
for await (const request of requests) {
try {
await client.execute(request)
} catch {}
}
return true
},
} as Migration

10
src/models/Migrations/Migration.d.ts vendored Normal file
View File

@@ -0,0 +1,10 @@
import type Client from 'models/Clients/Client'
export default interface Migration {
/**
* timestamp in UTC
*/
date: number
up(client: Client): Promise<boolean>
down?(client: Client): Promise<boolean>
}

149
src/models/Query.ts Normal file
View File

@@ -0,0 +1,149 @@
interface QueryRootFilters<Obj extends Record<string, unknown>> {
/**
* one of the results should be true to be true
*/
$or?: Array<QueryList<Obj>>
/**
* every results should be false to be true
*/
$nor?: Array<QueryList<Obj>>
/**
* (default) make sure every sub queries return true
*/
$and?: Array<QueryList<Obj>>
/**
* at least one result must be false
*/
$nand?: Array<QueryList<Obj>>
/**
* invert the result from the following query
*/
$not?: QueryList<Obj>
/**
* define a precise offset of the data you fetched
*/
$offset?: number
/**
* limit the number of elements returned from the dataset
*/
$limit?: number
/**
* sort the data the way you want with each keys being priorized
*
* ex:
* {a: Sort.DESC, b: Sort.ASC}
*
* will sort first by a and if equal will sort by b
*/
$sort?: SortInterface<Obj>
}
/**
* Logical operators that can be used to filter data
*/
export type QueryLogicalOperator<Value> = {
/**
* one of the results should be true to be true
*/
$or: Array<QueryValues<Value>>
} | {
/**
* every results should be false to be true
*/
$nor: Array<QueryValues<Value>>
} | {
/**
* at least one result must be false
*/
$nand: Array<QueryValues<Value>>
} | {
/**
* (default) make sure every sub queries return true
*/
$and: Array<QueryValues<Value>>
} | {
/**
* invert the result from the following query
*/
$not: QueryValues<Value>
}
/**
* differents comparisons operators that can be used to filter data
*/
export type QueryComparisonOperator<Value> = {
/**
* the remote source value must be absolutelly equal to the proposed value
*/
$eq: Value | null
} | {
/**
* the remote source value must be greater than the proposed value
*/
$gt: number | Date
} | {
/**
* the remote source value must be lesser than the proposed value
*/
$lt: number | Date
} | {
/**
* the remote source value must be greater or equal than the proposed value
*/
$gte: number | Date
} | {
/**
* the remote source value must be lesser or equal than the proposed value
*/
$lte: number | Date
} | {
/**
* the remote source value must be one of the proposed values
*/
$in: Array<Value>
} | {
/**
* (for string only) part of the proposed value must be in the remote source
*/
$inc: Value | null
}
export type QueryList<Obj extends Record<string, unknown>> = {
[Key in keyof Obj]?: QueryValues<Obj[Key]>
}
/**
* Differents values the element can take
* if null it will check if it is NULL on the remote
* if array it will check oneOf
* if RegExp it will check if regexp match
*/
export type QueryValues<Value> = Value |
null |
Array<Value> |
RegExp |
QueryComparisonOperator<Value> |
QueryLogicalOperator<Value>
/**
* The query element that allows you to query different elements
*/
export type Query<Obj extends Record<string, unknown>> = QueryList<Obj> & QueryRootFilters<Obj>
/**
* sorting interface with priority
*/
export type SortInterface<Obj extends Record<string, unknown>> = {
[Key in keyof Obj]?: Sort
}
export enum Sort {
/**
* Sort the values from the lowest to the largest
*/
ASC,
/**
* Sort the values form the largest to the lowest
*/
DESC
}

48
src/models/config.ts Normal file
View File

@@ -0,0 +1,48 @@
import Schema from 'libs/Schema'
import type Dao from './Adapters/DaoAdapter'
import PostgresAdapter from './Adapters/PostgresAdapter'
import CassandraClient from './Clients/CassandraClient'
import type { ClientStatic } from './Clients/Client'
import type Migration from './Migrations/Migration'
// @ts-ignore
interface Config {
/**
* the main client is responsible for the Migration system
*/
mainClient: ClientStatic
/**
* define every models of the application
*/
models: Record<string, Dao>
/**
* Define the application migrations
*/
migrations: Array<Migration>
}
const config = {
/**
* the main client is responsible for the Migration system
*/
mainClient: CassandraClient as ClientStatic<CassandraClient>,
/**
* define every models of the application
*/
models: {
session: new PostgresAdapter(new Schema({}), 'pouet')
// session: new Dao(Session, new CassandraAdapter(Session, 'Session', 'id')),
},
/**
* Define the application migrations
*/
migrations: [
// Migration20240326115528
]
} as const
export default config