4339 lines
128 KiB
TypeScript
4339 lines
128 KiB
TypeScript
/**
|
|
* @author fenris
|
|
*/
|
|
type int = number;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type float = number;
|
|
declare var process: any;
|
|
declare var require: any;
|
|
declare class Buffer {
|
|
constructor(x: string, modifier?: string);
|
|
static from(x: string, encoding?: string): any;
|
|
toString(modifier?: string): string;
|
|
}
|
|
declare namespace lib_plankton.base {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function environment(): string;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_pseudopointer<type_value> = {
|
|
value: type_value;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_null<type_value>(): type_pseudopointer<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_make<type_value>(value: type_value): type_pseudopointer<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_isset<type_value>(pseudopointer: type_pseudopointer<type_value>): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_read<type_value>(pseudopointer: type_pseudopointer<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_write<type_value>(pseudopointer: type_pseudopointer<type_value>, value: type_value): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare var instance_verbosity: int;
|
|
/**
|
|
* @desc the ability to check for equality with another element of the same domain
|
|
* @author fenris
|
|
*/
|
|
interface interface_collatable<type_value> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_collate(value: type_value): boolean;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function instance_collate<type_value>(value1: (type_value & {
|
|
_collate?: ((value: type_value) => boolean);
|
|
}), value2: type_value): boolean;
|
|
/**
|
|
* @desc the ability to compare with another element of the same domain for determining if the first is "smaller than or equal to" the latter
|
|
* @author fenris
|
|
*/
|
|
interface interface_comparable<type_value> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_compare(value: type_value): boolean;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function instance_compare<type_value>(value1: (type_value & {
|
|
_compare: ((value: type_value) => boolean);
|
|
}), value2: type_value): boolean;
|
|
/**
|
|
* @desc the ability to create an exact copy
|
|
* @author fenris
|
|
*/
|
|
interface interface_cloneable<type_value> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_clone(): type_value;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function instance_clone<type_value>(value: (type_value & {
|
|
_clone?: (() => type_value);
|
|
})): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_hashable {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_hash(): string;
|
|
}
|
|
/**
|
|
* @desc the ability to generate a string out of the element, which identifies it to a high degree
|
|
* @author fenris
|
|
*/
|
|
declare function instance_hash<type_value>(value: (type_value & {
|
|
_hash?: (() => string);
|
|
})): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_showable {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_show(): string;
|
|
}
|
|
/**
|
|
* @desc the ability to map the element to a textual representation (most likely not injective)
|
|
* @author fenris
|
|
*/
|
|
declare function instance_show<type_value>(value: (type_value & {
|
|
_show?: (() => string);
|
|
})): string;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
interface interface_decorator<type_core> {
|
|
/**
|
|
* @author frac
|
|
*/
|
|
core: type_core;
|
|
}
|
|
/**
|
|
* @author frac
|
|
*/
|
|
declare class class_observer {
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected counter: int;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected actions: {
|
|
[id: string]: (information: Object) => void;
|
|
};
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected buffer: Array<Object>;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @author frac
|
|
*/
|
|
empty(): boolean;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
flush(): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
set(id: string, action: (information: Object) => void): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
del(id: string): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
add(action: (information: Object) => void): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
notify(information?: Object, delayed?: boolean): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
rollout(): void;
|
|
}
|
|
/**
|
|
* @author frac
|
|
*/
|
|
/**
|
|
* @author frac
|
|
*/
|
|
/**
|
|
* @author frac
|
|
*/
|
|
declare class class_error extends Error {
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected suberrors: Array<Error>;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected mess: string;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
constructor(message: string, suberrors?: Array<Error>);
|
|
/**
|
|
* @override
|
|
* @author frac
|
|
*/
|
|
toString(): string;
|
|
}
|
|
declare namespace lib_plankton.base {
|
|
/**
|
|
* returns the current UNIX timestamp
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function get_current_timestamp(rounded?: boolean): float;
|
|
/**
|
|
*/
|
|
function object_merge(core: Record<string, any>, mantle: Record<string, any>): Record<string, any>;
|
|
}
|
|
declare module lib_plankton.pod {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_pod<type_value> = {
|
|
kind: ("empty" | "filled");
|
|
value?: type_value;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make_empty<type_value>(): type_pod<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make_filled<type_value>(value: type_value): type_pod<type_value>;
|
|
/**
|
|
* whether the pod is filled
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function is_filled<type_value>(pod: type_pod<type_value>): boolean;
|
|
/**
|
|
* return the value, stored in the pod-wrapper
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function cull<type_value>(pod: type_pod<type_value>): type_value;
|
|
/**
|
|
* to pass on a empty-pod or to use a filled-pod
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function propagate<type_value, type_value_>(pod: type_pod<type_value>, function_: ((value: type_value) => type_value_)): type_pod<type_value_>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function distinguish<type_value, type_result>(pod: type_pod<type_value>, function_empty: (() => type_result), function_filled: ((value: type_value) => type_result)): type_result;
|
|
/**
|
|
*/
|
|
function show<type_value>(pod: type_pod<type_value>, options?: {
|
|
show_value?: ((value: type_value) => string);
|
|
}): string;
|
|
}
|
|
declare module lib_plankton.pod {
|
|
/**
|
|
*/
|
|
class class_pod<type_value> {
|
|
private subject;
|
|
constructor(subject: type_pod<type_value>);
|
|
tear(): type_pod<type_value>;
|
|
static empty<type_value>(): class_pod<type_value>;
|
|
static filled<type_value>(value: type_value): class_pod<type_value>;
|
|
is_empty(): boolean;
|
|
is_filled(): boolean;
|
|
cull(): type_value;
|
|
show(show_value?: any): string;
|
|
toString(): string;
|
|
propagate<type_value_>(function_: ((value: type_value) => type_value_)): class_pod<type_value_>;
|
|
distinguish<type_result>(function_empty: (() => type_result), function_filled: ((value: type_value) => type_result)): type_result;
|
|
}
|
|
}
|
|
/**
|
|
* might be completely obsolete
|
|
*/
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_promise<type_result, type_reason> = Promise<type_result>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_reject<type_result, type_reason>(reason: type_reason): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_resolve<type_result, type_reason>(result: type_result): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_make<type_result, type_reason>(executor: (resolve: ((result?: type_result) => void), reject: ((reason?: type_reason) => void)) => void): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_then_close<type_result, type_reason>(promise: type_promise<type_result, type_reason>, resolver: ((result: type_result) => void), rejector: ((reason: type_reason) => void)): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_then_append<type_result, type_reason, type_result_>(promise: type_promise<type_result, type_reason>, resolver: ((result: type_result) => type_promise<type_result_, type_reason>), rejector?: ((reason: type_reason) => type_promise<type_result_, type_reason>)): type_promise<type_result_, type_result>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_all<type_result, type_reason>(promises: Array<type_promise<type_result, type_reason>>): type_promise<Array<type_result>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_chain<type_result, type_reason>(promises: (Array<(input: type_result) => type_promise<type_result, type_reason>>), start?: type_result): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_condense<type_element, type_reason>(promises: Array<() => type_promise<type_element, type_reason>>): type_promise<Array<type_element>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_group<type_reason>(promises: Record<string, (() => type_promise<any, type_reason>)>, options?: {
|
|
serial?: boolean;
|
|
}): type_promise<Record<string, any>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_wrap<type_result_inner, type_result_outer, type_reason>(promise: type_promise<type_result_inner, type_reason>, transformator_result: ((reason: type_result_inner) => type_result_outer), transformator_reason?: ((reason: type_reason) => type_reason)): type_promise<type_result_outer, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_attach<type_reason>(state: Record<string, any>, promise: type_promise<any, type_reason>, name: string): type_promise<Record<string, any>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_delay<type_result, type_reason>(promise: type_promise<type_result, type_reason>, delay: int): type_promise<type_result, type_reason>;
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
*/
|
|
class CancellablePromise<type_result> extends Promise<type_result> {
|
|
/**
|
|
*/
|
|
private cancelled;
|
|
/**
|
|
*/
|
|
private interval;
|
|
/**
|
|
*/
|
|
private subject;
|
|
/**
|
|
*/
|
|
constructor(executor: ((resolve: any, reject: any) => void));
|
|
/**
|
|
*/
|
|
private clear;
|
|
/**
|
|
*/
|
|
then<type_next_resolved, type_next_rejected>(onfulfilled?: ((value: type_result) => (type_next_resolved | PromiseLike<type_next_resolved>)), onrejected?: ((reason: any) => (type_next_rejected | PromiseLike<type_next_rejected>))): Promise<type_next_resolved | type_next_rejected>;
|
|
/**
|
|
*/
|
|
catch(x: any): Promise<type_result>;
|
|
/**
|
|
*/
|
|
cancel(): void;
|
|
}
|
|
}
|
|
/**
|
|
* initializer might be obsolete, since promises are reusable after having been resolved or rejected
|
|
*/
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
enum enum_initializer_state {
|
|
initial = 0,
|
|
waiting = 1,
|
|
successful = 2,
|
|
failed = 3
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_initializer<type_result, type_reason> = {
|
|
fetcher: (() => type_promise<type_result, type_reason>);
|
|
state?: enum_initializer_state;
|
|
queue: Array<{
|
|
resolve: ((result?: type_result) => void);
|
|
reject: ((reason?: type_reason) => void);
|
|
}>;
|
|
result?: type_result;
|
|
reason?: type_reason;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_make<type_result, type_reason>(fetcher: (() => type_promise<type_result, type_reason>)): type_initializer<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_reset<type_result, type_reason>(subject: type_initializer<type_result, type_reason>): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_state<type_result, type_reason>(subject: type_initializer<type_result, type_reason>): enum_initializer_state;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_get<type_result, type_reason>(subject: type_initializer<type_result, type_reason>): type_promise<type_result, type_reason>;
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_deferral<type_input, type_output> = {
|
|
representation: (input: type_input) => Promise<type_output>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
* @desc activates the deferral and handles its output according to a given procedure
|
|
* @param {(value : type_value)=>void} procedure a function which receives the output of the deferral as argument
|
|
*/
|
|
function deferral_use<type_input, type_output>(deferral: type_deferral<type_input, type_output>, input: type_input, procedure: (output: type_output) => void): void;
|
|
/**
|
|
* @author fenris
|
|
* @desc creates a deferral-subject (similar to "new Promise", where "convey" reflects "resolve"/"reject")
|
|
*/
|
|
function deferral_make<type_input, type_output>(handler: (input: type_input, convey: (output: type_output) => void) => void): type_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
* @desc wraps a simple function into a deferral (similar to "Promise.resolve"/"Promise.reject")
|
|
*/
|
|
function deferral_wrap<type_input, type_output>(function_: (input: type_input) => type_output): type_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_id<type_value>(): type_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_const<type_value>(value: type_value): type_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_delay<type_output>(output: type_output, delay: int): type_deferral<any, type_output>;
|
|
/**
|
|
* @author fenris
|
|
* @desc connects two deferrals to form a new one; the output of the first is taken as input for the second
|
|
* (similar to "Promise.then" when passing a function which returns a new promise)
|
|
* @param {type_deferral<type_value1>} first a simple deferral
|
|
* @param {(value1 : type_value1)=>type_deferral<type_value2>} second a function depending from a value returning a deferral
|
|
*/
|
|
function deferral_compose_serial<type_input, type_between, type_output>(first: type_deferral<type_input, type_between>, second: type_deferral<type_between, type_output>): type_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_compose_parallel<type_input, type_output_left, type_output_right>({ "left": deferral_left, "right": deferral_right, }: {
|
|
left: type_deferral<type_input, type_output_left>;
|
|
right: type_deferral<type_input, type_output_right>;
|
|
}): type_deferral<type_input, {
|
|
left: type_output_left;
|
|
right: type_output_right;
|
|
}>;
|
|
/**
|
|
* @author fenris
|
|
* @desc repeatedly applied serial composition
|
|
*/
|
|
function deferral_chain<type_value>(members: Array<type_deferral<type_value, type_value>>): type_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_deferral<type_input, type_output> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private subject;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private constructor();
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private static _cram;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private static _tear;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static make<type_input, type_output>(handler: (input: type_input, convey: (value: type_output) => void) => void): class_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
use(input: type_input, procedure: (value: type_output) => void): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
compose_serial<type_output_>(second: class_deferral<type_output, type_output_>): class_deferral<type_input, type_output_>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static chain<type_value>(members: Array<class_deferral<type_value, type_value>>): class_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static wrap<type_input, type_output>(function_: (input: type_input) => type_output): class_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static const_<type_value>(value: type_value): class_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static delay<type_output>(output: type_output, delay: int): class_deferral<any, type_output>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* converts the "arguments"-map into an array
|
|
*
|
|
* @param {Object} args
|
|
* @author fenris
|
|
*/
|
|
function args2list(args: any): Array<any>;
|
|
/**
|
|
* just the empty function; useful for some callbacks etc.
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function nothing(): void;
|
|
/**
|
|
* just the identity; useful for some callbacks etc.; defined as function instead of const for using type parameters
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function id<type_value>(x: type_value): type_value;
|
|
/**
|
|
* just the identity; useful for some callbacks etc.
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function const_<type_value>(x: type_value): ((y: any) => type_value);
|
|
/**
|
|
* composes two functions (i.e. returns a function that return the result of the successive execution of both input-functions)
|
|
*
|
|
* @param {function} function_f
|
|
* @param {function} function_g
|
|
* @author fenris
|
|
*/
|
|
function compose<type_x, type_y, type_z>(function_f: ((type_x: any) => type_y), function_g: ((type_y: any) => type_z)): ((value: type_x) => type_z);
|
|
/**
|
|
* transforms a function with sequential input to a function with leveled input; example: add(2,3) = curryfy(add)(2)(3)
|
|
*
|
|
* @param {function} f
|
|
* @return {function} the currified version of the in put function
|
|
* @author fenris
|
|
*/
|
|
function curryfy(f: Function): Function;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function convey(value: any, functions: Array<Function>): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function timeout(procedure: (() => void), delay_in_seconds: float): int;
|
|
/**
|
|
* Promise version of "setTimeout"
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function defer<type_result>(seconds: float, action: (() => type_result)): Promise<type_result>;
|
|
/**
|
|
* a definition for a value being "defined"
|
|
*
|
|
* @author neuc
|
|
*/
|
|
function is_def<type_value>(obj: type_value, options?: {
|
|
null_is_valid?: boolean;
|
|
}): boolean;
|
|
/**
|
|
* returns the value if set and, when a type is specified, if the type is correct, if not return default_value
|
|
*
|
|
* @author neuc
|
|
*/
|
|
function def_val(value: any, default_value: any, options?: {
|
|
type?: (null | string);
|
|
null_is_valid?: boolean;
|
|
}): any;
|
|
/**
|
|
* provides the call for an attribute of a class as a regular function; useful for processing lists of objects
|
|
*
|
|
* @param {string} name the name of the attribute
|
|
* @return {function}
|
|
* @author fenris
|
|
*/
|
|
function attribute<type_object, type_attribute>(name: string): ((object: type_object) => type_attribute);
|
|
/**
|
|
* provides a method of a class as a regular function; useful for processing lists of objects
|
|
*
|
|
* @param {string} name the name of the method
|
|
* @return {function}
|
|
* @author fenris
|
|
*/
|
|
function method<type_object, type_output>(name: string): ((object: type_object) => type_output);
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_coproduct = {
|
|
kind: string;
|
|
data?: any;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function distinguish<type_output>(coproduct: type_coproduct, handlers: Record<string, ((data?: any) => type_output)>, options?: {
|
|
fallback?: (null | ((coproduct?: type_coproduct) => type_output));
|
|
}): type_output;
|
|
/**
|
|
* for rate_limit_check
|
|
*
|
|
* @author fenris
|
|
*/
|
|
type type_mana_snapshot = {
|
|
timestamp: float;
|
|
value: float;
|
|
};
|
|
/**
|
|
* rate limiting algorithm, based on the idea of mana (magic power) in video games:
|
|
* - an actor has a fixed mana capacity, i.e. the maximum amount of available power
|
|
* - an actor has a fixed rate of mana regeneration, i.e. how fast the power is filled up (linear growth)
|
|
* - an action has a defined mana heft, i.e. how much power is required and deducted in order to execute it
|
|
* - mana states are represented by snapshots, i.e. the amount of power at a certain point in time
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function rate_limit_check(setup: {
|
|
capacity: float;
|
|
regeneration_rate: float;
|
|
get_snapshot: (() => Promise<(null | type_mana_snapshot)>);
|
|
set_snapshot: ((snapshot: type_mana_snapshot) => Promise<void>);
|
|
update_snapshot: ((timestamp: float, value_increment: float) => Promise<void>);
|
|
}, heft: float): Promise<{
|
|
granted: boolean;
|
|
seconds: (null | float);
|
|
}>;
|
|
}
|
|
declare namespace lib_plankton.email {
|
|
/**
|
|
*/
|
|
function send(smtp_credentials: {
|
|
host: string;
|
|
port: int;
|
|
username: string;
|
|
password: string;
|
|
}, sender: string, receivers: Array<string>, subject: string, content: string): Promise<void>;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
enum enum_level {
|
|
debug = 0,
|
|
info = 1,
|
|
notice = 2,
|
|
warning = 3,
|
|
error = 4
|
|
}
|
|
/**
|
|
*/
|
|
function level_order(level1: enum_level, level2: enum_level): boolean;
|
|
/**
|
|
*/
|
|
function level_show(level: enum_level): string;
|
|
/**
|
|
*/
|
|
type type_entry = {
|
|
level: enum_level;
|
|
incident: string;
|
|
details: Record<string, any>;
|
|
};
|
|
}
|
|
/**
|
|
* @deprecated
|
|
* @todo remove
|
|
*/
|
|
declare namespace lib_plankton.log {
|
|
function level_push(level: int): void;
|
|
function level_pop(): void;
|
|
function indent_push(indent: int): void;
|
|
function indent_pop(): void;
|
|
function indent_inc(): void;
|
|
function indent_dec(): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write({ "message": message, "type": type, "prefix": prefix, "level": level, "indent": indent, }: {
|
|
message?: string;
|
|
type?: string;
|
|
prefix?: string;
|
|
level?: int;
|
|
indent?: int;
|
|
}): void;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
abstract class class_channel {
|
|
/**
|
|
*/
|
|
abstract add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
* output for writing log entries to stdout
|
|
*/
|
|
class class_channel_stdout extends class_channel {
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
class class_channel_file extends class_channel {
|
|
/**
|
|
* the path of the log file
|
|
*/
|
|
private path;
|
|
/**
|
|
*/
|
|
private human_readable;
|
|
/**
|
|
* [constructor]
|
|
*/
|
|
constructor(path: string, human_readable: boolean);
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
class class_channel_email extends class_channel {
|
|
/**
|
|
*/
|
|
private smtp_credentials;
|
|
/**
|
|
*/
|
|
private sender;
|
|
/**
|
|
*/
|
|
private receivers;
|
|
/**
|
|
* [constructor]
|
|
*/
|
|
constructor(smtp_credentials: {
|
|
host: string;
|
|
port: int;
|
|
username: string;
|
|
password: string;
|
|
}, sender: string, receivers: Array<string>);
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
* output for desktop notifications via "libnotify"
|
|
*/
|
|
class class_channel_notify extends class_channel {
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
* decorator for filtering out log entries below a certain level threshold
|
|
*/
|
|
class class_channel_minlevel extends class_channel {
|
|
/**
|
|
*/
|
|
private core;
|
|
/**
|
|
*/
|
|
private threshold;
|
|
/**
|
|
*/
|
|
constructor(core: class_channel, threshold: enum_level);
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
function channel_make(description: {
|
|
kind: string;
|
|
data?: {
|
|
[key: string]: any;
|
|
};
|
|
}): class_channel;
|
|
/**
|
|
*/
|
|
type type_configuration = Array<class_channel>;
|
|
/**
|
|
*/
|
|
function conf_default(): type_configuration;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
* pushes a new configuration on the stack and activates it
|
|
*/
|
|
function conf_push(channels: type_configuration): void;
|
|
/**
|
|
* pops the current active configuration from the stack
|
|
*/
|
|
function conf_pop(): void;
|
|
/**
|
|
* consumes a log entry, i.e. sends it to the currently active outputs
|
|
*/
|
|
function add(entry: type_entry): void;
|
|
/**
|
|
*/
|
|
function debug(incident: string, details?: Record<string, any>): void;
|
|
/**
|
|
*/
|
|
function info(incident: string, details?: Record<string, any>): void;
|
|
/**
|
|
*/
|
|
function notice(incident: string, details?: Record<string, any>): void;
|
|
/**
|
|
*/
|
|
function warning(incident: string, details?: Record<string, any>): void;
|
|
/**
|
|
*/
|
|
function error(incident: string, details?: Record<string, any>): void;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_code<type_from, type_to> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_from): type_to;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
decode(x: type_to): type_from;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_code<type_from, type_to> = {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
encode: (x: type_from) => type_to;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
decode: (x: type_to) => type_from;
|
|
};
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inverse_encode<type_from, type_to>(decode: (to: type_to) => type_from, to: type_to): type_from;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inverse_decode<type_from, type_to>(encode: (from: type_from) => type_to, from: type_from): type_to;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_inverse<type_from, type_to> implements interface_code<type_to, type_from> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: interface_code<type_from, type_to>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(subject: interface_code<type_from, type_to>);
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(to: type_to): type_from;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(from: type_from): type_to;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function pair_encode<type_from, type_between, type_to>(encode_first: (from: type_from) => type_between, encode_second: (between: type_between) => type_to, from: type_from): type_to;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function pair_decode<type_from, type_between, type_to>(decode_first: (between: type_between) => type_from, decode_second: (to: type_to) => type_between, to: type_to): type_from;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_pair<type_from, type_between, type_to> implements interface_code<type_from, type_to> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected first: interface_code<type_from, type_between>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected second: interface_code<type_between, type_to>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(first: interface_code<type_from, type_between>, second: interface_code<type_between, type_to>);
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(from: type_from): type_to;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(to: type_to): type_from;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function chain_encode(encode_links: Array<(from: any) => any>, from: any): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function chain_decode(decode_links: Array<(to: any) => any>, to: any): any;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_chain implements interface_code<any, any> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected links: Array<interface_code<any, any>>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(links: Array<interface_code<any, any>>);
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(from: any): any;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(to: any): any;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
type type_flatten_from = Array<{
|
|
[name: string]: any;
|
|
}>;
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
type type_flatten_to = {
|
|
keys: Array<string>;
|
|
data: Array<Array<any>>;
|
|
};
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
function flatten_encode(from: type_flatten_from, keys?: Array<string>): type_flatten_to;
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
function flatten_decode(to: type_flatten_to): type_flatten_from;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_flatten implements interface_code<type_flatten_from, type_flatten_to> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_flatten_from): type_flatten_to;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: type_flatten_to): type_flatten_from;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.json {
|
|
/**
|
|
*/
|
|
type type_source = any;
|
|
/**
|
|
*/
|
|
type type_target = string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function encode(source: type_source, options?: {
|
|
formatted?: boolean;
|
|
}): type_target;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function decode(target: type_target): type_source;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function implementation_code(): lib_plankton.code.type_code<type_source, type_target>;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.json {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_json implements lib_plankton.code.interface_code<any, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: any): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): any;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.file {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function exists(path: string): Promise<boolean>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read(path: string): Promise<string>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read_buffer(path: string): Promise<Buffer>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read_stdin(): Promise<string>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write(path: string, content: string, options?: {
|
|
encoding?: string;
|
|
}): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write_buffer(path: string, content: Buffer, options?: {}): Promise<void>;
|
|
/**
|
|
*/
|
|
function delete_(path: string): Promise<void>;
|
|
}
|
|
declare namespace lib_plankton.object {
|
|
/**
|
|
* @author fenris
|
|
* @deprecated use the "??" operator instead
|
|
*/
|
|
function fetch<type_value>(object: Object, fieldname: string, options?: {
|
|
fallback?: type_value;
|
|
escalate?: boolean;
|
|
}): type_value;
|
|
/**
|
|
*/
|
|
function map<type_from, type_to>(object_from: Record<string, type_from>, transformator: ((value_from: type_from, key?: string) => type_to)): Record<string, type_to>;
|
|
/**
|
|
* gibt ein Objekt mit bestimmten Einträgen des Eingabe-Objekts zurück
|
|
*/
|
|
function filter<type_value>(object_from: Record<string, type_value>, predicate: ((value_from: type_value, key?: string) => boolean)): Record<string, type_value>;
|
|
/**
|
|
* wandelt ein Array mit Einträgen der Form {key,value} in ein entsprechendes Objekt um
|
|
*
|
|
* @deprecated use Object.fromEntries instead!
|
|
*/
|
|
function from_array<type_value>(array: Array<{
|
|
key: string;
|
|
value: type_value;
|
|
}>): Record<string, type_value>;
|
|
/**
|
|
* wandelt ein Objekt in ein entsprechendes Array mit Einträgen der Form {key,value} um
|
|
*
|
|
* @deprecated use Object.entries insetad!
|
|
*/
|
|
function to_array<type_value>(object: Record<string, type_value>): Array<{
|
|
key: string;
|
|
value: type_value;
|
|
}>;
|
|
/**
|
|
* gibt eine Liste von Schlüsseln eines Objekts zurück
|
|
*
|
|
* @deprecated use Object.keys instead!
|
|
*/
|
|
function keys(object: Record<string, any>): Array<string>;
|
|
/**
|
|
* gibt eine Liste von Werten eines Objekts zurück
|
|
*
|
|
* @deprecated use Object.values instead!
|
|
*/
|
|
function values<type_value>(object: Record<string, type_value>): Array<type_value>;
|
|
/**
|
|
* liest ein Baum-artiges Objekt an einer bestimmten Stelle aus
|
|
*/
|
|
function path_read<type_value>(object: Object, path: string, options?: {
|
|
fallback?: type_value;
|
|
escalate?: boolean;
|
|
}): type_value;
|
|
/**
|
|
* schreibt einen Wert an eine bestimmte Stelle in einem Baum-artigen Objekt
|
|
*/
|
|
function path_write<type_value>(object: Object, path: string, value: type_value, construct?: boolean): void;
|
|
/**
|
|
* prüft ob ein Objekt einem bestimmten Muster entspricht
|
|
*
|
|
* @deprecated not very useful
|
|
*/
|
|
function matches<type_value_object, type_value_pattern>(object: Record<string, type_value_object>, pattern: Record<string, type_value_pattern>, options?: {
|
|
collate?: ((value_pattern: type_value_pattern, value_object: type_value_object) => boolean);
|
|
}): boolean;
|
|
/**
|
|
* erzeugt eine Projektion eines Baum-artigen Objekts in ein Listen-artiges Objekt
|
|
*/
|
|
function flatten(value: any, options?: {
|
|
separator?: string;
|
|
key_for_array_element?: ((index: int) => string);
|
|
}): Record<string, any>;
|
|
/**
|
|
* @deprecated use Object.assign instead!
|
|
*/
|
|
function clash(x: Record<string, any>, y: Record<string, any>, options?: {
|
|
overwrite?: boolean;
|
|
hooks?: {
|
|
existing?: ((key?: string, value_old?: any, value_new?: any) => void);
|
|
};
|
|
}): Record<string, any>;
|
|
/**
|
|
* @deprecated use Object.assign instead!
|
|
*/
|
|
function patch(core: (null | Record<string, any>), mantle: (null | Record<string, any>), options?: {
|
|
deep?: boolean;
|
|
path?: (null | string);
|
|
}): void;
|
|
/**
|
|
* @deprecated use Object.assign instead!
|
|
*/
|
|
function patched(core: Record<string, any>, mantle: Record<string, any>, options?: {
|
|
deep?: boolean;
|
|
}): Record<string, any>;
|
|
/**
|
|
* @deprecated use Object.assign instead!
|
|
*/
|
|
function attached(object: Record<string, any>, key: string, value: any): Record<string, any>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function copy(object: Record<string, any>): Record<string, any>;
|
|
}
|
|
declare namespace lib_plankton.pair {
|
|
/**
|
|
*/
|
|
type type_pair<type_first, type_second> = {
|
|
first: type_first;
|
|
second: type_second;
|
|
};
|
|
}
|
|
declare namespace lib_plankton.pair {
|
|
/**
|
|
*/
|
|
function swap<type_first, type_second>(pair: type_pair<type_first, type_second>): type_pair<type_second, type_first>;
|
|
/**
|
|
*/
|
|
function show<type_first, type_second>(pair: type_pair<type_first, type_second>, options?: {
|
|
show_first?: ((first: type_first) => string);
|
|
show_second?: ((second: type_second) => string);
|
|
}): string;
|
|
}
|
|
declare namespace lib_plankton.list {
|
|
/**
|
|
*/
|
|
type type_separation<type_element> = {
|
|
yes: Array<type_element>;
|
|
no: Array<type_element>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_result_max<type_element, type_value> = (null | {
|
|
index: int;
|
|
element: type_element;
|
|
value: type_value;
|
|
});
|
|
}
|
|
declare namespace lib_plankton.list {
|
|
/**
|
|
* returns a certain list of integer numbers
|
|
*/
|
|
function range(from: int, to: int, options?: {
|
|
step?: int;
|
|
}): Array<int>;
|
|
/**
|
|
* returns a certain list of consecutiv integer numbers, beginning with 0
|
|
*/
|
|
function sequence(length: int): Array<int>;
|
|
/**
|
|
*/
|
|
function from_iterator<type_element>(iterator: Iterator<type_element>): Array<type_element>;
|
|
/**
|
|
*/
|
|
function is_empty<type_element>(list: Array<type_element>): boolean;
|
|
/**
|
|
* combines two lists into one
|
|
*
|
|
* @param {boolean} [options.cut] whether the result list will be as long as the shortest input list or an exception is thrown if they have different lengths; default: true
|
|
*/
|
|
function zip<type_element_first, type_element_second>(list_first: Array<type_element_first>, list_second: Array<type_element_second>, options?: {
|
|
cut?: boolean;
|
|
}): Array<lib_plankton.pair.type_pair<type_element_first, type_element_second>>;
|
|
/**
|
|
* checks whether two lists are equal
|
|
*
|
|
* @todo define common function "equals" and default predicate to
|
|
*/
|
|
function equals<type_element>(list1: Array<type_element>, list2: Array<type_element>, options?: {
|
|
collate_element?: ((element1: type_element, element2: type_element) => boolean);
|
|
}): boolean;
|
|
/**
|
|
* creates a list with the elements from the input list, which fulfil a certain predicate (~ filter)
|
|
*/
|
|
function keep<type_element>(list: Array<type_element>, predicate: ((element: type_element) => boolean)): Array<type_element>;
|
|
/**
|
|
* creates a list with the elements from the input list, which do not fulfil a certain predicate (~ dual filter)
|
|
*/
|
|
function drop<type_element>(list: Array<type_element>, predicate: ((element: type_element) => boolean)): Array<type_element>;
|
|
/**
|
|
*/
|
|
function filter_inplace<type_element>(list: Array<type_element>, predicate: ((element: type_element) => boolean)): void;
|
|
/**
|
|
* returns a list with no duplicates (like unix' "unique")
|
|
*/
|
|
function cleaned<type_element>(list: Array<type_element>, options?: {
|
|
collate_element?: ((x: type_element, y: type_element) => boolean);
|
|
}): Array<type_element>;
|
|
/**
|
|
* creates a binary partition of the list according to a given predicate
|
|
*/
|
|
function separate<type_element>(list: Array<type_element>, predicate: ((element: type_element) => boolean)): type_separation<type_element>;
|
|
/**
|
|
*/
|
|
function clone<type_element>(list: Array<type_element>): Array<type_element>;
|
|
/**
|
|
*/
|
|
function reversed<type_element>(list: Array<type_element>): Array<type_element>;
|
|
/**
|
|
* @todo use Array.toSorted?
|
|
*/
|
|
function sorted<type_element>(list: Array<type_element>, options: {
|
|
compare_element?: ((element1: type_element, element2: type_element) => boolean);
|
|
}): Array<type_element>;
|
|
/**
|
|
* die Liste in gleich große Blöcke zerlegen
|
|
*/
|
|
function chop<type_element>(list: Array<type_element>, chunk_size: int): Array<Array<type_element>>;
|
|
/**
|
|
*/
|
|
function group<type_element>(list: Array<type_element>, collate_element: ((x: type_element, y: type_element) => boolean)): Array<Array<type_element>>;
|
|
/**
|
|
*/
|
|
function has<type_element>(list: Array<type_element>, predicate: ((element: type_element) => boolean)): boolean;
|
|
/**
|
|
* @deprecate use Array.includes or Array.some
|
|
*/
|
|
function contains<type_element>(list: Array<type_element>, element: type_element, options: {
|
|
collate_element?: ((element1: type_element, element2: type_element) => boolean);
|
|
}): boolean;
|
|
/**
|
|
* retrieves the element and its index of the list, which has the maximum value
|
|
*/
|
|
function max<type_element, type_value>(list: Array<type_element>, target_function: ((element: type_element) => type_value), options: {
|
|
compare_value: ((value1: type_value, value2: type_value) => boolean);
|
|
}): type_result_max<type_element, type_value>;
|
|
/**
|
|
* retrieves the element and its index of the list, which has the mininum value
|
|
*/
|
|
function min<type_element, type_value>(list: Array<type_element>, target_function: (element: type_element) => type_value, options: {
|
|
compare_value: ((value1: type_value, value2: type_value) => boolean);
|
|
}): type_result_max<type_element, type_value>;
|
|
/**
|
|
* implements the idea of arithmetic distribution like in "(a+b)·(c+d) = (a·c)+(a·d)+(b·c)+(b·d)"
|
|
* example: distribute([[1,2],[3],[4,5,6]]) = [[1,3,4],[1,3,5],[1,3,6],[2,3,4],[2,3,5],[2,3,6]]
|
|
*/
|
|
function distribute<type_element>(lists: Array<Array<type_element>>): Array<Array<type_element>>;
|
|
/**
|
|
*/
|
|
function contrast<type_left, type_right>(list_left: Array<type_left>, extract_key_left: ((left: type_left) => string), list_right: Array<type_right>, extract_key_right: ((right: type_right) => string)): {
|
|
both: Array<{
|
|
key: string;
|
|
left: type_left;
|
|
right: type_right;
|
|
}>;
|
|
only_left: Array<{
|
|
key: string;
|
|
left: type_left;
|
|
}>;
|
|
only_right: Array<{
|
|
key: string;
|
|
right: type_right;
|
|
}>;
|
|
};
|
|
}
|
|
declare namespace lib_plankton.conf {
|
|
/**
|
|
*/
|
|
type type_schema = ({
|
|
enum?: Array<any>;
|
|
default?: any;
|
|
description?: string;
|
|
} | {
|
|
type: "null";
|
|
description?: string;
|
|
} | {
|
|
type: "boolean";
|
|
nullable?: boolean;
|
|
enum?: Array<boolean>;
|
|
default?: boolean;
|
|
description?: string;
|
|
} | {
|
|
type: "integer";
|
|
nullable?: boolean;
|
|
enum?: Array<int>;
|
|
default?: int;
|
|
description?: string;
|
|
} | {
|
|
type: "number";
|
|
nullable?: boolean;
|
|
enum?: Array<number>;
|
|
default?: number;
|
|
description?: string;
|
|
} | {
|
|
type: "string";
|
|
nullable?: boolean;
|
|
enum?: Array<string>;
|
|
default?: string;
|
|
description?: string;
|
|
} | {
|
|
type: "array";
|
|
nullable?: boolean;
|
|
items: type_schema;
|
|
enum?: Array<Array<any>>;
|
|
default?: Array<any>;
|
|
description?: string;
|
|
} | {
|
|
type: "object";
|
|
nullable?: boolean;
|
|
properties?: Record<string, type_schema>;
|
|
required?: Array<string>;
|
|
additionalProperties?: (false | type_schema);
|
|
enum?: Array<Record<string, any>>;
|
|
default?: Record<string, any>;
|
|
description?: string;
|
|
} | {
|
|
anyOf: Array<type_schema>;
|
|
default?: any;
|
|
} | {
|
|
allOf: Array<type_schema>;
|
|
} | {
|
|
oneOf: Array<type_schema>;
|
|
} | {
|
|
not: type_schema;
|
|
});
|
|
/**
|
|
*/
|
|
type type_report = {
|
|
incident: string;
|
|
details: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_adaption<type_result> = {
|
|
reports: Array<type_report>;
|
|
result: lib_plankton.pod.type_pod<type_result>;
|
|
};
|
|
}
|
|
declare namespace lib_plankton.conf {
|
|
/**
|
|
* @todo versioning
|
|
*/
|
|
function refine<type_result>(schema: type_schema, value_raw: any): type_result;
|
|
/**
|
|
*/
|
|
function load(schema: type_schema, path: (null | string)): Promise<void>;
|
|
}
|
|
declare var plain_text_to_html: (text: string) => string;
|
|
/**
|
|
* @desc makes a valid
|
|
*/
|
|
declare var format_sentence: (str: string, rtl?: boolean, caseSense?: boolean) => string;
|
|
declare var fill_string_template: (template_string: string, object: any, fabric: Function, delimiter: string, default_string: string, sloppy: boolean) => string;
|
|
declare var make_string_template: (_template: string, _fabrics?: Object) => (object: {
|
|
[key: string]: string;
|
|
}) => string;
|
|
declare var make_eml_header: (object: {
|
|
[key: string]: string;
|
|
}) => string;
|
|
declare var make_eml_body: Object;
|
|
declare namespace lib_plankton.string {
|
|
/**
|
|
* @author neuc,frac
|
|
*/
|
|
function empty(str: string): boolean;
|
|
/**
|
|
* @desc returns a unique string
|
|
* @param {string} prefix an optional prefix for the generated string
|
|
* @return {string}
|
|
* @author fenris
|
|
*/
|
|
function generate(prefix?: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function join(parts: Array<string>, glue?: string): string;
|
|
/**
|
|
* @desc splits a string, but returns an empty list, if the string is empty
|
|
* @param {string} chain
|
|
* @param {string} separator
|
|
* @return {Array<string>}
|
|
* @author fenris
|
|
*/
|
|
function split(chain: string, separator?: string): Array<string>;
|
|
/**
|
|
* @author neu3no
|
|
*/
|
|
function explode(str: string, needle: string, max: int): Array<string>;
|
|
/**
|
|
* @desc concats a given word with itself n times
|
|
* @param {string} word
|
|
* @param {int}
|
|
* @return {string}
|
|
* @author fenris
|
|
*/
|
|
function repeat(word: string, count: int): string;
|
|
/**
|
|
* @desc lengthens a string by repeatedly appending or prepending another string
|
|
* @param {string} word the string to pad
|
|
* @param {int} length the length, which the result shall have
|
|
* @param {string} symbol the string, which will be added (multiple times)
|
|
* @param {boolean} [prepend]; whether to prepend (~true) or append (~false); default: false
|
|
* @return {string} the padded string
|
|
* @author fenris
|
|
*/
|
|
function pad(word: string, length: int, symbol?: string, mode?: string): string;
|
|
/**
|
|
* @desc checks if a given string conttains a certain substring
|
|
* @param {string} string
|
|
* @param {string} part
|
|
* @return {boolean}
|
|
* @author fenris
|
|
*/
|
|
function contains(chain: string, part: string): boolean;
|
|
/**
|
|
* @desc checks if a given string starts with a certain substring
|
|
* @param {string} string
|
|
* @param {string} part
|
|
* @return {boolean}
|
|
* @author fenris
|
|
*/
|
|
function startsWith(chain: string, part: string): boolean;
|
|
/**
|
|
* @desc checks if a given string ends with a certain substring
|
|
* @param {string} string
|
|
* @param {string} part
|
|
* @return {boolean}
|
|
* @author fenris
|
|
*/
|
|
function endsWith(chain: string, part: string): boolean;
|
|
/**
|
|
* @desc count the occourrences of a string in a string
|
|
* @param string haystack_string the string wich should be examined
|
|
* @param string needle_string the string which should be counted
|
|
* @author neuc
|
|
*/
|
|
function count_occourrences(haystack_string: string, needle_string: string, check_escape: boolean): int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function replace(str: string, replacements: Array<{
|
|
from: string;
|
|
to: string;
|
|
}>, options?: {}): string;
|
|
/**
|
|
* @desc replaces occurences of "{{name}}" in a string by the corresponding values of an argument object
|
|
* @author fenris
|
|
*/
|
|
function coin(str: string, args: {
|
|
[id: string]: string;
|
|
}, options?: {
|
|
legacy?: boolean;
|
|
open?: string;
|
|
close?: string;
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
* @deprecated use limit
|
|
*/
|
|
function cut(str: string, length: int, delimiter?: string): string;
|
|
/**
|
|
*/
|
|
function limit(str: string, options?: {
|
|
length?: int;
|
|
indicator?: string;
|
|
}): string;
|
|
/**
|
|
*/
|
|
function slice(str: string, size: int): Array<string>;
|
|
}
|
|
/**
|
|
* @deprecated
|
|
*/
|
|
declare namespace lib_string {
|
|
const empty: typeof lib_plankton.string.empty;
|
|
const generate: typeof lib_plankton.string.generate;
|
|
const split: typeof lib_plankton.string.split;
|
|
const explode: typeof lib_plankton.string.repeat;
|
|
const repeat: typeof lib_plankton.string.repeat;
|
|
const pad: typeof lib_plankton.string.pad;
|
|
const contains: typeof lib_plankton.string.contains;
|
|
const startsWith: typeof lib_plankton.string.startsWith;
|
|
const endsWith: typeof lib_plankton.string.endsWith;
|
|
const count_occourrences: typeof lib_plankton.string.count_occourrences;
|
|
const coin: typeof lib_plankton.string.coin;
|
|
const stance: typeof lib_plankton.string.coin;
|
|
const cut: typeof lib_plankton.string.cut;
|
|
}
|
|
declare namespace lib_plankton.string {
|
|
/**
|
|
* an implementation of c sprintf
|
|
* @param {string} string format string
|
|
* @param {array} args arguments which should be filled into
|
|
* @returns {string}
|
|
*/
|
|
var sprintf: (input: string, args?: Array<any>, original?: any) => string;
|
|
/**
|
|
* an implementation of c printf
|
|
* @param {string} string format string
|
|
* @param {array} args arguments which should be filled into
|
|
* @returns {string}
|
|
*/
|
|
function printf(format: any, args: any): void;
|
|
}
|
|
declare var sprintf: (input: string, args?: Array<any>, original?: any) => string;
|
|
declare var printf: typeof lib_plankton.string.printf;
|
|
declare var eml_log: any;
|
|
declare var track_exports: any;
|
|
declare var make_logger: (prefix: any, current_loglevel: any) => (obj: any, lvl: any) => void;
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
*/
|
|
type type_query = {
|
|
template: string;
|
|
arguments: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
enum enum_type {
|
|
boolean = "boolean",
|
|
integer = "integer",
|
|
string_short = "string_short",
|
|
string_medium = "string_medium",
|
|
string_long = "string_long",
|
|
float = "float"
|
|
}
|
|
/**
|
|
*/
|
|
type type_description_create_table = {
|
|
name: string;
|
|
key_field?: (null | {
|
|
name: string;
|
|
type?: enum_type;
|
|
comment?: (null | string);
|
|
auto_increment?: (null | boolean);
|
|
description?: (null | string);
|
|
});
|
|
data_fields?: Array<{
|
|
name: string;
|
|
nullable?: boolean;
|
|
type: enum_type;
|
|
default?: any;
|
|
description?: (null | string);
|
|
}>;
|
|
constraints?: Array<{
|
|
kind: string;
|
|
parameters?: Record<string, any>;
|
|
}>;
|
|
description?: (null | string);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_insert = {
|
|
table_name: string;
|
|
values: Record<string, any>;
|
|
returning?: (null | string);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_update = {
|
|
table_name: string;
|
|
values: Record<string, any>;
|
|
condition?: (null | string);
|
|
arguments?: (null | Record<string, any>);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_delete = {
|
|
table_name: string;
|
|
condition?: (null | string);
|
|
arguments?: (null | Record<string, any>);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_select = {
|
|
source: string;
|
|
fields?: (null | Array<string>);
|
|
condition?: (null | string);
|
|
group_by?: (null | string);
|
|
having?: (null | string);
|
|
order_by?: (null | string);
|
|
limit?: (null | int);
|
|
arguments?: (null | Record<string, any>);
|
|
};
|
|
/**
|
|
* rows
|
|
*/
|
|
type type_result_get = Array<Record<string, any>>;
|
|
/**
|
|
* auto insert id
|
|
*/
|
|
type type_result_put = (null | int);
|
|
/**
|
|
* number of affected rows
|
|
*/
|
|
type type_result_set = int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_database = {
|
|
wrap_name: ((name: string) => string);
|
|
query_free_get: ((query: type_query) => Promise<type_result_get>);
|
|
query_free_put: ((query: type_query) => Promise<type_result_put>);
|
|
query_free_set: ((query: type_query) => Promise<type_result_set>);
|
|
query_create_table: ((description_create_table: type_description_create_table) => Promise<void>);
|
|
query_insert: ((description_insert: type_description_insert) => Promise<type_result_put>);
|
|
query_update: ((description_update: type_description_update) => Promise<type_result_set>);
|
|
query_delete: ((description_delete: type_description_delete) => Promise<type_result_set>);
|
|
query_select: ((description_select: type_description_select) => Promise<type_result_get>);
|
|
};
|
|
/**
|
|
*/
|
|
type interface_database = database.type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @todo default case?
|
|
*/
|
|
function sql_common_value_format(value: any): string;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_create_table(description_create_table: type_description_create_table, options?: {
|
|
auto_increment_keyword?: (null | string);
|
|
auto_increment_special?: (null | string);
|
|
omit_comments?: boolean;
|
|
type_map?: Record<enum_type, string>;
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_insert(description_insert: type_description_insert, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
set_returning?: boolean;
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_update(description_update: type_description_update, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_delete(description_delete: type_description_delete, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_select(description_select: type_description_select, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
*/
|
|
type type_sqlite_subject = {
|
|
path: string;
|
|
handle: any;
|
|
};
|
|
/**
|
|
*/
|
|
type type_sqlite_parameters = {
|
|
path: string;
|
|
};
|
|
/**
|
|
*/
|
|
function sqlite_make(parameters: type_sqlite_parameters): type_sqlite_subject;
|
|
/**
|
|
*/
|
|
function sqlite_wrap_name(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sqlite_query_free_get(subject: type_sqlite_subject, query: type_query): Promise<type_result_get>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sqlite_query_free_put(subject: type_sqlite_subject, query: type_query): Promise<type_result_put>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sqlite_query_free_set(subject: type_sqlite_subject, query: type_query): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_create_table(description_create_table: type_description_create_table): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_create_table(subject: type_sqlite_subject, description: type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_insert(description_insert: type_description_insert): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_insert(subject: type_sqlite_subject, description_insert: type_description_insert): Promise<type_result_put>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_update(description_update: type_description_update): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_update(subject: type_sqlite_subject, description_update: type_description_update): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_delete(description_delete: type_description_delete): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_delete(subject: type_sqlite_subject, description_delete: type_description_delete): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_select(description_select: type_description_select): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_select(subject: type_sqlite_subject, description_select: type_description_select): Promise<Array<Record<string, any>>>;
|
|
/**
|
|
*/
|
|
function sqlite_database(parameters: type_sqlite_parameters): type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_sqlite implements interface_database {
|
|
private subject;
|
|
constructor(parameters: type_sqlite_parameters);
|
|
wrap_name(name: any): string;
|
|
query_free_get(query: any): Promise<type_result_get>;
|
|
query_free_put(query: any): Promise<number>;
|
|
query_free_set(query: any): Promise<number>;
|
|
query_create_table(description_create_table: any): Promise<void>;
|
|
query_insert(description_insert: any): Promise<number>;
|
|
query_update(description_update: any): Promise<number>;
|
|
query_delete(description_delete: any): Promise<number>;
|
|
query_select(description_select: any): Promise<Record<string, any>[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_postgresql_subject = {
|
|
host: string;
|
|
port: int;
|
|
username: string;
|
|
password: string;
|
|
schema: string;
|
|
pool: (null | any);
|
|
};
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_postgresql_parameters = {
|
|
host: string;
|
|
port?: int;
|
|
username: string;
|
|
password: string;
|
|
schema: string;
|
|
};
|
|
/**
|
|
*/
|
|
function postgresql_make(parameters: type_postgresql_parameters): type_postgresql_subject;
|
|
/**
|
|
*/
|
|
function postgresql_wrap_name(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
* @see https://node-postgres.com/apis/pool#poolquery
|
|
*/
|
|
function postgresql_query_free_get(subject: type_postgresql_subject, query: type_query): Promise<type_result_get>;
|
|
/**
|
|
* @author fenris
|
|
* @see https://node-postgres.com/apis/pool#poolquery
|
|
*/
|
|
function postgresql_query_free_put(subject: type_postgresql_subject, query: type_query): Promise<type_result_put>;
|
|
/**
|
|
* @author fenris
|
|
* @see https://node-postgres.com/apis/pool#poolquery
|
|
*/
|
|
function postgresql_query_free_set(subject: type_postgresql_subject, query: type_query): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_create_table(description_create_table: type_description_create_table): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_create_table(subject: type_postgresql_subject, description: type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_insert(description_insert: type_description_insert): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_insert(subject: type_postgresql_subject, description_insert: type_description_insert): Promise<type_result_put>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_update(description_update: type_description_update): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_update(subject: type_postgresql_subject, description_update: type_description_update): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_delete(description_delete: type_description_delete): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_delete(subject: type_postgresql_subject, description_delete: type_description_delete): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_select(description_select: type_description_select): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_select(subject: type_postgresql_subject, description_select: type_description_select): Promise<Array<Record<string, any>>>;
|
|
/**
|
|
*/
|
|
function postgresql_database(parameters: type_postgresql_parameters): type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_postgresql implements interface_database {
|
|
private subject;
|
|
constructor(parameters: type_postgresql_parameters);
|
|
wrap_name(name: any): string;
|
|
query_free_get(query: any): Promise<type_result_get>;
|
|
query_free_put(query: any): Promise<number>;
|
|
query_free_set(query: any): Promise<number>;
|
|
query_create_table(description_create_table: any): Promise<void>;
|
|
query_insert(description_insert: any): Promise<number>;
|
|
query_update(description_update: any): Promise<number>;
|
|
query_delete(description_delete: any): Promise<number>;
|
|
query_select(description_select: any): Promise<Record<string, any>[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_mysql_subject = {
|
|
verbose: boolean;
|
|
};
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_mysql_parameters = {
|
|
verbose?: boolean;
|
|
};
|
|
/**
|
|
*/
|
|
function mysql_make(parameters: type_mysql_parameters): type_mysql_subject;
|
|
/**
|
|
*/
|
|
function mysql_wrap_name(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function mysql_query_free_get(subject: type_mysql_subject, query: type_query): Promise<type_result_get>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function mysql_query_free_put(subject: type_mysql_subject, query: type_query): Promise<type_result_put>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function mysql_query_free_set(subject: type_mysql_subject, query: type_query): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_create_table(description_create_table: type_description_create_table): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_create_table(subject: type_mysql_subject, description: type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_insert(description_insert: type_description_insert): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_insert(subject: type_mysql_subject, description_insert: type_description_insert): Promise<type_result_put>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_update(description_update: type_description_update): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_update(subject: type_mysql_subject, description_update: type_description_update): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_delete(description_delete: type_description_delete): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_delete(subject: type_mysql_subject, description_delete: type_description_delete): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_select(description_select: type_description_select): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_select(subject: type_mysql_subject, description_select: type_description_select): Promise<Array<Record<string, any>>>;
|
|
/**
|
|
*/
|
|
function mysql_database(parameters: type_mysql_parameters): type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_mysql implements interface_database {
|
|
private subject;
|
|
constructor(parameters: type_mysql_parameters);
|
|
wrap_name(name: any): string;
|
|
query_free_get(query: any): Promise<type_result_get>;
|
|
query_free_put(query: any): Promise<number>;
|
|
query_free_set(query: any): Promise<number>;
|
|
query_create_table(description_create_table: any): Promise<void>;
|
|
query_insert(description_insert: any): Promise<number>;
|
|
query_update(description_update: any): Promise<number>;
|
|
query_delete(description_delete: any): Promise<number>;
|
|
query_select(description_select: any): Promise<Record<string, any>[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
* implements the idea of a database, which houses datasets (type_value) and manages their access by ids or sth. similar (type_key)
|
|
*
|
|
* @author fenris
|
|
*/
|
|
type type_store<type_key, type_value, type_setup_input, type_searchterm, type_preview> = {
|
|
/**
|
|
* shall prepare the storage instance for use
|
|
*
|
|
* @author fenris
|
|
*/
|
|
setup(input: type_setup_input): Promise<void>;
|
|
/**
|
|
* shall insert a new dataset and return its automatically assign key
|
|
*
|
|
* @author fenris
|
|
*/
|
|
create(value: type_value): Promise<type_key>;
|
|
/**
|
|
* shall modify an existing dataset
|
|
*
|
|
* @author fenris
|
|
*/
|
|
update(key: type_key, value: type_value): Promise<void>;
|
|
/**
|
|
* shall remove an existing dataset
|
|
*
|
|
* @author fenris
|
|
*/
|
|
delete(key: type_key): Promise<void>;
|
|
/**
|
|
* shall get an existing dataset by its key
|
|
*
|
|
* @author fenris
|
|
*/
|
|
read(key: type_key): Promise<type_value>;
|
|
/**
|
|
* shall list keys and previews of existing datasets, which match a certain search term
|
|
*
|
|
* @author fenris
|
|
*/
|
|
search(term?: (null | type_searchterm)): Promise<Array<{
|
|
key: type_key;
|
|
preview: type_preview;
|
|
}>>;
|
|
};
|
|
/**
|
|
* for class wrappers
|
|
*/
|
|
type interface_store<type_key, type_value, type_setup_input, type_searchterm, type_preview> = type_store<type_key, type_value, type_setup_input, type_searchterm, type_preview>;
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
* implements the idea of a storage without managed keys
|
|
*
|
|
* @author fenris
|
|
*/
|
|
type type_chest<type_key, type_value, type_setup_input, type_searchterm, type_preview> = {
|
|
/**
|
|
* shall prepare the storage instance for use
|
|
*
|
|
* @author fenris
|
|
*/
|
|
setup(input: type_setup_input): Promise<void>;
|
|
/**
|
|
* shall remove all items
|
|
*/
|
|
clear(): Promise<void>;
|
|
/**
|
|
* shall insert a new or modify an existing dataset and return whether it is new
|
|
*
|
|
* @author fenris
|
|
*/
|
|
write(key: type_key, value: type_value): Promise<boolean>;
|
|
/**
|
|
* shall remove an existing dataset
|
|
*
|
|
* @author fenris
|
|
*/
|
|
delete(key: type_key): Promise<void>;
|
|
/**
|
|
* shall get an existing dataset by its key
|
|
*
|
|
* @author fenris
|
|
*/
|
|
read(key: type_key): Promise<type_value>;
|
|
/**
|
|
* shall list keys and previews of existing datasets, which match a certain search term
|
|
*
|
|
* @author fenris
|
|
*/
|
|
search(term?: (null | type_searchterm)): Promise<Array<{
|
|
key: type_key;
|
|
preview: type_preview;
|
|
}>>;
|
|
};
|
|
/**
|
|
* for class wrappers
|
|
*/
|
|
type interface_chest<type_key, type_value, type_setup_input, type_searchterm, type_preview> = type_chest<type_key, type_value, type_setup_input, type_searchterm, type_preview>;
|
|
}
|
|
declare namespace lib_plankton.storage.memory {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject<type_value> = {
|
|
data: Record<string, type_value>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_parameters<type_value> = {};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make<type_value>(parameters: type_parameters<type_value>): type_subject<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function clear<type_value>(subject: type_subject<type_value>): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write<type_value>(subject: type_subject<type_value>, key: string, value: type_value): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function delete_<type_value>(subject: type_subject<type_value>, key: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read<type_value>(subject: type_subject<type_value>, key: string): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function list<type_value>(subject: type_subject<type_value>): Array<string>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function search<type_value>(subject: type_subject<type_value>, term: (null | string)): Array<{
|
|
key: string;
|
|
preview: string;
|
|
}>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function implementation_chest<type_value>(parameters: type_parameters<type_value>): type_chest<string, type_value, void, string, string>;
|
|
}
|
|
declare namespace lib_plankton.storage.memory {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_chest<type_item> implements type_chest<string, type_item, void, string, string> {
|
|
private subject;
|
|
constructor(parameters: type_parameters<type_item>);
|
|
setup(input: any): Promise<void>;
|
|
clear(): Promise<void>;
|
|
write(key: any, value: any): Promise<boolean>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Awaited<type_item>>;
|
|
search(term: any): Promise<{
|
|
key: string;
|
|
preview: string;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage.filesystem {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject = {
|
|
nodemodule: any;
|
|
};
|
|
/**
|
|
*/
|
|
type type_parameters = {};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make(parameters: type_parameters): type_subject;
|
|
/**
|
|
*/
|
|
function clear(subject: type_subject): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write(subject: type_subject, path: string, content: Buffer): Promise<boolean>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function delete_(subject: type_subject, path: string): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read(subject: type_subject, path: string): Promise<Buffer>;
|
|
/**
|
|
*/
|
|
function implementation_chest(parameters: type_parameters): type_chest<string, Buffer, void, void, void>;
|
|
}
|
|
declare namespace lib_plankton.storage.filesystem {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_chest implements type_chest<string, Buffer, void, void, void> {
|
|
private subject;
|
|
constructor(parameters: type_parameters);
|
|
setup(input: any): Promise<void>;
|
|
clear(): Promise<void>;
|
|
write(key: any, value: any): Promise<boolean>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Buffer>;
|
|
search(searchterm: any): Promise<{
|
|
key: string;
|
|
preview: void;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
*/
|
|
type type_sql_table_autokey_search_term = {
|
|
expression: string;
|
|
arguments: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_sql_table_autokey_subject = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_name: string;
|
|
};
|
|
/**
|
|
*/
|
|
type type_sql_table_autokey_parameters = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_name: string;
|
|
};
|
|
/**
|
|
*/
|
|
function sql_table_autokey_make(parameters: type_sql_table_autokey_parameters): type_sql_table_autokey_subject;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_setup(subject: type_sql_table_autokey_subject, description_create_table: lib_plankton.database.type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_create(subject: type_sql_table_autokey_subject, value: Record<string, any>): Promise<int>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_update(subject: type_sql_table_autokey_subject, key: int, value: Record<string, any>): Promise<void>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_delete(subject: type_sql_table_autokey_subject, key: int): Promise<void>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_read(subject: type_sql_table_autokey_subject, key: int): Promise<Record<string, any>>;
|
|
/**
|
|
* @todo correct preview
|
|
*/
|
|
function sql_table_autokey_search(subject: type_sql_table_autokey_subject, term: (null | type_sql_table_autokey_search_term)): Promise<Array<{
|
|
key: int;
|
|
preview: Record<string, any>;
|
|
}>>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_store(parameters: type_sql_table_autokey_parameters): type_store<int, Record<string, any>, lib_plankton.database.type_description_create_table, type_sql_table_autokey_search_term, Record<string, any>>;
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_sql_table_autokey implements type_store<int, Record<string, any>, lib_plankton.database.type_description_create_table, string, Record<string, any>> {
|
|
private subject;
|
|
constructor(parameters: type_sql_table_autokey_parameters);
|
|
setup(input: any): Promise<void>;
|
|
create(value: any): Promise<number>;
|
|
update(key: any, value: any): Promise<void>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Record<string, any>>;
|
|
search(term: any): Promise<{
|
|
key: number;
|
|
preview: Record<string, any>;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage.sql_table_common {
|
|
/**
|
|
*/
|
|
type type_sql_table_common_search_term = {
|
|
expression: string;
|
|
arguments: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_names: Array<string>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_parameters = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_names: Array<string>;
|
|
};
|
|
/**
|
|
*/
|
|
function make(parameters: type_parameters): type_subject;
|
|
/**
|
|
*/
|
|
function setup(subject: type_subject, description_create_table: lib_plankton.database.type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function clear(subject: type_subject): Promise<void>;
|
|
/**
|
|
* @todo optimize: avoid read
|
|
*/
|
|
function write(subject: type_subject, key: Array<any>, value: Record<string, any>): Promise<boolean>;
|
|
/**
|
|
*/
|
|
function delete_(subject: type_subject, key: Array<any>): Promise<void>;
|
|
/**
|
|
*/
|
|
function read(subject: type_subject, key: Array<any>): Promise<Record<string, any>>;
|
|
/**
|
|
* @todo correct preview
|
|
*/
|
|
function search(subject: type_subject, term: (null | type_sql_table_common_search_term)): Promise<Array<{
|
|
key: Array<any>;
|
|
preview: Record<string, any>;
|
|
}>>;
|
|
/**
|
|
*/
|
|
function chest(parameters: type_parameters): type_chest<Array<any>, Record<string, any>, lib_plankton.database.type_description_create_table, type_sql_table_common_search_term, Record<string, any>>;
|
|
}
|
|
declare namespace lib_plankton.storage.sql_table_common {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_chest implements type_chest<Array<any>, Record<string, any>, lib_plankton.database.type_description_create_table, type_sql_table_common_search_term, Record<string, any>> {
|
|
private subject;
|
|
constructor(parameters: type_parameters);
|
|
setup(input: any): Promise<void>;
|
|
clear(): Promise<void>;
|
|
write(key: any, value: any): Promise<boolean>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Record<string, any>>;
|
|
search(term: any): Promise<{
|
|
key: any[];
|
|
preview: Record<string, any>;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.cache {
|
|
/**
|
|
*/
|
|
type type_cache<type_value> = {
|
|
init: (() => Promise<void>);
|
|
clear: (() => Promise<void>);
|
|
query: ((key: string, retrieve: (() => Promise<type_value>)) => Promise<{
|
|
retrieved: boolean;
|
|
value: type_value;
|
|
}>);
|
|
};
|
|
}
|
|
declare namespace lib_plankton.cache {
|
|
/**
|
|
*/
|
|
function get<type_value>(cache: type_cache<type_value>, key: string, retrieve: (() => Promise<type_value>)): Promise<type_value>;
|
|
/**
|
|
*/
|
|
function get_complex<type_input, type_value>(cache: type_cache<type_value>, group: string, input: type_input, retrieve: ((input: type_input) => Promise<type_value>), options?: {
|
|
encode_input?: ((input: type_input) => string);
|
|
}): Promise<type_value>;
|
|
}
|
|
declare namespace lib_plankton.cache.never {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject<type_value> = {};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make<type_value>(): type_subject<type_value>;
|
|
/**
|
|
*/
|
|
function implementation<type_value>(subject: type_subject<type_value>): type_cache<type_value>;
|
|
}
|
|
declare namespace lib_plankton.cache.always {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject<type_value> = {
|
|
value: lib_plankton.pod.type_pod<type_value>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make<type_value>(value: lib_plankton.pod.type_pod<type_value>): type_subject<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function clear<type_value>(subject: type_subject<type_value>): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function query<type_value>(subject: type_subject<type_value>, key: string, retrieve: (() => Promise<type_value>)): Promise<{
|
|
retrieved: boolean;
|
|
value: type_value;
|
|
}>;
|
|
/**
|
|
*/
|
|
function implementation<type_value>(subject: type_subject<type_value>): type_cache<type_value>;
|
|
}
|
|
declare namespace lib_plankton.cache.chest {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject<type_value> = {
|
|
chest: lib_plankton.storage.type_chest<string, type_value, void, any, any>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make<type_value>(options?: {
|
|
chest?: lib_plankton.storage.type_chest<string, type_value, void, any, any>;
|
|
}): type_subject<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function init<type_value>(subject: type_subject<type_value>): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function clear<type_value>(subject: type_subject<type_value>): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function query<type_value>(subject: type_subject<type_value>, key: string, retrieve: (() => Promise<type_value>)): Promise<{
|
|
retrieved: boolean;
|
|
value: type_value;
|
|
}>;
|
|
/**
|
|
*/
|
|
function implementation<type_value>(subject: type_subject<type_value>): type_cache<type_value>;
|
|
}
|
|
declare namespace lib_plankton.shape {
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_jsonschema = any;
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_oas_schema = any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_inspection = {
|
|
flaws: Array<string>;
|
|
sub: Array<{
|
|
position: string;
|
|
inspection: type_inspection;
|
|
}>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inspection_create(): type_inspection;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inspection_add(main: type_inspection, flaw: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inspection_extend(main: type_inspection, prefix: string, sub: type_inspection): void;
|
|
/**
|
|
*/
|
|
type type_shape = {
|
|
kind: string;
|
|
parameters: Record<string, any>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_logic = {
|
|
inspect: ((sub_inspect: ((shape: type_shape, value: any) => type_inspection), value: any) => type_inspection);
|
|
show: ((sub_show: ((shape: type_shape) => string)) => string);
|
|
to_typescript_type: ((sub_to_typescript_type: ((shape: type_shape) => string)) => string);
|
|
to_jsonschema: ((sub_to_json_schema: ((shape: type_shape) => type_jsonschema)) => type_jsonschema);
|
|
to_oas_schema: ((sub_to_oas_schema: ((shape: type_shape) => type_oas_schema)) => type_oas_schema);
|
|
example: ((sub_example: ((shape: type_shape) => any)) => any);
|
|
};
|
|
/**
|
|
*/
|
|
function inspect(shape: type_shape, value: any): type_inspection;
|
|
/**
|
|
*/
|
|
function inspect_flat(shape: type_shape, value: any): Array<string>;
|
|
/**
|
|
*/
|
|
function show(shape: type_shape): string;
|
|
/**
|
|
*/
|
|
function to_typescript_type(shape: type_shape): string;
|
|
/**
|
|
*/
|
|
function to_jsonschema(shape: type_shape): type_jsonschema;
|
|
/**
|
|
*/
|
|
function to_oas_schema(shape: type_shape): type_oas_schema;
|
|
/**
|
|
*/
|
|
function example(shape: type_shape): any;
|
|
/**
|
|
*/
|
|
function register<type_parameters, type_subject>(name: string, construct: ((parameters: type_parameters) => type_subject), logic: ((subject: type_subject) => type_logic)): void;
|
|
}
|
|
declare namespace lib_plankton.shape.any {
|
|
/**
|
|
*/
|
|
type type_subject = {};
|
|
/**
|
|
*/
|
|
export function make(options?: {}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.null_ {
|
|
/**
|
|
*/
|
|
type type_subject = {};
|
|
/**
|
|
*/
|
|
export function make(options?: {}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.boolean {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | boolean);
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.integer {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
minimum: lib_plankton.pod.type_pod<int>;
|
|
maximum: lib_plankton.pod.type_pod<int>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | int);
|
|
description?: string;
|
|
minimum?: int;
|
|
maximum?: int;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.float {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
minimum: lib_plankton.pod.type_pod<float>;
|
|
maximum: lib_plankton.pod.type_pod<float>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | float);
|
|
description?: string;
|
|
minimum?: float;
|
|
maximum?: float;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.string {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
pattern: lib_plankton.pod.type_pod<string>;
|
|
min_length: lib_plankton.pod.type_pod<int>;
|
|
max_length: lib_plankton.pod.type_pod<int>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | int);
|
|
description?: string;
|
|
pattern?: string;
|
|
min_length?: int;
|
|
max_length?: int;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.email {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
core: type_shape;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | int);
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.list_ {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
shape_element: type_shape;
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(shape_element: type_shape, options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: any;
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.map {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
shape_key: type_shape;
|
|
shape_value: type_shape;
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(shape_key: type_shape, shape_value: type_shape, options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: any;
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.record {
|
|
/**
|
|
*/
|
|
type type_field = {
|
|
name: string;
|
|
shape: type_shape;
|
|
required: boolean;
|
|
};
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
fields: Array<type_field>;
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(fields_raw: Array<{
|
|
name: string;
|
|
shape: type_shape;
|
|
required?: boolean;
|
|
}>, options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: any;
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.www_form {
|
|
/**
|
|
*/
|
|
type type_source = Record<string, string>;
|
|
/**
|
|
*/
|
|
type type_target = string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function encode(source: type_source): type_target;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function decode(target: type_target): type_source;
|
|
}
|
|
declare namespace lib_plankton.www_form {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_www_form implements lib_plankton.code.interface_code<type_source, type_target> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(source: type_source): type_target;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(target: type_target): type_source;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.url {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_url = {
|
|
scheme: (null | string);
|
|
host: (null | string);
|
|
username: (null | string);
|
|
password: (null | string);
|
|
port: (null | int);
|
|
path: (null | string);
|
|
query: (null | string);
|
|
hash: (null | string);
|
|
};
|
|
}
|
|
declare namespace lib_plankton.url {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function encode(url: type_url): string;
|
|
/**
|
|
* @author fenris
|
|
* @todo arguments
|
|
*/
|
|
function decode(url_raw: string): type_url;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function implementation_code(): lib_plankton.code.type_code<type_url, string>;
|
|
}
|
|
declare namespace lib_plankton.url {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_url implements lib_plankton.code.interface_code<type_url, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: any): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): any;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.random {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_state = {
|
|
builtin: boolean;
|
|
seed?: int;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function state_push(state: type_state): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function state_pop(): type_state;
|
|
/**
|
|
* returns a random floating point number in the interval [0,1[
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function generate_unit(): float;
|
|
/**
|
|
* returns a random boolean value
|
|
*
|
|
* @param {float} [probability] the probability for the return-value "true"; default: 0.5
|
|
* @author fenris
|
|
*/
|
|
function generate_boolean(options?: {
|
|
probability?: float;
|
|
}): boolean;
|
|
/**
|
|
* returns a random integer number in the interval [a,b]
|
|
*
|
|
* @param {int} [minimum] the left side of the halfopen interval (i.e. the smallest included value in the range)
|
|
* @param {int} [minimum] the right side of the halfopen interval (i.e. the smallest excluded value in the range)
|
|
* @author fenris
|
|
*/
|
|
function generate_integer(options?: {
|
|
minimum?: int;
|
|
maximum?: int;
|
|
}): int;
|
|
var generate_int: typeof generate_integer;
|
|
/**
|
|
* returns a random floating point number in the given interval
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function generate_float(options?: {
|
|
minimum?: int;
|
|
maximum?: int;
|
|
}): float;
|
|
/**
|
|
* returns a random date
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function generate_date(options?: {
|
|
minimum?: Date;
|
|
maximum?: Date;
|
|
}): Date;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_hexdigit(): string;
|
|
/**
|
|
* generates a random string with an optional prefix
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function generate_string(options?: {
|
|
length?: int;
|
|
}): string;
|
|
/**
|
|
* chooses a value randomly from a list of values with weights (a higher weight means a higher probability to be chosen)
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function choose_weighted<type_value>(sets: Array<{
|
|
weight: float;
|
|
value: type_value;
|
|
}>): type_value;
|
|
/**
|
|
* chooses a value randomly from a list of values with equal probabilities
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function choose_uniformly<type_value>(values: Array<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function shuffle<type_element>(list: Array<type_element>): Array<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_vowel(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_halfvowel(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_consonant(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_letter(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_syllable(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_word(options?: {
|
|
syllable_count_minimum?: int;
|
|
syllable_count_maximum?: int;
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_text(options?: {
|
|
word_count?: int;
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_city(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_street(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_guid(options?: {
|
|
with_braces?: boolean;
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_url(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_email_address(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_telephone_number(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_time(): any;
|
|
/**
|
|
* @author fenris
|
|
* @deprecated
|
|
* @todo remove
|
|
*/
|
|
function generate_for_shape(shape: any): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
}
|
|
declare namespace lib_plankton.session {
|
|
/**
|
|
*/
|
|
type type_session = {
|
|
key: string;
|
|
name: string;
|
|
expiry: int;
|
|
data: any;
|
|
};
|
|
/**
|
|
*/
|
|
function begin(name: string, options?: {
|
|
lifetime?: int;
|
|
data?: any;
|
|
}): Promise<string>;
|
|
/**
|
|
*/
|
|
function get(key: string): Promise<type_session>;
|
|
/**
|
|
*/
|
|
function end(key: string): Promise<void>;
|
|
/**
|
|
*/
|
|
function setup(options?: {
|
|
key_length?: int;
|
|
key_max_attempts?: int;
|
|
default_lifetime?: int;
|
|
data_chest?: lib_plankton.storage.type_chest<string, any, void, string, string>;
|
|
clear?: boolean;
|
|
}): Promise<void>;
|
|
}
|
|
declare namespace lib_plankton {
|
|
namespace order {
|
|
/**
|
|
*/
|
|
type type_order<type_value> = ((x: type_value, y: type_value) => boolean);
|
|
/**
|
|
*/
|
|
type type_comparator<type_value> = ((x: type_value, y: type_value) => int);
|
|
/**
|
|
*/
|
|
type type_sorter<type_element> = ((list: Array<type_element>) => Array<type_element>);
|
|
/**
|
|
*/
|
|
function from_comparator<type_value>(comparator: type_comparator<type_value>): type_order<type_value>;
|
|
/**
|
|
*/
|
|
function to_comparator<type_value>(order: type_order<type_value>): type_comparator<type_value>;
|
|
/**
|
|
*/
|
|
function order_default<type_value>(): type_order<type_value>;
|
|
/**
|
|
* @desc provide a total order given by a list
|
|
*/
|
|
function order_total<type_value>(list: Array<type_value>, options?: {
|
|
collation?: ((x: type_value, y: type_value) => boolean);
|
|
}): type_order<type_value>;
|
|
/**
|
|
* @desc lexicographic order
|
|
*/
|
|
function order_lexicographic_pair<type_value_first, type_value_second>(options?: {
|
|
order_first?: type_order<type_value_first>;
|
|
order_second?: type_order<type_value_second>;
|
|
}): type_order<lib_plankton.pair.type_pair<type_value_first, type_value_second>>;
|
|
/**
|
|
*/
|
|
function order_lexicographic_pair_wrapped<type_container, type_value_first, type_value_second>(extract_first: ((container: type_container) => type_value_first), extract_second: ((container: type_container) => type_value_second), options?: {
|
|
order_first?: type_order<type_value_first>;
|
|
order_second?: type_order<type_value_second>;
|
|
}): type_order<type_container>;
|
|
/**
|
|
* @desc lexicographic order
|
|
*/
|
|
function order_lexicographic_list<type_element>(options?: {
|
|
order?: type_order<type_element>;
|
|
}): type_order<Array<type_element>>;
|
|
/**
|
|
*/
|
|
function sorter_merge<type_element>(options?: {
|
|
order?: type_order<type_element>;
|
|
}): type_sorter<type_element>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.pit {
|
|
/**
|
|
*/
|
|
type type_date = {
|
|
year: int;
|
|
month: int;
|
|
day: int;
|
|
};
|
|
/**
|
|
*/
|
|
type type_ywd = {
|
|
year: int;
|
|
week: int;
|
|
day: int;
|
|
};
|
|
/**
|
|
*/
|
|
type type_time = {
|
|
hour: int;
|
|
minute: int;
|
|
second: int;
|
|
};
|
|
/**
|
|
*/
|
|
type type_datetime = {
|
|
timezone_shift: int;
|
|
date: type_date;
|
|
time: (null | type_time);
|
|
};
|
|
/**
|
|
*/
|
|
type type_pit = int;
|
|
}
|
|
declare namespace lib_plankton.pit {
|
|
/**
|
|
*/
|
|
function date_object_get_week_of_year(date: Date): int;
|
|
/**
|
|
*/
|
|
function to_unix_timestamp(pit: type_pit): int;
|
|
/**
|
|
*/
|
|
function from_unix_timestamp(unix_timestamp: int): type_pit;
|
|
/**
|
|
*/
|
|
function to_date_object(pit: type_pit): Date;
|
|
/**
|
|
* @todo timezone
|
|
*/
|
|
function to_datetime(pit: type_pit, options?: {
|
|
timezone_shift?: int;
|
|
}): type_datetime;
|
|
/**
|
|
*/
|
|
function from_datetime(datetime: type_datetime): type_pit;
|
|
/**
|
|
*/
|
|
function is_before(pit: type_pit, reference: type_pit): boolean;
|
|
/**
|
|
*/
|
|
function is_between(pit: type_pit, reference_left: type_pit, reference_right: type_pit): boolean;
|
|
/**
|
|
*/
|
|
function shift_day(pit: type_pit, increment: int): type_pit;
|
|
/**
|
|
*/
|
|
function shift_week(pit: type_pit, increment: int): type_pit;
|
|
/**
|
|
*/
|
|
function trunc_week(pit: type_pit): type_pit;
|
|
/**
|
|
*/
|
|
function now(): type_pit;
|
|
/**
|
|
* @param year year according to specified timezone shift
|
|
* @param week week according to specified timezone shift
|
|
* @return the begin of the week (monday, 00:00)
|
|
*/
|
|
function from_ywd(ywd: type_ywd, options?: {
|
|
timezone_shift?: int;
|
|
}): type_pit;
|
|
/**
|
|
* @todo timezone
|
|
*/
|
|
function to_ywd(pit: type_pit, options?: {
|
|
timezone_shift?: int;
|
|
}): type_ywd;
|
|
}
|
|
declare namespace lib_plankton.ical {
|
|
/**
|
|
*/
|
|
type type_rrule = {
|
|
freq?: string;
|
|
byday?: string;
|
|
bymonth?: string;
|
|
};
|
|
/**
|
|
*/
|
|
type type_offset = string;
|
|
/**
|
|
*/
|
|
/**
|
|
*/
|
|
export enum enum_class {
|
|
public = "public",
|
|
private = "private",
|
|
confidential = "confidential"
|
|
}
|
|
/**
|
|
*/
|
|
export enum enum_event_status {
|
|
tentative = "tentative",
|
|
confirmed = "confirmed",
|
|
cancelled = "cancelled"
|
|
}
|
|
/**
|
|
*/
|
|
export enum enum_transp {
|
|
opaque = "opaque",
|
|
transparent = "transparent"
|
|
}
|
|
/**
|
|
*/
|
|
type type_tzid = string;
|
|
/**
|
|
*/
|
|
export type type_date = {
|
|
year: int;
|
|
month: int;
|
|
day: int;
|
|
};
|
|
/**
|
|
*/
|
|
export type type_time = {
|
|
hour: int;
|
|
minute: int;
|
|
second: int;
|
|
utc: boolean;
|
|
};
|
|
/**
|
|
*/
|
|
export type type_datetime = {
|
|
date: type_date;
|
|
time: (null | type_time);
|
|
};
|
|
/**
|
|
*/
|
|
export type type_dt = {
|
|
tzid: type_tzid;
|
|
value: type_datetime;
|
|
};
|
|
/**
|
|
*/
|
|
type type_duration = {
|
|
negative: boolean;
|
|
weeks?: int;
|
|
days?: int;
|
|
hours?: int;
|
|
minutes?: int;
|
|
seconds?: int;
|
|
};
|
|
/**
|
|
*/
|
|
type type_vtimezone = {
|
|
tzid?: type_tzid;
|
|
standard?: {
|
|
dtstart: type_datetime;
|
|
rrule: type_rrule;
|
|
tzoffsetfrom?: type_offset;
|
|
tzoffsetto?: type_offset;
|
|
};
|
|
daylight?: {
|
|
dtstart: type_datetime;
|
|
rrule: type_rrule;
|
|
tzoffsetfrom?: type_offset;
|
|
tzoffsetto?: type_offset;
|
|
};
|
|
};
|
|
/**
|
|
* @see https://www.rfc-editor.org/rfc/rfc5545#section-3.6.1
|
|
*/
|
|
export type type_vevent = {
|
|
uid: string;
|
|
dtstamp: type_datetime;
|
|
dtstart?: type_dt;
|
|
class?: enum_class;
|
|
created?: type_datetime;
|
|
description?: string;
|
|
geo?: {
|
|
latitude: float;
|
|
longitude: float;
|
|
};
|
|
last_modified?: type_datetime;
|
|
location?: string;
|
|
organizer?: {
|
|
cn?: string;
|
|
value?: string;
|
|
};
|
|
priority?: int;
|
|
sequence?: int;
|
|
status?: enum_event_status;
|
|
summary?: string;
|
|
transp?: enum_transp;
|
|
url?: string;
|
|
recurid?: any;
|
|
rrule?: type_rrule;
|
|
dtend?: type_dt;
|
|
duration?: type_duration;
|
|
attach?: any;
|
|
attendee?: string;
|
|
categories?: Array<string>;
|
|
comment?: any;
|
|
contact?: any;
|
|
exdate?: any;
|
|
rstatus?: any;
|
|
related?: any;
|
|
resources?: any;
|
|
rdate?: any;
|
|
x_props?: Record<string, string>;
|
|
iana_props?: Record<string, string>;
|
|
};
|
|
/**
|
|
* @see https://www.rfc-editor.org/rfc/rfc5545#section-3.4
|
|
*/
|
|
export type type_vcalendar = {
|
|
version: string;
|
|
prodid: string;
|
|
vevents: Array<type_vevent>;
|
|
calscale?: string;
|
|
method?: string;
|
|
vtimezone?: type_vtimezone;
|
|
x_props?: Record<string, string>;
|
|
iana_props?: Record<string, string>;
|
|
};
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.ical {
|
|
/**
|
|
*/
|
|
function datetime_to_unixtimestamp(datetime: type_datetime): int;
|
|
/**
|
|
* @see https://www.rfc-editor.org/rfc/rfc5545
|
|
* @see https://icalendar.org/iCalendar-RFC-5545/
|
|
* @todo implement edge cases
|
|
*/
|
|
function ics_decode(ics: string, options?: {
|
|
debug?: boolean;
|
|
}): type_vcalendar;
|
|
/**
|
|
* @todo method
|
|
* @todo add missing fields
|
|
*/
|
|
function ics_encode(vcalendar: type_vcalendar): string;
|
|
}
|
|
declare namespace lib_plankton.http {
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
enum enum_method {
|
|
options = "options",
|
|
head = "head",
|
|
get = "get",
|
|
delete = "delete",
|
|
post = "post",
|
|
put = "put",
|
|
patch = "patch"
|
|
}
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
type type_request = {
|
|
scheme: ("http" | "https");
|
|
host: (null | string);
|
|
path: string;
|
|
version: string;
|
|
method: enum_method;
|
|
query: (null | string);
|
|
headers: Record<string, string>;
|
|
body: (null | Buffer);
|
|
};
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
type type_response = {
|
|
version: (null | string);
|
|
status_code: int;
|
|
headers: Record<string, string>;
|
|
body: Buffer;
|
|
};
|
|
}
|
|
declare namespace lib_plankton.http {
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function encode_method(method: enum_method): string;
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function encode_request(request: type_request): string;
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function decode_request(request_raw: string): type_request;
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function encode_response(response: type_response): string;
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function decode_response(response_raw: string): type_response;
|
|
/**
|
|
* executes an HTTP request
|
|
*
|
|
* @todo define type_signal
|
|
*/
|
|
function call(request: type_request, options?: {
|
|
timeout?: (null | float);
|
|
follow_redirects?: boolean;
|
|
implementation?: ("fetch" | "http_module");
|
|
}): Promise<type_response>;
|
|
}
|
|
declare namespace lib_plankton.http {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_http_request implements lib_plankton.code.interface_code<type_request, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_request): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): type_request;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_http_response implements lib_plankton.code.interface_code<type_response, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_response): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): type_response;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.markdown {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function code(content: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function paragraph(content: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sectionhead(level: int, content: string): string;
|
|
}
|
|
declare namespace lib_plankton.api {
|
|
/**
|
|
*/
|
|
enum enum_checklevel {
|
|
none = "none",
|
|
soft = "soft",
|
|
hard = "hard"
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_action = {
|
|
name: string;
|
|
active: ((version: (null | string)) => boolean);
|
|
execution: ((version: (null | string), environment: any, input: any) => Promise<any>);
|
|
restriction: ((version: (null | string), environment: any) => Promise<boolean>);
|
|
input_shape: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
output_shape: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
title: (null | string);
|
|
description: (null | string);
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_api = {
|
|
title: string;
|
|
actions: Record<string, type_action>;
|
|
};
|
|
/**
|
|
*/
|
|
class class_error_permission_denied extends Error {
|
|
}
|
|
}
|
|
declare namespace lib_plankton.api {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make(title: string): type_api;
|
|
/**
|
|
* @throws Error if a action with the given name has already been registered
|
|
* @author fenris
|
|
*/
|
|
function register(api: type_api, name: string, options?: {
|
|
active?: ((version: (null | string)) => boolean);
|
|
execution?: ((version: (null | string), environment: any, input: any) => Promise<any>);
|
|
restriction?: ((version: (null | string), input: any) => Promise<boolean>);
|
|
input_shape?: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
output_shape?: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
title?: (null | string);
|
|
description?: (null | string);
|
|
}): void;
|
|
/**
|
|
* @throws Error if not found
|
|
* @author fenris
|
|
*/
|
|
function get_action(api: type_api, name: string): type_action;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function call(api: type_api, name: string, options?: {
|
|
version?: (null | string);
|
|
input?: any;
|
|
environment?: Record<string, any>;
|
|
checklevel_restriction?: enum_checklevel;
|
|
checklevel_input?: enum_checklevel;
|
|
checklevel_output?: enum_checklevel;
|
|
}): Promise<any>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_documentation_for_action(api: type_api, name: string, options?: {
|
|
version?: (null | string);
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_documentation(api: type_api, options?: {
|
|
version?: (null | string);
|
|
}): string;
|
|
}
|
|
declare namespace lib_plankton.api {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_api {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private subject;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private constructor();
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static create(name: string): class_api;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
register(name: string, options?: {
|
|
active?: ((version: (null | string)) => boolean);
|
|
execution?: ((version: (null | string), environment: any, input: any) => lib_plankton.call.type_promise<any, Error>);
|
|
restriction?: ((version: (null | string), input: any) => Promise<boolean>);
|
|
input_shape?: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
output_shape?: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
title?: (null | string);
|
|
description?: (null | string);
|
|
}): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
call(name: string, options?: {
|
|
version?: (null | string);
|
|
input?: any;
|
|
environment?: Record<string, any>;
|
|
checklevel_restriction?: enum_checklevel;
|
|
checklevel_input?: enum_checklevel;
|
|
checklevel_output?: enum_checklevel;
|
|
}): lib_plankton.call.type_promise<any, Error>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
generate_documentation_for_action(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
generate_documentation(): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.rest {
|
|
/**
|
|
*/
|
|
type type_oas_schema = ({} | {
|
|
nullable: boolean;
|
|
} | {
|
|
type: "boolean";
|
|
nullable?: boolean;
|
|
enum?: Array<boolean>;
|
|
} | {
|
|
type: "integer";
|
|
nullable?: boolean;
|
|
enum?: Array<int>;
|
|
} | {
|
|
type: "number";
|
|
nullable?: boolean;
|
|
enum?: Array<float>;
|
|
} | {
|
|
type: "string";
|
|
nullable?: boolean;
|
|
enum?: Array<string>;
|
|
} | {
|
|
type: "array";
|
|
nullable?: boolean;
|
|
items: type_oas_schema;
|
|
} | {
|
|
type: "object";
|
|
nullable?: boolean;
|
|
additionalProperties?: (false | type_oas_schema);
|
|
properties: Record<string, type_oas_schema>;
|
|
required: Array<string>;
|
|
});
|
|
/**
|
|
*/
|
|
type type_execution<type_input, type_output> = ((stuff: {
|
|
version: (null | string);
|
|
headers: Record<string, string>;
|
|
path_parameters: Record<string, string>;
|
|
query_parameters: Record<string, string>;
|
|
input: (null | type_input);
|
|
}) => Promise<{
|
|
status_code: int;
|
|
data: type_output;
|
|
}>);
|
|
/**
|
|
*/
|
|
type type_restriction<type_input> = ((stuff: {
|
|
version: (null | string);
|
|
headers: Record<string, string>;
|
|
path_parameters: Record<string, string>;
|
|
query_parameters: Record<string, string>;
|
|
}) => Promise<boolean>);
|
|
/**
|
|
*/
|
|
type type_operation<type_input, type_output> = {
|
|
action_name: string;
|
|
query_parameters: ((version: string) => Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>);
|
|
input_schema: ((version: (null | string)) => type_oas_schema);
|
|
output_schema: ((version: (null | string)) => type_oas_schema);
|
|
request_body_mimetype: string;
|
|
request_body_decode: ((http_request_body: Buffer, http_request_header_content_type: (null | string)) => any);
|
|
response_body_mimetype: string;
|
|
response_body_encode: ((output: any) => Buffer);
|
|
};
|
|
/**
|
|
*/
|
|
type type_routenode = {
|
|
operations: Record</*lib_plankton.http.enum_method*/ string, type_operation<any, any>>;
|
|
sub_branch: Record<string, type_routenode>;
|
|
sub_wildcard: (null | {
|
|
name: string;
|
|
node: type_routenode;
|
|
});
|
|
};
|
|
/**
|
|
*/
|
|
type type_rest = {
|
|
api: lib_plankton.api.type_api;
|
|
routetree: type_routenode;
|
|
versioning_method: ("none" | "path" | "header" | "query");
|
|
versioning_header_name: (null | string);
|
|
versioning_query_key: (null | string);
|
|
header_parameters: Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>;
|
|
set_access_control_headers: boolean;
|
|
authentication: ({
|
|
kind: "none";
|
|
parameters: {};
|
|
} | {
|
|
kind: "key_header";
|
|
parameters: {
|
|
name: string;
|
|
};
|
|
});
|
|
};
|
|
}
|
|
declare namespace lib_plankton.rest {
|
|
/**
|
|
*/
|
|
function make(options?: {
|
|
title?: (null | string);
|
|
versioning_method?: ("none" | "path" | "header" | "query");
|
|
versioning_header_name?: (null | string);
|
|
versioning_query_key?: (null | string);
|
|
header_parameters?: Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>;
|
|
set_access_control_headers?: boolean;
|
|
authentication?: ({
|
|
kind: "none";
|
|
parameters: {};
|
|
} | {
|
|
kind: "key_header";
|
|
parameters: {
|
|
name: string;
|
|
};
|
|
});
|
|
actions?: Array<{
|
|
http_method: lib_plankton.http.enum_method;
|
|
path: string;
|
|
options: {
|
|
active?: ((version: string) => boolean);
|
|
restriction?: (null | type_restriction<any>);
|
|
execution?: type_execution<any, any>;
|
|
title?: (null | string);
|
|
description?: (null | string);
|
|
query_parameters?: ((version: string) => Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>);
|
|
input_schema?: ((version: string) => type_oas_schema);
|
|
output_schema?: ((version: string) => type_oas_schema);
|
|
request_body_mimetype?: string;
|
|
request_body_decode?: ((http_request_body: Buffer, http_request_header_content_type: (null | string)) => any);
|
|
response_body_mimetype?: string;
|
|
response_body_encode?: ((output: any) => Buffer);
|
|
};
|
|
}>;
|
|
}): type_rest;
|
|
/**
|
|
*/
|
|
function register<type_input, type_output>(rest: type_rest, http_method: lib_plankton.http.enum_method, path: string, options: {
|
|
active?: ((version: string) => boolean);
|
|
restriction?: (null | type_restriction<type_input>);
|
|
execution?: type_execution<type_input, type_output>;
|
|
title?: (null | string);
|
|
description?: (null | string);
|
|
query_parameters?: ((version: string) => Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>);
|
|
input_schema?: ((version: (null | string)) => type_oas_schema);
|
|
output_schema?: ((version: (null | string)) => type_oas_schema);
|
|
request_body_mimetype?: string;
|
|
request_body_decode?: ((http_request_body: Buffer, http_request_header_content_type: (null | string)) => any);
|
|
response_body_mimetype?: string;
|
|
response_body_encode?: ((output: any) => Buffer);
|
|
}): void;
|
|
/**
|
|
* @todo check request body mimetype?
|
|
* @todo check query paramater validity
|
|
*/
|
|
function call(rest: type_rest, http_request: lib_plankton.http.type_request, options?: {
|
|
checklevel_restriction?: lib_plankton.api.enum_checklevel;
|
|
checklevel_input?: lib_plankton.api.enum_checklevel;
|
|
checklevel_output?: lib_plankton.api.enum_checklevel;
|
|
}): Promise<lib_plankton.http.type_response>;
|
|
/**
|
|
* @see https://swagger.io/specification/#openrest-object
|
|
*/
|
|
function to_oas(rest: type_rest, options?: {
|
|
version?: (null | string);
|
|
servers?: Array<string>;
|
|
}): any;
|
|
}
|
|
declare namespace lib_plankton.server {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_metadata = {
|
|
ip_address: string;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject = {
|
|
host: string;
|
|
port: int;
|
|
threshold: (null | float);
|
|
handle: ((input: Buffer, metadata?: type_metadata) => Promise<string>);
|
|
serverobj: any;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make(handle: ((input: string, metadata?: type_metadata) => Promise<string>), options?: {
|
|
host?: string;
|
|
port?: int;
|
|
threshold?: (null | float);
|
|
}): type_subject;
|
|
/**
|
|
* @author fenris
|
|
* @deprecated
|
|
*/
|
|
function make_old(port: int, handle: ((input: string, metadata?: type_metadata) => Promise<string>)): type_subject;
|
|
/**
|
|
* @author fenris
|
|
* @see https://nodejs.org/api/net.html#serverlistenport-host-backlog-callback
|
|
*/
|
|
function start(subject: type_subject): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function kill(subject: type_subject): void;
|
|
}
|
|
declare namespace lib_plankton.server {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_server {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: type_subject;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(handle: ((input: Buffer, metadata?: type_metadata) => Promise<string>), options?: {
|
|
host?: string;
|
|
port?: int;
|
|
});
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
start(): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
kill(): void;
|
|
}
|
|
}
|
|
declare var lib_server: typeof lib_plankton.server;
|
|
declare namespace lib_plankton.args {
|
|
/**
|
|
*/
|
|
enum enum_environment {
|
|
cli = "cli",
|
|
url = "url"
|
|
}
|
|
/**
|
|
*/
|
|
enum enum_kind {
|
|
positional = "positional",
|
|
volatile = "volatile"
|
|
}
|
|
/**
|
|
*/
|
|
enum enum_type {
|
|
boolean = "boolean",
|
|
integer = "int",
|
|
float = "float",
|
|
string = "string"
|
|
}
|
|
/**
|
|
*/
|
|
enum enum_mode {
|
|
replace = "replace",
|
|
accumulate = "accumulate"
|
|
}
|
|
}
|
|
declare namespace lib_plankton.args {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_argument {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected name: string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected kind: enum_kind;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected type: enum_type;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected mode: enum_mode;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected default_: any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected info: string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected parameters: Object;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected hidden: boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor({ "name": name, "type": type, "kind": kind, "mode": mode, "default": default_, "info": info, "parameters": parameters, "hidden": hidden, }: {
|
|
name: string;
|
|
type?: enum_type;
|
|
kind?: enum_kind;
|
|
mode?: enum_mode;
|
|
default?: any;
|
|
info?: string;
|
|
parameters?: Object;
|
|
hidden?: boolean;
|
|
});
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static positional({ "name": name, "type": type, "mode": mode, "default": default_, "info": info, "hidden": hidden, "index": index, }: {
|
|
name: string;
|
|
type?: enum_type;
|
|
mode?: enum_mode;
|
|
default?: any;
|
|
info?: string;
|
|
hidden?: boolean;
|
|
index: int;
|
|
}): class_argument;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static volatile({ "name": name, "type": type, "mode": mode, "default": default_, "info": info, "hidden": hidden, "indicators_short": indicators_short, "indicators_long": indicators_long, }: {
|
|
name: string;
|
|
type?: enum_type;
|
|
mode?: enum_mode;
|
|
default?: any;
|
|
info?: string;
|
|
hidden?: boolean;
|
|
indicators_short: Array<string>;
|
|
indicators_long: Array<string>;
|
|
}): class_argument;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
check(): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
name_get(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
kind_get(): enum_kind;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type_get(): enum_type;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
mode_get(): enum_mode;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
default_get(): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
parameters_get(): Object;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
hidden_get(): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
toString(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
indicator_main(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
pattern_value(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
extract(raw: string): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
assign(data: Object, target: string, raw: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
make(data: Object, target: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
generate_help(): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.args {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
var verbosity: int;
|
|
/**
|
|
* @author fenris
|
|
* @todo check validity
|
|
*/
|
|
class class_handler {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected arguments_: {
|
|
[name: string]: class_argument;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(arguments_: {
|
|
[name: string]: class_argument;
|
|
});
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
filter(kind: enum_kind): {
|
|
[name: string]: class_argument;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
read(environment: enum_environment, input: string, data?: {
|
|
[name: string]: any;
|
|
}): {
|
|
[name: string]: any;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
* @todo handle if the data object doesn't have the required field or the type is wrong or sth.
|
|
*/
|
|
write(environment: enum_environment, data: {
|
|
[name: string]: any;
|
|
}): string;
|
|
/**
|
|
* @desc manpage-like info-sheet
|
|
* @author fenris
|
|
*/
|
|
generate_help({ "programname": programname, "author": author, "description": description, "executable": executable, }: {
|
|
programname?: string;
|
|
author?: string;
|
|
description?: string;
|
|
executable?: string;
|
|
}): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.bcrypt {
|
|
/**
|
|
*/
|
|
function compute(input: string, options?: {
|
|
rounds?: int;
|
|
salt?: (null | string);
|
|
}): Promise<string>;
|
|
/**
|
|
*/
|
|
function compare(password_shall_image: string, password_is: string): Promise<boolean>;
|
|
}
|
|
declare namespace lib_plankton.map {
|
|
/**
|
|
*/
|
|
type type_pair<type_key, type_value> = {
|
|
key: type_key;
|
|
value: type_value;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_map<type_key, type_value> = {
|
|
size: (() => int);
|
|
has: ((key: type_key) => boolean);
|
|
get: ((key: type_key, fallback?: lib_plankton.pod.type_pod<type_value>) => type_value);
|
|
set: ((key: type_key, value: type_value) => void);
|
|
delete: ((key: type_key) => void);
|
|
iterate: ((procedure: ((value: type_value, key?: type_key) => void)) => void);
|
|
};
|
|
}
|
|
declare namespace lib_plankton.map {
|
|
/**
|
|
*/
|
|
function clear<type_key, type_value>(map: type_map<type_key, type_value>): void;
|
|
/**
|
|
*/
|
|
function keys<type_key, type_value>(map: type_map<type_key, type_value>): Array<type_key>;
|
|
/**
|
|
*/
|
|
function values<type_key, type_value>(map: type_map<type_key, type_value>): Array<type_value>;
|
|
/**
|
|
*/
|
|
function dump<type_key, type_value>(map: type_map<type_key, type_value>): Array<type_pair<type_key, type_value>>;
|
|
/**
|
|
*/
|
|
function show<type_key, type_value>(map: type_map<type_key, type_value>, options?: {
|
|
show_key?: ((key: type_key) => string);
|
|
show_value?: ((value: type_value) => string);
|
|
}): string;
|
|
}
|
|
declare namespace lib_plankton.map.simplemap {
|
|
/**
|
|
*/
|
|
type type_subject<type_value> = {
|
|
data: Record<string, type_value>;
|
|
};
|
|
/**
|
|
*/
|
|
function make<type_value>(options?: {
|
|
data?: Record<string, type_value>;
|
|
}): type_subject<type_value>;
|
|
/**
|
|
*/
|
|
function size<type_value>(subject: type_subject<type_value>): int;
|
|
/**
|
|
*/
|
|
function has<type_value>(subject: type_subject<type_value>, key: string): boolean;
|
|
/**
|
|
*/
|
|
function get_safe<type_value>(subject: type_subject<type_value>, key: string): lib_plankton.pod.type_pod<type_value>;
|
|
/**
|
|
*/
|
|
function get<type_value>(subject: type_subject<type_value>, key: string, fallback?: lib_plankton.pod.type_pod<type_value>): type_value;
|
|
/**
|
|
*/
|
|
function set<type_value>(subject: type_subject<type_value>, key: string, value: type_value): void;
|
|
/**
|
|
*/
|
|
function delete_<type_value>(subject: type_subject<type_value>, key: string): void;
|
|
/**
|
|
*/
|
|
function iterate<type_value>(subject: type_subject<type_value>, procedure: ((value?: type_value, key?: string) => void)): void;
|
|
/**
|
|
*/
|
|
function implementation_map<type_value>(subject: type_subject<type_value>): type_map<string, type_value>;
|
|
}
|
|
declare namespace lib_plankton.map.hashmap {
|
|
/**
|
|
* we base the hashmap on a simplemap, whos keys are the hashes and whos values are the key/value-pairs
|
|
*/
|
|
type type_subject<type_key, type_value> = {
|
|
hashing: ((key: type_key) => string);
|
|
core: lib_plankton.map.simplemap.type_subject<type_pair<type_key, type_value>>;
|
|
};
|
|
/**
|
|
*/
|
|
function make<type_key, type_value>(hashing: ((key: type_key) => string), options?: {
|
|
pairs?: Array<type_pair<type_key, type_value>>;
|
|
}): type_subject<type_key, type_value>;
|
|
/**
|
|
*/
|
|
function size<type_key, type_value>(subject: type_subject<type_key, type_value>): int;
|
|
/**
|
|
*/
|
|
function has<type_key, type_value>(subject: type_subject<type_key, type_value>, key: type_key): boolean;
|
|
/**
|
|
*/
|
|
function get<type_key, type_value>(subject: type_subject<type_key, type_value>, key: type_key, fallback?: lib_plankton.pod.type_pod<type_value>): type_value;
|
|
/**
|
|
*/
|
|
function set<type_key, type_value>(subject: type_subject<type_key, type_value>, key: type_key, value: type_value): void;
|
|
/**
|
|
*/
|
|
function delete_<type_key, type_value>(subject: type_subject<type_key, type_value>, key: type_key): void;
|
|
/**
|
|
*/
|
|
function iterate<type_key, type_value>(subject: type_subject<type_key, type_value>, procedure: ((value?: type_value, key?: type_key) => void)): void;
|
|
/**
|
|
*/
|
|
function implementation_map<type_key, type_value>(subject: type_subject<type_key, type_value>): type_map<type_key, type_value>;
|
|
}
|
|
declare namespace lib_plankton.map.collatemap {
|
|
/**
|
|
*/
|
|
type type_collation<type_key> = ((key1: type_key, key2: type_key) => boolean);
|
|
/**
|
|
*/
|
|
export type type_subject<type_key, type_value> = {
|
|
collation: type_collation<type_key>;
|
|
pairs: Array<type_pair<type_key, type_value>>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make<type_key, type_value>(collation: type_collation<type_key>, options?: {
|
|
pairs?: Array<type_pair<type_key, type_value>>;
|
|
}): type_subject<type_key, type_value>;
|
|
/**
|
|
*/
|
|
export function size<type_key, type_value>(subject: type_subject<type_key, type_value>): int;
|
|
/**
|
|
*/
|
|
export function has<type_key, type_value>(subject: type_subject<type_key, type_value>, key: type_key): boolean;
|
|
/**
|
|
* @todo use .find
|
|
*/
|
|
export function get<type_key, type_value>(subject: type_subject<type_key, type_value>, key: type_key, fallback?: lib_plankton.pod.type_pod<type_value>): type_value;
|
|
/**
|
|
*/
|
|
export function set<type_key, type_value>(subject: type_subject<type_key, type_value>, key: type_key, value: type_value): void;
|
|
/**
|
|
*/
|
|
export function delete_<type_key, type_value>(subject: type_subject<type_key, type_value>, key: type_key): void;
|
|
/**
|
|
*/
|
|
export function iterate<type_key, type_value>(subject: type_subject<type_key, type_value>, function_: ((value?: type_value, key?: type_key) => void)): void;
|
|
/**
|
|
*/
|
|
export function implementation_map<type_key, type_value>(subject: type_subject<type_key, type_value>): type_map<type_key, type_value>;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.base64 {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_source = string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_target = string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function encode(source: type_source): type_target;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function decode(target: type_target): type_source;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function implementation_code(): lib_plankton.code.type_code<type_source, type_target>;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.base64 {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_base64 implements lib_plankton.code.interface_code<string, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: string): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.auth {
|
|
/**
|
|
*/
|
|
type type_auth<type_preparation, type_execute_input, type_control_input> = {
|
|
login_prepare: (() => Promise<type_preparation>);
|
|
login_execute: ((input: type_execute_input) => Promise<string>);
|
|
login_control: ((input: type_control_input) => Promise<void>);
|
|
};
|
|
}
|
|
declare namespace lib_plankton.auth {
|
|
}
|
|
declare namespace lib_plankton.auth.internal {
|
|
/**
|
|
*/
|
|
type type_parameters = {
|
|
password_image_chest: lib_plankton.storage.type_chest<string, string, void, string, string>;
|
|
check_password: ((image: string, input: string) => Promise<boolean>);
|
|
};
|
|
/**
|
|
*/
|
|
type type_preparation = null;
|
|
/**
|
|
*/
|
|
type type_execute_input = {
|
|
name: string;
|
|
password: string;
|
|
};
|
|
/**
|
|
*/
|
|
type type_control_input = void;
|
|
/**
|
|
*/
|
|
export function implementation_auth(parameters: type_parameters): type_auth<type_preparation, type_execute_input, type_control_input>;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.auth.oidc {
|
|
/**
|
|
*/
|
|
type type_token = string;
|
|
/**
|
|
*/
|
|
type type_userinfo = {
|
|
name: (null | string);
|
|
email: (null | string);
|
|
};
|
|
/**
|
|
*/
|
|
export type type_parameters_raw = {
|
|
url_authorization: string;
|
|
url_token: string;
|
|
url_userinfo: string;
|
|
client_id: string;
|
|
client_secret: string;
|
|
url_redirect: string;
|
|
scopes?: (null | Array<string>);
|
|
label?: string;
|
|
};
|
|
/**
|
|
*/
|
|
export type type_parameters = {
|
|
url_authorization: string;
|
|
url_token: string;
|
|
url_userinfo: string;
|
|
client_id: string;
|
|
client_secret: string;
|
|
url_redirect: string;
|
|
scopes: Array<string>;
|
|
label: string;
|
|
};
|
|
/**
|
|
*/
|
|
export type type_subject = {
|
|
parameters: type_parameters;
|
|
state: {};
|
|
};
|
|
/**
|
|
*/
|
|
export function make(parameters_raw: type_parameters_raw): type_subject;
|
|
/**
|
|
* @see https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest
|
|
*/
|
|
export function authorization_url(subject: type_subject): string;
|
|
/**
|
|
*/
|
|
export function handle_authorization_callback(subject: type_subject, cookie: (null | string), stuff: Record<string, string>): Promise<{
|
|
token: type_token;
|
|
userinfo: type_userinfo;
|
|
}>;
|
|
export {};
|
|
}
|