2440 lines
73 KiB
TypeScript
2440 lines
73 KiB
TypeScript
/**
|
|
* @author fenris
|
|
*/
|
|
type int = number;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type float = number;
|
|
declare var process: any;
|
|
declare var require: any;
|
|
declare class Buffer {
|
|
constructor(x: string, modifier?: string);
|
|
static from(x: string, encoding?: string): any;
|
|
toString(modifier?: string): string;
|
|
}
|
|
declare namespace lib_plankton.base {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function environment(): string;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_pseudopointer<type_value> = {
|
|
value: type_value;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_null<type_value>(): type_pseudopointer<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_make<type_value>(value: type_value): type_pseudopointer<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_isset<type_value>(pseudopointer: type_pseudopointer<type_value>): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_read<type_value>(pseudopointer: type_pseudopointer<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_write<type_value>(pseudopointer: type_pseudopointer<type_value>, value: type_value): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare var instance_verbosity: int;
|
|
/**
|
|
* @desc the ability to check for equality with another element of the same domain
|
|
* @author fenris
|
|
*/
|
|
interface interface_collatable<type_value> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_collate(value: type_value): boolean;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function instance_collate<type_value>(value1: (type_value & {
|
|
_collate?: ((value: type_value) => boolean);
|
|
}), value2: type_value): boolean;
|
|
/**
|
|
* @desc the ability to compare with another element of the same domain for determining if the first is "smaller than or equal to" the latter
|
|
* @author fenris
|
|
*/
|
|
interface interface_comparable<type_value> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_compare(value: type_value): boolean;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function instance_compare<type_value>(value1: (type_value & {
|
|
_compare: ((value: type_value) => boolean);
|
|
}), value2: type_value): boolean;
|
|
/**
|
|
* @desc the ability to create an exact copy
|
|
* @author fenris
|
|
*/
|
|
interface interface_cloneable<type_value> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_clone(): type_value;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function instance_clone<type_value>(value: (type_value & {
|
|
_clone?: (() => type_value);
|
|
})): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_hashable {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_hash(): string;
|
|
}
|
|
/**
|
|
* @desc the ability to generate a string out of the element, which identifies it to a high degree
|
|
* @author fenris
|
|
*/
|
|
declare function instance_hash<type_value>(value: (type_value & {
|
|
_hash?: (() => string);
|
|
})): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_showable {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_show(): string;
|
|
}
|
|
/**
|
|
* @desc the ability to map the element to a textual representation (most likely not injective)
|
|
* @author fenris
|
|
*/
|
|
declare function instance_show<type_value>(value: (type_value & {
|
|
_show?: (() => string);
|
|
})): string;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
interface interface_decorator<type_core> {
|
|
/**
|
|
* @author frac
|
|
*/
|
|
core: type_core;
|
|
}
|
|
/**
|
|
* @author frac
|
|
*/
|
|
declare class class_observer {
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected counter: int;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected actions: {
|
|
[id: string]: (information: Object) => void;
|
|
};
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected buffer: Array<Object>;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @author frac
|
|
*/
|
|
empty(): boolean;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
flush(): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
set(id: string, action: (information: Object) => void): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
del(id: string): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
add(action: (information: Object) => void): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
notify(information?: Object, delayed?: boolean): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
rollout(): void;
|
|
}
|
|
/**
|
|
* @author frac
|
|
*/
|
|
/**
|
|
* @author frac
|
|
*/
|
|
/**
|
|
* @author frac
|
|
*/
|
|
declare class class_error extends Error {
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected suberrors: Array<Error>;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected mess: string;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
constructor(message: string, suberrors?: Array<Error>);
|
|
/**
|
|
* @override
|
|
* @author frac
|
|
*/
|
|
toString(): string;
|
|
}
|
|
declare namespace lib_plankton.base {
|
|
/**
|
|
* returns the current UNIX timestamp
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function get_current_timestamp(rounded?: boolean): float;
|
|
/**
|
|
*/
|
|
function object_merge(core: Record<string, any>, mantle: Record<string, any>): Record<string, any>;
|
|
/**
|
|
*/
|
|
function buffer_show(buffer: Buffer, { "block_size": option_block_size, "break_char": option_break_char, }?: {
|
|
block_size?: int;
|
|
break_char?: string;
|
|
}): string;
|
|
}
|
|
declare module lib_plankton.pod {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_pod<type_value> = {
|
|
kind: ("empty" | "filled");
|
|
value?: type_value;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make_empty<type_value>(): type_pod<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make_filled<type_value>(value: type_value): type_pod<type_value>;
|
|
/**
|
|
* whether the pod is filled
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function is_filled<type_value>(pod: type_pod<type_value>): boolean;
|
|
/**
|
|
* return the value, stored in the pod-wrapper
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function cull<type_value>(pod: type_pod<type_value>): type_value;
|
|
/**
|
|
* to pass on a empty-pod or to use a filled-pod
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function propagate<type_value, type_value_>(pod: type_pod<type_value>, function_: ((value: type_value) => type_value_)): type_pod<type_value_>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function distinguish<type_value, type_result>(pod: type_pod<type_value>, function_empty: (() => type_result), function_filled: ((value: type_value) => type_result)): type_result;
|
|
/**
|
|
*/
|
|
function show<type_value>(pod: type_pod<type_value>, options?: {
|
|
show_value?: ((value: type_value) => string);
|
|
}): string;
|
|
}
|
|
declare module lib_plankton.pod {
|
|
/**
|
|
*/
|
|
class class_pod<type_value> {
|
|
private subject;
|
|
constructor(subject: type_pod<type_value>);
|
|
tear(): type_pod<type_value>;
|
|
static empty<type_value>(): class_pod<type_value>;
|
|
static filled<type_value>(value: type_value): class_pod<type_value>;
|
|
is_empty(): boolean;
|
|
is_filled(): boolean;
|
|
cull(): type_value;
|
|
show(show_value?: any): string;
|
|
toString(): string;
|
|
propagate<type_value_>(function_: ((value: type_value) => type_value_)): class_pod<type_value_>;
|
|
distinguish<type_result>(function_empty: (() => type_result), function_filled: ((value: type_value) => type_result)): type_result;
|
|
}
|
|
}
|
|
/**
|
|
* might be completely obsolete
|
|
*/
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_promise<type_result, type_reason> = Promise<type_result>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_reject<type_result, type_reason>(reason: type_reason): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_resolve<type_result, type_reason>(result: type_result): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_make<type_result, type_reason>(executor: (resolve: ((result?: type_result) => void), reject: ((reason?: type_reason) => void)) => void): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_then_close<type_result, type_reason>(promise: type_promise<type_result, type_reason>, resolver: ((result: type_result) => void), rejector: ((reason: type_reason) => void)): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_then_append<type_result, type_reason, type_result_>(promise: type_promise<type_result, type_reason>, resolver: ((result: type_result) => type_promise<type_result_, type_reason>), rejector?: ((reason: type_reason) => type_promise<type_result_, type_reason>)): type_promise<type_result_, type_result>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_all<type_result, type_reason>(promises: Array<type_promise<type_result, type_reason>>): type_promise<Array<type_result>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_chain<type_result, type_reason>(promises: (Array<(input: type_result) => type_promise<type_result, type_reason>>), start?: type_result): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_condense<type_element, type_reason>(promises: Array<() => type_promise<type_element, type_reason>>): type_promise<Array<type_element>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_group<type_reason>(promises: Record<string, (() => type_promise<any, type_reason>)>, options?: {
|
|
serial?: boolean;
|
|
}): type_promise<Record<string, any>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_wrap<type_result_inner, type_result_outer, type_reason>(promise: type_promise<type_result_inner, type_reason>, transformator_result: ((reason: type_result_inner) => type_result_outer), transformator_reason?: ((reason: type_reason) => type_reason)): type_promise<type_result_outer, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_attach<type_reason>(state: Record<string, any>, promise: type_promise<any, type_reason>, name: string): type_promise<Record<string, any>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_delay<type_result, type_reason>(promise: type_promise<type_result, type_reason>, delay: int): type_promise<type_result, type_reason>;
|
|
}
|
|
/**
|
|
* initializer might be obsolete, since promises are reusable after having been resolved or rejected
|
|
*/
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
enum enum_initializer_state {
|
|
initial = 0,
|
|
waiting = 1,
|
|
successful = 2,
|
|
failed = 3
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_initializer<type_result, type_reason> = {
|
|
fetcher: (() => type_promise<type_result, type_reason>);
|
|
state?: enum_initializer_state;
|
|
queue: Array<{
|
|
resolve: ((result?: type_result) => void);
|
|
reject: ((reason?: type_reason) => void);
|
|
}>;
|
|
result?: type_result;
|
|
reason?: type_reason;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_make<type_result, type_reason>(fetcher: (() => type_promise<type_result, type_reason>)): type_initializer<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_reset<type_result, type_reason>(subject: type_initializer<type_result, type_reason>): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_state<type_result, type_reason>(subject: type_initializer<type_result, type_reason>): enum_initializer_state;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_get<type_result, type_reason>(subject: type_initializer<type_result, type_reason>): type_promise<type_result, type_reason>;
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_deferral<type_input, type_output> = {
|
|
representation: (input: type_input) => Promise<type_output>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
* @desc activates the deferral and handles its output according to a given procedure
|
|
* @param {(value : type_value)=>void} procedure a function which receives the output of the deferral as argument
|
|
*/
|
|
function deferral_use<type_input, type_output>(deferral: type_deferral<type_input, type_output>, input: type_input, procedure: (output: type_output) => void): void;
|
|
/**
|
|
* @author fenris
|
|
* @desc creates a deferral-subject (similar to "new Promise", where "convey" reflects "resolve"/"reject")
|
|
*/
|
|
function deferral_make<type_input, type_output>(handler: (input: type_input, convey: (output: type_output) => void) => void): type_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
* @desc wraps a simple function into a deferral (similar to "Promise.resolve"/"Promise.reject")
|
|
*/
|
|
function deferral_wrap<type_input, type_output>(function_: (input: type_input) => type_output): type_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_id<type_value>(): type_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_const<type_value>(value: type_value): type_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_delay<type_output>(output: type_output, delay: int): type_deferral<any, type_output>;
|
|
/**
|
|
* @author fenris
|
|
* @desc connects two deferrals to form a new one; the output of the first is taken as input for the second
|
|
* (similar to "Promise.then" when passing a function which returns a new promise)
|
|
* @param {type_deferral<type_value1>} first a simple deferral
|
|
* @param {(value1 : type_value1)=>type_deferral<type_value2>} second a function depending from a value returning a deferral
|
|
*/
|
|
function deferral_compose_serial<type_input, type_between, type_output>(first: type_deferral<type_input, type_between>, second: type_deferral<type_between, type_output>): type_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_compose_parallel<type_input, type_output_left, type_output_right>({ "left": deferral_left, "right": deferral_right, }: {
|
|
left: type_deferral<type_input, type_output_left>;
|
|
right: type_deferral<type_input, type_output_right>;
|
|
}): type_deferral<type_input, {
|
|
left: type_output_left;
|
|
right: type_output_right;
|
|
}>;
|
|
/**
|
|
* @author fenris
|
|
* @desc repeatedly applied serial composition
|
|
*/
|
|
function deferral_chain<type_value>(members: Array<type_deferral<type_value, type_value>>): type_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_deferral<type_input, type_output> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private subject;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private constructor();
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private static _cram;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private static _tear;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static make<type_input, type_output>(handler: (input: type_input, convey: (value: type_output) => void) => void): class_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
use(input: type_input, procedure: (value: type_output) => void): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
compose_serial<type_output_>(second: class_deferral<type_output, type_output_>): class_deferral<type_input, type_output_>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static chain<type_value>(members: Array<class_deferral<type_value, type_value>>): class_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static wrap<type_input, type_output>(function_: (input: type_input) => type_output): class_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static const_<type_value>(value: type_value): class_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static delay<type_output>(output: type_output, delay: int): class_deferral<any, type_output>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* converts the "arguments"-map into an array
|
|
*
|
|
* @param {Object} args
|
|
* @author fenris
|
|
*/
|
|
export function args2list(args: any): Array<any>;
|
|
/**
|
|
* just the empty function; useful for some callbacks etc.
|
|
*
|
|
* @author fenris
|
|
*/
|
|
export function nothing(): void;
|
|
/**
|
|
* just the identity; useful for some callbacks etc.; defined as function instead of const for using type parameters
|
|
*
|
|
* @author fenris
|
|
*/
|
|
export function id<type_value>(x: type_value): type_value;
|
|
/**
|
|
* just the identity; useful for some callbacks etc.
|
|
*
|
|
* @author fenris
|
|
*/
|
|
export function const_<type_value>(x: type_value): ((y: any) => type_value);
|
|
/**
|
|
* composes two functions (i.e. returns a function that return the result of the successive execution of both input-functions)
|
|
*
|
|
* @param {function} function_f
|
|
* @param {function} function_g
|
|
* @author fenris
|
|
*/
|
|
export function compose<type_x, type_y, type_z>(function_f: ((type_x: any) => type_y), function_g: ((type_y: any) => type_z)): ((value: type_x) => type_z);
|
|
/**
|
|
* transforms a function with sequential input to a function with leveled input; example: add(2,3) = curryfy(add)(2)(3)
|
|
*
|
|
* @param {function} f
|
|
* @return {function} the currified version of the in put function
|
|
* @author fenris
|
|
*/
|
|
export function curryfy(f: Function): Function;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function convey(value: any, functions: Array<Function>): any;
|
|
/**
|
|
*/
|
|
class class_value_wrapper<type_value> {
|
|
/**
|
|
*/
|
|
private value;
|
|
/**
|
|
*/
|
|
constructor(value: type_value);
|
|
/**
|
|
*/
|
|
convey<type_value_result>(function_: ((value: type_value) => type_value_result)): class_value_wrapper<type_value_result>;
|
|
/**
|
|
*/
|
|
cull(): type_value;
|
|
}
|
|
/**
|
|
*/
|
|
export function wrap<type_value>(value: type_value): class_value_wrapper<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function timeout(procedure: (() => void), delay_in_seconds: float): int;
|
|
/**
|
|
* Promise version of "setTimeout"
|
|
*
|
|
* @author fenris
|
|
*/
|
|
export function defer<type_result>(seconds: float, action: (() => type_result)): Promise<type_result>;
|
|
/**
|
|
* a definition for a value being "defined"
|
|
*
|
|
* @author neuc
|
|
*/
|
|
export function is_def<type_value>(obj: type_value, options?: {
|
|
null_is_valid?: boolean;
|
|
}): boolean;
|
|
/**
|
|
* returns the value if set and, when a type is specified, if the type is correct, if not return default_value
|
|
*
|
|
* @author neuc
|
|
*/
|
|
export function def_val(value: any, default_value: any, options?: {
|
|
type?: (null | string);
|
|
null_is_valid?: boolean;
|
|
}): any;
|
|
/**
|
|
* provides the call for an attribute of a class as a regular function; useful for processing lists of objects
|
|
*
|
|
* @param {string} name the name of the attribute
|
|
* @return {function}
|
|
* @author fenris
|
|
*/
|
|
export function attribute<type_object, type_attribute>(name: string): ((object: type_object) => type_attribute);
|
|
/**
|
|
* provides a method of a class as a regular function; useful for processing lists of objects
|
|
*
|
|
* @param {string} name the name of the method
|
|
* @return {function}
|
|
* @author fenris
|
|
*/
|
|
export function method<type_object, type_output>(name: string): ((object: type_object) => type_output);
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export type type_coproduct = {
|
|
kind: string;
|
|
data?: any;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function distinguish<type_output>(coproduct: type_coproduct, handlers: Record<string, ((data?: any) => type_output)>, options?: {
|
|
fallback?: (null | ((coproduct?: type_coproduct) => type_output));
|
|
}): type_output;
|
|
/**
|
|
*/
|
|
export function try_catch_wrap<type_value>(get_value: (() => type_value)): {
|
|
value: (null | type_value);
|
|
error: (null | any);
|
|
};
|
|
/**
|
|
*/
|
|
export function try_catch_wrap_async<type_value>(get_value: (() => Promise<type_value>)): Promise<{
|
|
value: (null | type_value);
|
|
error: (null | any);
|
|
}>;
|
|
/**
|
|
*/
|
|
export function sleep(seconds: float): Promise<void>;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.email {
|
|
/**
|
|
*/
|
|
function send(smtp_credentials: {
|
|
host: string;
|
|
port: int;
|
|
username: string;
|
|
password: string;
|
|
}, sender: string, receivers: Array<string>, subject: string, content: string): Promise<void>;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
enum enum_level {
|
|
debug = 0,
|
|
info = 1,
|
|
notice = 2,
|
|
warning = 3,
|
|
error = 4
|
|
}
|
|
/**
|
|
*/
|
|
type type_entry = {
|
|
level: enum_level;
|
|
incident: string;
|
|
tags: Array<string>;
|
|
details: any;
|
|
};
|
|
/**
|
|
*/
|
|
type type_channel_description = lib_plankton.call.type_coproduct;
|
|
/**
|
|
*/
|
|
type type_channel_logic = {
|
|
send: ((entry: type_entry) => void);
|
|
};
|
|
/**
|
|
*/
|
|
type type_logger_data = Array<type_channel_description>;
|
|
/**
|
|
*/
|
|
type type_logger_logic = Array<type_channel_logic>;
|
|
/**
|
|
*/
|
|
type type_format_definition = ({
|
|
kind: "jsonl";
|
|
data: {
|
|
structured: boolean;
|
|
};
|
|
} | {
|
|
kind: "human_readable";
|
|
data: {};
|
|
});
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
function level_order(level1: enum_level, level2: enum_level): boolean;
|
|
/**
|
|
*/
|
|
function level_show(level: enum_level, { "abbreviated": option_abbreviated, }?: {
|
|
abbreviated?: boolean;
|
|
}): string;
|
|
/**
|
|
*/
|
|
function level_decode(level_string: string): enum_level;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
* @todo use label
|
|
*/
|
|
function get_logger_logic(logger_data: type_logger_data): type_logger_logic;
|
|
/**
|
|
*/
|
|
function format_entry(format_definition: type_format_definition, entry: type_entry): string;
|
|
/**
|
|
*/
|
|
function parse_format_definition(format_definition_raw: any): type_format_definition;
|
|
}
|
|
declare namespace lib_plankton.log.channel.filtered {
|
|
/**
|
|
*/
|
|
type type_predicate = ((entry: type_entry) => boolean);
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
/**
|
|
* @todo check if it has to be logic
|
|
*/
|
|
core: type_channel_logic;
|
|
predicate: type_predicate;
|
|
};
|
|
/**
|
|
*/
|
|
function predicate_incident(substring: string): type_predicate;
|
|
/**
|
|
*/
|
|
function predicate_level(threshold: enum_level): type_predicate;
|
|
/**
|
|
*/
|
|
function predicate_tag(tag: string): type_predicate;
|
|
/**
|
|
* combines other predicates in disjunctive normal form
|
|
*/
|
|
function predicate_complex(definition: Array<Array<{
|
|
mode: boolean;
|
|
item: type_predicate;
|
|
}>>): type_predicate;
|
|
/**
|
|
*/
|
|
function send(subject: type_subject, entry: type_entry): void;
|
|
/**
|
|
*/
|
|
function logic(subject: type_subject): type_channel_logic;
|
|
}
|
|
declare namespace lib_plankton.log.channel.minlevel {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
/**
|
|
* @todo check if it has to be logic
|
|
*/
|
|
core: type_channel_logic;
|
|
threshold: enum_level;
|
|
};
|
|
/**
|
|
*/
|
|
function send(subject: type_subject, entry: type_entry): void;
|
|
/**
|
|
*/
|
|
function logic(subject: type_subject): type_channel_logic;
|
|
}
|
|
declare namespace lib_plankton.log.channel.std {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
target: ("stdout" | "stderr");
|
|
format: type_format_definition;
|
|
};
|
|
/**
|
|
*/
|
|
function send(subject: type_subject, entry: type_entry): void;
|
|
/**
|
|
*/
|
|
function logic(subject: type_subject): type_channel_logic;
|
|
}
|
|
declare namespace lib_plankton.log.channel.file {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
path: string;
|
|
format: type_format_definition;
|
|
};
|
|
/**
|
|
*/
|
|
function send(subject: type_subject, entry: type_entry): void;
|
|
/**
|
|
*/
|
|
function logic(subject: type_subject): type_channel_logic;
|
|
}
|
|
declare namespace lib_plankton.log.channel.notify {
|
|
/**
|
|
*/
|
|
type type_subject = {};
|
|
/**
|
|
* @todo tags
|
|
*/
|
|
function send(subject: type_subject, entry: type_entry): void;
|
|
/**
|
|
*/
|
|
function logic(subject: type_subject): type_channel_logic;
|
|
}
|
|
declare namespace lib_plankton.log.channel.email {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
smtp_credentials: {
|
|
host: string;
|
|
port: int;
|
|
username: string;
|
|
password: string;
|
|
};
|
|
sender: string;
|
|
receivers: Array<string>;
|
|
};
|
|
/**
|
|
* @todo tags
|
|
*/
|
|
function send(subject: type_subject, entry: type_entry): void;
|
|
/**
|
|
*/
|
|
function logic(subject: type_subject): type_channel_logic;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
function get_channel_logic(channel_description: type_channel_description): type_channel_logic;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
function default_logger(): type_logger_data;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
function set_main_logger(logger_data: type_logger_data): void;
|
|
/**
|
|
* consumes a log entry, i.e. sends it to all channels
|
|
*/
|
|
function send_(logger: type_logger_logic, entry: type_entry): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @todo rename to "send"
|
|
*/
|
|
function debug_(logger: type_logger_logic, incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @todo rename to "info"
|
|
*/
|
|
function info_(logger: type_logger_logic, incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @todo rename to "notice"
|
|
*/
|
|
function notice_(logger: type_logger_logic, incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @todo rename to "warning"
|
|
*/
|
|
function warning_(logger: type_logger_logic, incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @todo rename to "error"
|
|
*/
|
|
function error_(logger: type_logger_logic, incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*/
|
|
function _send(entry: type_entry): void;
|
|
/**
|
|
* [convenience]
|
|
*/
|
|
function _debug(incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*/
|
|
function _info(incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*/
|
|
function _notice(incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*/
|
|
function _warning(incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*/
|
|
function _error(incident: string, { "tags": option_tags, "details": option_details, }?: {
|
|
tags?: Array<string>;
|
|
details?: any;
|
|
}): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @deprecated use ._debug instead!
|
|
*/
|
|
function debug(incident: string, details?: any, tags?: Array<string>): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @deprecated use ._info instead!
|
|
*/
|
|
function info(incident: string, details?: any, tags?: Array<string>): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @deprecated use ._notice instead!
|
|
*/
|
|
function notice(incident: string, details?: any, tags?: Array<string>): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @deprecated use ._warning instead!
|
|
*/
|
|
function warning(incident: string, details?: any, tags?: Array<string>): void;
|
|
/**
|
|
* [convenience]
|
|
*
|
|
* @deprecated use ._error instead!
|
|
*/
|
|
function error(incident: string, details?: any, tags?: Array<string>): void;
|
|
}
|
|
declare var plain_text_to_html: (text: string) => string;
|
|
/**
|
|
* @desc makes a valid
|
|
*/
|
|
declare var format_sentence: (str: string, rtl?: boolean, caseSense?: boolean) => string;
|
|
declare var fill_string_template: (template_string: string, object: any, fabric: Function, delimiter: string, default_string: string, sloppy: boolean) => string;
|
|
declare var make_string_template: (_template: string, _fabrics?: Object) => (object: {
|
|
[key: string]: string;
|
|
}) => string;
|
|
declare var make_eml_header: (object: {
|
|
[key: string]: string;
|
|
}) => string;
|
|
declare var make_eml_body: Object;
|
|
declare namespace lib_plankton.string {
|
|
/**
|
|
* @author neuc,frac
|
|
*/
|
|
function empty(str: string): boolean;
|
|
/**
|
|
* @desc returns a unique string
|
|
* @param {string} prefix an optional prefix for the generated string
|
|
* @return {string}
|
|
* @author fenris
|
|
*/
|
|
function generate(prefix?: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function join(parts: Array<string>, glue?: string): string;
|
|
/**
|
|
* @desc splits a string, but returns an empty list, if the string is empty
|
|
* @param {string} chain
|
|
* @param {string} separator
|
|
* @return {Array<string>}
|
|
* @author fenris
|
|
*/
|
|
function split(chain: string, separator: string): Array<string>;
|
|
/**
|
|
* @author neu3no
|
|
*/
|
|
function explode(str: string, needle: string, max: int): Array<string>;
|
|
/**
|
|
* @desc concats a given word with itself n times
|
|
* @param {string} word
|
|
* @param {int}
|
|
* @return {string}
|
|
* @author fenris
|
|
*/
|
|
function repeat(word: string, count: int): string;
|
|
/**
|
|
* @desc lengthens a string by repeatedly appending or prepending another string
|
|
* @param {string} word the string to pad
|
|
* @param {int} length the length, which the result shall have
|
|
* @param {string} symbol the string, which will be added (multiple times)
|
|
* @param {boolean} [prepend]; whether to prepend (~true) or append (~false); default: false
|
|
* @return {string} the padded string
|
|
* @author fenris
|
|
*/
|
|
function pad(word: string, length: int, symbol?: string, mode?: string): string;
|
|
/**
|
|
* @desc checks if a given string conttains a certain substring
|
|
* @param {string} string
|
|
* @param {string} part
|
|
* @return {boolean}
|
|
* @author fenris
|
|
*/
|
|
function contains(chain: string, part: string): boolean;
|
|
/**
|
|
* @desc checks if a given string starts with a certain substring
|
|
* @param {string} string
|
|
* @param {string} part
|
|
* @return {boolean}
|
|
* @author fenris
|
|
*/
|
|
function startsWith(chain: string, part: string): boolean;
|
|
/**
|
|
* @desc checks if a given string ends with a certain substring
|
|
* @param {string} string
|
|
* @param {string} part
|
|
* @return {boolean}
|
|
* @author fenris
|
|
*/
|
|
function endsWith(chain: string, part: string): boolean;
|
|
/**
|
|
* @desc count the occourrences of a string in a string
|
|
* @param string haystack_string the string wich should be examined
|
|
* @param string needle_string the string which should be counted
|
|
* @author neuc
|
|
*/
|
|
function count_occourrences(haystack_string: string, needle_string: string, check_escape: boolean): int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function replace(str: string, replacements: Array<{
|
|
from: string;
|
|
to: string;
|
|
}>, options?: {}): string;
|
|
/**
|
|
* @desc replaces occurences of "{{name}}" in a string by the corresponding values of an argument object
|
|
* @author fenris
|
|
*/
|
|
function coin(str: string, args: {
|
|
[id: string]: string;
|
|
}, options?: {
|
|
legacy?: boolean;
|
|
open?: string;
|
|
close?: string;
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
* @deprecated use limit
|
|
*/
|
|
function cut(str: string, length: int, delimiter?: string): string;
|
|
/**
|
|
*/
|
|
function limit(str: string, options?: {
|
|
length?: int;
|
|
indicator?: string;
|
|
}): string;
|
|
/**
|
|
*/
|
|
function slice(str: string, size: int): Array<string>;
|
|
/**
|
|
*/
|
|
function capitalize(str: string): string;
|
|
}
|
|
/**
|
|
* @deprecated
|
|
*/
|
|
declare namespace lib_string {
|
|
const empty: typeof lib_plankton.string.empty;
|
|
const generate: typeof lib_plankton.string.generate;
|
|
const split: typeof lib_plankton.string.split;
|
|
const explode: typeof lib_plankton.string.repeat;
|
|
const repeat: typeof lib_plankton.string.repeat;
|
|
const pad: typeof lib_plankton.string.pad;
|
|
const contains: typeof lib_plankton.string.contains;
|
|
const startsWith: typeof lib_plankton.string.startsWith;
|
|
const endsWith: typeof lib_plankton.string.endsWith;
|
|
const count_occourrences: typeof lib_plankton.string.count_occourrences;
|
|
const coin: typeof lib_plankton.string.coin;
|
|
const stance: typeof lib_plankton.string.coin;
|
|
const cut: typeof lib_plankton.string.cut;
|
|
}
|
|
declare namespace lib_plankton.string {
|
|
/**
|
|
* an implementation of c sprintf
|
|
* @param {string} string format string
|
|
* @param {array} args arguments which should be filled into
|
|
* @returns {string}
|
|
*/
|
|
var sprintf: (input: string, args?: Array<any>, original?: any) => string;
|
|
/**
|
|
* an implementation of c printf
|
|
* @param {string} string format string
|
|
* @param {array} args arguments which should be filled into
|
|
* @returns {string}
|
|
*/
|
|
function printf(format: any, args: any): void;
|
|
}
|
|
declare var sprintf: (input: string, args?: Array<any>, original?: any) => string;
|
|
declare var printf: typeof lib_plankton.string.printf;
|
|
declare var eml_log: any;
|
|
declare var track_exports: any;
|
|
declare var make_logger: (prefix: any, current_loglevel: any) => (obj: any, lvl: any) => void;
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
*/
|
|
type type_query = {
|
|
template: string;
|
|
arguments: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
enum enum_type {
|
|
boolean = "boolean",
|
|
integer = "integer",
|
|
string_short = "string_short",
|
|
string_medium = "string_medium",
|
|
string_long = "string_long",
|
|
float = "float"
|
|
}
|
|
/**
|
|
*/
|
|
type type_description_create_table = {
|
|
name: string;
|
|
key_field?: (null | {
|
|
name: string;
|
|
type?: enum_type;
|
|
comment?: (null | string);
|
|
auto_increment?: (null | boolean);
|
|
description?: (null | string);
|
|
});
|
|
data_fields?: Array<{
|
|
name: string;
|
|
nullable?: boolean;
|
|
type: enum_type;
|
|
default?: any;
|
|
description?: (null | string);
|
|
}>;
|
|
constraints?: Array<{
|
|
kind: string;
|
|
parameters?: Record<string, any>;
|
|
}>;
|
|
description?: (null | string);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_insert = {
|
|
table_name: string;
|
|
values: Record<string, any>;
|
|
returning?: (null | string);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_update = {
|
|
table_name: string;
|
|
values: Record<string, any>;
|
|
condition?: (null | string);
|
|
arguments?: (null | Record<string, any>);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_delete = {
|
|
table_name: string;
|
|
condition?: (null | string);
|
|
arguments?: (null | Record<string, any>);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_select = {
|
|
source: string;
|
|
fields?: (null | Array<string>);
|
|
condition?: (null | string);
|
|
group_by?: (null | string);
|
|
having?: (null | string);
|
|
order_by?: (null | string);
|
|
limit?: (null | int);
|
|
arguments?: (null | Record<string, any>);
|
|
};
|
|
/**
|
|
* rows
|
|
*/
|
|
type type_result_get = Array<Record<string, any>>;
|
|
/**
|
|
* auto insert id
|
|
*/
|
|
type type_result_put = (null | int);
|
|
/**
|
|
* number of affected rows
|
|
*/
|
|
type type_result_set = int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_database = {
|
|
wrap_name: ((name: string) => string);
|
|
query_free_get: ((query: type_query) => Promise<type_result_get>);
|
|
query_free_put: ((query: type_query) => Promise<type_result_put>);
|
|
query_free_set: ((query: type_query) => Promise<type_result_set>);
|
|
query_create_table: ((description_create_table: type_description_create_table) => Promise<void>);
|
|
query_insert: ((description_insert: type_description_insert) => Promise<type_result_put>);
|
|
query_update: ((description_update: type_description_update) => Promise<type_result_set>);
|
|
query_delete: ((description_delete: type_description_delete) => Promise<type_result_set>);
|
|
query_select: ((description_select: type_description_select) => Promise<type_result_get>);
|
|
};
|
|
/**
|
|
*/
|
|
type interface_database = database.type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @todo default case?
|
|
*/
|
|
function sql_common_value_format(value: any): string;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_create_table(description_create_table: type_description_create_table, options?: {
|
|
auto_increment_keyword?: (null | string);
|
|
auto_increment_special?: (null | string);
|
|
omit_comments?: boolean;
|
|
type_map?: Record<enum_type, string>;
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_insert(description_insert: type_description_insert, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
set_returning?: boolean;
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_update(description_update: type_description_update, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_delete(description_delete: type_description_delete, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_select(description_select: type_description_select, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
*/
|
|
type type_sqlite_subject = {
|
|
path: string;
|
|
handle: any;
|
|
};
|
|
/**
|
|
*/
|
|
type type_sqlite_parameters = {
|
|
path: string;
|
|
};
|
|
/**
|
|
*/
|
|
function sqlite_make(parameters: type_sqlite_parameters): type_sqlite_subject;
|
|
/**
|
|
*/
|
|
function sqlite_wrap_name(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sqlite_query_free_get(subject: type_sqlite_subject, query: type_query): Promise<type_result_get>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sqlite_query_free_put(subject: type_sqlite_subject, query: type_query): Promise<type_result_put>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sqlite_query_free_set(subject: type_sqlite_subject, query: type_query): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_create_table(description_create_table: type_description_create_table): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_create_table(subject: type_sqlite_subject, description: type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_insert(description_insert: type_description_insert): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_insert(subject: type_sqlite_subject, description_insert: type_description_insert): Promise<type_result_put>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_update(description_update: type_description_update): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_update(subject: type_sqlite_subject, description_update: type_description_update): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_delete(description_delete: type_description_delete): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_delete(subject: type_sqlite_subject, description_delete: type_description_delete): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_select(description_select: type_description_select): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_select(subject: type_sqlite_subject, description_select: type_description_select): Promise<Array<Record<string, any>>>;
|
|
/**
|
|
*/
|
|
function sqlite_database(parameters: type_sqlite_parameters): type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_sqlite implements interface_database {
|
|
private subject;
|
|
constructor(parameters: type_sqlite_parameters);
|
|
wrap_name(name: any): string;
|
|
query_free_get(query: any): Promise<type_result_get>;
|
|
query_free_put(query: any): Promise<number>;
|
|
query_free_set(query: any): Promise<number>;
|
|
query_create_table(description_create_table: any): Promise<void>;
|
|
query_insert(description_insert: any): Promise<number>;
|
|
query_update(description_update: any): Promise<number>;
|
|
query_delete(description_delete: any): Promise<number>;
|
|
query_select(description_select: any): Promise<Record<string, any>[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_postgresql_subject = {
|
|
host: string;
|
|
port: int;
|
|
username: string;
|
|
password: string;
|
|
schema: string;
|
|
pool: (null | any);
|
|
};
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_postgresql_parameters = {
|
|
host: string;
|
|
port?: int;
|
|
username: string;
|
|
password: string;
|
|
schema: string;
|
|
};
|
|
/**
|
|
*/
|
|
function postgresql_make(parameters: type_postgresql_parameters): type_postgresql_subject;
|
|
/**
|
|
*/
|
|
function postgresql_wrap_name(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
* @see https://node-postgres.com/apis/pool#poolquery
|
|
*/
|
|
function postgresql_query_free_get(subject: type_postgresql_subject, query: type_query): Promise<type_result_get>;
|
|
/**
|
|
* @author fenris
|
|
* @see https://node-postgres.com/apis/pool#poolquery
|
|
*/
|
|
function postgresql_query_free_put(subject: type_postgresql_subject, query: type_query): Promise<type_result_put>;
|
|
/**
|
|
* @author fenris
|
|
* @see https://node-postgres.com/apis/pool#poolquery
|
|
*/
|
|
function postgresql_query_free_set(subject: type_postgresql_subject, query: type_query): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_create_table(description_create_table: type_description_create_table): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_create_table(subject: type_postgresql_subject, description: type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_insert(description_insert: type_description_insert): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_insert(subject: type_postgresql_subject, description_insert: type_description_insert): Promise<type_result_put>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_update(description_update: type_description_update): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_update(subject: type_postgresql_subject, description_update: type_description_update): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_delete(description_delete: type_description_delete): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_delete(subject: type_postgresql_subject, description_delete: type_description_delete): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_select(description_select: type_description_select): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_select(subject: type_postgresql_subject, description_select: type_description_select): Promise<Array<Record<string, any>>>;
|
|
/**
|
|
*/
|
|
function postgresql_database(parameters: type_postgresql_parameters): type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_postgresql implements interface_database {
|
|
private subject;
|
|
constructor(parameters: type_postgresql_parameters);
|
|
wrap_name(name: any): string;
|
|
query_free_get(query: any): Promise<type_result_get>;
|
|
query_free_put(query: any): Promise<number>;
|
|
query_free_set(query: any): Promise<number>;
|
|
query_create_table(description_create_table: any): Promise<void>;
|
|
query_insert(description_insert: any): Promise<number>;
|
|
query_update(description_update: any): Promise<number>;
|
|
query_delete(description_delete: any): Promise<number>;
|
|
query_select(description_select: any): Promise<Record<string, any>[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_mysql_subject = {
|
|
verbose: boolean;
|
|
};
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_mysql_parameters = {
|
|
verbose?: boolean;
|
|
};
|
|
/**
|
|
*/
|
|
function mysql_make(parameters: type_mysql_parameters): type_mysql_subject;
|
|
/**
|
|
*/
|
|
function mysql_wrap_name(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function mysql_query_free_get(subject: type_mysql_subject, query: type_query): Promise<type_result_get>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function mysql_query_free_put(subject: type_mysql_subject, query: type_query): Promise<type_result_put>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function mysql_query_free_set(subject: type_mysql_subject, query: type_query): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_create_table(description_create_table: type_description_create_table): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_create_table(subject: type_mysql_subject, description: type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_insert(description_insert: type_description_insert): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_insert(subject: type_mysql_subject, description_insert: type_description_insert): Promise<type_result_put>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_update(description_update: type_description_update): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_update(subject: type_mysql_subject, description_update: type_description_update): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_delete(description_delete: type_description_delete): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_delete(subject: type_mysql_subject, description_delete: type_description_delete): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_select(description_select: type_description_select): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_select(subject: type_mysql_subject, description_select: type_description_select): Promise<Array<Record<string, any>>>;
|
|
/**
|
|
*/
|
|
function mysql_database(parameters: type_mysql_parameters): type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_mysql implements interface_database {
|
|
private subject;
|
|
constructor(parameters: type_mysql_parameters);
|
|
wrap_name(name: any): string;
|
|
query_free_get(query: any): Promise<type_result_get>;
|
|
query_free_put(query: any): Promise<number>;
|
|
query_free_set(query: any): Promise<number>;
|
|
query_create_table(description_create_table: any): Promise<void>;
|
|
query_insert(description_insert: any): Promise<number>;
|
|
query_update(description_update: any): Promise<number>;
|
|
query_delete(description_delete: any): Promise<number>;
|
|
query_select(description_select: any): Promise<Record<string, any>[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
* implements the idea of a database, which houses datasets (type_value) and manages their access by ids or sth. similar (type_key)
|
|
*
|
|
* @author fenris
|
|
*/
|
|
type type_store<type_key, type_value, type_setup_input, type_searchterm, type_preview> = {
|
|
/**
|
|
* shall prepare the storage instance for use
|
|
*
|
|
* @author fenris
|
|
*/
|
|
setup(input: type_setup_input): Promise<void>;
|
|
/**
|
|
* shall insert a new dataset and return its automatically assign key
|
|
*
|
|
* @author fenris
|
|
*/
|
|
create(value: type_value): Promise<type_key>;
|
|
/**
|
|
* shall modify an existing dataset
|
|
*
|
|
* @author fenris
|
|
*/
|
|
update(key: type_key, value: type_value): Promise<void>;
|
|
/**
|
|
* shall remove an existing dataset
|
|
*
|
|
* @author fenris
|
|
*/
|
|
delete(key: type_key): Promise<void>;
|
|
/**
|
|
* shall get an existing dataset by its key
|
|
*
|
|
* @author fenris
|
|
*/
|
|
read(key: type_key): Promise<type_value>;
|
|
/**
|
|
* shall list keys and previews of existing datasets, which match a certain search term
|
|
*
|
|
* @author fenris
|
|
*/
|
|
search(term?: (null | type_searchterm)): Promise<Array<{
|
|
key: type_key;
|
|
preview: type_preview;
|
|
}>>;
|
|
};
|
|
/**
|
|
* for class wrappers
|
|
*/
|
|
type interface_store<type_key, type_value, type_setup_input, type_searchterm, type_preview> = type_store<type_key, type_value, type_setup_input, type_searchterm, type_preview>;
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
* implements the idea of a storage without managed keys
|
|
*
|
|
* @author fenris
|
|
*/
|
|
type type_chest<type_key, type_value, type_setup_input, type_searchterm, type_preview> = {
|
|
/**
|
|
* shall prepare the storage instance for use
|
|
*
|
|
* @author fenris
|
|
*/
|
|
setup(input: type_setup_input): Promise<void>;
|
|
/**
|
|
* shall remove all items
|
|
*/
|
|
clear(): Promise<void>;
|
|
/**
|
|
* shall insert a new or modify an existing dataset and return whether it is new
|
|
*
|
|
* @author fenris
|
|
*/
|
|
write(key: type_key, value: type_value): Promise<boolean>;
|
|
/**
|
|
* shall remove an existing dataset
|
|
*
|
|
* @author fenris
|
|
*/
|
|
delete(key: type_key): Promise<void>;
|
|
/**
|
|
* shall get an existing dataset by its key
|
|
*
|
|
* @author fenris
|
|
*/
|
|
read(key: type_key): Promise<type_value>;
|
|
/**
|
|
* shall list keys and previews of existing datasets, which match a certain search term
|
|
*
|
|
* @author fenris
|
|
*/
|
|
search(term?: (null | type_searchterm)): Promise<Array<{
|
|
key: type_key;
|
|
preview: type_preview;
|
|
}>>;
|
|
};
|
|
/**
|
|
* for class wrappers
|
|
*/
|
|
type interface_chest<type_key, type_value, type_setup_input, type_searchterm, type_preview> = type_chest<type_key, type_value, type_setup_input, type_searchterm, type_preview>;
|
|
}
|
|
declare namespace lib_plankton.storage.memory {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject<type_value> = {
|
|
data: Record<string, type_value>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_parameters<type_value> = {};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make<type_value>(parameters: type_parameters<type_value>): type_subject<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function clear<type_value>(subject: type_subject<type_value>): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write<type_value>(subject: type_subject<type_value>, key: string, value: type_value): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function delete_<type_value>(subject: type_subject<type_value>, key: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read<type_value>(subject: type_subject<type_value>, key: string): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function list<type_value>(subject: type_subject<type_value>): Array<string>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function search<type_value>(subject: type_subject<type_value>, term: (null | string)): Array<{
|
|
key: string;
|
|
preview: string;
|
|
}>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function implementation_chest<type_value>(parameters: type_parameters<type_value>): type_chest<string, type_value, void, string, string>;
|
|
}
|
|
declare namespace lib_plankton.storage.memory {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_chest<type_item> implements type_chest<string, type_item, void, string, string> {
|
|
private subject;
|
|
constructor(parameters: type_parameters<type_item>);
|
|
setup(input: any): Promise<void>;
|
|
clear(): Promise<void>;
|
|
write(key: any, value: any): Promise<boolean>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<type_item>;
|
|
search(term: any): Promise<{
|
|
key: string;
|
|
preview: string;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage.filesystem {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject = {
|
|
nodemodule: any;
|
|
};
|
|
/**
|
|
*/
|
|
type type_parameters = {};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make(parameters: type_parameters): type_subject;
|
|
/**
|
|
*/
|
|
function clear(subject: type_subject): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write(subject: type_subject, path: string, content: Buffer): Promise<boolean>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function delete_(subject: type_subject, path: string): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read(subject: type_subject, path: string): Promise<Buffer>;
|
|
/**
|
|
*/
|
|
function implementation_chest(parameters: type_parameters): type_chest<string, Buffer, void, void, void>;
|
|
}
|
|
declare namespace lib_plankton.storage.filesystem {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_chest implements type_chest<string, Buffer, void, void, void> {
|
|
private subject;
|
|
constructor(parameters: type_parameters);
|
|
setup(input: any): Promise<void>;
|
|
clear(): Promise<void>;
|
|
write(key: any, value: any): Promise<boolean>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Buffer>;
|
|
search(searchterm: any): Promise<{
|
|
key: string;
|
|
preview: void;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
*/
|
|
type type_sql_table_autokey_search_term = {
|
|
expression: string;
|
|
arguments: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_sql_table_autokey_subject = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_name: string;
|
|
};
|
|
/**
|
|
*/
|
|
type type_sql_table_autokey_parameters = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_name: string;
|
|
};
|
|
/**
|
|
*/
|
|
function sql_table_autokey_make(parameters: type_sql_table_autokey_parameters): type_sql_table_autokey_subject;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_setup(subject: type_sql_table_autokey_subject, description_create_table: lib_plankton.database.type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_create(subject: type_sql_table_autokey_subject, value: Record<string, any>): Promise<int>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_update(subject: type_sql_table_autokey_subject, key: int, value: Record<string, any>): Promise<void>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_delete(subject: type_sql_table_autokey_subject, key: int): Promise<void>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_read(subject: type_sql_table_autokey_subject, key: int): Promise<Record<string, any>>;
|
|
/**
|
|
* @todo correct preview
|
|
*/
|
|
function sql_table_autokey_search(subject: type_sql_table_autokey_subject, term: (null | type_sql_table_autokey_search_term)): Promise<Array<{
|
|
key: int;
|
|
preview: Record<string, any>;
|
|
}>>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_store(parameters: type_sql_table_autokey_parameters): type_store<int, Record<string, any>, lib_plankton.database.type_description_create_table, type_sql_table_autokey_search_term, Record<string, any>>;
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_sql_table_autokey implements type_store<int, Record<string, any>, lib_plankton.database.type_description_create_table, string, Record<string, any>> {
|
|
private subject;
|
|
constructor(parameters: type_sql_table_autokey_parameters);
|
|
setup(input: any): Promise<void>;
|
|
create(value: any): Promise<number>;
|
|
update(key: any, value: any): Promise<void>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Record<string, any>>;
|
|
search(term: any): Promise<{
|
|
key: number;
|
|
preview: Record<string, any>;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage.sql_table_common {
|
|
/**
|
|
*/
|
|
type type_sql_table_common_search_term = {
|
|
expression: string;
|
|
arguments: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_names: Array<string>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_parameters = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_names: Array<string>;
|
|
};
|
|
/**
|
|
*/
|
|
function make(parameters: type_parameters): type_subject;
|
|
/**
|
|
*/
|
|
function setup(subject: type_subject, description_create_table: lib_plankton.database.type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function clear(subject: type_subject): Promise<void>;
|
|
/**
|
|
* @todo optimize: avoid read
|
|
*/
|
|
function write(subject: type_subject, key: Array<any>, value: Record<string, any>): Promise<boolean>;
|
|
/**
|
|
*/
|
|
function delete_(subject: type_subject, key: Array<any>): Promise<void>;
|
|
/**
|
|
*/
|
|
function read(subject: type_subject, key: Array<any>): Promise<Record<string, any>>;
|
|
/**
|
|
* @todo correct preview
|
|
*/
|
|
function search(subject: type_subject, term: (null | type_sql_table_common_search_term)): Promise<Array<{
|
|
key: Array<any>;
|
|
preview: Record<string, any>;
|
|
}>>;
|
|
/**
|
|
*/
|
|
function chest(parameters: type_parameters): type_chest<Array<any>, Record<string, any>, lib_plankton.database.type_description_create_table, type_sql_table_common_search_term, Record<string, any>>;
|
|
}
|
|
declare namespace lib_plankton.storage.sql_table_common {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_chest implements type_chest<Array<any>, Record<string, any>, lib_plankton.database.type_description_create_table, type_sql_table_common_search_term, Record<string, any>> {
|
|
private subject;
|
|
constructor(parameters: type_parameters);
|
|
setup(input: any): Promise<void>;
|
|
clear(): Promise<void>;
|
|
write(key: any, value: any): Promise<boolean>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Record<string, any>>;
|
|
search(term: any): Promise<{
|
|
key: any[];
|
|
preview: Record<string, any>;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.cache {
|
|
/**
|
|
*/
|
|
type type_result<type_value> = {
|
|
retrieved: boolean;
|
|
value: type_value;
|
|
};
|
|
/**
|
|
*/
|
|
type type_entry<type_value> = {
|
|
value: type_value;
|
|
expiry: (null | float);
|
|
};
|
|
/**
|
|
*/
|
|
type type_subject<type_value> = lib_plankton.storage.type_chest<string, type_entry<type_value>, void, any, any>;
|
|
}
|
|
declare namespace lib_plankton.cache {
|
|
/**
|
|
*/
|
|
function make<type_value>({ "chest": chest, }?: {
|
|
chest?: lib_plankton.storage.type_chest<string, type_entry<type_value>, void, any, any>;
|
|
}): type_subject<type_value>;
|
|
/**
|
|
*/
|
|
function init<type_value>(subject: type_subject<type_value>): Promise<void>;
|
|
/**
|
|
*/
|
|
function clear<type_value>(subject: type_subject<type_value>): Promise<void>;
|
|
/**
|
|
*/
|
|
function invalidate<type_value>(subject: type_subject<type_value>, key: string): Promise<void>;
|
|
/**
|
|
*/
|
|
function query<type_value>(subject: type_subject<type_value>, key: string, lifetime: (null | float), retrieve: (() => Promise<type_value>)): Promise<type_result<type_value>>;
|
|
/**
|
|
* syntactic sugar: if the information, whether the value was retrieved, is irrelevant
|
|
*/
|
|
function get<type_value>(subject: type_subject<type_value>, key: string, lifetime: (null | float), retrieve: (() => Promise<type_value>)): Promise<type_value>;
|
|
/**
|
|
*/
|
|
function get_complex<type_input, type_value>(cache: type_subject<type_value>, group: string, input: type_input, lifetime: (null | float), retrieve: ((input: type_input) => Promise<type_value>), { "encode_input": encode_input, }?: {
|
|
encode_input?: ((input: type_input) => string);
|
|
}): Promise<type_value>;
|
|
}
|
|
declare namespace lib_plankton.file {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function exists(path: string): Promise<boolean>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read(path: string): Promise<string>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read_buffer(path: string): Promise<Buffer>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read_stdin(): Promise<string>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write(path: string, content: string, options?: {
|
|
encoding?: string;
|
|
}): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write_buffer(path: string, content: Buffer, options?: {}): Promise<void>;
|
|
/**
|
|
*/
|
|
function delete_(path: string): Promise<void>;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_code<type_from, type_to> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_from): type_to;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
decode(x: type_to): type_from;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_code<type_from, type_to> = {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
encode: (x: type_from) => type_to;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
decode: (x: type_to) => type_from;
|
|
};
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inverse_encode<type_from, type_to>(decode: (to: type_to) => type_from, to: type_to): type_from;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inverse_decode<type_from, type_to>(encode: (from: type_from) => type_to, from: type_from): type_to;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_inverse<type_from, type_to> implements interface_code<type_to, type_from> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: interface_code<type_from, type_to>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(subject: interface_code<type_from, type_to>);
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(to: type_to): type_from;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(from: type_from): type_to;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function pair_encode<type_from, type_between, type_to>(encode_first: (from: type_from) => type_between, encode_second: (between: type_between) => type_to, from: type_from): type_to;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function pair_decode<type_from, type_between, type_to>(decode_first: (between: type_between) => type_from, decode_second: (to: type_to) => type_between, to: type_to): type_from;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_pair<type_from, type_between, type_to> implements interface_code<type_from, type_to> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected first: interface_code<type_from, type_between>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected second: interface_code<type_between, type_to>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(first: interface_code<type_from, type_between>, second: interface_code<type_between, type_to>);
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(from: type_from): type_to;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(to: type_to): type_from;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function chain_encode(encode_links: Array<(from: any) => any>, from: any): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function chain_decode(decode_links: Array<(to: any) => any>, to: any): any;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_chain implements interface_code<any, any> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected links: Array<interface_code<any, any>>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(links: Array<interface_code<any, any>>);
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(from: any): any;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(to: any): any;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
type type_flatten_from = Array<{
|
|
[name: string]: any;
|
|
}>;
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
type type_flatten_to = {
|
|
keys: Array<string>;
|
|
data: Array<Array<any>>;
|
|
};
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
function flatten_encode(from: type_flatten_from, keys?: Array<string>): type_flatten_to;
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
function flatten_decode(to: type_flatten_to): type_flatten_from;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_flatten implements interface_code<type_flatten_from, type_flatten_to> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_flatten_from): type_flatten_to;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: type_flatten_to): type_flatten_from;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.json {
|
|
/**
|
|
*/
|
|
type type_source = any;
|
|
/**
|
|
*/
|
|
type type_target = string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function encode(source: type_source, options?: {
|
|
formatted?: boolean;
|
|
}): type_target;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function decode(target: type_target): type_source;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function implementation_code(): lib_plankton.code.type_code<type_source, type_target>;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.json {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_json implements lib_plankton.code.interface_code<any, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: any): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): any;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.markdown {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function code(content: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function paragraph(content: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sectionhead(level: int, content: string): string;
|
|
}
|
|
declare namespace lib_plankton.args {
|
|
/**
|
|
*/
|
|
enum enum_environment {
|
|
cli = "cli",
|
|
url = "url"
|
|
}
|
|
/**
|
|
*/
|
|
enum enum_kind {
|
|
positional = "positional",
|
|
volatile = "volatile"
|
|
}
|
|
/**
|
|
*/
|
|
enum enum_type {
|
|
boolean = "boolean",
|
|
integer = "int",
|
|
float = "float",
|
|
string = "string"
|
|
}
|
|
/**
|
|
*/
|
|
enum enum_mode {
|
|
replace = "replace",
|
|
accumulate = "accumulate"
|
|
}
|
|
}
|
|
declare namespace lib_plankton.args {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_argument {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected name: string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected kind: enum_kind;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected type: enum_type;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected mode: enum_mode;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected default_: any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected info: string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected parameters: Object;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected hidden: boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor({ "name": name, "type": type, "kind": kind, "mode": mode, "default": default_, "info": info, "parameters": parameters, "hidden": hidden, }: {
|
|
name: string;
|
|
type?: enum_type;
|
|
kind?: enum_kind;
|
|
mode?: enum_mode;
|
|
default?: any;
|
|
info?: string;
|
|
parameters?: Object;
|
|
hidden?: boolean;
|
|
});
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static positional({ "name": name, "type": type, "mode": mode, "default": default_, "info": info, "hidden": hidden, "index": index, }: {
|
|
name: string;
|
|
type?: enum_type;
|
|
mode?: enum_mode;
|
|
default?: any;
|
|
info?: string;
|
|
hidden?: boolean;
|
|
index: int;
|
|
}): class_argument;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static volatile({ "name": name, "type": type, "mode": mode, "default": default_, "info": info, "hidden": hidden, "indicators_short": indicators_short, "indicators_long": indicators_long, }: {
|
|
name: string;
|
|
type?: enum_type;
|
|
mode?: enum_mode;
|
|
default?: any;
|
|
info?: string;
|
|
hidden?: boolean;
|
|
indicators_short: Array<string>;
|
|
indicators_long: Array<string>;
|
|
}): class_argument;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
check(): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
name_get(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
kind_get(): enum_kind;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type_get(): enum_type;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
mode_get(): enum_mode;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
default_get(): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
parameters_get(): Object;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
hidden_get(): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
toString(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
indicator_main(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
pattern_value(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
extract(raw: string): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
assign(data: Object, target: string, raw: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
make(data: Object, target: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
generate_help(): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.args {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
var verbosity: int;
|
|
/**
|
|
* @author fenris
|
|
* @todo check validity
|
|
*/
|
|
class class_handler {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected arguments_: {
|
|
[name: string]: class_argument;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(arguments_: {
|
|
[name: string]: class_argument;
|
|
});
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
filter(kind: enum_kind): {
|
|
[name: string]: class_argument;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
read(environment: enum_environment, input: string, data?: {
|
|
[name: string]: any;
|
|
}): {
|
|
[name: string]: any;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
* @todo handle if the data object doesn't have the required field or the type is wrong or sth.
|
|
*/
|
|
write(environment: enum_environment, data: {
|
|
[name: string]: any;
|
|
}): string;
|
|
/**
|
|
* @desc manpage-like info-sheet
|
|
* @author fenris
|
|
*/
|
|
generate_help({ "programname": programname, "author": author, "description": description, "executable": executable, }: {
|
|
programname?: string;
|
|
author?: string;
|
|
description?: string;
|
|
executable?: string;
|
|
}): string;
|
|
}
|
|
}
|