4993 lines
143 KiB
TypeScript
4993 lines
143 KiB
TypeScript
/**
|
|
* @author fenris
|
|
*/
|
|
type int = number;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type float = number;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_date = {
|
|
year: int;
|
|
month: int;
|
|
day: int;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_time = {
|
|
hour: int;
|
|
minute: int;
|
|
second: int;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_datetimeobject = {
|
|
date: type_date;
|
|
time: type_time;
|
|
};
|
|
declare var process: any;
|
|
declare var require: any;
|
|
declare class Buffer {
|
|
constructor(x: string, modifier?: string);
|
|
static from(x: string, encoding?: string): any;
|
|
toString(modifier?: string): string;
|
|
}
|
|
declare namespace lib_plankton.base {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function environment(): string;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_pseudopointer<type_value> = {
|
|
value: type_value;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_null<type_value>(): type_pseudopointer<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_make<type_value>(value: type_value): type_pseudopointer<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_isset<type_value>(pseudopointer: type_pseudopointer<type_value>): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_read<type_value>(pseudopointer: type_pseudopointer<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function pseudopointer_write<type_value>(pseudopointer: type_pseudopointer<type_value>, value: type_value): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare var instance_verbosity: int;
|
|
/**
|
|
* @desc the ability to check for equality with another element of the same domain
|
|
* @author fenris
|
|
*/
|
|
interface interface_collatable<type_value> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_collate(value: type_value): boolean;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function instance_collate<type_value>(value1: (type_value & {
|
|
_collate?: ((value: type_value) => boolean);
|
|
}), value2: type_value): boolean;
|
|
/**
|
|
* @desc the ability to compare with another element of the same domain for determining if the first is "smaller than or equal to" the latter
|
|
* @author fenris
|
|
*/
|
|
interface interface_comparable<type_value> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_compare(value: type_value): boolean;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function instance_compare<type_value>(value1: (type_value & {
|
|
_compare: ((value: type_value) => boolean);
|
|
}), value2: type_value): boolean;
|
|
/**
|
|
* @desc the ability to create an exact copy
|
|
* @author fenris
|
|
*/
|
|
interface interface_cloneable<type_value> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_clone(): type_value;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
declare function instance_clone<type_value>(value: (type_value & {
|
|
_clone?: (() => type_value);
|
|
})): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_hashable {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_hash(): string;
|
|
}
|
|
/**
|
|
* @desc the ability to generate a string out of the element, which identifies it to a high degree
|
|
* @author fenris
|
|
*/
|
|
declare function instance_hash<type_value>(value: (type_value & {
|
|
_hash?: (() => string);
|
|
})): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_showable {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
_show(): string;
|
|
}
|
|
/**
|
|
* @desc the ability to map the element to a textual representation (most likely not injective)
|
|
* @author fenris
|
|
*/
|
|
declare function instance_show<type_value>(value: (type_value & {
|
|
_show?: (() => string);
|
|
})): string;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
interface interface_decorator<type_core> {
|
|
/**
|
|
* @author frac
|
|
*/
|
|
core: type_core;
|
|
}
|
|
/**
|
|
* @author frac
|
|
*/
|
|
declare class class_observer {
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected counter: int;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected actions: {
|
|
[id: string]: (information: Object) => void;
|
|
};
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected buffer: Array<Object>;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @author frac
|
|
*/
|
|
empty(): boolean;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
flush(): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
set(id: string, action: (information: Object) => void): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
del(id: string): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
add(action: (information: Object) => void): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
notify(information?: Object, delayed?: boolean): void;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
rollout(): void;
|
|
}
|
|
/**
|
|
* @author frac
|
|
*/
|
|
/**
|
|
* @author frac
|
|
*/
|
|
/**
|
|
* @author frac
|
|
*/
|
|
declare class class_error extends Error {
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected suberrors: Array<Error>;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
protected mess: string;
|
|
/**
|
|
* @author frac
|
|
*/
|
|
constructor(message: string, suberrors?: Array<Error>);
|
|
/**
|
|
* @override
|
|
* @author frac
|
|
*/
|
|
toString(): string;
|
|
}
|
|
declare namespace lib_plankton.base {
|
|
/**
|
|
* returns the current UNIX timestamp
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function get_current_timestamp(rounded?: boolean): float;
|
|
/**
|
|
*/
|
|
function object_merge(core: Record<string, any>, mantle: Record<string, any>): Record<string, any>;
|
|
}
|
|
declare module lib_plankton.pod {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_pod<type_value> = {
|
|
kind: ("empty" | "filled");
|
|
value?: type_value;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make_empty<type_value>(): type_pod<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make_filled<type_value>(value: type_value): type_pod<type_value>;
|
|
/**
|
|
* whether the pod is filled
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function is_filled<type_value>(pod: type_pod<type_value>): boolean;
|
|
/**
|
|
* return the value, stored in the pod-wrapper
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function cull<type_value>(pod: type_pod<type_value>): type_value;
|
|
/**
|
|
* to pass on a empty-pod or to use a filled-pod
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function propagate<type_value, type_value_>(pod: type_pod<type_value>, function_: ((value: type_value) => type_value_)): type_pod<type_value_>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function distinguish<type_value, type_result>(pod: type_pod<type_value>, function_empty: (() => type_result), function_filled: ((value: type_value) => type_result)): type_result;
|
|
/**
|
|
*/
|
|
function show<type_value>(pod: type_pod<type_value>, options?: {
|
|
show_value?: ((value: type_value) => string);
|
|
}): string;
|
|
}
|
|
declare module lib_plankton.pod {
|
|
/**
|
|
*/
|
|
class class_pod<type_value> {
|
|
private subject;
|
|
constructor(subject: type_pod<type_value>);
|
|
tear(): type_pod<type_value>;
|
|
static empty<type_value>(): class_pod<type_value>;
|
|
static filled<type_value>(value: type_value): class_pod<type_value>;
|
|
is_empty(): boolean;
|
|
is_filled(): boolean;
|
|
cull(): type_value;
|
|
show(show_value?: any): string;
|
|
toString(): string;
|
|
propagate<type_value_>(function_: ((value: type_value) => type_value_)): class_pod<type_value_>;
|
|
distinguish<type_result>(function_empty: (() => type_result), function_filled: ((value: type_value) => type_result)): type_result;
|
|
}
|
|
}
|
|
/**
|
|
* might be completely obsolete
|
|
*/
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_promise<type_result, type_reason> = Promise<type_result>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_reject<type_result, type_reason>(reason: type_reason): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_resolve<type_result, type_reason>(result: type_result): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_make<type_result, type_reason>(executor: (resolve: ((result?: type_result) => void), reject: ((reason?: type_reason) => void)) => void): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_then_close<type_result, type_reason>(promise: type_promise<type_result, type_reason>, resolver: ((result: type_result) => void), rejector: ((reason: type_reason) => void)): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_then_append<type_result, type_reason, type_result_>(promise: type_promise<type_result, type_reason>, resolver: ((result: type_result) => type_promise<type_result_, type_reason>), rejector?: ((reason: type_reason) => type_promise<type_result_, type_reason>)): type_promise<type_result_, type_result>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_all<type_result, type_reason>(promises: Array<type_promise<type_result, type_reason>>): type_promise<Array<type_result>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_chain<type_result, type_reason>(promises: (Array<(input: type_result) => type_promise<type_result, type_reason>>), start?: type_result): type_promise<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_condense<type_element, type_reason>(promises: Array<() => type_promise<type_element, type_reason>>): type_promise<Array<type_element>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_group<type_reason>(promises: Record<string, (() => type_promise<any, type_reason>)>, options?: {
|
|
serial?: boolean;
|
|
}): type_promise<Record<string, any>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_wrap<type_result_inner, type_result_outer, type_reason>(promise: type_promise<type_result_inner, type_reason>, transformator_result: ((reason: type_result_inner) => type_result_outer), transformator_reason?: ((reason: type_reason) => type_reason)): type_promise<type_result_outer, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_attach<type_reason>(state: Record<string, any>, promise: type_promise<any, type_reason>, name: string): type_promise<Record<string, any>, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function promise_delay<type_result, type_reason>(promise: type_promise<type_result, type_reason>, delay: int): type_promise<type_result, type_reason>;
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
*/
|
|
class CancellablePromise<type_result> extends Promise<type_result> {
|
|
/**
|
|
*/
|
|
private cancelled;
|
|
/**
|
|
*/
|
|
private interval;
|
|
/**
|
|
*/
|
|
private subject;
|
|
/**
|
|
*/
|
|
constructor(executor: ((resolve: any, reject: any) => void));
|
|
/**
|
|
*/
|
|
private clear;
|
|
/**
|
|
*/
|
|
then<type_next_resolved, type_next_rejected>(onfulfilled?: ((value: type_result) => (type_next_resolved | PromiseLike<type_next_resolved>)), onrejected?: ((reason: any) => (type_next_rejected | PromiseLike<type_next_rejected>))): Promise<type_next_resolved | type_next_rejected>;
|
|
/**
|
|
*/
|
|
catch(x: any): Promise<type_result>;
|
|
/**
|
|
*/
|
|
cancel(): void;
|
|
}
|
|
}
|
|
/**
|
|
* initializer might be obsolete, since promises are reusable after having been resolved or rejected
|
|
*/
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
enum enum_initializer_state {
|
|
initial = 0,
|
|
waiting = 1,
|
|
successful = 2,
|
|
failed = 3
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_initializer<type_result, type_reason> = {
|
|
fetcher: (() => type_promise<type_result, type_reason>);
|
|
state?: enum_initializer_state;
|
|
queue: Array<{
|
|
resolve: ((result?: type_result) => void);
|
|
reject: ((reason?: type_reason) => void);
|
|
}>;
|
|
result?: type_result;
|
|
reason?: type_reason;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_make<type_result, type_reason>(fetcher: (() => type_promise<type_result, type_reason>)): type_initializer<type_result, type_reason>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_reset<type_result, type_reason>(subject: type_initializer<type_result, type_reason>): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_state<type_result, type_reason>(subject: type_initializer<type_result, type_reason>): enum_initializer_state;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function initializer_get<type_result, type_reason>(subject: type_initializer<type_result, type_reason>): type_promise<type_result, type_reason>;
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_deferral<type_input, type_output> = {
|
|
representation: (input: type_input) => Promise<type_output>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
* @desc activates the deferral and handles its output according to a given procedure
|
|
* @param {(value : type_value)=>void} procedure a function which receives the output of the deferral as argument
|
|
*/
|
|
function deferral_use<type_input, type_output>(deferral: type_deferral<type_input, type_output>, input: type_input, procedure: (output: type_output) => void): void;
|
|
/**
|
|
* @author fenris
|
|
* @desc creates a deferral-subject (similar to "new Promise", where "convey" reflects "resolve"/"reject")
|
|
*/
|
|
function deferral_make<type_input, type_output>(handler: (input: type_input, convey: (output: type_output) => void) => void): type_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
* @desc wraps a simple function into a deferral (similar to "Promise.resolve"/"Promise.reject")
|
|
*/
|
|
function deferral_wrap<type_input, type_output>(function_: (input: type_input) => type_output): type_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_id<type_value>(): type_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_const<type_value>(value: type_value): type_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_delay<type_output>(output: type_output, delay: int): type_deferral<any, type_output>;
|
|
/**
|
|
* @author fenris
|
|
* @desc connects two deferrals to form a new one; the output of the first is taken as input for the second
|
|
* (similar to "Promise.then" when passing a function which returns a new promise)
|
|
* @param {type_deferral<type_value1>} first a simple deferral
|
|
* @param {(value1 : type_value1)=>type_deferral<type_value2>} second a function depending from a value returning a deferral
|
|
*/
|
|
function deferral_compose_serial<type_input, type_between, type_output>(first: type_deferral<type_input, type_between>, second: type_deferral<type_between, type_output>): type_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function deferral_compose_parallel<type_input, type_output_left, type_output_right>({ "left": deferral_left, "right": deferral_right, }: {
|
|
left: type_deferral<type_input, type_output_left>;
|
|
right: type_deferral<type_input, type_output_right>;
|
|
}): type_deferral<type_input, {
|
|
left: type_output_left;
|
|
right: type_output_right;
|
|
}>;
|
|
/**
|
|
* @author fenris
|
|
* @desc repeatedly applied serial composition
|
|
*/
|
|
function deferral_chain<type_value>(members: Array<type_deferral<type_value, type_value>>): type_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_deferral<type_input, type_output> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private subject;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private constructor();
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private static _cram;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private static _tear;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static make<type_input, type_output>(handler: (input: type_input, convey: (value: type_output) => void) => void): class_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
use(input: type_input, procedure: (value: type_output) => void): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
compose_serial<type_output_>(second: class_deferral<type_output, type_output_>): class_deferral<type_input, type_output_>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static chain<type_value>(members: Array<class_deferral<type_value, type_value>>): class_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static wrap<type_input, type_output>(function_: (input: type_input) => type_output): class_deferral<type_input, type_output>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static const_<type_value>(value: type_value): class_deferral<type_value, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static delay<type_output>(output: type_output, delay: int): class_deferral<any, type_output>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.call {
|
|
/**
|
|
* converts the "arguments"-map into an array
|
|
*
|
|
* @param {Object} args
|
|
* @author fenris
|
|
*/
|
|
function args2list(args: any): Array<any>;
|
|
/**
|
|
* just the empty function; useful for some callbacks etc.
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function nothing(): void;
|
|
/**
|
|
* just the identity; useful for some callbacks etc.; defined as function instead of const for using type parameters
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function id<type_value>(x: type_value): type_value;
|
|
/**
|
|
* just the identity; useful for some callbacks etc.
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function const_<type_value>(x: type_value): ((y: any) => type_value);
|
|
/**
|
|
* composes two functions (i.e. returns a function that return the result of the successive execution of both input-functions)
|
|
*
|
|
* @param {function} function_f
|
|
* @param {function} function_g
|
|
* @author fenris
|
|
*/
|
|
function compose<type_x, type_y, type_z>(function_f: ((type_x: any) => type_y), function_g: ((type_y: any) => type_z)): ((value: type_x) => type_z);
|
|
/**
|
|
* transforms a function with sequential input to a function with leveled input; example: add(2,3) = curryfy(add)(2)(3)
|
|
*
|
|
* @param {function} f
|
|
* @return {function} the currified version of the in put function
|
|
* @author fenris
|
|
*/
|
|
function curryfy(f: Function): Function;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function convey(value: any, functions: Array<Function>): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function timeout(procedure: (() => void), delay_in_seconds: float): int;
|
|
/**
|
|
* Promise version of "setTimeout"
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function defer<type_result>(seconds: float, action: (() => type_result)): Promise<type_result>;
|
|
/**
|
|
* a definition for a value being "defined"
|
|
*
|
|
* @author neuc
|
|
*/
|
|
function is_def<type_value>(obj: type_value, options?: {
|
|
null_is_valid?: boolean;
|
|
}): boolean;
|
|
/**
|
|
* returns the value if set and, when a type is specified, if the type is correct, if not return default_value
|
|
*
|
|
* @author neuc
|
|
*/
|
|
function def_val(value: any, default_value: any, options?: {
|
|
type?: (null | string);
|
|
null_is_valid?: boolean;
|
|
}): any;
|
|
/**
|
|
* provides the call for an attribute of a class as a regular function; useful for processing lists of objects
|
|
*
|
|
* @param {string} name the name of the attribute
|
|
* @return {function}
|
|
* @author fenris
|
|
*/
|
|
function attribute<type_object, type_attribute>(name: string): ((object: type_object) => type_attribute);
|
|
/**
|
|
* provides a method of a class as a regular function; useful for processing lists of objects
|
|
*
|
|
* @param {string} name the name of the method
|
|
* @return {function}
|
|
* @author fenris
|
|
*/
|
|
function method<type_object, type_output>(name: string): ((object: type_object) => type_output);
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_coproduct = {
|
|
kind: string;
|
|
data?: any;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function distinguish<type_output>(coproduct: type_coproduct, handlers: Record<string, ((data?: any) => type_output)>, options?: {
|
|
fallback?: (null | ((coproduct?: type_coproduct) => type_output));
|
|
}): type_output;
|
|
/**
|
|
* for rate_limit_check
|
|
*
|
|
* @author fenris
|
|
*/
|
|
type type_mana_snapshot = {
|
|
timestamp: float;
|
|
value: float;
|
|
};
|
|
/**
|
|
* rate limiting algorithm, based on the idea of mana (magic power) in video games:
|
|
* - an actor has a fixed mana capacity, i.e. the maximum amount of available power
|
|
* - an actor has a fixed rate of mana regeneration, i.e. how fast the power is filled up (linear growth)
|
|
* - an action has a defined mana heft, i.e. how much power is required and deducted in order to execute it
|
|
* - mana states are represented by snapshots, i.e. the amount of power at a certain point in time
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function rate_limit_check(setup: {
|
|
capacity: float;
|
|
regeneration_rate: float;
|
|
get_snapshot: (() => Promise<(null | type_mana_snapshot)>);
|
|
set_snapshot: ((snapshot: type_mana_snapshot) => Promise<void>);
|
|
update_snapshot: ((timestamp: float, value_increment: float) => Promise<void>);
|
|
}, heft: float): Promise<{
|
|
granted: boolean;
|
|
seconds: (null | float);
|
|
}>;
|
|
}
|
|
declare namespace lib_plankton.email {
|
|
/**
|
|
*/
|
|
function send(smtp_credentials: {
|
|
host: string;
|
|
port: int;
|
|
username: string;
|
|
password: string;
|
|
}, sender: string, receivers: Array<string>, subject: string, content: string): Promise<void>;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
enum enum_level {
|
|
debug = 0,
|
|
info = 1,
|
|
notice = 2,
|
|
warning = 3,
|
|
error = 4
|
|
}
|
|
/**
|
|
*/
|
|
function level_order(level1: enum_level, level2: enum_level): boolean;
|
|
/**
|
|
*/
|
|
function level_show(level: enum_level): string;
|
|
/**
|
|
*/
|
|
type type_entry = {
|
|
level: enum_level;
|
|
incident: string;
|
|
details: Record<string, any>;
|
|
};
|
|
}
|
|
/**
|
|
* @deprecated
|
|
* @todo remove
|
|
*/
|
|
declare namespace lib_plankton.log {
|
|
function level_push(level: int): void;
|
|
function level_pop(): void;
|
|
function indent_push(indent: int): void;
|
|
function indent_pop(): void;
|
|
function indent_inc(): void;
|
|
function indent_dec(): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write({ "message": message, "type": type, "prefix": prefix, "level": level, "indent": indent, }: {
|
|
message?: string;
|
|
type?: string;
|
|
prefix?: string;
|
|
level?: int;
|
|
indent?: int;
|
|
}): void;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
abstract class class_channel {
|
|
/**
|
|
*/
|
|
abstract add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
* output for writing log entries to stdout
|
|
*/
|
|
class class_channel_stdout extends class_channel {
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
class class_channel_file extends class_channel {
|
|
/**
|
|
* the path of the log file
|
|
*/
|
|
private path;
|
|
/**
|
|
*/
|
|
private human_readable;
|
|
/**
|
|
* [constructor]
|
|
*/
|
|
constructor(path: string, human_readable: boolean);
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
class class_channel_email extends class_channel {
|
|
/**
|
|
*/
|
|
private smtp_credentials;
|
|
/**
|
|
*/
|
|
private sender;
|
|
/**
|
|
*/
|
|
private receivers;
|
|
/**
|
|
* [constructor]
|
|
*/
|
|
constructor(smtp_credentials: {
|
|
host: string;
|
|
port: int;
|
|
username: string;
|
|
password: string;
|
|
}, sender: string, receivers: Array<string>);
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
* output for desktop notifications via "libnotify"
|
|
*/
|
|
class class_channel_notify extends class_channel {
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
* decorator for filtering out log entries below a certain level threshold
|
|
*/
|
|
class class_channel_minlevel extends class_channel {
|
|
/**
|
|
*/
|
|
private core;
|
|
/**
|
|
*/
|
|
private threshold;
|
|
/**
|
|
*/
|
|
constructor(core: class_channel, threshold: enum_level);
|
|
/**
|
|
*/
|
|
add(entry: type_entry): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
*/
|
|
function channel_make(description: {
|
|
kind: string;
|
|
data?: {
|
|
[key: string]: any;
|
|
};
|
|
}): class_channel;
|
|
/**
|
|
*/
|
|
type type_configuration = Array<class_channel>;
|
|
/**
|
|
*/
|
|
function conf_default(): type_configuration;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
/**
|
|
* pushes a new configuration on the stack and activates it
|
|
*/
|
|
function conf_push(channels: type_configuration): void;
|
|
/**
|
|
* pops the current active configuration from the stack
|
|
*/
|
|
function conf_pop(): void;
|
|
/**
|
|
* consumes a log entry, i.e. sends it to the currently active outputs
|
|
*/
|
|
function add(entry: type_entry): void;
|
|
/**
|
|
*/
|
|
function debug(incident: string, details?: Record<string, any>): void;
|
|
/**
|
|
*/
|
|
function info(incident: string, details?: Record<string, any>): void;
|
|
/**
|
|
*/
|
|
function notice(incident: string, details?: Record<string, any>): void;
|
|
/**
|
|
*/
|
|
function warning(incident: string, details?: Record<string, any>): void;
|
|
/**
|
|
*/
|
|
function error(incident: string, details?: Record<string, any>): void;
|
|
}
|
|
declare namespace lib_plankton.log {
|
|
}
|
|
declare var plain_text_to_html: (text: string) => string;
|
|
/**
|
|
* @desc makes a valid
|
|
*/
|
|
declare var format_sentence: (str: string, rtl?: boolean, caseSense?: boolean) => string;
|
|
declare var fill_string_template: (template_string: string, object: any, fabric: Function, delimiter: string, default_string: string, sloppy: boolean) => string;
|
|
declare var make_string_template: (_template: string, _fabrics?: Object) => (object: {
|
|
[key: string]: string;
|
|
}) => string;
|
|
declare var make_eml_header: (object: {
|
|
[key: string]: string;
|
|
}) => string;
|
|
declare var make_eml_body: Object;
|
|
declare namespace lib_plankton.string {
|
|
/**
|
|
* @author neuc,frac
|
|
*/
|
|
function empty(str: string): boolean;
|
|
/**
|
|
* @desc returns a unique string
|
|
* @param {string} prefix an optional prefix for the generated string
|
|
* @return {string}
|
|
* @author fenris
|
|
*/
|
|
function generate(prefix?: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function join(parts: Array<string>, glue?: string): string;
|
|
/**
|
|
* @desc splits a string, but returns an empty list, if the string is empty
|
|
* @param {string} chain
|
|
* @param {string} separator
|
|
* @return {Array<string>}
|
|
* @author fenris
|
|
*/
|
|
function split(chain: string, separator?: string): Array<string>;
|
|
/**
|
|
* @author neu3no
|
|
*/
|
|
function explode(str: string, needle: string, max: int): Array<string>;
|
|
/**
|
|
* @desc concats a given word with itself n times
|
|
* @param {string} word
|
|
* @param {int}
|
|
* @return {string}
|
|
* @author fenris
|
|
*/
|
|
function repeat(word: string, count: int): string;
|
|
/**
|
|
* @desc lengthens a string by repeatedly appending or prepending another string
|
|
* @param {string} word the string to pad
|
|
* @param {int} length the length, which the result shall have
|
|
* @param {string} symbol the string, which will be added (multiple times)
|
|
* @param {boolean} [prepend]; whether to prepend (~true) or append (~false); default: false
|
|
* @return {string} the padded string
|
|
* @author fenris
|
|
*/
|
|
function pad(word: string, length: int, symbol?: string, mode?: string): string;
|
|
/**
|
|
* @desc checks if a given string conttains a certain substring
|
|
* @param {string} string
|
|
* @param {string} part
|
|
* @return {boolean}
|
|
* @author fenris
|
|
*/
|
|
function contains(chain: string, part: string): boolean;
|
|
/**
|
|
* @desc checks if a given string starts with a certain substring
|
|
* @param {string} string
|
|
* @param {string} part
|
|
* @return {boolean}
|
|
* @author fenris
|
|
*/
|
|
function startsWith(chain: string, part: string): boolean;
|
|
/**
|
|
* @desc checks if a given string ends with a certain substring
|
|
* @param {string} string
|
|
* @param {string} part
|
|
* @return {boolean}
|
|
* @author fenris
|
|
*/
|
|
function endsWith(chain: string, part: string): boolean;
|
|
/**
|
|
* @desc count the occourrences of a string in a string
|
|
* @param string haystack_string the string wich should be examined
|
|
* @param string needle_string the string which should be counted
|
|
* @author neuc
|
|
*/
|
|
function count_occourrences(haystack_string: string, needle_string: string, check_escape: boolean): int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function replace(str: string, replacements: Array<{
|
|
from: string;
|
|
to: string;
|
|
}>, options?: {}): string;
|
|
/**
|
|
* @desc replaces occurences of "{{name}}" in a string by the corresponding values of an argument object
|
|
* @author fenris
|
|
*/
|
|
function coin(str: string, args: {
|
|
[id: string]: string;
|
|
}, options?: {
|
|
legacy?: boolean;
|
|
open?: string;
|
|
close?: string;
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
* @deprecated use limit
|
|
*/
|
|
function cut(str: string, length: int, delimiter?: string): string;
|
|
/**
|
|
*/
|
|
function limit(str: string, options?: {
|
|
length?: int;
|
|
indicator?: string;
|
|
}): string;
|
|
/**
|
|
*/
|
|
function slice(str: string, size: int): Array<string>;
|
|
}
|
|
/**
|
|
* @deprecated
|
|
*/
|
|
declare namespace lib_string {
|
|
const empty: typeof lib_plankton.string.empty;
|
|
const generate: typeof lib_plankton.string.generate;
|
|
const split: typeof lib_plankton.string.split;
|
|
const explode: typeof lib_plankton.string.repeat;
|
|
const repeat: typeof lib_plankton.string.repeat;
|
|
const pad: typeof lib_plankton.string.pad;
|
|
const contains: typeof lib_plankton.string.contains;
|
|
const startsWith: typeof lib_plankton.string.startsWith;
|
|
const endsWith: typeof lib_plankton.string.endsWith;
|
|
const count_occourrences: typeof lib_plankton.string.count_occourrences;
|
|
const coin: typeof lib_plankton.string.coin;
|
|
const stance: typeof lib_plankton.string.coin;
|
|
const cut: typeof lib_plankton.string.cut;
|
|
}
|
|
declare namespace lib_plankton.string {
|
|
/**
|
|
* an implementation of c sprintf
|
|
* @param {string} string format string
|
|
* @param {array} args arguments which should be filled into
|
|
* @returns {string}
|
|
*/
|
|
var sprintf: (input: string, args?: Array<any>, original?: any) => string;
|
|
/**
|
|
* an implementation of c printf
|
|
* @param {string} string format string
|
|
* @param {array} args arguments which should be filled into
|
|
* @returns {string}
|
|
*/
|
|
function printf(format: any, args: any): void;
|
|
}
|
|
declare var sprintf: (input: string, args?: Array<any>, original?: any) => string;
|
|
declare var printf: typeof lib_plankton.string.printf;
|
|
declare var eml_log: any;
|
|
declare var track_exports: any;
|
|
declare var make_logger: (prefix: any, current_loglevel: any) => (obj: any, lvl: any) => void;
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
*/
|
|
type type_query = {
|
|
template: string;
|
|
arguments: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
enum enum_type {
|
|
boolean = "boolean",
|
|
integer = "integer",
|
|
string_short = "string_short",
|
|
string_medium = "string_medium",
|
|
string_long = "string_long",
|
|
float = "float"
|
|
}
|
|
/**
|
|
*/
|
|
type type_description_create_table = {
|
|
name: string;
|
|
key_field?: (null | {
|
|
name: string;
|
|
type?: enum_type;
|
|
comment?: (null | string);
|
|
auto_increment?: (null | boolean);
|
|
description?: (null | string);
|
|
});
|
|
data_fields?: Array<{
|
|
name: string;
|
|
nullable?: boolean;
|
|
type: enum_type;
|
|
default?: any;
|
|
description?: (null | string);
|
|
}>;
|
|
constraints?: Array<{
|
|
kind: string;
|
|
parameters?: Record<string, any>;
|
|
}>;
|
|
description?: (null | string);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_insert = {
|
|
table_name: string;
|
|
values: Record<string, any>;
|
|
returning?: (null | string);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_update = {
|
|
table_name: string;
|
|
values: Record<string, any>;
|
|
condition?: (null | string);
|
|
arguments?: (null | Record<string, any>);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_delete = {
|
|
table_name: string;
|
|
condition?: (null | string);
|
|
arguments?: (null | Record<string, any>);
|
|
};
|
|
/**
|
|
*/
|
|
type type_description_select = {
|
|
source: string;
|
|
fields?: (null | Array<string>);
|
|
condition?: (null | string);
|
|
group_by?: (null | string);
|
|
having?: (null | string);
|
|
order_by?: (null | string);
|
|
limit?: (null | int);
|
|
arguments?: (null | Record<string, any>);
|
|
};
|
|
/**
|
|
* rows
|
|
*/
|
|
type type_result_get = Array<Record<string, any>>;
|
|
/**
|
|
* auto insert id
|
|
*/
|
|
type type_result_put = (null | int);
|
|
/**
|
|
* number of affected rows
|
|
*/
|
|
type type_result_set = int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_database = {
|
|
wrap_name: ((name: string) => string);
|
|
query_free_get: ((query: type_query) => Promise<type_result_get>);
|
|
query_free_put: ((query: type_query) => Promise<type_result_put>);
|
|
query_free_set: ((query: type_query) => Promise<type_result_set>);
|
|
query_create_table: ((description_create_table: type_description_create_table) => Promise<void>);
|
|
query_insert: ((description_insert: type_description_insert) => Promise<type_result_put>);
|
|
query_update: ((description_update: type_description_update) => Promise<type_result_set>);
|
|
query_delete: ((description_delete: type_description_delete) => Promise<type_result_set>);
|
|
query_select: ((description_select: type_description_select) => Promise<type_result_get>);
|
|
};
|
|
/**
|
|
*/
|
|
type interface_database = database.type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @todo default case?
|
|
*/
|
|
function sql_common_value_format(value: any): string;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_create_table(description_create_table: type_description_create_table, options?: {
|
|
auto_increment_keyword?: (null | string);
|
|
auto_increment_special?: (null | string);
|
|
omit_comments?: boolean;
|
|
type_map?: Record<enum_type, string>;
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_insert(description_insert: type_description_insert, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
set_returning?: boolean;
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_update(description_update: type_description_update, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_delete(description_delete: type_description_delete, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
/**
|
|
*/
|
|
function sql_common_formulation_select(description_select: type_description_select, options?: {
|
|
wrap_name?: ((name: string) => string);
|
|
}): type_query;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
*/
|
|
type type_sqlite_subject = {
|
|
path: string;
|
|
handle: any;
|
|
};
|
|
/**
|
|
*/
|
|
type type_sqlite_parameters = {
|
|
path: string;
|
|
};
|
|
/**
|
|
*/
|
|
function sqlite_make(parameters: type_sqlite_parameters): type_sqlite_subject;
|
|
/**
|
|
*/
|
|
function sqlite_wrap_name(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sqlite_query_free_get(subject: type_sqlite_subject, query: type_query): Promise<type_result_get>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sqlite_query_free_put(subject: type_sqlite_subject, query: type_query): Promise<type_result_put>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sqlite_query_free_set(subject: type_sqlite_subject, query: type_query): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_create_table(description_create_table: type_description_create_table): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_create_table(subject: type_sqlite_subject, description: type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_insert(description_insert: type_description_insert): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_insert(subject: type_sqlite_subject, description_insert: type_description_insert): Promise<type_result_put>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_update(description_update: type_description_update): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_update(subject: type_sqlite_subject, description_update: type_description_update): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_delete(description_delete: type_description_delete): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_delete(subject: type_sqlite_subject, description_delete: type_description_delete): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function sqlite_formulation_select(description_select: type_description_select): type_query;
|
|
/**
|
|
*/
|
|
function sqlite_query_select(subject: type_sqlite_subject, description_select: type_description_select): Promise<Array<Record<string, any>>>;
|
|
/**
|
|
*/
|
|
function sqlite_database(parameters: type_sqlite_parameters): type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_sqlite implements interface_database {
|
|
private subject;
|
|
constructor(parameters: type_sqlite_parameters);
|
|
wrap_name(name: any): string;
|
|
query_free_get(query: any): Promise<type_result_get>;
|
|
query_free_put(query: any): Promise<number>;
|
|
query_free_set(query: any): Promise<number>;
|
|
query_create_table(description_create_table: any): Promise<void>;
|
|
query_insert(description_insert: any): Promise<number>;
|
|
query_update(description_update: any): Promise<number>;
|
|
query_delete(description_delete: any): Promise<number>;
|
|
query_select(description_select: any): Promise<Record<string, any>[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_postgresql_subject = {
|
|
host: string;
|
|
port: int;
|
|
username: string;
|
|
password: string;
|
|
schema: string;
|
|
pool: (null | any);
|
|
};
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_postgresql_parameters = {
|
|
host: string;
|
|
port?: int;
|
|
username: string;
|
|
password: string;
|
|
schema: string;
|
|
};
|
|
/**
|
|
*/
|
|
function postgresql_make(parameters: type_postgresql_parameters): type_postgresql_subject;
|
|
/**
|
|
*/
|
|
function postgresql_wrap_name(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
* @see https://node-postgres.com/apis/pool#poolquery
|
|
*/
|
|
function postgresql_query_free_get(subject: type_postgresql_subject, query: type_query): Promise<type_result_get>;
|
|
/**
|
|
* @author fenris
|
|
* @see https://node-postgres.com/apis/pool#poolquery
|
|
*/
|
|
function postgresql_query_free_put(subject: type_postgresql_subject, query: type_query): Promise<type_result_put>;
|
|
/**
|
|
* @author fenris
|
|
* @see https://node-postgres.com/apis/pool#poolquery
|
|
*/
|
|
function postgresql_query_free_set(subject: type_postgresql_subject, query: type_query): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_create_table(description_create_table: type_description_create_table): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_create_table(subject: type_postgresql_subject, description: type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_insert(description_insert: type_description_insert): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_insert(subject: type_postgresql_subject, description_insert: type_description_insert): Promise<type_result_put>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_update(description_update: type_description_update): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_update(subject: type_postgresql_subject, description_update: type_description_update): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_delete(description_delete: type_description_delete): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_delete(subject: type_postgresql_subject, description_delete: type_description_delete): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function postgresql_formulation_select(description_select: type_description_select): type_query;
|
|
/**
|
|
*/
|
|
function postgresql_query_select(subject: type_postgresql_subject, description_select: type_description_select): Promise<Array<Record<string, any>>>;
|
|
/**
|
|
*/
|
|
function postgresql_database(parameters: type_postgresql_parameters): type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_postgresql implements interface_database {
|
|
private subject;
|
|
constructor(parameters: type_postgresql_parameters);
|
|
wrap_name(name: any): string;
|
|
query_free_get(query: any): Promise<type_result_get>;
|
|
query_free_put(query: any): Promise<number>;
|
|
query_free_set(query: any): Promise<number>;
|
|
query_create_table(description_create_table: any): Promise<void>;
|
|
query_insert(description_insert: any): Promise<number>;
|
|
query_update(description_update: any): Promise<number>;
|
|
query_delete(description_delete: any): Promise<number>;
|
|
query_select(description_select: any): Promise<Record<string, any>[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_mysql_subject = {
|
|
verbose: boolean;
|
|
};
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_mysql_parameters = {
|
|
verbose?: boolean;
|
|
};
|
|
/**
|
|
*/
|
|
function mysql_make(parameters: type_mysql_parameters): type_mysql_subject;
|
|
/**
|
|
*/
|
|
function mysql_wrap_name(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function mysql_query_free_get(subject: type_mysql_subject, query: type_query): Promise<type_result_get>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function mysql_query_free_put(subject: type_mysql_subject, query: type_query): Promise<type_result_put>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function mysql_query_free_set(subject: type_mysql_subject, query: type_query): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_create_table(description_create_table: type_description_create_table): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_create_table(subject: type_mysql_subject, description: type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_insert(description_insert: type_description_insert): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_insert(subject: type_mysql_subject, description_insert: type_description_insert): Promise<type_result_put>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_update(description_update: type_description_update): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_update(subject: type_mysql_subject, description_update: type_description_update): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_delete(description_delete: type_description_delete): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_delete(subject: type_mysql_subject, description_delete: type_description_delete): Promise<type_result_set>;
|
|
/**
|
|
*/
|
|
function mysql_formulation_select(description_select: type_description_select): type_query;
|
|
/**
|
|
*/
|
|
function mysql_query_select(subject: type_mysql_subject, description_select: type_description_select): Promise<Array<Record<string, any>>>;
|
|
/**
|
|
*/
|
|
function mysql_database(parameters: type_mysql_parameters): type_database;
|
|
}
|
|
declare namespace lib_plankton.database {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_mysql implements interface_database {
|
|
private subject;
|
|
constructor(parameters: type_mysql_parameters);
|
|
wrap_name(name: any): string;
|
|
query_free_get(query: any): Promise<type_result_get>;
|
|
query_free_put(query: any): Promise<number>;
|
|
query_free_set(query: any): Promise<number>;
|
|
query_create_table(description_create_table: any): Promise<void>;
|
|
query_insert(description_insert: any): Promise<number>;
|
|
query_update(description_update: any): Promise<number>;
|
|
query_delete(description_delete: any): Promise<number>;
|
|
query_select(description_select: any): Promise<Record<string, any>[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
* implements the idea of a database, which houses datasets (type_value) and manages their access by ids or sth. similar (type_key)
|
|
*
|
|
* @author fenris
|
|
*/
|
|
type type_store<type_key, type_value, type_setup_input, type_searchterm, type_preview> = {
|
|
/**
|
|
* shall prepare the storage instance for use
|
|
*
|
|
* @author fenris
|
|
*/
|
|
setup(input: type_setup_input): Promise<void>;
|
|
/**
|
|
* shall insert a new dataset and return its automatically assign key
|
|
*
|
|
* @author fenris
|
|
*/
|
|
create(value: type_value): Promise<type_key>;
|
|
/**
|
|
* shall modify an existing dataset
|
|
*
|
|
* @author fenris
|
|
*/
|
|
update(key: type_key, value: type_value): Promise<void>;
|
|
/**
|
|
* shall remove an existing dataset
|
|
*
|
|
* @author fenris
|
|
*/
|
|
delete(key: type_key): Promise<void>;
|
|
/**
|
|
* shall get an existing dataset by its key
|
|
*
|
|
* @author fenris
|
|
*/
|
|
read(key: type_key): Promise<type_value>;
|
|
/**
|
|
* shall list keys and previews of existing datasets, which match a certain search term
|
|
*
|
|
* @author fenris
|
|
*/
|
|
search(term?: (null | type_searchterm)): Promise<Array<{
|
|
key: type_key;
|
|
preview: type_preview;
|
|
}>>;
|
|
};
|
|
/**
|
|
* for class wrappers
|
|
*/
|
|
type interface_store<type_key, type_value, type_setup_input, type_searchterm, type_preview> = type_store<type_key, type_value, type_setup_input, type_searchterm, type_preview>;
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
* implements the idea of a storage without managed keys
|
|
*
|
|
* @author fenris
|
|
*/
|
|
type type_chest<type_key, type_value, type_setup_input, type_searchterm, type_preview> = {
|
|
/**
|
|
* shall prepare the storage instance for use
|
|
*
|
|
* @author fenris
|
|
*/
|
|
setup(input: type_setup_input): Promise<void>;
|
|
/**
|
|
* shall remove all items
|
|
*/
|
|
clear(): Promise<void>;
|
|
/**
|
|
* shall insert a new or modify an existing dataset and return whether it is new
|
|
*
|
|
* @author fenris
|
|
*/
|
|
write(key: type_key, value: type_value): Promise<boolean>;
|
|
/**
|
|
* shall remove an existing dataset
|
|
*
|
|
* @author fenris
|
|
*/
|
|
delete(key: type_key): Promise<void>;
|
|
/**
|
|
* shall get an existing dataset by its key
|
|
*
|
|
* @author fenris
|
|
*/
|
|
read(key: type_key): Promise<type_value>;
|
|
/**
|
|
* shall list keys and previews of existing datasets, which match a certain search term
|
|
*
|
|
* @author fenris
|
|
*/
|
|
search(term?: (null | type_searchterm)): Promise<Array<{
|
|
key: type_key;
|
|
preview: type_preview;
|
|
}>>;
|
|
};
|
|
/**
|
|
* for class wrappers
|
|
*/
|
|
type interface_chest<type_key, type_value, type_setup_input, type_searchterm, type_preview> = type_chest<type_key, type_value, type_setup_input, type_searchterm, type_preview>;
|
|
}
|
|
declare namespace lib_plankton.storage.memory {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject<type_value> = {
|
|
data: Record<string, type_value>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_parameters<type_value> = {};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make<type_value>(parameters: type_parameters<type_value>): type_subject<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function clear<type_value>(subject: type_subject<type_value>): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write<type_value>(subject: type_subject<type_value>, key: string, value: type_value): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function delete_<type_value>(subject: type_subject<type_value>, key: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read<type_value>(subject: type_subject<type_value>, key: string): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function list<type_value>(subject: type_subject<type_value>): Array<string>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function search<type_value>(subject: type_subject<type_value>, term: (null | string)): Array<{
|
|
key: string;
|
|
preview: string;
|
|
}>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function implementation_chest<type_value>(parameters: type_parameters<type_value>): type_chest<string, type_value, void, string, string>;
|
|
}
|
|
declare namespace lib_plankton.storage.memory {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_chest<type_item> implements type_chest<string, type_item, void, string, string> {
|
|
private subject;
|
|
constructor(parameters: type_parameters<type_item>);
|
|
setup(input: any): Promise<void>;
|
|
clear(): Promise<void>;
|
|
write(key: any, value: any): Promise<boolean>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Awaited<type_item>>;
|
|
search(term: any): Promise<{
|
|
key: string;
|
|
preview: string;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage.filesystem {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject = {
|
|
nodemodule: any;
|
|
};
|
|
/**
|
|
*/
|
|
type type_parameters = {};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make(parameters: type_parameters): type_subject;
|
|
/**
|
|
*/
|
|
function clear(subject: type_subject): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write(subject: type_subject, path: string, content: Buffer): Promise<boolean>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function delete_(subject: type_subject, path: string): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read(subject: type_subject, path: string): Promise<Buffer>;
|
|
/**
|
|
*/
|
|
function implementation_chest(parameters: type_parameters): type_chest<string, Buffer, void, void, void>;
|
|
}
|
|
declare namespace lib_plankton.storage.filesystem {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_chest implements type_chest<string, Buffer, void, void, void> {
|
|
private subject;
|
|
constructor(parameters: type_parameters);
|
|
setup(input: any): Promise<void>;
|
|
clear(): Promise<void>;
|
|
write(key: any, value: any): Promise<boolean>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Buffer>;
|
|
search(searchterm: any): Promise<{
|
|
key: string;
|
|
preview: void;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
*/
|
|
type type_sql_table_autokey_search_term = {
|
|
expression: string;
|
|
arguments: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_sql_table_autokey_subject = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_name: string;
|
|
};
|
|
/**
|
|
*/
|
|
type type_sql_table_autokey_parameters = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_name: string;
|
|
};
|
|
/**
|
|
*/
|
|
function sql_table_autokey_make(parameters: type_sql_table_autokey_parameters): type_sql_table_autokey_subject;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_setup(subject: type_sql_table_autokey_subject, description_create_table: lib_plankton.database.type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_create(subject: type_sql_table_autokey_subject, value: Record<string, any>): Promise<int>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_update(subject: type_sql_table_autokey_subject, key: int, value: Record<string, any>): Promise<void>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_delete(subject: type_sql_table_autokey_subject, key: int): Promise<void>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_read(subject: type_sql_table_autokey_subject, key: int): Promise<Record<string, any>>;
|
|
/**
|
|
* @todo correct preview
|
|
*/
|
|
function sql_table_autokey_search(subject: type_sql_table_autokey_subject, term: (null | type_sql_table_autokey_search_term)): Promise<Array<{
|
|
key: int;
|
|
preview: Record<string, any>;
|
|
}>>;
|
|
/**
|
|
*/
|
|
function sql_table_autokey_store(parameters: type_sql_table_autokey_parameters): type_store<int, Record<string, any>, lib_plankton.database.type_description_create_table, type_sql_table_autokey_search_term, Record<string, any>>;
|
|
}
|
|
declare namespace lib_plankton.storage {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_sql_table_autokey implements type_store<int, Record<string, any>, lib_plankton.database.type_description_create_table, string, Record<string, any>> {
|
|
private subject;
|
|
constructor(parameters: type_sql_table_autokey_parameters);
|
|
setup(input: any): Promise<void>;
|
|
create(value: any): Promise<number>;
|
|
update(key: any, value: any): Promise<void>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Record<string, any>>;
|
|
search(term: any): Promise<{
|
|
key: number;
|
|
preview: Record<string, any>;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.storage.sql_table_common {
|
|
/**
|
|
*/
|
|
type type_sql_table_common_search_term = {
|
|
expression: string;
|
|
arguments: Record<string, any>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_names: Array<string>;
|
|
};
|
|
/**
|
|
*/
|
|
type type_parameters = {
|
|
database_implementation: lib_plankton.database.type_database;
|
|
table_name: string;
|
|
key_names: Array<string>;
|
|
};
|
|
/**
|
|
*/
|
|
function make(parameters: type_parameters): type_subject;
|
|
/**
|
|
*/
|
|
function setup(subject: type_subject, description_create_table: lib_plankton.database.type_description_create_table): Promise<void>;
|
|
/**
|
|
*/
|
|
function clear(subject: type_subject): Promise<void>;
|
|
/**
|
|
* @todo optimize: avoid read
|
|
*/
|
|
function write(subject: type_subject, key: Array<any>, value: Record<string, any>): Promise<boolean>;
|
|
/**
|
|
*/
|
|
function delete_(subject: type_subject, key: Array<any>): Promise<void>;
|
|
/**
|
|
*/
|
|
function read(subject: type_subject, key: Array<any>): Promise<Record<string, any>>;
|
|
/**
|
|
* @todo correct preview
|
|
*/
|
|
function search(subject: type_subject, term: (null | type_sql_table_common_search_term)): Promise<Array<{
|
|
key: Array<any>;
|
|
preview: Record<string, any>;
|
|
}>>;
|
|
/**
|
|
*/
|
|
function chest(parameters: type_parameters): type_chest<Array<any>, Record<string, any>, lib_plankton.database.type_description_create_table, type_sql_table_common_search_term, Record<string, any>>;
|
|
}
|
|
declare namespace lib_plankton.storage.sql_table_common {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_chest implements type_chest<Array<any>, Record<string, any>, lib_plankton.database.type_description_create_table, type_sql_table_common_search_term, Record<string, any>> {
|
|
private subject;
|
|
constructor(parameters: type_parameters);
|
|
setup(input: any): Promise<void>;
|
|
clear(): Promise<void>;
|
|
write(key: any, value: any): Promise<boolean>;
|
|
delete(key: any): Promise<void>;
|
|
read(key: any): Promise<Record<string, any>>;
|
|
search(term: any): Promise<{
|
|
key: any[];
|
|
preview: Record<string, any>;
|
|
}[]>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.shape {
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_jsonschema = any;
|
|
/**
|
|
* @todo
|
|
*/
|
|
type type_oas_schema = any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_inspection = {
|
|
flaws: Array<string>;
|
|
sub: Array<{
|
|
position: string;
|
|
inspection: type_inspection;
|
|
}>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inspection_create(): type_inspection;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inspection_add(main: type_inspection, flaw: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inspection_extend(main: type_inspection, prefix: string, sub: type_inspection): void;
|
|
/**
|
|
*/
|
|
type type_shape = {
|
|
kind: string;
|
|
parameters: Record<string, any>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_logic = {
|
|
inspect: ((sub_inspect: ((shape: type_shape, value: any) => type_inspection), value: any) => type_inspection);
|
|
show: ((sub_show: ((shape: type_shape) => string)) => string);
|
|
to_typescript_type: ((sub_to_typescript_type: ((shape: type_shape) => string)) => string);
|
|
to_jsonschema: ((sub_to_json_schema: ((shape: type_shape) => type_jsonschema)) => type_jsonschema);
|
|
to_oas_schema: ((sub_to_oas_schema: ((shape: type_shape) => type_oas_schema)) => type_oas_schema);
|
|
example: ((sub_example: ((shape: type_shape) => any)) => any);
|
|
};
|
|
/**
|
|
*/
|
|
function inspect(shape: type_shape, value: any): type_inspection;
|
|
/**
|
|
*/
|
|
function inspect_flat(shape: type_shape, value: any): Array<string>;
|
|
/**
|
|
*/
|
|
function show(shape: type_shape): string;
|
|
/**
|
|
*/
|
|
function to_typescript_type(shape: type_shape): string;
|
|
/**
|
|
*/
|
|
function to_jsonschema(shape: type_shape): type_jsonschema;
|
|
/**
|
|
*/
|
|
function to_oas_schema(shape: type_shape): type_oas_schema;
|
|
/**
|
|
*/
|
|
function example(shape: type_shape): any;
|
|
/**
|
|
*/
|
|
function register<type_parameters, type_subject>(name: string, construct: ((parameters: type_parameters) => type_subject), logic: ((subject: type_subject) => type_logic)): void;
|
|
}
|
|
declare namespace lib_plankton.shape.any {
|
|
/**
|
|
*/
|
|
type type_subject = {};
|
|
/**
|
|
*/
|
|
export function make(options?: {}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.null_ {
|
|
/**
|
|
*/
|
|
type type_subject = {};
|
|
/**
|
|
*/
|
|
export function make(options?: {}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.boolean {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | boolean);
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.integer {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
minimum: lib_plankton.pod.type_pod<int>;
|
|
maximum: lib_plankton.pod.type_pod<int>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | int);
|
|
description?: string;
|
|
minimum?: int;
|
|
maximum?: int;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.float {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
minimum: lib_plankton.pod.type_pod<float>;
|
|
maximum: lib_plankton.pod.type_pod<float>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | float);
|
|
description?: string;
|
|
minimum?: float;
|
|
maximum?: float;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.string {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
pattern: lib_plankton.pod.type_pod<string>;
|
|
min_length: lib_plankton.pod.type_pod<int>;
|
|
max_length: lib_plankton.pod.type_pod<int>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | int);
|
|
description?: string;
|
|
pattern?: string;
|
|
min_length?: int;
|
|
max_length?: int;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.email {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
core: type_shape;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: (null | int);
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.list_ {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
shape_element: type_shape;
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(shape_element: type_shape, options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: any;
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.map {
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
shape_key: type_shape;
|
|
shape_value: type_shape;
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(shape_key: type_shape, shape_value: type_shape, options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: any;
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.shape.record {
|
|
/**
|
|
*/
|
|
type type_field = {
|
|
name: string;
|
|
shape: type_shape;
|
|
required: boolean;
|
|
};
|
|
/**
|
|
*/
|
|
type type_subject = {
|
|
fields: Array<type_field>;
|
|
soft: boolean;
|
|
defaultvalue: lib_plankton.pod.type_pod<any>;
|
|
description: lib_plankton.pod.type_pod<string>;
|
|
};
|
|
/**
|
|
*/
|
|
export function make(fields_raw: Array<{
|
|
name: string;
|
|
shape: type_shape;
|
|
required?: boolean;
|
|
}>, options?: {
|
|
soft?: boolean;
|
|
defaultvalue?: any;
|
|
description?: string;
|
|
}): type_subject;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_code<type_from, type_to> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_from): type_to;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
decode(x: type_to): type_from;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_code<type_from, type_to> = {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
encode: (x: type_from) => type_to;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
decode: (x: type_to) => type_from;
|
|
};
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inverse_encode<type_from, type_to>(decode: (to: type_to) => type_from, to: type_to): type_from;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function inverse_decode<type_from, type_to>(encode: (from: type_from) => type_to, from: type_from): type_to;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_inverse<type_from, type_to> implements interface_code<type_to, type_from> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: interface_code<type_from, type_to>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(subject: interface_code<type_from, type_to>);
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(to: type_to): type_from;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(from: type_from): type_to;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function pair_encode<type_from, type_between, type_to>(encode_first: (from: type_from) => type_between, encode_second: (between: type_between) => type_to, from: type_from): type_to;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function pair_decode<type_from, type_between, type_to>(decode_first: (between: type_between) => type_from, decode_second: (to: type_to) => type_between, to: type_to): type_from;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_pair<type_from, type_between, type_to> implements interface_code<type_from, type_to> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected first: interface_code<type_from, type_between>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected second: interface_code<type_between, type_to>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(first: interface_code<type_from, type_between>, second: interface_code<type_between, type_to>);
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(from: type_from): type_to;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(to: type_to): type_from;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function chain_encode(encode_links: Array<(from: any) => any>, from: any): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function chain_decode(decode_links: Array<(to: any) => any>, to: any): any;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_chain implements interface_code<any, any> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected links: Array<interface_code<any, any>>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(links: Array<interface_code<any, any>>);
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(from: any): any;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(to: any): any;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
type type_flatten_from = Array<{
|
|
[name: string]: any;
|
|
}>;
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
type type_flatten_to = {
|
|
keys: Array<string>;
|
|
data: Array<Array<any>>;
|
|
};
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
function flatten_encode(from: type_flatten_from, keys?: Array<string>): type_flatten_to;
|
|
/**
|
|
* @author Christian Fraß <frass@greenscale.de>
|
|
*/
|
|
function flatten_decode(to: type_flatten_to): type_flatten_from;
|
|
}
|
|
declare namespace lib_plankton.code {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_code_flatten implements interface_code<type_flatten_from, type_flatten_to> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_flatten_from): type_flatten_to;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: type_flatten_to): type_flatten_from;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.www_form {
|
|
/**
|
|
*/
|
|
type type_source = Record<string, string>;
|
|
/**
|
|
*/
|
|
type type_target = string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function encode(source: type_source): type_target;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function decode(target: type_target): type_source;
|
|
}
|
|
declare namespace lib_plankton.www_form {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_www_form implements lib_plankton.code.interface_code<type_source, type_target> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(source: type_source): type_target;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(target: type_target): type_source;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.url {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_url = {
|
|
scheme: (null | string);
|
|
host: (null | string);
|
|
username: (null | string);
|
|
password: (null | string);
|
|
port: (null | int);
|
|
path: (null | string);
|
|
query: (null | string);
|
|
hash: (null | string);
|
|
};
|
|
}
|
|
declare namespace lib_plankton.url {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function encode(url: type_url): string;
|
|
/**
|
|
* @author fenris
|
|
* @todo arguments
|
|
*/
|
|
function decode(url_raw: string): type_url;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function implementation_code(): lib_plankton.code.type_code<type_url, string>;
|
|
}
|
|
declare namespace lib_plankton.url {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_url implements lib_plankton.code.interface_code<type_url, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: any): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): any;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.random {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_state = {
|
|
builtin: boolean;
|
|
seed?: int;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function state_push(state: type_state): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function state_pop(): type_state;
|
|
/**
|
|
* returns a random floating point number in the interval [0,1[
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function generate_unit(): float;
|
|
/**
|
|
* returns a random boolean value
|
|
*
|
|
* @param {float} [probability] the probability for the return-value "true"; default: 0.5
|
|
* @author fenris
|
|
*/
|
|
function generate_boolean(options?: {
|
|
probability?: float;
|
|
}): boolean;
|
|
/**
|
|
* returns a random integer number in the interval [a,b]
|
|
*
|
|
* @param {int} [minimum] the left side of the halfopen interval (i.e. the smallest included value in the range)
|
|
* @param {int} [minimum] the right side of the halfopen interval (i.e. the smallest excluded value in the range)
|
|
* @author fenris
|
|
*/
|
|
function generate_integer(options?: {
|
|
minimum?: int;
|
|
maximum?: int;
|
|
}): int;
|
|
var generate_int: typeof generate_integer;
|
|
/**
|
|
* returns a random floating point number in the given interval
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function generate_float(options?: {
|
|
minimum?: int;
|
|
maximum?: int;
|
|
}): float;
|
|
/**
|
|
* returns a random date
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function generate_date(options?: {
|
|
minimum?: Date;
|
|
maximum?: Date;
|
|
}): Date;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_hexdigit(): string;
|
|
/**
|
|
* generates a random string with an optional prefix
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function generate_string(options?: {
|
|
length?: int;
|
|
}): string;
|
|
/**
|
|
* chooses a value randomly from a list of values with weights (a higher weight means a higher probability to be chosen)
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function choose_weighted<type_value>(sets: Array<{
|
|
weight: float;
|
|
value: type_value;
|
|
}>): type_value;
|
|
/**
|
|
* chooses a value randomly from a list of values with equal probabilities
|
|
*
|
|
* @author fenris
|
|
*/
|
|
function choose_uniformly<type_value>(values: Array<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function shuffle<type_element>(list: Array<type_element>): Array<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_vowel(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_halfvowel(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_consonant(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_letter(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_syllable(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_word(options?: {
|
|
syllable_count_minimum?: int;
|
|
syllable_count_maximum?: int;
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_text(options?: {
|
|
word_count?: int;
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_city(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_street(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_guid(options?: {
|
|
with_braces?: boolean;
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_url(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_email_address(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_telephone_number(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_time(): any;
|
|
/**
|
|
* @author fenris
|
|
* @deprecated
|
|
* @todo remove
|
|
*/
|
|
function generate_for_shape(shape: any): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
}
|
|
declare namespace lib_plankton.session {
|
|
/**
|
|
*/
|
|
type type_session = {
|
|
key: string;
|
|
name: string;
|
|
expiry: int;
|
|
data: any;
|
|
};
|
|
/**
|
|
*/
|
|
function begin(name: string, options?: {
|
|
lifetime?: int;
|
|
data?: any;
|
|
}): Promise<string>;
|
|
/**
|
|
*/
|
|
function get(key: string): Promise<type_session>;
|
|
/**
|
|
*/
|
|
function end(key: string): Promise<void>;
|
|
/**
|
|
*/
|
|
function setup(options?: {
|
|
key_length?: int;
|
|
key_max_attempts?: int;
|
|
default_lifetime?: int;
|
|
data_chest?: lib_plankton.storage.type_chest<string, any, void, string, string>;
|
|
clear?: boolean;
|
|
}): Promise<void>;
|
|
}
|
|
declare namespace lib_plankton.file {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function exists(path: string): Promise<boolean>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read(path: string): Promise<string>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read_buffer(path: string): Promise<Buffer>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function read_stdin(): Promise<string>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write(path: string, content: string, options?: {
|
|
encoding?: string;
|
|
}): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function write_buffer(path: string, content: Buffer, options?: {}): Promise<void>;
|
|
/**
|
|
*/
|
|
function delete_(path: string): Promise<void>;
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function map_clear<type_key, type_value>(map_forEach: (procedure: (value?: type_value, key?: type_key) => void) => void, map_delete: (key: type_key) => void): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function map_keys<type_key, type_value>(map_forEach: (procedure: (value?: type_value, key?: type_key) => void) => void): Array<type_key>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function map_values<type_key, type_value>(map_forEach: (procedure: (value?: type_value, key?: type_key) => void) => void): Array<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function map_pairs<type_key, type_value>(map_forEach: (procedure: (value?: type_value, key?: type_key) => void) => void): Array<{
|
|
key: type_key;
|
|
value: type_value;
|
|
}>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function map_toString<type_key, type_value>(map_forEach: (procedure: (value?: type_value, key?: type_key) => void) => void, show_key?: (key: type_key) => string, show_value?: (value: type_value) => string): string;
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_map<type_key, type_value> {
|
|
/**
|
|
* @desc [accessor]
|
|
*/
|
|
has(key: type_key): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
*/
|
|
get(key: type_key, fallback?: lib_plankton.pod.class_pod<type_value>): type_value;
|
|
/**
|
|
* @desc [mutator]
|
|
*/
|
|
set(key: type_key, value: type_value): void;
|
|
/**
|
|
* @desc [mutator]
|
|
*/
|
|
delete(key: type_key): void;
|
|
/**
|
|
* @desc [accessor]
|
|
*/
|
|
forEach(procedure: ((value?: type_value, key?: type_key) => void)): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
abstract class class_mapbase<type_key, type_value> implements interface_map<type_key, type_value> {
|
|
/**
|
|
* @desc [constructor]
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
*/
|
|
abstract has(key: type_key): boolean;
|
|
/**
|
|
* @implementation
|
|
*/
|
|
abstract get(key: type_key, fallback?: lib_plankton.pod.class_pod<type_value>): type_value;
|
|
/**
|
|
* @implementation
|
|
*/
|
|
abstract set(key: type_key, value: type_value): void;
|
|
/**
|
|
* @implementation
|
|
*/
|
|
abstract delete(key: type_key): void;
|
|
/**
|
|
* @implementation
|
|
*/
|
|
abstract forEach(procedure: ((value?: type_value, key?: type_key) => void)): void;
|
|
/**
|
|
* @desc [mutator]
|
|
*/
|
|
clear(): void;
|
|
/**
|
|
*/
|
|
get_safe(key: type_key): lib_plankton.pod.class_pod<type_value>;
|
|
/**
|
|
* @desc [accessor]
|
|
*/
|
|
keys(): Array<type_key>;
|
|
/**
|
|
* @desc [accessor]
|
|
*/
|
|
values(): Array<type_value>;
|
|
/**
|
|
* @desc [accessor]
|
|
*/
|
|
pairs(): Array<{
|
|
key: type_key;
|
|
value: type_value;
|
|
}>;
|
|
/**
|
|
* @desc [accessor]
|
|
*/
|
|
toString(show_key?: ((key: type_key) => string), show_value?: ((value: type_value) => string)): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
interface interface_memory<type_element> {
|
|
/**
|
|
* @desc [accessor] the number of elements
|
|
*/
|
|
size(): int;
|
|
/**
|
|
* @desc [accessor] reads the takeable element
|
|
*/
|
|
scan(): type_element;
|
|
/**
|
|
* @desc [mutator] inserts an element
|
|
*/
|
|
give(element: type_element): void;
|
|
/**
|
|
* @desc [mutator] removes an element and returns it
|
|
*/
|
|
take(): type_element;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_pair<type_first, type_second> implements interface_cloneable<class_pair<type_first, type_second>>, interface_collatable<class_pair<type_first, type_second>>, interface_hashable, interface_showable {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected first: type_first;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected second: type_second;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(first: type_first, second: type_second);
|
|
/**
|
|
* @desc [accessor] [getter]
|
|
* @author fenris
|
|
*/
|
|
first_get(): type_first;
|
|
/**
|
|
* @desc [accessor] [getter]
|
|
* @author fenris
|
|
*/
|
|
second_get(): type_second;
|
|
/**
|
|
* @desc [mutator] [setter]
|
|
* @author fenris
|
|
*/
|
|
first_set(first: type_first): void;
|
|
/**
|
|
* @desc [mutator] [setter]
|
|
* @author fenris
|
|
*/
|
|
second_set(second: type_second): void;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
swap(): class_pair<type_second, type_first>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
transform<type_first_, type_second_>(transform_first: (first: type_first) => type_first_, transform_second: (second: type_second) => type_second_): class_pair<type_first_, type_second_>;
|
|
/**
|
|
* @desc [accessor] [implementation]
|
|
* @author fenris
|
|
*/
|
|
_clone(): class_pair<type_first, type_second>;
|
|
/**
|
|
* @desc [accessor] [implementation]
|
|
* @author fenris
|
|
*/
|
|
_hash(): string;
|
|
/**
|
|
* @desc [accessor] [implementation]
|
|
* @author fenris
|
|
*/
|
|
_collate(pair: class_pair<type_first, type_second>): boolean;
|
|
/**
|
|
* @desc [accessor] [implementation]
|
|
* @author fenris
|
|
*/
|
|
_show(): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
*/
|
|
type type_collation<type_element> = ((x: type_element, y: type_element) => boolean);
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export type type_set<type_element> = {
|
|
elements: Array<type_element>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function set_construct<type_element>(collation: type_collation<type_element>, elements?: Array<type_element>): type_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_size<type_element>(subject: type_set<type_element>): int;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_has<type_element>(collation: type_collation<type_element>, subject: type_set<type_element>, element: type_element): boolean;
|
|
/**
|
|
* @desc [mutator]
|
|
* @author fenris
|
|
*/
|
|
export function set_add<type_element>(collation: type_collation<type_element>, subject: type_set<type_element>, element: type_element): void;
|
|
/**
|
|
* @desc [mutator]
|
|
* @author fenris
|
|
*/
|
|
export function set_pop<type_element>(subject: type_set<type_element>): lib_plankton.pod.type_pod<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_forEach<type_element>(subject: type_set<type_element>, function_: ((element: type_element) => void)): void;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_map<type_element_from, type_element_to>(collation: type_collation<type_element_to>, subject: type_set<type_element_from>, transformator: ((element: type_element_from) => type_element_to)): type_set<type_element_to>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_filter<type_element>(subject: type_set<type_element>, predicate: ((element: type_element) => boolean)): type_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_dump<type_element>(subject: type_set<type_element>): Array<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_subset<type_element>(collation: type_collation<type_element>, subject: type_set<type_element>, object: type_set<type_element>): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_superset<type_element>(collation: type_collation<type_element>, subject: type_set<type_element>, object: type_set<type_element>): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_equals<type_element>(collation: type_collation<type_element>, subject: type_set<type_element>, object: type_set<type_element>): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_toString<type_element>(show_element: ((element: type_element) => string), subject: type_set<type_element>): string;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_empty<type_element>(subject: type_set<type_element>): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_union<type_element>(collation: type_collation<type_element>, subject: type_set<type_element>, object: type_set<type_element>): type_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_intersection<type_element>(collation: type_collation<type_element>, subject: type_set<type_element>, object: type_set<type_element>): type_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_difference<type_element>(collation: type_collation<type_element>, subject: type_set<type_element>, object: type_set<type_element>): type_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
export function set_symmetric_difference<type_element>(collation: type_collation<type_element>, subject: type_set<type_element>, object: type_set<type_element>): type_set<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function set_union_all<type_element>(collation: type_collation<type_element>, sets: Array<type_set<type_element>>): type_set<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function set_intersection_all<type_element>(collation: type_collation<type_element>, sets: Array<type_set<type_element>>): type_set<type_element>;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_set<type_element> implements interface_collatable<class_set<type_element>>, interface_showable {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: type_set<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected equality: (element1: type_element, element2: type_element) => boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(elements?: Array<type_element>, equality?: (element1: type_element, element2: type_element) => boolean);
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected static from_subject<type_element>(equality: (element1: type_element, element2: type_element) => boolean, subject: type_set<type_element>): class_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
size(): int;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
has(element: type_element): boolean;
|
|
/**
|
|
* @desc [mutator]
|
|
* @author fenris
|
|
*/
|
|
add(element: type_element): void;
|
|
/**
|
|
* @desc [mutator]
|
|
* @author fenris
|
|
*/
|
|
pop(): lib_plankton.pod.class_pod<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
forEach(function_: (element: type_element) => void): void;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
map<type_element_>(transformator: (element: type_element) => type_element_, equality?: (x: type_element_, y: type_element_) => boolean): class_set<type_element_>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
filter(predicate: (element: type_element) => boolean): class_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
dump(): Array<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
subset(set: class_set<type_element>): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
superset(set: class_set<type_element>): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
equals(set: class_set<type_element>): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
toString(): string;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
empty(): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
union(set: class_set<type_element>): class_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
intersection(set: class_set<type_element>): class_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
difference(set: class_set<type_element>): class_set<type_element>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
symmetric_difference(set: class_set<type_element>): class_set<type_element>;
|
|
/**
|
|
* @desc [accessor] [implementation]
|
|
* @author fenris
|
|
*/
|
|
_collate(set: class_set<type_element>): boolean;
|
|
/**
|
|
* @desc [accessor] [implementation]
|
|
* @author fenris
|
|
*/
|
|
_show(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static union_all<type_element>(sets: Array<class_set<type_element>>): class_set<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static intersection_all<type_element>(sets: Array<class_set<type_element>>): class_set<type_element>;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_stack<type_element> = {
|
|
elements: Array<type_element>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function stack_construct<type_element>(): type_stack<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function stack_size<type_element>(subject: type_stack<type_element>): int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function stack_scan<type_element>(subject: type_stack<type_element>): type_element;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function stack_take<type_element>(subject: type_stack<type_element>): type_element;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function stack_give<type_element>(subject: type_stack<type_element>, element: type_element): void;
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
abstract class class_stack<type_element> implements interface_memory<type_element> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: type_stack<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @override
|
|
* @author fenris
|
|
*/
|
|
size(): int;
|
|
/**
|
|
* @override
|
|
* @author fenris
|
|
*/
|
|
scan(): type_element;
|
|
/**
|
|
* @override
|
|
* @author fenris
|
|
*/
|
|
take(): type_element;
|
|
/**
|
|
* @override
|
|
* @author fenris
|
|
*/
|
|
give(element: type_element): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_queue<type_element> = {
|
|
elements: Array<type_element>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function queue_construct<type_element>(): type_queue<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function queue_size<type_element>(subject: type_queue<type_element>): int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function queue_scan<type_element>(subject: type_queue<type_element>): type_element;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function queue_take<type_element>(subject: type_queue<type_element>): type_element;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function queue_give<type_element>(subject: type_queue<type_element>, element: type_element): void;
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
abstract class class_queue<type_element> implements interface_memory<type_element> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: type_queue<type_element>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @override
|
|
* @author fenris
|
|
*/
|
|
size(): int;
|
|
/**
|
|
* @override
|
|
* @author fenris
|
|
*/
|
|
scan(): type_element;
|
|
/**
|
|
* @override
|
|
* @author fenris
|
|
*/
|
|
take(): type_element;
|
|
/**
|
|
* @override
|
|
* @author fenris
|
|
*/
|
|
give(element: type_element): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_simplemap<type_value> = {
|
|
memory: {
|
|
[key: string]: type_value;
|
|
};
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function simplemap_construct<type_value>(): type_simplemap<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function simplemap_has<type_value>(subject: type_simplemap<type_value>, key: string): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function simplemap_get_safe<type_value>(subject: type_simplemap<type_value>, key: string): lib_plankton.pod.type_pod<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function simplemap_get<type_value>(subject: type_simplemap<type_value>, key: string, fallback?: lib_plankton.pod.type_pod<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function simplemap_set<type_value>(subject: type_simplemap<type_value>, key: string, value: type_value): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function simplemap_delete<type_value>(subject: type_simplemap<type_value>, key: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function simplemap_clear<type_value>(subject: type_simplemap<type_value>): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function simplemap_forEach<type_value>(subject: type_simplemap<type_value>, function_: (value?: type_value, key?: string) => void): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function simplemap_from_object<type_value>(object: {
|
|
[key: string]: type_value;
|
|
}): type_simplemap<type_value>;
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_simplemap<type_value> extends class_mapbase<string, type_value> implements interface_map<string, type_value> {
|
|
/**
|
|
* @author fenris
|
|
* @desc [attribute]
|
|
*/
|
|
protected subject: type_simplemap<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static make<type_value>(): class_simplemap<type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static from_object<type_value>(object: {
|
|
[key: string]: type_value;
|
|
}): class_simplemap<type_value>;
|
|
/**
|
|
* @author fenris
|
|
* @desc [constructor]
|
|
*/
|
|
protected constructor(subject?: type_simplemap<type_value>);
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
has(key: string): boolean;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
get(key: string, fallback?: lib_plankton.pod.class_pod<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
set(key: string, value: type_value): void;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
delete(key: string): void;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
forEach(procedure: (value?: type_value, key?: string) => void): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
*/
|
|
type type_pair<type_key, type_value> = {
|
|
key: type_key;
|
|
value: type_value;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
* @desc we base the hashmap on a simplemap, whos keys are the hashes and whos values are the key/value-pairs
|
|
*/
|
|
export type type_hashmap<type_key, type_value> = {
|
|
core: type_simplemap<type_pair<type_key, type_value>>;
|
|
hashing: ((key: type_key) => string);
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function hashmap_construct<type_key, type_value>(hashing: ((key: type_key) => string), pairs: Array<{
|
|
key: type_key;
|
|
value: type_value;
|
|
}>): type_hashmap<type_key, type_value>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function hashmap_has<type_key, type_value>(subject: type_hashmap<type_key, type_value>, key: type_key): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function hashmap_get<type_key, type_value>(subject: type_hashmap<type_key, type_value>, key: type_key, fallback?: lib_plankton.pod.type_pod<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function hashmap_set<type_key, type_value>(subject: type_hashmap<type_key, type_value>, key: type_key, value: type_value): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function hashmap_delete<type_key, type_value>(subject: type_hashmap<type_key, type_value>, key: type_key): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function hashmap_clear<type_key, type_value>(subject: type_hashmap<type_key, type_value>): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function hashmap_forEach<type_key, type_value>(subject: type_hashmap<type_key, type_value>, procedure: ((value?: type_value, key?: type_key) => void)): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function hashmap_dump<type_key, type_value>(subject: type_hashmap<type_key, type_value>): Array<type_pair<type_key, type_value>>;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_hashmap<type_key, type_value> extends class_mapbase<type_key, type_value> implements interface_map<type_key, type_value> {
|
|
/**
|
|
* @author fenris
|
|
* @desc [attribute]
|
|
*/
|
|
protected subject: type_hashmap<type_key, type_value>;
|
|
/**
|
|
* @author fenris
|
|
* @desc [constructor]
|
|
*/
|
|
constructor(hashing?: ((key: type_key) => string), pairs?: Array<{
|
|
key: type_key;
|
|
value: type_value;
|
|
}>);
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
has(key: type_key): boolean;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
get(key: type_key, fallback?: lib_plankton.pod.class_pod<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
set(key: type_key, value: type_value): void;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
delete(key: type_key): void;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
forEach(procedure: ((value?: type_value, key?: type_key) => void)): void;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
*/
|
|
type type_collation<type_key> = ((key1: type_key, key2: type_key) => boolean);
|
|
/**
|
|
*/
|
|
export type type_collatemap<type_key, type_value> = {
|
|
pairs: Array<{
|
|
key: type_key;
|
|
value: type_value;
|
|
}>;
|
|
};
|
|
/**
|
|
*/
|
|
export function collatemap_construct<type_key, type_value>(): type_collatemap<type_key, type_value>;
|
|
/**
|
|
*/
|
|
export function collatemap_has<type_key, type_value>(collation: type_collation<type_key>, subject: type_collatemap<type_key, type_value>, key: type_key): boolean;
|
|
/**
|
|
* @todo use .find
|
|
*/
|
|
export function collatemap_get<type_key, type_value>(collation: type_collation<type_key>, subject: type_collatemap<type_key, type_value>, key: type_key, fallback?: lib_plankton.pod.type_pod<type_value>): type_value;
|
|
/**
|
|
*/
|
|
export function collatemap_set<type_key, type_value>(collation: type_collation<type_key>, subject: type_collatemap<type_key, type_value>, key: type_key, value: type_value): void;
|
|
/**
|
|
*/
|
|
export function collatemap_delete<type_key, type_value>(collation: type_collation<type_key>, subject: type_collatemap<type_key, type_value>, key: type_key): void;
|
|
/**
|
|
*/
|
|
export function collatemap_forEach<type_key, type_value>(subject: type_collatemap<type_key, type_value>, function_: ((value?: type_value, key?: type_key) => void)): void;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_collatemap<type_key, type_value> extends class_mapbase<type_key, type_value> implements interface_map<type_key, type_value> {
|
|
/**
|
|
* @author fenris
|
|
* @desc [attribute]
|
|
*/
|
|
protected collation: (key1: type_key, key2: type_key) => boolean;
|
|
/**
|
|
* @author fenris
|
|
* @desc [attribute]
|
|
*/
|
|
protected subject: type_collatemap<type_key, type_value>;
|
|
/**
|
|
* @author fenris
|
|
* @desc [constructor]
|
|
*/
|
|
constructor(collation?: (key1: type_key, key2: type_key) => boolean);
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
has(key: type_key): boolean;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
get(key: type_key, fallback?: lib_plankton.pod.class_pod<type_value>): type_value;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
set(key: type_key, value: type_value): void;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
delete(key: type_key): void;
|
|
/**
|
|
* @author fenris
|
|
* @implementation
|
|
*/
|
|
forEach(procedure: (value?: type_value, key?: type_key) => void): void;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
* @deprecated
|
|
*/
|
|
var class_map: typeof class_collatemap;
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_edge<type_node> = {
|
|
from: type_node;
|
|
to: type_node;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_graph<type_node> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected equality: (node1: type_node, node2: type_node) => boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected nodes: Array<type_node>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected edges: Array<type_edge<type_node>>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(equality?: (node1: type_node, node2: type_node) => boolean, nodes?: Array<type_node>, edges?: Array<type_edge<type_node>>);
|
|
/**
|
|
* @desc [accessor] [getter]
|
|
* @author fenris
|
|
*/
|
|
nodes_get(): Array<type_node>;
|
|
/**
|
|
* @desc [mutator]
|
|
* @author fenris
|
|
*/
|
|
add_node(node: type_node): void;
|
|
/**
|
|
* @desc [accessor] [getter]
|
|
* @author fenris
|
|
*/
|
|
edges_get(): Array<type_edge<type_node>>;
|
|
/**
|
|
* @desc [mutator]
|
|
* @author fenris
|
|
*/
|
|
add_edge(edge: type_edge<type_node>): void;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
has(node: type_node): boolean;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
outgoing(node: type_node): Array<type_edge<type_node>>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
incoming(node: type_node): Array<type_edge<type_node>>;
|
|
/**
|
|
* @desc [accessor]
|
|
* @author fenris
|
|
*/
|
|
without(pivot: type_node): class_graph<type_node>;
|
|
/**
|
|
* @desc [accessor] returns the topologic sorting of the nodes (if it exists)
|
|
* @author fenris
|
|
*/
|
|
topsort(): Array<type_node>;
|
|
/**
|
|
* @desc [accessor] returns the reduced version of a graph representing an order relation (implicit transitivity)
|
|
* @author fenris
|
|
*/
|
|
hasse(): class_graph<type_node>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
output_dot({ "extract_id": extract_id, "extract_label": extract_label, "rotate": rotate, }?: {
|
|
extract_id?: (node: type_node) => string;
|
|
extract_label?: (node: type_node) => string;
|
|
rotate?: boolean;
|
|
}): Object;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
const relation_le: string;
|
|
const relation_ge: string;
|
|
const relation_lt: string;
|
|
const relation_gt: string;
|
|
const relation_eq: string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_binnode<type_data> = {
|
|
data: type_data;
|
|
left: type_binnode<type_data>;
|
|
right: type_binnode<type_data>;
|
|
depth: int;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_bintree<type_data> = {
|
|
root: type_binnode<type_data>;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function bintree_construct<type_data>(): type_bintree<type_data>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function bintree_depth<type_data>(subject: type_bintree<type_data>): int;
|
|
/**
|
|
* @author fenris
|
|
* @todo remove later on
|
|
*/
|
|
function bintree_check_depths<type_data>(subject: type_bintree<type_data>): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function bintree_insert<type_data>(compare: (x: type_data, y: type_data) => boolean, subject: type_bintree<type_data>, data: type_data, rebalance?: boolean): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function bintree_search<type_data>(compare: (x: type_data, y: type_data) => boolean, subject: type_bintree<type_data>, data: type_data, relation?: string): Array<type_data>;
|
|
/**
|
|
* @author fenris
|
|
* @deprecated only used for AVL-Tree-Index atm.
|
|
*/
|
|
function bintree_find<type_data>(compare: (x: type_data, y: type_data) => boolean, subject: type_bintree<type_data>, data: type_data): type_data;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function bintree_traverse<type_data>(subject: type_bintree<type_data>): Array<type_data>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function bintree_show<type_data>(show_data: (data: type_data) => string, subject: type_bintree<type_data>): string;
|
|
/**
|
|
* @author fenris
|
|
* @todo tidy up or remove
|
|
*/
|
|
function bintree_to_graph<type_data>(subject: type_bintree<type_data>): class_graph<type_data>;
|
|
}
|
|
declare namespace lib_plankton.structures {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_bintree<type_data> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: type_bintree<type_data>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected compare: (x: type_data, y: type_data) => boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(compare?: (x: type_data, y: type_data) => boolean);
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
depth(): int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
insert(data: type_data, rebalance?: boolean): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
delete(data: type_data): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
search(relation: string, data: type_data): Array<type_data>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
traverse(): Array<type_data>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
show(): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.json {
|
|
/**
|
|
*/
|
|
type type_source = any;
|
|
/**
|
|
*/
|
|
type type_target = string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function encode(source: type_source, options?: {
|
|
formatted?: boolean;
|
|
}): type_target;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function decode(target: type_target): type_source;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
export function implementation_code(): lib_plankton.code.type_code<type_source, type_target>;
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.json {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_json implements lib_plankton.code.interface_code<any, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: any): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): any;
|
|
}
|
|
}
|
|
declare module lib_et {
|
|
/**
|
|
* @desc type of extended timestamp
|
|
* @author fenris
|
|
*/
|
|
type type_et = {
|
|
era: int;
|
|
stamp: int;
|
|
};
|
|
/**
|
|
* @desc type of UNIX timestamp
|
|
* @author fenris
|
|
*/
|
|
type type_timestamp = int;
|
|
/**
|
|
* @desc type of Javascript Date object
|
|
* @author fenris
|
|
*/
|
|
type type_jsdate = Date;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_components = {
|
|
year: int;
|
|
month: int;
|
|
day: int;
|
|
hour: int;
|
|
minute: int;
|
|
second: int;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_ywd = {
|
|
year: int;
|
|
week: int;
|
|
day: int;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function part(et1: type_et, et2: type_et): type_et;
|
|
/**
|
|
* @desc less
|
|
* @author fenris
|
|
*/
|
|
function before(reference: type_et, et: type_et): boolean;
|
|
/**
|
|
* @desc greater
|
|
* @author fenris
|
|
*/
|
|
function after(reference: type_et, et: type_et): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function between(begin: type_et, end: type_et, et: type_et): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function intersect(begin1: type_et, end1: type_et, begin2: type_et, end2: type_et): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function move(base: type_et, span: type_et): type_et;
|
|
/**
|
|
* @desc currified version of "move"
|
|
* @author fenris
|
|
*/
|
|
function move_(span: type_et): (base: type_et) => type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function from_timestamp(timestamp: type_timestamp): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function to_timestamp(et: type_et): type_timestamp;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function from_jsdate(jsdate: type_jsdate): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function to_jsdate(et: type_et): type_jsdate;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function from_components(components: type_components): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function to_components(et: type_et): type_components;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function now(): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function to_string(et: type_et): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function to_string_ywd(et: type_et): string;
|
|
/**
|
|
* @desc retrieve week of year
|
|
* @author fenris
|
|
*/
|
|
function get_woy(et: type_et): int;
|
|
/**
|
|
* @desc retrieve day of week
|
|
* @author fenris
|
|
*/
|
|
function get_dow(et: type_et): int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function trunc_minute(et?: type_et): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function trunc_hour(et?: type_et): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function trunc_day(et?: type_et): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function trunc_month(et?: type_et): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function trunc_year(et?: type_et): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function trunc_week(et?: type_et): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function span_second(seconds?: int): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function span_minute(minutes?: int): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function span_hour(hours?: int): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function span_day(days?: int): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function span_week(weeks?: int): type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function span_year(years?: int): type_et;
|
|
}
|
|
declare module lib_et {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_et {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: type_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(subject: type_et);
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
move(et: class_et): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
before(et: class_et): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
after(et: class_et): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
between(et1: class_et, et2: class_et): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
trunc_minute(): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
trunc_hour(): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
trunc_day(): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
trunc_month(): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
trunc_year(): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
trunc_week(): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static now(): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static span_second(count?: int): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static span_minute(count?: int): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static span_hour(count?: int): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static span_day(count?: int): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static span_week(count?: int): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static span_year(count?: int): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static from_timestamp(timestamp: type_timestamp): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
to_timestamp(): type_timestamp;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static from_jsdate(jsdate: type_jsdate): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
to_jsdate(): type_jsdate;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static from_components(components: type_components): class_et;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
to_components(): type_components;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
get_woy(): int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
get_dow(): int;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
to_string(): string;
|
|
}
|
|
}
|
|
declare var global_config: any;
|
|
/**
|
|
* @author neuc
|
|
*/
|
|
declare namespace lib_plankton.date {
|
|
/**
|
|
* @author neu3no, fenris
|
|
*/
|
|
function set_days(day_names: Array<string>): void;
|
|
/**
|
|
* @author neu3no, fenris
|
|
*/
|
|
function set_months(month_names: Array<string>): void;
|
|
/**
|
|
* @desc week of year
|
|
* @param {Date} date
|
|
* @return {int}
|
|
* @author fenris
|
|
*/
|
|
function get_week(date: Date): int;
|
|
/**
|
|
* @author neu3no, fenris
|
|
*/
|
|
function set_currentDate(date: Date): void;
|
|
/**
|
|
* @author neu3no, fenris
|
|
*/
|
|
function parse(format: string, date?: Date): string;
|
|
/**
|
|
* @author neu3no, fenris
|
|
*/
|
|
function locale_date(date?: Date, ignore_error?: boolean): string;
|
|
/**
|
|
*/
|
|
type type_unixtimestamp = int;
|
|
/**
|
|
*/
|
|
type type_components = {
|
|
timezone_offset: int;
|
|
year: int;
|
|
month: int;
|
|
day: int;
|
|
hour: int;
|
|
minute: int;
|
|
second: int;
|
|
};
|
|
/**
|
|
*/
|
|
function now(): type_unixtimestamp;
|
|
/**
|
|
*/
|
|
function from_components(components: type_components): type_unixtimestamp;
|
|
/**
|
|
*/
|
|
function to_components(unixtimestamp: type_unixtimestamp): type_components;
|
|
/**
|
|
*/
|
|
function get_timestamp_from_year_and_week_and_day(year: int, week: int, day: int): type_unixtimestamp;
|
|
}
|
|
declare var strftime: typeof lib_plankton.date;
|
|
declare namespace lib_plankton.ical {
|
|
/**
|
|
*/
|
|
type type_rrule = {
|
|
freq?: string;
|
|
byday?: string;
|
|
bymonth?: string;
|
|
};
|
|
/**
|
|
*/
|
|
type type_offset = string;
|
|
/**
|
|
*/
|
|
/**
|
|
*/
|
|
export enum enum_class {
|
|
public = "public",
|
|
private = "private",
|
|
confidential = "confidential"
|
|
}
|
|
/**
|
|
*/
|
|
export enum enum_event_status {
|
|
tentative = "tentative",
|
|
confirmed = "confirmed",
|
|
cancelled = "cancelled"
|
|
}
|
|
/**
|
|
*/
|
|
export enum enum_transp {
|
|
opaque = "opaque",
|
|
transparent = "transparent"
|
|
}
|
|
/**
|
|
*/
|
|
type type_tzid = string;
|
|
/**
|
|
*/
|
|
export type type_date = {
|
|
year: int;
|
|
month: int;
|
|
day: int;
|
|
};
|
|
/**
|
|
*/
|
|
export type type_time = {
|
|
hour: int;
|
|
minute: int;
|
|
second: int;
|
|
utc: boolean;
|
|
};
|
|
/**
|
|
*/
|
|
export type type_datetime = {
|
|
date: type_date;
|
|
time: (null | type_time);
|
|
};
|
|
/**
|
|
*/
|
|
export type type_dt = {
|
|
tzid: type_tzid;
|
|
value: type_datetime;
|
|
};
|
|
/**
|
|
*/
|
|
type type_duration = {
|
|
negative: boolean;
|
|
weeks?: int;
|
|
days?: int;
|
|
hours?: int;
|
|
minutes?: int;
|
|
seconds?: int;
|
|
};
|
|
/**
|
|
*/
|
|
type type_vtimezone = {
|
|
tzid?: type_tzid;
|
|
standard?: {
|
|
dtstart: type_datetime;
|
|
rrule: type_rrule;
|
|
tzoffsetfrom?: type_offset;
|
|
tzoffsetto?: type_offset;
|
|
};
|
|
daylight?: {
|
|
dtstart: type_datetime;
|
|
rrule: type_rrule;
|
|
tzoffsetfrom?: type_offset;
|
|
tzoffsetto?: type_offset;
|
|
};
|
|
};
|
|
/**
|
|
* @see https://www.rfc-editor.org/rfc/rfc5545#section-3.6.1
|
|
*/
|
|
export type type_vevent = {
|
|
uid: string;
|
|
dtstamp: type_datetime;
|
|
dtstart?: type_dt;
|
|
class?: enum_class;
|
|
created?: type_datetime;
|
|
description?: string;
|
|
geo?: {
|
|
latitude: float;
|
|
longitude: float;
|
|
};
|
|
last_modified?: type_datetime;
|
|
location?: string;
|
|
organizer?: {
|
|
cn?: string;
|
|
value?: string;
|
|
};
|
|
priority?: int;
|
|
sequence?: int;
|
|
status?: enum_event_status;
|
|
summary?: string;
|
|
transp?: enum_transp;
|
|
url?: string;
|
|
recurid?: any;
|
|
rrule?: type_rrule;
|
|
dtend?: type_dt;
|
|
duration?: type_duration;
|
|
attach?: any;
|
|
attendee?: string;
|
|
categories?: Array<string>;
|
|
comment?: any;
|
|
contact?: any;
|
|
exdate?: any;
|
|
rstatus?: any;
|
|
related?: any;
|
|
resources?: any;
|
|
rdate?: any;
|
|
x_props?: Record<string, string>;
|
|
iana_props?: Record<string, string>;
|
|
};
|
|
/**
|
|
* @see https://www.rfc-editor.org/rfc/rfc5545#section-3.4
|
|
*/
|
|
export type type_vcalendar = {
|
|
version: string;
|
|
prodid: string;
|
|
vevents: Array<type_vevent>;
|
|
calscale?: string;
|
|
method?: string;
|
|
vtimezone?: type_vtimezone;
|
|
x_props?: Record<string, string>;
|
|
iana_props?: Record<string, string>;
|
|
};
|
|
export {};
|
|
}
|
|
declare namespace lib_plankton.ical {
|
|
/**
|
|
*/
|
|
function datetime_to_unixtimestamp(datetime: type_datetime): lib_plankton.date.type_unixtimestamp;
|
|
/**
|
|
* @see https://www.rfc-editor.org/rfc/rfc5545
|
|
* @see https://icalendar.org/iCalendar-RFC-5545/
|
|
* @todo implement edge cases
|
|
*/
|
|
function ics_decode(ics: string, options?: {
|
|
debug?: boolean;
|
|
}): type_vcalendar;
|
|
/**
|
|
* @todo method
|
|
* @todo add missing fields
|
|
*/
|
|
function ics_encode(vcalendar: type_vcalendar): string;
|
|
}
|
|
declare namespace lib_plankton.http {
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
enum enum_method {
|
|
options = "options",
|
|
head = "head",
|
|
get = "get",
|
|
delete = "delete",
|
|
post = "post",
|
|
put = "put",
|
|
patch = "patch"
|
|
}
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
type type_request = {
|
|
scheme: ("http" | "https");
|
|
host: (null | string);
|
|
path: string;
|
|
version: string;
|
|
method: enum_method;
|
|
query: (null | string);
|
|
headers: Record<string, string>;
|
|
body: (null | Buffer);
|
|
};
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
type type_response = {
|
|
version: (null | string);
|
|
status_code: int;
|
|
headers: Record<string, string>;
|
|
body: Buffer;
|
|
};
|
|
}
|
|
declare namespace lib_plankton.http {
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function encode_method(method: enum_method): string;
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function encode_request(request: type_request): string;
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function decode_request(request_raw: string): type_request;
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function encode_response(response: type_response): string;
|
|
/**
|
|
* @author fenris <frass@greenscale.de>
|
|
*/
|
|
function decode_response(response_raw: string): type_response;
|
|
/**
|
|
* executes an HTTP request
|
|
*
|
|
* @todo define type_signal
|
|
*/
|
|
function call(request: type_request, options?: {
|
|
timeout?: (null | float);
|
|
follow_redirects?: boolean;
|
|
implementation?: ("fetch" | "http_module");
|
|
}): Promise<type_response>;
|
|
}
|
|
declare namespace lib_plankton.http {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_http_request implements lib_plankton.code.interface_code<type_request, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_request): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): type_request;
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_http_response implements lib_plankton.code.interface_code<type_response, string> {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor();
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
encode(x: type_response): string;
|
|
/**
|
|
* @implementation
|
|
* @author fenris
|
|
*/
|
|
decode(x: string): type_response;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.object {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function fetch<type_value>(object: Object, fieldname: string, fallback?: type_value, escalation?: int): type_value;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function map<type_from, type_to>(object_from: {
|
|
[key: string]: type_from;
|
|
}, transformator: (value_from: type_from, key?: string) => type_to): {
|
|
[key: string]: type_to;
|
|
};
|
|
/**
|
|
* @desc gibt ein Objekt mit bestimmten Einträgen des Eingabe-Objekts zurück
|
|
* @author fenris
|
|
*/
|
|
function filter<type_value>(object_from: {
|
|
[key: string]: type_value;
|
|
}, predicate: (value_from: type_value, key?: string) => boolean): {
|
|
[key: string]: type_value;
|
|
};
|
|
/**
|
|
* @desc wandelt ein Array mit Einträgen der Form {key,value} in ein entsprechendes Objekt um
|
|
* @author fenris
|
|
*/
|
|
function from_array<type_value>(array: Array<{
|
|
key: string;
|
|
value: type_value;
|
|
}>): {
|
|
[key: string]: type_value;
|
|
};
|
|
/**
|
|
* @desc wandelt ein Objekt in ein entsprechendes Array mit Einträgen der Form {key,value} um
|
|
* @author fenris
|
|
*/
|
|
function to_array<type_value>(object: {
|
|
[key: string]: type_value;
|
|
}): Array<{
|
|
key: string;
|
|
value: type_value;
|
|
}>;
|
|
/**
|
|
* @desc gibt eine Liste von Schlüsseln eines Objekts zurück
|
|
* @author fenris
|
|
*/
|
|
function keys(object: {
|
|
[key: string]: any;
|
|
}): Array<string>;
|
|
/**
|
|
* @desc gibt eine Liste von Werten eines Objekts zurück
|
|
* @author fenris
|
|
*/
|
|
function values<type_value>(object: {
|
|
[key: string]: type_value;
|
|
}): Array<type_value>;
|
|
/**
|
|
* @desc liest ein Baum-artiges Objekt an einer bestimmten Stelle aus
|
|
* @author fenris
|
|
*/
|
|
function path_read<type_value>(object: Object, path: string, fallback?: type_value, escalation?: int): type_value;
|
|
/**
|
|
* @desc schreibt einen Wert an eine bestimmte Stelle in einem Baum-artigen Objekt
|
|
* @author fenris
|
|
*/
|
|
function path_write<type_value>(object: Object, path: string, value: type_value, construct?: boolean): void;
|
|
/**
|
|
* @desc prüft ob ein Objekt einem bestimmten Muster entspricht
|
|
* @param {Object} object das zu prüfende Objekt
|
|
* @param {Object} pattern das einzuhaltende Muster
|
|
* @param {Function} connlate eine Funktion zum Feststellen der Gleichheit von Einzelwerten
|
|
* @author fenris
|
|
*/
|
|
function matches(object: Object, pattern: Object, collate?: typeof instance_collate): boolean;
|
|
/**
|
|
* @desc erzeugt eine Projektion eines Baum-artigen Objekts in ein Listen-artiges Objekt
|
|
* @param {string} [separator] welches Zeichen als Trenner zwischen zwei Pfad-Schritten verwendet werden soll
|
|
* @author fenris
|
|
*/
|
|
function flatten(value: any, separator?: string, key_for_element?: (index: int) => string): Object;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function clash(x: {
|
|
[key: string]: any;
|
|
}, y: {
|
|
[key: string]: any;
|
|
}, { "overwrite": overwrite, "hooks": { "existing": hook_existing, }, }?: {
|
|
overwrite?: boolean;
|
|
hooks?: {
|
|
existing?: (key?: string, value_old?: any, value_new?: any) => void;
|
|
};
|
|
}): {
|
|
[key: string]: any;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function patch(core: Object, mantle: Object, deep?: boolean, path?: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function patched(core: Object, mantle: Object, deep?: boolean): Object;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function attached(object: Object, key: string, value: any): Object;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function copy(object: Object): Object;
|
|
}
|
|
declare namespace lib_plankton.markdown {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function code(content: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function paragraph(content: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function sectionhead(level: int, content: string): string;
|
|
}
|
|
declare namespace lib_plankton.api {
|
|
/**
|
|
*/
|
|
enum enum_checklevel {
|
|
none = "none",
|
|
soft = "soft",
|
|
hard = "hard"
|
|
}
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_action = {
|
|
name: string;
|
|
active: ((version: (null | string)) => boolean);
|
|
execution: ((version: (null | string), environment: any, input: any) => Promise<any>);
|
|
restriction: ((version: (null | string), environment: any) => Promise<boolean>);
|
|
input_shape: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
output_shape: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
title: (null | string);
|
|
description: (null | string);
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_api = {
|
|
title: string;
|
|
actions: Record<string, type_action>;
|
|
};
|
|
/**
|
|
*/
|
|
class class_error_permission_denied extends Error {
|
|
}
|
|
}
|
|
declare namespace lib_plankton.api {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make(title: string): type_api;
|
|
/**
|
|
* @throws Error if a action with the given name has already been registered
|
|
* @author fenris
|
|
*/
|
|
function register(api: type_api, name: string, options?: {
|
|
active?: ((version: (null | string)) => boolean);
|
|
execution?: ((version: (null | string), environment: any, input: any) => Promise<any>);
|
|
restriction?: ((version: (null | string), input: any) => Promise<boolean>);
|
|
input_shape?: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
output_shape?: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
title?: (null | string);
|
|
description?: (null | string);
|
|
}): void;
|
|
/**
|
|
* @throws Error if not found
|
|
* @author fenris
|
|
*/
|
|
function get_action(api: type_api, name: string): type_action;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function call(api: type_api, name: string, options?: {
|
|
version?: (null | string);
|
|
input?: any;
|
|
environment?: Record<string, any>;
|
|
checklevel_restriction?: enum_checklevel;
|
|
checklevel_input?: enum_checklevel;
|
|
checklevel_output?: enum_checklevel;
|
|
}): Promise<any>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_documentation_for_action(api: type_api, name: string, options?: {
|
|
version?: (null | string);
|
|
}): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function generate_documentation(api: type_api, options?: {
|
|
version?: (null | string);
|
|
}): string;
|
|
}
|
|
declare namespace lib_plankton.api {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_api {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private subject;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
private constructor();
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static create(name: string): class_api;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
register(name: string, options?: {
|
|
active?: ((version: (null | string)) => boolean);
|
|
execution?: ((version: (null | string), environment: any, input: any) => lib_plankton.call.type_promise<any, Error>);
|
|
restriction?: ((version: (null | string), input: any) => Promise<boolean>);
|
|
input_shape?: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
output_shape?: ((version: (null | string)) => lib_plankton.shape.type_shape);
|
|
title?: (null | string);
|
|
description?: (null | string);
|
|
}): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
call(name: string, options?: {
|
|
version?: (null | string);
|
|
input?: any;
|
|
environment?: Record<string, any>;
|
|
checklevel_restriction?: enum_checklevel;
|
|
checklevel_input?: enum_checklevel;
|
|
checklevel_output?: enum_checklevel;
|
|
}): lib_plankton.call.type_promise<any, Error>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
generate_documentation_for_action(name: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
generate_documentation(): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.rest {
|
|
/**
|
|
*/
|
|
type type_oas_schema = ({} | {
|
|
nullable: boolean;
|
|
} | {
|
|
type: "boolean";
|
|
nullable?: boolean;
|
|
enum?: Array<boolean>;
|
|
} | {
|
|
type: "integer";
|
|
nullable?: boolean;
|
|
enum?: Array<int>;
|
|
} | {
|
|
type: "number";
|
|
nullable?: boolean;
|
|
enum?: Array<float>;
|
|
} | {
|
|
type: "string";
|
|
nullable?: boolean;
|
|
enum?: Array<string>;
|
|
} | {
|
|
type: "array";
|
|
nullable?: boolean;
|
|
items: type_oas_schema;
|
|
} | {
|
|
type: "object";
|
|
nullable?: boolean;
|
|
additionalProperties?: (false | type_oas_schema);
|
|
properties: Record<string, type_oas_schema>;
|
|
required: Array<string>;
|
|
});
|
|
/**
|
|
*/
|
|
type type_execution<type_input, type_output> = ((stuff: {
|
|
version: (null | string);
|
|
headers: Record<string, string>;
|
|
path_parameters: Record<string, string>;
|
|
query_parameters: Record<string, string>;
|
|
input: (null | type_input);
|
|
}) => Promise<{
|
|
status_code: int;
|
|
data: type_output;
|
|
}>);
|
|
/**
|
|
*/
|
|
type type_restriction<type_input> = ((stuff: {
|
|
version: (null | string);
|
|
headers: Record<string, string>;
|
|
path_parameters: Record<string, string>;
|
|
query_parameters: Record<string, string>;
|
|
}) => Promise<boolean>);
|
|
/**
|
|
*/
|
|
type type_operation<type_input, type_output> = {
|
|
action_name: string;
|
|
query_parameters: Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>;
|
|
request_body_mimetype: string;
|
|
request_body_decode: ((http_request_body: Buffer, http_request_header_content_type: (null | string)) => any);
|
|
response_body_mimetype: string;
|
|
response_body_encode: ((output: any) => Buffer);
|
|
input_schema: ((version: (null | string)) => type_oas_schema);
|
|
output_schema: ((version: (null | string)) => type_oas_schema);
|
|
};
|
|
/**
|
|
*/
|
|
type type_routenode = {
|
|
operations: Record</*lib_plankton.http.enum_method*/ string, type_operation<any, any>>;
|
|
sub_branch: Record<string, type_routenode>;
|
|
sub_wildcard: (null | {
|
|
name: string;
|
|
node: type_routenode;
|
|
});
|
|
};
|
|
/**
|
|
*/
|
|
type type_rest = {
|
|
api: lib_plankton.api.type_api;
|
|
routetree: type_routenode;
|
|
versioning_method: ("none" | "path" | "header" | "query");
|
|
versioning_header_name: (null | string);
|
|
versioning_query_key: (null | string);
|
|
header_parameters: Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>;
|
|
set_access_control_headers: boolean;
|
|
authentication: ({
|
|
kind: "none";
|
|
parameters: {};
|
|
} | {
|
|
kind: "key_header";
|
|
parameters: {
|
|
name: string;
|
|
};
|
|
});
|
|
};
|
|
}
|
|
declare namespace lib_plankton.rest {
|
|
/**
|
|
*/
|
|
function make(options?: {
|
|
title?: (null | string);
|
|
versioning_method?: ("none" | "path" | "header" | "query");
|
|
versioning_header_name?: (null | string);
|
|
versioning_query_key?: (null | string);
|
|
header_parameters?: Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>;
|
|
set_access_control_headers?: boolean;
|
|
authentication?: ({
|
|
kind: "none";
|
|
parameters: {};
|
|
} | {
|
|
kind: "key_header";
|
|
parameters: {
|
|
name: string;
|
|
};
|
|
});
|
|
actions?: Array<{
|
|
http_method: lib_plankton.http.enum_method;
|
|
path: string;
|
|
options: {
|
|
active?: ((version: string) => boolean);
|
|
restriction?: (null | type_restriction<any>);
|
|
execution?: type_execution<any, any>;
|
|
title?: (null | string);
|
|
description?: (null | string);
|
|
query_parameters?: Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>;
|
|
input_schema?: ((version: string) => type_oas_schema);
|
|
output_schema?: ((version: string) => type_oas_schema);
|
|
request_body_mimetype?: string;
|
|
request_body_decode?: ((http_request_body: Buffer, http_request_header_content_type: (null | string)) => any);
|
|
response_body_mimetype?: string;
|
|
response_body_encode?: ((output: any) => Buffer);
|
|
};
|
|
}>;
|
|
}): type_rest;
|
|
/**
|
|
*/
|
|
function register<type_input, type_output>(rest: type_rest, http_method: lib_plankton.http.enum_method, path: string, options: {
|
|
active?: ((version: string) => boolean);
|
|
restriction?: (null | type_restriction<type_input>);
|
|
execution?: type_execution<type_input, type_output>;
|
|
title?: (null | string);
|
|
description?: (null | string);
|
|
query_parameters?: Array<{
|
|
name: string;
|
|
description: (null | string);
|
|
required: boolean;
|
|
}>;
|
|
input_schema?: ((version: (null | string)) => type_oas_schema);
|
|
output_schema?: ((version: (null | string)) => type_oas_schema);
|
|
request_body_mimetype?: string;
|
|
request_body_decode?: ((http_request_body: Buffer, http_request_header_content_type: (null | string)) => any);
|
|
response_body_mimetype?: string;
|
|
response_body_encode?: ((output: any) => Buffer);
|
|
}): void;
|
|
/**
|
|
* @todo check request body mimetype?
|
|
* @todo check query paramater validity
|
|
*/
|
|
function call(rest: type_rest, http_request: lib_plankton.http.type_request, options?: {
|
|
checklevel_restriction?: lib_plankton.api.enum_checklevel;
|
|
checklevel_input?: lib_plankton.api.enum_checklevel;
|
|
checklevel_output?: lib_plankton.api.enum_checklevel;
|
|
}): Promise<lib_plankton.http.type_response>;
|
|
/**
|
|
* @see https://swagger.io/specification/#openrest-object
|
|
*/
|
|
function to_oas(rest: type_rest, options?: {
|
|
version?: (null | string);
|
|
servers?: Array<string>;
|
|
}): any;
|
|
}
|
|
declare namespace lib_plankton.server {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_metadata = {
|
|
ip_address: string;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type type_subject = {
|
|
host: string;
|
|
port: int;
|
|
threshold: (null | float);
|
|
handle: ((input: Buffer, metadata?: type_metadata) => Promise<string>);
|
|
serverobj: any;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function make(handle: ((input: string, metadata?: type_metadata) => Promise<string>), options?: {
|
|
host?: string;
|
|
port?: int;
|
|
threshold?: (null | float);
|
|
}): type_subject;
|
|
/**
|
|
* @author fenris
|
|
* @deprecated
|
|
*/
|
|
function make_old(port: int, handle: ((input: string, metadata?: type_metadata) => Promise<string>)): type_subject;
|
|
/**
|
|
* @author fenris
|
|
* @see https://nodejs.org/api/net.html#serverlistenport-host-backlog-callback
|
|
*/
|
|
function start(subject: type_subject): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
function kill(subject: type_subject): void;
|
|
}
|
|
declare namespace lib_plankton.server {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_server {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected subject: type_subject;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(handle: ((input: Buffer, metadata?: type_metadata) => Promise<string>), options?: {
|
|
host?: string;
|
|
port?: int;
|
|
});
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
start(): Promise<void>;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
kill(): void;
|
|
}
|
|
}
|
|
declare var lib_server: typeof lib_plankton.server;
|
|
declare namespace lib_plankton.args {
|
|
/**
|
|
*/
|
|
enum enum_environment {
|
|
cli = "cli",
|
|
url = "url"
|
|
}
|
|
/**
|
|
*/
|
|
enum enum_kind {
|
|
positional = "positional",
|
|
volatile = "volatile"
|
|
}
|
|
/**
|
|
*/
|
|
enum enum_type {
|
|
boolean = "boolean",
|
|
integer = "int",
|
|
float = "float",
|
|
string = "string"
|
|
}
|
|
/**
|
|
*/
|
|
enum enum_mode {
|
|
replace = "replace",
|
|
accumulate = "accumulate"
|
|
}
|
|
}
|
|
declare namespace lib_plankton.args {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
class class_argument {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected name: string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected kind: enum_kind;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected type: enum_type;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected mode: enum_mode;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected default_: any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected info: string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected parameters: Object;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected hidden: boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor({ "name": name, "type": type, "kind": kind, "mode": mode, "default": default_, "info": info, "parameters": parameters, "hidden": hidden, }: {
|
|
name: string;
|
|
type?: enum_type;
|
|
kind?: enum_kind;
|
|
mode?: enum_mode;
|
|
default?: any;
|
|
info?: string;
|
|
parameters?: Object;
|
|
hidden?: boolean;
|
|
});
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static positional({ "name": name, "type": type, "mode": mode, "default": default_, "info": info, "hidden": hidden, "index": index, }: {
|
|
name: string;
|
|
type?: enum_type;
|
|
mode?: enum_mode;
|
|
default?: any;
|
|
info?: string;
|
|
hidden?: boolean;
|
|
index: int;
|
|
}): class_argument;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
static volatile({ "name": name, "type": type, "mode": mode, "default": default_, "info": info, "hidden": hidden, "indicators_short": indicators_short, "indicators_long": indicators_long, }: {
|
|
name: string;
|
|
type?: enum_type;
|
|
mode?: enum_mode;
|
|
default?: any;
|
|
info?: string;
|
|
hidden?: boolean;
|
|
indicators_short: Array<string>;
|
|
indicators_long: Array<string>;
|
|
}): class_argument;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
check(): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
name_get(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
kind_get(): enum_kind;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
type_get(): enum_type;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
mode_get(): enum_mode;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
default_get(): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
parameters_get(): Object;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
hidden_get(): boolean;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
toString(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
indicator_main(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
pattern_value(): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
extract(raw: string): any;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
assign(data: Object, target: string, raw: string): void;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
make(data: Object, target: string): string;
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
generate_help(): string;
|
|
}
|
|
}
|
|
declare namespace lib_plankton.args {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
var verbosity: int;
|
|
/**
|
|
* @author fenris
|
|
* @todo check validity
|
|
*/
|
|
class class_handler {
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
protected arguments_: {
|
|
[name: string]: class_argument;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
constructor(arguments_: {
|
|
[name: string]: class_argument;
|
|
});
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
filter(kind: enum_kind): {
|
|
[name: string]: class_argument;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
*/
|
|
read(environment: enum_environment, input: string, data?: {
|
|
[name: string]: any;
|
|
}): {
|
|
[name: string]: any;
|
|
};
|
|
/**
|
|
* @author fenris
|
|
* @todo handle if the data object doesn't have the required field or the type is wrong or sth.
|
|
*/
|
|
write(environment: enum_environment, data: {
|
|
[name: string]: any;
|
|
}): string;
|
|
/**
|
|
* @desc manpage-like info-sheet
|
|
* @author fenris
|
|
*/
|
|
generate_help({ "programname": programname, "author": author, "description": description, "executable": executable, }: {
|
|
programname?: string;
|
|
author?: string;
|
|
description?: string;
|
|
executable?: string;
|
|
}): string;
|
|
}
|
|
}
|