update interface button styles

This commit is contained in:
2023-07-28 02:15:00 +02:00
parent aff5e643ac
commit 12c07c83b3
2901 changed files with 1369600 additions and 41 deletions

45
node_modules/@lezer/lr/dist/constants.d.ts generated vendored Normal file
View File

@ -0,0 +1,45 @@
export declare const enum Action {
ReduceFlag = 65536,
ValueMask = 65535,
ReduceDepthShift = 19,
RepeatFlag = 131072,
GotoFlag = 131072,
StayFlag = 262144
}
export declare const enum StateFlag {
Skipped = 1,
Accepting = 2
}
export declare const enum Specialize {
Specialize = 0,
Extend = 1
}
export declare const enum Term {
Err = 0
}
export declare const enum Seq {
End = 65535,
Done = 0,
Next = 1,
Other = 2
}
export declare const enum ParseState {
Flags = 0,
Actions = 1,
Skip = 2,
TokenizerMask = 3,
DefaultReduce = 4,
ForcedReduce = 5,
Size = 6
}
export declare const enum Encode {
BigValCode = 126,
BigVal = 65535,
Start = 32,
Gap1 = 34,
Gap2 = 92,
Base = 46
}
export declare const enum File {
Version = 14
}

5
node_modules/@lezer/lr/dist/decode.d.ts generated vendored Normal file
View File

@ -0,0 +1,5 @@
export declare function decodeArray<T extends {
[i: number]: number;
} = Uint16Array>(input: string | T, Type?: {
new (n: number): T;
}): T;

1740
node_modules/@lezer/lr/dist/index.cjs generated vendored Normal file

File diff suppressed because it is too large Load Diff

3
node_modules/@lezer/lr/dist/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,3 @@
export { LRParser, ParserConfig, ContextTracker } from "./parse";
export { InputStream, ExternalTokenizer, LocalTokenGroup } from "./token";
export { Stack } from "./stack";

1731
node_modules/@lezer/lr/dist/index.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

110
node_modules/@lezer/lr/dist/parse.d.ts generated vendored Normal file
View File

@ -0,0 +1,110 @@
import { Tree, TreeFragment, NodeSet, NodeType, NodePropSource, Input, PartialParse, Parser, ParseWrapper } from "@lezer/common";
import { Stack } from "./stack";
import { Tokenizer, ExternalTokenizer, CachedToken, InputStream } from "./token";
declare class FragmentCursor {
readonly fragments: readonly TreeFragment[];
readonly nodeSet: NodeSet;
i: number;
fragment: TreeFragment | null;
safeFrom: number;
safeTo: number;
trees: Tree[];
start: number[];
index: number[];
nextStart: number;
constructor(fragments: readonly TreeFragment[], nodeSet: NodeSet);
nextFragment(): void;
nodeAt(pos: number): Tree | null;
}
declare class TokenCache {
readonly stream: InputStream;
tokens: CachedToken[];
mainToken: CachedToken | null;
actions: number[];
constructor(parser: LRParser, stream: InputStream);
getActions(stack: Stack): number[];
getMainToken(stack: Stack): CachedToken;
updateCachedToken(token: CachedToken, tokenizer: Tokenizer, stack: Stack): void;
putAction(action: number, token: number, end: number, index: number): number;
addActions(stack: Stack, token: number, end: number, index: number): number;
}
export declare class Parse implements PartialParse {
readonly parser: LRParser;
readonly input: Input;
readonly ranges: readonly {
from: number;
to: number;
}[];
stacks: Stack[];
recovering: number;
fragments: FragmentCursor | null;
nextStackID: number;
minStackPos: number;
reused: Tree[];
stream: InputStream;
tokens: TokenCache;
topTerm: number;
stoppedAt: null | number;
lastBigReductionStart: number;
lastBigReductionSize: number;
bigReductionCount: number;
constructor(parser: LRParser, input: Input, fragments: readonly TreeFragment[], ranges: readonly {
from: number;
to: number;
}[]);
get parsedPos(): number;
advance(): Tree;
stopAt(pos: number): void;
private advanceStack;
private advanceFully;
private runRecovery;
stackToTree(stack: Stack): Tree;
private stackID;
}
export declare class Dialect {
readonly source: string | undefined;
readonly flags: readonly boolean[];
readonly disabled: null | Uint8Array;
constructor(source: string | undefined, flags: readonly boolean[], disabled: null | Uint8Array);
allows(term: number): boolean;
}
export declare class ContextTracker<T> {
constructor(spec: {
start: T;
shift?(context: T, term: number, stack: Stack, input: InputStream): T;
reduce?(context: T, term: number, stack: Stack, input: InputStream): T;
reuse?(context: T, node: Tree, stack: Stack, input: InputStream): T;
hash?(context: T): number;
strict?: boolean;
});
}
export interface ParserConfig {
props?: readonly NodePropSource[];
top?: string;
dialect?: string;
tokenizers?: {
from: ExternalTokenizer;
to: ExternalTokenizer;
}[];
specializers?: {
from: (value: string, stack: Stack) => number;
to: (value: string, stack: Stack) => number;
}[];
contextTracker?: ContextTracker<any>;
strict?: boolean;
wrap?: ParseWrapper;
bufferLength?: number;
}
export declare class LRParser extends Parser {
readonly nodeSet: NodeSet;
createParse(input: Input, fragments: readonly TreeFragment[], ranges: readonly {
from: number;
to: number;
}[]): PartialParse;
configure(config: ParserConfig): LRParser;
hasWrappers(): boolean;
getName(term: number): string;
get topNode(): NodeType;
static deserialize(spec: any): LRParser;
}
export {};

35
node_modules/@lezer/lr/dist/stack.d.ts generated vendored Normal file
View File

@ -0,0 +1,35 @@
import { BufferCursor } from "@lezer/common";
export declare class Stack {
pos: number;
get context(): any;
canShift(term: number): boolean;
get parser(): import("./parse").LRParser;
dialectEnabled(dialectID: number): boolean;
private shiftContext;
private reduceContext;
private updateContext;
}
export declare const enum Recover {
Insert = 200,
Delete = 190,
Reduce = 100,
MaxNext = 4,
MaxInsertStackDepth = 300,
DampenInsertStackDepth = 120,
MinBigReduction = 2000
}
export declare class StackBufferCursor implements BufferCursor {
stack: Stack;
pos: number;
index: number;
buffer: number[];
constructor(stack: Stack, pos: number, index: number);
static create(stack: Stack, pos?: number): StackBufferCursor;
maybeNext(): void;
get id(): number;
get start(): number;
get end(): number;
get size(): number;
next(): void;
fork(): StackBufferCursor;
}

45
node_modules/@lezer/lr/dist/token.d.ts generated vendored Normal file
View File

@ -0,0 +1,45 @@
import { Stack } from "./stack";
export declare class CachedToken {
start: number;
value: number;
end: number;
extended: number;
lookAhead: number;
mask: number;
context: number;
}
export declare class InputStream {
private chunk2;
private chunk2Pos;
next: number;
pos: number;
private rangeIndex;
private range;
peek(offset: number): any;
acceptToken(token: number, endOffset?: number): void;
private getChunk;
private readNext;
advance(n?: number): number;
private setDone;
}
export interface Tokenizer {
}
export declare class LocalTokenGroup implements Tokenizer {
readonly precTable: number;
readonly elseToken?: number;
contextual: boolean;
fallback: boolean;
extend: boolean;
readonly data: Readonly<Uint16Array>;
constructor(data: Readonly<Uint16Array> | string, precTable: number, elseToken?: number);
token(input: InputStream, stack: Stack): void;
}
interface ExternalOptions {
contextual?: boolean;
fallback?: boolean;
extend?: boolean;
}
export declare class ExternalTokenizer {
constructor(token: (input: InputStream, stack: Stack) => void, options?: ExternalOptions);
}
export {};