update interface button styles

This commit is contained in:
2023-07-28 02:15:00 +02:00
parent aff5e643ac
commit 12c07c83b3
2901 changed files with 1369600 additions and 41 deletions

21
node_modules/@lezer/lr/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (C) 2018 by Marijn Haverbeke <marijn@haverbeke.berlin> and others
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

25
node_modules/@lezer/lr/README.md generated vendored Normal file
View File

@ -0,0 +1,25 @@
# @lezer/lr
[ [**WEBSITE**](http://lezer.codemirror.net) | [**ISSUES**](https://github.com/lezer-parser/lezer/issues) | [**FORUM**](https://discuss.codemirror.net/c/lezer) | [**CHANGELOG**](https://github.com/lezer-parser/lr/blob/master/CHANGELOG.md) ]
Lezer ("reader" in Dutch, pronounced pretty much as laser) is an
incremental GLR parser intended for use in an editor or similar
system, which needs to keep a representation of the program current
during changes and in the face of syntax errors.
It prioritizes speed and compactness (both of parser table files and
of syntax tree) over having a highly usable parse tree—trees nodes are
just blobs with a start, end, tag, and set of child nodes, with no
further labeling of child nodes or extra metadata.
This package contains the run-time LR parser library. It consumes
parsers generated by
[@lezer/generator](https://github.com/lezer-parser/generator).
The parser programming interface is documented on [the
website](https://lezer.codemirror.net/docs/ref/#lr).
The code is licensed under an MIT license.
This project was hugely inspired by
[tree-sitter](http://tree-sitter.github.io/tree-sitter/).

45
node_modules/@lezer/lr/dist/constants.d.ts generated vendored Normal file
View File

@ -0,0 +1,45 @@
export declare const enum Action {
ReduceFlag = 65536,
ValueMask = 65535,
ReduceDepthShift = 19,
RepeatFlag = 131072,
GotoFlag = 131072,
StayFlag = 262144
}
export declare const enum StateFlag {
Skipped = 1,
Accepting = 2
}
export declare const enum Specialize {
Specialize = 0,
Extend = 1
}
export declare const enum Term {
Err = 0
}
export declare const enum Seq {
End = 65535,
Done = 0,
Next = 1,
Other = 2
}
export declare const enum ParseState {
Flags = 0,
Actions = 1,
Skip = 2,
TokenizerMask = 3,
DefaultReduce = 4,
ForcedReduce = 5,
Size = 6
}
export declare const enum Encode {
BigValCode = 126,
BigVal = 65535,
Start = 32,
Gap1 = 34,
Gap2 = 92,
Base = 46
}
export declare const enum File {
Version = 14
}

5
node_modules/@lezer/lr/dist/decode.d.ts generated vendored Normal file
View File

@ -0,0 +1,5 @@
export declare function decodeArray<T extends {
[i: number]: number;
} = Uint16Array>(input: string | T, Type?: {
new (n: number): T;
}): T;

1740
node_modules/@lezer/lr/dist/index.cjs generated vendored Normal file

File diff suppressed because it is too large Load Diff

3
node_modules/@lezer/lr/dist/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,3 @@
export { LRParser, ParserConfig, ContextTracker } from "./parse";
export { InputStream, ExternalTokenizer, LocalTokenGroup } from "./token";
export { Stack } from "./stack";

1731
node_modules/@lezer/lr/dist/index.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

110
node_modules/@lezer/lr/dist/parse.d.ts generated vendored Normal file
View File

@ -0,0 +1,110 @@
import { Tree, TreeFragment, NodeSet, NodeType, NodePropSource, Input, PartialParse, Parser, ParseWrapper } from "@lezer/common";
import { Stack } from "./stack";
import { Tokenizer, ExternalTokenizer, CachedToken, InputStream } from "./token";
declare class FragmentCursor {
readonly fragments: readonly TreeFragment[];
readonly nodeSet: NodeSet;
i: number;
fragment: TreeFragment | null;
safeFrom: number;
safeTo: number;
trees: Tree[];
start: number[];
index: number[];
nextStart: number;
constructor(fragments: readonly TreeFragment[], nodeSet: NodeSet);
nextFragment(): void;
nodeAt(pos: number): Tree | null;
}
declare class TokenCache {
readonly stream: InputStream;
tokens: CachedToken[];
mainToken: CachedToken | null;
actions: number[];
constructor(parser: LRParser, stream: InputStream);
getActions(stack: Stack): number[];
getMainToken(stack: Stack): CachedToken;
updateCachedToken(token: CachedToken, tokenizer: Tokenizer, stack: Stack): void;
putAction(action: number, token: number, end: number, index: number): number;
addActions(stack: Stack, token: number, end: number, index: number): number;
}
export declare class Parse implements PartialParse {
readonly parser: LRParser;
readonly input: Input;
readonly ranges: readonly {
from: number;
to: number;
}[];
stacks: Stack[];
recovering: number;
fragments: FragmentCursor | null;
nextStackID: number;
minStackPos: number;
reused: Tree[];
stream: InputStream;
tokens: TokenCache;
topTerm: number;
stoppedAt: null | number;
lastBigReductionStart: number;
lastBigReductionSize: number;
bigReductionCount: number;
constructor(parser: LRParser, input: Input, fragments: readonly TreeFragment[], ranges: readonly {
from: number;
to: number;
}[]);
get parsedPos(): number;
advance(): Tree;
stopAt(pos: number): void;
private advanceStack;
private advanceFully;
private runRecovery;
stackToTree(stack: Stack): Tree;
private stackID;
}
export declare class Dialect {
readonly source: string | undefined;
readonly flags: readonly boolean[];
readonly disabled: null | Uint8Array;
constructor(source: string | undefined, flags: readonly boolean[], disabled: null | Uint8Array);
allows(term: number): boolean;
}
export declare class ContextTracker<T> {
constructor(spec: {
start: T;
shift?(context: T, term: number, stack: Stack, input: InputStream): T;
reduce?(context: T, term: number, stack: Stack, input: InputStream): T;
reuse?(context: T, node: Tree, stack: Stack, input: InputStream): T;
hash?(context: T): number;
strict?: boolean;
});
}
export interface ParserConfig {
props?: readonly NodePropSource[];
top?: string;
dialect?: string;
tokenizers?: {
from: ExternalTokenizer;
to: ExternalTokenizer;
}[];
specializers?: {
from: (value: string, stack: Stack) => number;
to: (value: string, stack: Stack) => number;
}[];
contextTracker?: ContextTracker<any>;
strict?: boolean;
wrap?: ParseWrapper;
bufferLength?: number;
}
export declare class LRParser extends Parser {
readonly nodeSet: NodeSet;
createParse(input: Input, fragments: readonly TreeFragment[], ranges: readonly {
from: number;
to: number;
}[]): PartialParse;
configure(config: ParserConfig): LRParser;
hasWrappers(): boolean;
getName(term: number): string;
get topNode(): NodeType;
static deserialize(spec: any): LRParser;
}
export {};

35
node_modules/@lezer/lr/dist/stack.d.ts generated vendored Normal file
View File

@ -0,0 +1,35 @@
import { BufferCursor } from "@lezer/common";
export declare class Stack {
pos: number;
get context(): any;
canShift(term: number): boolean;
get parser(): import("./parse").LRParser;
dialectEnabled(dialectID: number): boolean;
private shiftContext;
private reduceContext;
private updateContext;
}
export declare const enum Recover {
Insert = 200,
Delete = 190,
Reduce = 100,
MaxNext = 4,
MaxInsertStackDepth = 300,
DampenInsertStackDepth = 120,
MinBigReduction = 2000
}
export declare class StackBufferCursor implements BufferCursor {
stack: Stack;
pos: number;
index: number;
buffer: number[];
constructor(stack: Stack, pos: number, index: number);
static create(stack: Stack, pos?: number): StackBufferCursor;
maybeNext(): void;
get id(): number;
get start(): number;
get end(): number;
get size(): number;
next(): void;
fork(): StackBufferCursor;
}

45
node_modules/@lezer/lr/dist/token.d.ts generated vendored Normal file
View File

@ -0,0 +1,45 @@
import { Stack } from "./stack";
export declare class CachedToken {
start: number;
value: number;
end: number;
extended: number;
lookAhead: number;
mask: number;
context: number;
}
export declare class InputStream {
private chunk2;
private chunk2Pos;
next: number;
pos: number;
private rangeIndex;
private range;
peek(offset: number): any;
acceptToken(token: number, endOffset?: number): void;
private getChunk;
private readNext;
advance(n?: number): number;
private setDone;
}
export interface Tokenizer {
}
export declare class LocalTokenGroup implements Tokenizer {
readonly precTable: number;
readonly elseToken?: number;
contextual: boolean;
fallback: boolean;
extend: boolean;
readonly data: Readonly<Uint16Array>;
constructor(data: Readonly<Uint16Array> | string, precTable: number, elseToken?: number);
token(input: InputStream, stack: Stack): void;
}
interface ExternalOptions {
contextual?: boolean;
fallback?: boolean;
extend?: boolean;
}
export declare class ExternalTokenizer {
constructor(token: (input: InputStream, stack: Stack) => void, options?: ExternalOptions);
}
export {};

35
node_modules/@lezer/lr/package.json generated vendored Normal file
View File

@ -0,0 +1,35 @@
{
"name": "@lezer/lr",
"version": "1.3.9",
"description": "Incremental parser",
"main": "dist/index.cjs",
"type": "module",
"exports": {
"import": "./dist/index.js",
"require": "./dist/index.cjs"
},
"module": "dist/index.js",
"types": "dist/index.d.ts",
"author": "Marijn Haverbeke <marijn@haverbeke.berlin>",
"license": "MIT",
"repository": {
"type" : "git",
"url" : "https://github.com/lezer-parser/lr.git"
},
"devDependencies": {
"rollup": "^2.52.2",
"@rollup/plugin-commonjs": "^15.1.0",
"@rollup/plugin-node-resolve": "^9.0.0",
"rollup-plugin-typescript2": "^0.34.1",
"typescript": "^4.3.4"
},
"dependencies": {
"@lezer/common": "^1.0.0"
},
"files": ["dist"],
"scripts": {
"test": "echo 'Tests are in @lezer/generator'",
"watch": "rollup -w -c rollup.config.js",
"prepare": "rollup -c rollup.config.js"
}
}