class Tokenizer implements IterableIterator<Token>import { Tokenizer } from "https://deno.land/x/tokenizer@0.1.0/tokenizer.ts"; Tokenizes given source string into tokens ConstructorsnewTokenizer(rules: Rule[])Constructs a new Tokenizer newTokenizer(source: string, rules: Rule[])newTokenizer(sourceOrRules: string | Rule[], rulesOrNothing?: Rule[])Propertiesprivate_index: numberreadonlydone: booleanChecks if the Tokenizer is done scanning the source string readonlyindex: numberThe current index of the Tokenizer in the source string readonlyrules: Rule[]The rules that tells the Tokenizer what patterns to look for readonlysource: stringThe string that will be scanned unexpectedCharacterError: () => voidMethodsprivatematch(text: string, pattern: Pattern): { match: string; groups: string[]; }privatescan(): Token | undefinednext(): IteratorResult<Token>Returns the next scanned Token reset(): voidResets the index of the Tokenizer tokenize()Tokenizes given string (default is the lexer input) to a Token array tokenize(source: string)tokenize(source: string, callback: (token: Token) => void)tokenize(callback: (token: Token) => void)[Symbol.iterator](): IterableIterator<Token>