import { type Deep } from "https://deno.land/x/rimbu@0.13.1/deep/index.ts";
const { Tokenize } = Deep.Path.Result;
Converts a path string into separate tokens in a string array.
definition: P extends "" ? Path.Internal.AppendIfNotEmpty<Res, Token> : P extends `[${infer Index}]${infer Rest}` ? Tokenize<Rest, "", [...Path.Internal.AppendIfNotEmpty<Res, Token>, Index]> : P extends `?.${infer Rest}` ? Tokenize<Rest, "", [...Path.Internal.AppendIfNotEmpty<Res, Token>, "?."]> : P extends `.${infer Rest}` ? Tokenize<Rest, "", [...Path.Internal.AppendIfNotEmpty<Res, Token>, "."]> : P extends `${infer First}${infer Rest}` ? Tokenize<Rest, `${Token}${First}`, Res> : never