HEX
Server: Apache/2.4.41 (Ubuntu)
System: Linux wordpress-ubuntu-s-2vcpu-4gb-fra1-01 5.4.0-169-generic #187-Ubuntu SMP Thu Nov 23 14:52:28 UTC 2023 x86_64
User: root (0)
PHP: 7.4.33
Disabled: pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,pcntl_unshare,
Upload Files
File: /var/www/linde-ai/html/node_modules/strtok3/lib/AbstractTokenizer.d.ts
import { ITokenizer, IFileInfo, IReadChunkOptions } from './types';
import { IGetToken, IToken } from '@tokenizer/token';
interface INormalizedReadChunkOptions extends IReadChunkOptions {
    offset: number;
    length: number;
    position: number;
    mayBeLess?: boolean;
}
/**
 * Core tokenizer
 */
export declare abstract class AbstractTokenizer implements ITokenizer {
    fileInfo: IFileInfo;
    protected constructor(fileInfo?: IFileInfo);
    /**
     * Tokenizer-stream position
     */
    position: number;
    private numBuffer;
    /**
     * Read buffer from tokenizer
     * @param buffer - Target buffer to fill with data read from the tokenizer-stream
     * @param options - Additional read options
     * @returns Promise with number of bytes read
     */
    abstract readBuffer(buffer: Uint8Array, options?: IReadChunkOptions): Promise<number>;
    /**
     * Peek (read ahead) buffer from tokenizer
     * @param uint8Array- Target buffer to fill with data peek from the tokenizer-stream
     * @param options - Peek behaviour options
     * @returns Promise with number of bytes read
     */
    abstract peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
    /**
     * Read a token from the tokenizer-stream
     * @param token - The token to read
     * @param position - If provided, the desired position in the tokenizer-stream
     * @returns Promise with token data
     */
    readToken<Value>(token: IGetToken<Value>, position?: number): Promise<Value>;
    /**
     * Peek a token from the tokenizer-stream.
     * @param token - Token to peek from the tokenizer-stream.
     * @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
     * @returns Promise with token data
     */
    peekToken<Value>(token: IGetToken<Value>, position?: number): Promise<Value>;
    /**
     * Read a numeric token from the stream
     * @param token - Numeric token
     * @returns Promise with number
     */
    readNumber(token: IToken<number>): Promise<number>;
    /**
     * Read a numeric token from the stream
     * @param token - Numeric token
     * @returns Promise with number
     */
    peekNumber(token: IToken<number>): Promise<number>;
    /**
     * Ignore number of bytes, advances the pointer in under tokenizer-stream.
     * @param length - Number of bytes to ignore
     * @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
     */
    ignore(length: number): Promise<number>;
    close(): Promise<void>;
    protected normalizeOptions(uint8Array: Uint8Array, options?: IReadChunkOptions): INormalizedReadChunkOptions;
}
export {};