mirror of
https://github.com/gosticks/DefinitelyTyped.git
synced 2025-10-16 12:05:41 +00:00
[elasticlunr] new typings (#38717)
This commit is contained in:
parent
c9e2ebd317
commit
fd3952ba93
149
types/elasticlunr/elasticlunr-tests.ts
Normal file
149
types/elasticlunr/elasticlunr-tests.ts
Normal file
@ -0,0 +1,149 @@
|
||||
import elasticlunr = require('elasticlunr');
|
||||
|
||||
function assertType<T>(value: T): T {
|
||||
return value;
|
||||
}
|
||||
|
||||
interface TestDocument {
|
||||
id: string;
|
||||
field: string;
|
||||
}
|
||||
|
||||
elasticlunr<TestDocument>();
|
||||
|
||||
elasticlunr<TestDocument>(function() {
|
||||
this.addField('field');
|
||||
this.setRef('id');
|
||||
this.saveDocument(true);
|
||||
});
|
||||
|
||||
const index = elasticlunr<TestDocument>(ctx => {
|
||||
ctx.addField('field');
|
||||
ctx.setRef('id');
|
||||
ctx.saveDocument(true);
|
||||
});
|
||||
|
||||
const testDoc: TestDocument = { id: '1', field: 'ok' };
|
||||
|
||||
// Index
|
||||
|
||||
index.addDoc(testDoc);
|
||||
index.addDoc(testDoc, true);
|
||||
|
||||
index.addField('field');
|
||||
|
||||
index.coordNorm({ doc: 1 }, { doc: ['token'] }, 1);
|
||||
|
||||
index.fieldSearch(['ok'], 'field', { field: { boost: 10 } });
|
||||
index.fieldSearch(['ok'], 'field', { field: { boost: 10, bool: 'OR' } });
|
||||
index.fieldSearch(['ok'], 'field', { field: { boost: 10, bool: 'AND' } });
|
||||
index.fieldSearch(['ok'], 'field', { field: { boost: 10, expand: true } });
|
||||
|
||||
index.fieldSearchStats({ doc: ['ok'] }, 'ok', { doc: testDoc });
|
||||
|
||||
index.getFields();
|
||||
|
||||
index.idf('term', 'field');
|
||||
|
||||
index.mergeScores({ doc: 1 }, { doc: 1 }, 'AND');
|
||||
index.mergeScores({ doc: 1 }, { doc: 1 }, 'OR');
|
||||
|
||||
index.off('add', () => {});
|
||||
index.off('update', () => {});
|
||||
index.off('remove', () => {});
|
||||
|
||||
index.on('add', () => {});
|
||||
index.on('update', () => {});
|
||||
index.on('remove', () => {});
|
||||
index.on('add', 'update', () => {});
|
||||
index.on('add', 'update', 'remove', () => {});
|
||||
|
||||
index.removeDoc(testDoc);
|
||||
index.removeDoc(testDoc, true);
|
||||
|
||||
index.removeDocByRef('1');
|
||||
index.removeDocByRef('1', true);
|
||||
|
||||
index.saveDocument(true);
|
||||
|
||||
assertType<elasticlunr.SearchResults[]>(index.search('query'));
|
||||
index.search('query', {
|
||||
fields: {
|
||||
field: { boost: 2 },
|
||||
},
|
||||
});
|
||||
|
||||
index.toJSON();
|
||||
|
||||
index.updateDoc(testDoc);
|
||||
index.updateDoc(testDoc, true);
|
||||
|
||||
index.use(() => {});
|
||||
index.use(() => {}, 1);
|
||||
|
||||
elasticlunr.Index.load<TestDocument>({
|
||||
version: 'version',
|
||||
fields: ['field'],
|
||||
ref: 'id',
|
||||
documentStore: new elasticlunr.DocumentStore<TestDocument>().toJSON(),
|
||||
pipeline: ['trimmer', 'stopWordFilter', 'stemmer'],
|
||||
index: {
|
||||
field: { df: 1, docs: {} },
|
||||
},
|
||||
});
|
||||
|
||||
// Pipeline
|
||||
|
||||
index.pipeline.add(() => '');
|
||||
|
||||
index.pipeline.after(() => '', () => '');
|
||||
|
||||
index.pipeline.before(() => '', () => '');
|
||||
|
||||
assertType<elasticlunr.PipelineFunction[]>(index.pipeline.get());
|
||||
|
||||
index.pipeline.remove(() => '');
|
||||
|
||||
assertType<string[]>(index.pipeline.run(['']));
|
||||
|
||||
assertType<elasticlunr.SerialisedPipeline>(index.pipeline.toJSON());
|
||||
|
||||
assertType<elasticlunr.Pipeline>(elasticlunr.Pipeline.load(index.pipeline.toJSON()));
|
||||
|
||||
elasticlunr.Pipeline.registerFunction(() => '', 'fn');
|
||||
elasticlunr.Pipeline.getRegisteredFunction('fn');
|
||||
elasticlunr.Pipeline.warnIfFunctionNotRegistered(() => '');
|
||||
|
||||
// DocumentStore
|
||||
|
||||
index.documentStore.addDoc('1', { id: '1', field: '1' });
|
||||
|
||||
index.documentStore.addFieldLength('1', 'field', 1);
|
||||
|
||||
index.documentStore.getDoc('1');
|
||||
|
||||
index.documentStore.getFieldLength('1', 'field');
|
||||
|
||||
index.documentStore.hasDoc('1');
|
||||
|
||||
index.documentStore.isDocStored();
|
||||
|
||||
index.documentStore.removeDoc('1');
|
||||
|
||||
index.documentStore.toJSON();
|
||||
|
||||
index.documentStore.updateFieldLength('1', 'field', 1);
|
||||
|
||||
elasticlunr.DocumentStore.load(index.documentStore.toJSON());
|
||||
|
||||
// EventEmitter
|
||||
|
||||
index.eventEmitter.addListener('add', () => {});
|
||||
|
||||
index.eventEmitter.removeListener('add', () => {});
|
||||
|
||||
index.eventEmitter.hasHandler('add');
|
||||
|
||||
index.eventEmitter.emit('add');
|
||||
|
||||
index.eventEmitter.emit('add', 1, 2, 3);
|
||||
384
types/elasticlunr/index.d.ts
vendored
Normal file
384
types/elasticlunr/index.d.ts
vendored
Normal file
@ -0,0 +1,384 @@
|
||||
// Type definitions for elasticlunr 0.9
|
||||
// Project: http://weixsong.github.io
|
||||
// Definitions by: Luis Rodrigues <https://github.com/goblindegook>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
// TypeScript Version: 2.1
|
||||
|
||||
export = elasticlunr;
|
||||
|
||||
declare function elasticlunr<T>(
|
||||
config?: (this: elasticlunr.Index<T>, idx: elasticlunr.Index<T>) => void,
|
||||
): elasticlunr.Index<T>;
|
||||
|
||||
declare namespace elasticlunr {
|
||||
class Configuration<T> {
|
||||
constructor(config: string, fields: Array<keyof T>);
|
||||
|
||||
addAllFields2UserConfig(bool: Bool, expand: boolean, fields: Array<keyof T>): void;
|
||||
|
||||
buildDefaultConfig(fields: Array<keyof T>): void;
|
||||
|
||||
buildUserConfig(config: SearchConfig<T>, fields: Array<keyof T>): void;
|
||||
|
||||
get(): SearchConfig<T>;
|
||||
|
||||
reset(): void;
|
||||
}
|
||||
|
||||
type DocumentReference = string | number;
|
||||
|
||||
class DocumentStore<T> {
|
||||
constructor(save?: boolean);
|
||||
|
||||
addDoc(docRef: DocumentReference, doc: T): void;
|
||||
|
||||
addFieldLength(docRef: DocumentReference, fieldName: keyof T, length: number): void;
|
||||
|
||||
getDoc(docRef: DocumentReference): T;
|
||||
|
||||
getFieldLength(docRef: DocumentReference, fieldName: keyof T): number;
|
||||
|
||||
hasDoc(docRef: DocumentReference): boolean;
|
||||
|
||||
isDocStored(): boolean;
|
||||
|
||||
removeDoc(docRef: DocumentReference): void;
|
||||
|
||||
toJSON(): SerialisedDocumentStore<T>;
|
||||
|
||||
updateFieldLength(docRef: DocumentReference, fieldName: keyof T, length: number): void;
|
||||
|
||||
static load<T>(serialisedData: SerialisedDocumentStore<T>): DocumentStore<T>;
|
||||
}
|
||||
|
||||
type EventType = 'add' | 'update' | 'remove';
|
||||
type EventHandler = (...args: any[]) => void;
|
||||
|
||||
class EventEmitter {
|
||||
constructor();
|
||||
|
||||
addListener(e1: EventType, fn: EventHandler): void;
|
||||
addListener(e1: EventType, e2: EventType, fn: EventHandler): void;
|
||||
addListener(e1: EventType, e2: EventType, e3: EventType, fn: EventHandler): void;
|
||||
|
||||
emit(name: EventType, ...args: any[]): void;
|
||||
|
||||
hasHandler(name: EventType): boolean;
|
||||
|
||||
removeListener(name: EventType, fn: EventHandler): void;
|
||||
}
|
||||
|
||||
interface SearchScores {
|
||||
[key: string]: number;
|
||||
}
|
||||
|
||||
interface SearchResults {
|
||||
ref: string;
|
||||
score: number;
|
||||
}
|
||||
|
||||
interface IndexTokens {
|
||||
[key: string]: string[];
|
||||
}
|
||||
|
||||
interface IndexDocuments<T> {
|
||||
[key: string]: T;
|
||||
}
|
||||
|
||||
type Bool = 'OR' | 'AND';
|
||||
|
||||
type FieldSearchConfig<T extends {}> = {
|
||||
[K in keyof T]?: {
|
||||
bool?: Bool;
|
||||
boost?: number;
|
||||
expand?: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
interface SearchConfig<T> {
|
||||
fields?: FieldSearchConfig<T>;
|
||||
}
|
||||
|
||||
interface SerialisedInvertedIndex {
|
||||
root: InvertedIndexNode;
|
||||
}
|
||||
|
||||
interface SerialisedDocumentStore<T> {
|
||||
docInfo: {
|
||||
[docRef: string]: {
|
||||
[field in keyof T]: number;
|
||||
};
|
||||
};
|
||||
docs: {
|
||||
[docRef: string]: T;
|
||||
};
|
||||
}
|
||||
|
||||
interface SerialisedIndexData<T> {
|
||||
version: string;
|
||||
fields: Array<keyof T>;
|
||||
ref: keyof T;
|
||||
pipeline: SerialisedPipeline;
|
||||
documentStore: SerialisedDocumentStore<T>;
|
||||
index: { [K in keyof T]?: InvertedIndexNode };
|
||||
}
|
||||
|
||||
class Index<T> {
|
||||
constructor();
|
||||
|
||||
documentStore: DocumentStore<T>;
|
||||
|
||||
eventEmitter: EventEmitter;
|
||||
|
||||
index: { [K in keyof T]?: InvertedIndexNode };
|
||||
|
||||
pipeline: Pipeline;
|
||||
|
||||
addDoc(doc: T, emitEvent?: boolean): void;
|
||||
|
||||
addField(fieldName: keyof T): Index<T>;
|
||||
|
||||
coordNorm(scores: SearchScores, docTokens: IndexTokens, n: number): SearchScores;
|
||||
|
||||
fieldSearch(queryTokens: string[], fieldName: keyof T, config: FieldSearchConfig<T>): SearchScores;
|
||||
|
||||
fieldSearchStats(docTokens: IndexTokens, token: string, docs: IndexDocuments<T>): void;
|
||||
|
||||
getFields(): Array<keyof T>;
|
||||
|
||||
idf(term: string, field: keyof T): number;
|
||||
|
||||
mergeScores(accumScores: SearchScores | null, scores: SearchScores, op: 'AND' | 'OR'): SearchScores;
|
||||
|
||||
off(name: EventType, fn: EventHandler): void;
|
||||
|
||||
on(e1: EventType, fn: EventHandler): void;
|
||||
on(e1: EventType, e2: EventType, fn: EventHandler): void;
|
||||
on(e1: EventType, e2: EventType, e3: EventType, fn: EventHandler): void;
|
||||
|
||||
removeDoc(doc: T, emitEvent?: boolean): void;
|
||||
|
||||
removeDocByRef(docRef: DocumentReference, emitEvent?: boolean): void;
|
||||
|
||||
saveDocument(save: boolean): Index<T>;
|
||||
|
||||
search(query: string, userConfig?: SearchConfig<T>): SearchResults[];
|
||||
|
||||
setRef(refName: keyof T): Index<T>;
|
||||
|
||||
toJSON(): SerialisedIndexData<T>;
|
||||
|
||||
updateDoc(doc: T, emitEvent?: boolean): void;
|
||||
|
||||
use(plugin: (...args: any[]) => any, ...args: any[]): void;
|
||||
|
||||
static load<T>(serialisedData: SerialisedIndexData<T>): Index<T>;
|
||||
}
|
||||
|
||||
interface TokenInfo {
|
||||
ref: number | string;
|
||||
tf: number;
|
||||
}
|
||||
|
||||
interface InvertedIndexCharNode {
|
||||
a?: InvertedIndexNode;
|
||||
b?: InvertedIndexNode;
|
||||
c?: InvertedIndexNode;
|
||||
d?: InvertedIndexNode;
|
||||
e?: InvertedIndexNode;
|
||||
f?: InvertedIndexNode;
|
||||
g?: InvertedIndexNode;
|
||||
h?: InvertedIndexNode;
|
||||
i?: InvertedIndexNode;
|
||||
j?: InvertedIndexNode;
|
||||
k?: InvertedIndexNode;
|
||||
l?: InvertedIndexNode;
|
||||
m?: InvertedIndexNode;
|
||||
n?: InvertedIndexNode;
|
||||
o?: InvertedIndexNode;
|
||||
p?: InvertedIndexNode;
|
||||
q?: InvertedIndexNode;
|
||||
r?: InvertedIndexNode;
|
||||
s?: InvertedIndexNode;
|
||||
t?: InvertedIndexNode;
|
||||
u?: InvertedIndexNode;
|
||||
v?: InvertedIndexNode;
|
||||
w?: InvertedIndexNode;
|
||||
x?: InvertedIndexNode;
|
||||
y?: InvertedIndexNode;
|
||||
z?: InvertedIndexNode;
|
||||
}
|
||||
|
||||
interface InvertedIndexDocs {
|
||||
[key: string]: {
|
||||
tf?: number;
|
||||
};
|
||||
}
|
||||
|
||||
type InvertedIndexNode = InvertedIndexCharNode & {
|
||||
df: number;
|
||||
docs: InvertedIndexDocs;
|
||||
};
|
||||
|
||||
class InvertedIndex {
|
||||
constructor();
|
||||
|
||||
addToken(token: string, tokenInfo: TokenInfo, root?: InvertedIndexNode): void;
|
||||
|
||||
expandToken(token: string, memo?: string[], root?: InvertedIndexNode): string[];
|
||||
|
||||
getDocFreq(token: string): number;
|
||||
|
||||
getDocs(token: string): InvertedIndexDocs;
|
||||
|
||||
getNode(token: string): InvertedIndexNode;
|
||||
|
||||
getTermFrequency(token: string, docRef: string | number): number;
|
||||
|
||||
hasToken(token: string): boolean;
|
||||
|
||||
removeToken(token: string, ref: number | string): void;
|
||||
|
||||
toJSON(): SerialisedInvertedIndex;
|
||||
|
||||
static load(serialisedData: SerialisedInvertedIndex): InvertedIndex;
|
||||
}
|
||||
|
||||
type SerialisedPipeline = string[];
|
||||
|
||||
type PipelineFunction = (token: string, i: number, tokens: string[]) => string | undefined | null | void;
|
||||
|
||||
class Pipeline {
|
||||
constructor();
|
||||
|
||||
add(...functions: PipelineFunction[]): void;
|
||||
|
||||
after(existingFn: PipelineFunction, newFn: PipelineFunction): void;
|
||||
|
||||
before(existingFn: PipelineFunction, newFn: PipelineFunction): void;
|
||||
|
||||
get(): PipelineFunction[];
|
||||
|
||||
remove(fn: PipelineFunction): void;
|
||||
|
||||
reset(): void;
|
||||
|
||||
run(tokens: string[]): string[];
|
||||
|
||||
toJSON(): SerialisedPipeline;
|
||||
|
||||
static getRegisteredFunction(label: string): PipelineFunction;
|
||||
|
||||
static load(serialised: SerialisedPipeline): Pipeline;
|
||||
|
||||
static registerFunction(fn: PipelineFunction, label: string): void;
|
||||
|
||||
static warnIfFunctionNotRegistered(fn: PipelineFunction): void;
|
||||
}
|
||||
|
||||
class SortedSet<T> {
|
||||
constructor();
|
||||
|
||||
add(...args: any[]): void;
|
||||
|
||||
clone(): SortedSet<T>;
|
||||
|
||||
forEach(fn: (element: T, index: number, collection: T[]) => void, ctx: {}): void;
|
||||
|
||||
indexOf(elem: {}): number;
|
||||
|
||||
intersect(otherSet: SortedSet<T>): SortedSet<T>;
|
||||
|
||||
locationFor(elem: T): number;
|
||||
|
||||
map(fn: (element: T, index: number, collection: T[]) => T, ctx?: {}): T[];
|
||||
|
||||
toArray(): T[];
|
||||
|
||||
toJSON(): T[];
|
||||
|
||||
union(otherSet: SortedSet<T>): SortedSet<T>;
|
||||
|
||||
static load<T>(serialisedData: T[]): SortedSet<T>;
|
||||
}
|
||||
|
||||
const defaultStopWords: {
|
||||
[key: string]: boolean;
|
||||
};
|
||||
|
||||
const version: string;
|
||||
|
||||
function addStopWords(words: string[]): void;
|
||||
|
||||
function clearStopWords(): void;
|
||||
|
||||
function resetStopWords(): void;
|
||||
|
||||
function stemmer(w: string): string;
|
||||
|
||||
function stopWordFilter(token: string): string;
|
||||
|
||||
function tokenizer(str?: string): string[];
|
||||
|
||||
function trimmer(token: string): string;
|
||||
|
||||
namespace Pipeline {
|
||||
namespace registeredFunctions {
|
||||
function stemmer(w: string): string;
|
||||
|
||||
function stopWordFilter(token: string): string;
|
||||
|
||||
function trimmer(token: string): string;
|
||||
|
||||
namespace stemmer {
|
||||
const label: string;
|
||||
}
|
||||
|
||||
namespace stopWordFilter {
|
||||
const label: string;
|
||||
|
||||
const stopWords: {
|
||||
[key: string]: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
namespace trimmer {
|
||||
const label: string;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
namespace stemmer {
|
||||
const label: string;
|
||||
}
|
||||
|
||||
namespace stopWordFilter {
|
||||
const label: string;
|
||||
|
||||
const stopWords: {
|
||||
[key: string]: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
namespace tokenizer {
|
||||
const defaultSeperator: RegExp;
|
||||
|
||||
const seperator: RegExp;
|
||||
|
||||
function getSeperator(): RegExp;
|
||||
|
||||
function resetSeperator(): void;
|
||||
|
||||
function setSeperator(sep: RegExp): void;
|
||||
}
|
||||
|
||||
namespace trimmer {
|
||||
const label: string;
|
||||
}
|
||||
|
||||
namespace utils {
|
||||
function toString(obj: {}): string;
|
||||
|
||||
function warn(message: string): void;
|
||||
}
|
||||
}
|
||||
16
types/elasticlunr/tsconfig.json
Normal file
16
types/elasticlunr/tsconfig.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"lib": ["es6"],
|
||||
"noImplicitAny": true,
|
||||
"noImplicitThis": true,
|
||||
"strictFunctionTypes": true,
|
||||
"strictNullChecks": true,
|
||||
"baseUrl": "../",
|
||||
"typeRoots": ["../"],
|
||||
"types": [],
|
||||
"noEmit": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"files": ["index.d.ts", "elasticlunr-tests.ts"]
|
||||
}
|
||||
1
types/elasticlunr/tslint.json
Normal file
1
types/elasticlunr/tslint.json
Normal file
@ -0,0 +1 @@
|
||||
{ "extends": "dtslint/dt.json" }
|
||||
Loading…
Reference in New Issue
Block a user