Commit With Frontend and Backend in MERN

This commit is contained in:
sanikapendurkar
2025-02-10 14:24:56 +05:30
commit 0f4e1a3183
2518 changed files with 448667 additions and 0 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,243 @@
import type { Document } from '../bson';
import { MongoAPIError } from '../error';
import {
Explain,
ExplainableCursor,
type ExplainCommandOptions,
type ExplainVerbosityLike,
validateExplainTimeoutOptions
} from '../explain';
import type { MongoClient } from '../mongo_client';
import { AggregateOperation, type AggregateOptions } from '../operations/aggregate';
import { executeOperation } from '../operations/execute_operation';
import type { ClientSession } from '../sessions';
import type { Sort } from '../sort';
import { mergeOptions, type MongoDBNamespace } from '../utils';
import {
type AbstractCursorOptions,
CursorTimeoutMode,
type InitialCursorResponse
} from './abstract_cursor';
/** @public */
export interface AggregationCursorOptions extends AbstractCursorOptions, AggregateOptions {}
/**
* The **AggregationCursor** class is an internal class that embodies an aggregation cursor on MongoDB
* allowing for iteration over the results returned from the underlying query. It supports
* one by one document iteration, conversion to an array or can be iterated as a Node 4.X
* or higher stream
* @public
*/
export class AggregationCursor<TSchema = any> extends ExplainableCursor<TSchema> {
public readonly pipeline: Document[];
/** @internal */
private aggregateOptions: AggregateOptions;
/** @internal */
constructor(
client: MongoClient,
namespace: MongoDBNamespace,
pipeline: Document[] = [],
options: AggregateOptions = {}
) {
super(client, namespace, options);
this.pipeline = pipeline;
this.aggregateOptions = options;
const lastStage: Document | undefined = this.pipeline[this.pipeline.length - 1];
if (
this.cursorOptions.timeoutMS != null &&
this.cursorOptions.timeoutMode === CursorTimeoutMode.ITERATION &&
(lastStage?.$merge != null || lastStage?.$out != null)
)
throw new MongoAPIError('Cannot use $out or $merge stage with ITERATION timeoutMode');
}
clone(): AggregationCursor<TSchema> {
const clonedOptions = mergeOptions({}, this.aggregateOptions);
delete clonedOptions.session;
return new AggregationCursor(this.client, this.namespace, this.pipeline, {
...clonedOptions
});
}
override map<T>(transform: (doc: TSchema) => T): AggregationCursor<T> {
return super.map(transform) as AggregationCursor<T>;
}
/** @internal */
async _initialize(session: ClientSession): Promise<InitialCursorResponse> {
const options = {
...this.aggregateOptions,
...this.cursorOptions,
session
};
if (options.explain) {
try {
validateExplainTimeoutOptions(options, Explain.fromOptions(options));
} catch {
throw new MongoAPIError(
'timeoutMS cannot be used with explain when explain is specified in aggregateOptions'
);
}
}
const aggregateOperation = new AggregateOperation(this.namespace, this.pipeline, options);
const response = await executeOperation(this.client, aggregateOperation, this.timeoutContext);
return { server: aggregateOperation.server, session, response };
}
/** Execute the explain for the cursor */
async explain(): Promise<Document>;
async explain(verbosity: ExplainVerbosityLike | ExplainCommandOptions): Promise<Document>;
async explain(options: { timeoutMS?: number }): Promise<Document>;
async explain(
verbosity: ExplainVerbosityLike | ExplainCommandOptions,
options: { timeoutMS?: number }
): Promise<Document>;
async explain(
verbosity?: ExplainVerbosityLike | ExplainCommandOptions | { timeoutMS?: number },
options?: { timeoutMS?: number }
): Promise<Document> {
const { explain, timeout } = this.resolveExplainTimeoutOptions(verbosity, options);
return (
await executeOperation(
this.client,
new AggregateOperation(this.namespace, this.pipeline, {
...this.aggregateOptions, // NOTE: order matters here, we may need to refine this
...this.cursorOptions,
...timeout,
explain: explain ?? true
})
)
).shift(this.deserializationOptions);
}
/** Add a stage to the aggregation pipeline
* @example
* ```
* const documents = await users.aggregate().addStage({ $match: { name: /Mike/ } }).toArray();
* ```
* @example
* ```
* const documents = await users.aggregate()
* .addStage<{ name: string }>({ $project: { name: true } })
* .toArray(); // type of documents is { name: string }[]
* ```
*/
addStage(stage: Document): this;
addStage<T = Document>(stage: Document): AggregationCursor<T>;
addStage<T = Document>(stage: Document): AggregationCursor<T> {
this.throwIfInitialized();
if (
this.cursorOptions.timeoutMS != null &&
this.cursorOptions.timeoutMode === CursorTimeoutMode.ITERATION &&
(stage.$out != null || stage.$merge != null)
) {
throw new MongoAPIError('Cannot use $out or $merge stage with ITERATION timeoutMode');
}
this.pipeline.push(stage);
return this as unknown as AggregationCursor<T>;
}
/** Add a group stage to the aggregation pipeline */
group<T = TSchema>($group: Document): AggregationCursor<T>;
group($group: Document): this {
return this.addStage({ $group });
}
/** Add a limit stage to the aggregation pipeline */
limit($limit: number): this {
return this.addStage({ $limit });
}
/** Add a match stage to the aggregation pipeline */
match($match: Document): this {
return this.addStage({ $match });
}
/** Add an out stage to the aggregation pipeline */
out($out: { db: string; coll: string } | string): this {
return this.addStage({ $out });
}
/**
* Add a project stage to the aggregation pipeline
*
* @remarks
* In order to strictly type this function you must provide an interface
* that represents the effect of your projection on the result documents.
*
* By default chaining a projection to your cursor changes the returned type to the generic {@link Document} type.
* You should specify a parameterized type to have assertions on your final results.
*
* @example
* ```typescript
* // Best way
* const docs: AggregationCursor<{ a: number }> = cursor.project<{ a: number }>({ _id: 0, a: true });
* // Flexible way
* const docs: AggregationCursor<Document> = cursor.project({ _id: 0, a: true });
* ```
*
* @remarks
* In order to strictly type this function you must provide an interface
* that represents the effect of your projection on the result documents.
*
* **Note for Typescript Users:** adding a transform changes the return type of the iteration of this cursor,
* it **does not** return a new instance of a cursor. This means when calling project,
* you should always assign the result to a new variable in order to get a correctly typed cursor variable.
* Take note of the following example:
*
* @example
* ```typescript
* const cursor: AggregationCursor<{ a: number; b: string }> = coll.aggregate([]);
* const projectCursor = cursor.project<{ a: number }>({ _id: 0, a: true });
* const aPropOnlyArray: {a: number}[] = await projectCursor.toArray();
*
* // or always use chaining and save the final cursor
*
* const cursor = coll.aggregate().project<{ a: string }>({
* _id: 0,
* a: { $convert: { input: '$a', to: 'string' }
* }});
* ```
*/
project<T extends Document = Document>($project: Document): AggregationCursor<T> {
return this.addStage<T>({ $project });
}
/** Add a lookup stage to the aggregation pipeline */
lookup($lookup: Document): this {
return this.addStage({ $lookup });
}
/** Add a redact stage to the aggregation pipeline */
redact($redact: Document): this {
return this.addStage({ $redact });
}
/** Add a skip stage to the aggregation pipeline */
skip($skip: number): this {
return this.addStage({ $skip });
}
/** Add a sort stage to the aggregation pipeline */
sort($sort: Sort): this {
return this.addStage({ $sort });
}
/** Add a unwind stage to the aggregation pipeline */
unwind($unwind: Document | string): this {
return this.addStage({ $unwind });
}
/** Add a geoNear stage to the aggregation pipeline */
geoNear($geoNear: Document): this {
return this.addStage({ $geoNear });
}
}

View File

@@ -0,0 +1,172 @@
import type { Document } from '../bson';
import {
ChangeStream,
type ChangeStreamDocument,
type ChangeStreamEvents,
type OperationTime,
type ResumeToken
} from '../change_stream';
import { type CursorResponse } from '../cmap/wire_protocol/responses';
import { INIT, RESPONSE } from '../constants';
import type { MongoClient } from '../mongo_client';
import { AggregateOperation } from '../operations/aggregate';
import type { CollationOptions } from '../operations/command';
import { executeOperation } from '../operations/execute_operation';
import type { ClientSession } from '../sessions';
import { maxWireVersion, type MongoDBNamespace } from '../utils';
import {
AbstractCursor,
type AbstractCursorOptions,
type InitialCursorResponse
} from './abstract_cursor';
/** @internal */
export interface ChangeStreamCursorOptions extends AbstractCursorOptions {
startAtOperationTime?: OperationTime;
resumeAfter?: ResumeToken;
startAfter?: ResumeToken;
maxAwaitTimeMS?: number;
collation?: CollationOptions;
fullDocument?: string;
}
/** @internal */
export class ChangeStreamCursor<
TSchema extends Document = Document,
TChange extends Document = ChangeStreamDocument<TSchema>
> extends AbstractCursor<TChange, ChangeStreamEvents> {
private _resumeToken: ResumeToken;
private startAtOperationTime: OperationTime | null;
private hasReceived?: boolean;
private readonly changeStreamCursorOptions: ChangeStreamCursorOptions;
private postBatchResumeToken?: ResumeToken;
private readonly pipeline: Document[];
/**
* @internal
*
* used to determine change stream resumability
*/
maxWireVersion: number | undefined;
constructor(
client: MongoClient,
namespace: MongoDBNamespace,
pipeline: Document[] = [],
options: ChangeStreamCursorOptions = {}
) {
super(client, namespace, { ...options, tailable: true, awaitData: true });
this.pipeline = pipeline;
this.changeStreamCursorOptions = options;
this._resumeToken = null;
this.startAtOperationTime = options.startAtOperationTime ?? null;
if (options.startAfter) {
this.resumeToken = options.startAfter;
} else if (options.resumeAfter) {
this.resumeToken = options.resumeAfter;
}
}
set resumeToken(token: ResumeToken) {
this._resumeToken = token;
this.emit(ChangeStream.RESUME_TOKEN_CHANGED, token);
}
get resumeToken(): ResumeToken {
return this._resumeToken;
}
get resumeOptions(): ChangeStreamCursorOptions {
const options: ChangeStreamCursorOptions = {
...this.changeStreamCursorOptions
};
for (const key of ['resumeAfter', 'startAfter', 'startAtOperationTime'] as const) {
delete options[key];
}
if (this.resumeToken != null) {
if (this.changeStreamCursorOptions.startAfter && !this.hasReceived) {
options.startAfter = this.resumeToken;
} else {
options.resumeAfter = this.resumeToken;
}
} else if (this.startAtOperationTime != null && maxWireVersion(this.server) >= 7) {
options.startAtOperationTime = this.startAtOperationTime;
}
return options;
}
cacheResumeToken(resumeToken: ResumeToken): void {
if (this.bufferedCount() === 0 && this.postBatchResumeToken) {
this.resumeToken = this.postBatchResumeToken;
} else {
this.resumeToken = resumeToken;
}
this.hasReceived = true;
}
_processBatch(response: CursorResponse): void {
const { postBatchResumeToken } = response;
if (postBatchResumeToken) {
this.postBatchResumeToken = postBatchResumeToken;
if (response.batchSize === 0) {
this.resumeToken = postBatchResumeToken;
}
}
}
clone(): AbstractCursor<TChange> {
return new ChangeStreamCursor(this.client, this.namespace, this.pipeline, {
...this.cursorOptions
});
}
async _initialize(session: ClientSession): Promise<InitialCursorResponse> {
const aggregateOperation = new AggregateOperation(this.namespace, this.pipeline, {
...this.cursorOptions,
...this.changeStreamCursorOptions,
session
});
const response = await executeOperation(
session.client,
aggregateOperation,
this.timeoutContext
);
const server = aggregateOperation.server;
this.maxWireVersion = maxWireVersion(server);
if (
this.startAtOperationTime == null &&
this.changeStreamCursorOptions.resumeAfter == null &&
this.changeStreamCursorOptions.startAfter == null &&
this.maxWireVersion >= 7
) {
this.startAtOperationTime = response.operationTime;
}
this._processBatch(response);
this.emit(INIT, response);
this.emit(RESPONSE);
return { server, session, response };
}
override async getMore(batchSize: number): Promise<CursorResponse> {
const response = await super.getMore(batchSize);
this.maxWireVersion = maxWireVersion(this.server);
this._processBatch(response);
this.emit(ChangeStream.MORE, response);
this.emit(ChangeStream.RESPONSE);
return response;
}
}

View File

@@ -0,0 +1,83 @@
import { type Document } from '../bson';
import { type ClientBulkWriteCursorResponse } from '../cmap/wire_protocol/responses';
import type { MongoClient } from '../mongo_client';
import { ClientBulkWriteOperation } from '../operations/client_bulk_write/client_bulk_write';
import { type ClientBulkWriteCommandBuilder } from '../operations/client_bulk_write/command_builder';
import { type ClientBulkWriteOptions } from '../operations/client_bulk_write/common';
import { executeOperation } from '../operations/execute_operation';
import type { ClientSession } from '../sessions';
import { mergeOptions, MongoDBNamespace } from '../utils';
import {
AbstractCursor,
type AbstractCursorOptions,
type InitialCursorResponse
} from './abstract_cursor';
/** @public */
export interface ClientBulkWriteCursorOptions
extends Omit<AbstractCursorOptions, 'maxAwaitTimeMS' | 'tailable' | 'awaitData'>,
ClientBulkWriteOptions {}
/**
* This is the cursor that handles client bulk write operations. Note this is never
* exposed directly to the user and is always immediately exhausted.
* @internal
*/
export class ClientBulkWriteCursor extends AbstractCursor {
commandBuilder: ClientBulkWriteCommandBuilder;
/** @internal */
private cursorResponse?: ClientBulkWriteCursorResponse;
/** @internal */
private clientBulkWriteOptions: ClientBulkWriteOptions;
/** @internal */
constructor(
client: MongoClient,
commandBuilder: ClientBulkWriteCommandBuilder,
options: ClientBulkWriteCursorOptions = {}
) {
super(client, new MongoDBNamespace('admin', '$cmd'), options);
this.commandBuilder = commandBuilder;
this.clientBulkWriteOptions = options;
}
/**
* We need a way to get the top level cursor response fields for
* generating the bulk write result, so we expose this here.
*/
get response(): ClientBulkWriteCursorResponse | null {
if (this.cursorResponse) return this.cursorResponse;
return null;
}
get operations(): Document[] {
return this.commandBuilder.lastOperations;
}
clone(): ClientBulkWriteCursor {
const clonedOptions = mergeOptions({}, this.clientBulkWriteOptions);
delete clonedOptions.session;
return new ClientBulkWriteCursor(this.client, this.commandBuilder, {
...clonedOptions
});
}
/** @internal */
async _initialize(session: ClientSession): Promise<InitialCursorResponse> {
const clientBulkWriteOperation = new ClientBulkWriteOperation(this.commandBuilder, {
...this.clientBulkWriteOptions,
...this.cursorOptions,
session
});
const response = await executeOperation(
this.client,
clientBulkWriteOperation,
this.timeoutContext
);
this.cursorResponse = response;
return { server: clientBulkWriteOperation.server, session, response };
}
}

482
backend/node_modules/mongodb/src/cursor/find_cursor.ts generated vendored Normal file
View File

@@ -0,0 +1,482 @@
import { type Document } from '../bson';
import { CursorResponse } from '../cmap/wire_protocol/responses';
import { MongoAPIError, MongoInvalidArgumentError, MongoTailableCursorError } from '../error';
import {
Explain,
ExplainableCursor,
type ExplainCommandOptions,
type ExplainVerbosityLike,
validateExplainTimeoutOptions
} from '../explain';
import type { MongoClient } from '../mongo_client';
import type { CollationOptions } from '../operations/command';
import { CountOperation, type CountOptions } from '../operations/count';
import { executeOperation } from '../operations/execute_operation';
import { FindOperation, type FindOptions } from '../operations/find';
import type { Hint } from '../operations/operation';
import type { ClientSession } from '../sessions';
import { formatSort, type Sort, type SortDirection } from '../sort';
import { emitWarningOnce, mergeOptions, type MongoDBNamespace, squashError } from '../utils';
import { type InitialCursorResponse } from './abstract_cursor';
/** @public Flags allowed for cursor */
export const FLAGS = [
'tailable',
'oplogReplay',
'noCursorTimeout',
'awaitData',
'exhaust',
'partial'
] as const;
/** @public */
export class FindCursor<TSchema = any> extends ExplainableCursor<TSchema> {
/** @internal */
private cursorFilter: Document;
/** @internal */
private numReturned = 0;
/** @internal */
private readonly findOptions: FindOptions;
/** @internal */
constructor(
client: MongoClient,
namespace: MongoDBNamespace,
filter: Document = {},
options: FindOptions = {}
) {
super(client, namespace, options);
this.cursorFilter = filter;
this.findOptions = options;
if (options.sort != null) {
this.findOptions.sort = formatSort(options.sort);
}
}
clone(): FindCursor<TSchema> {
const clonedOptions = mergeOptions({}, this.findOptions);
delete clonedOptions.session;
return new FindCursor(this.client, this.namespace, this.cursorFilter, {
...clonedOptions
});
}
override map<T>(transform: (doc: TSchema) => T): FindCursor<T> {
return super.map(transform) as FindCursor<T>;
}
/** @internal */
async _initialize(session: ClientSession): Promise<InitialCursorResponse> {
const options = {
...this.findOptions, // NOTE: order matters here, we may need to refine this
...this.cursorOptions,
session
};
if (options.explain) {
try {
validateExplainTimeoutOptions(options, Explain.fromOptions(options));
} catch {
throw new MongoAPIError(
'timeoutMS cannot be used with explain when explain is specified in findOptions'
);
}
}
const findOperation = new FindOperation(this.namespace, this.cursorFilter, options);
const response = await executeOperation(this.client, findOperation, this.timeoutContext);
// the response is not a cursor when `explain` is enabled
this.numReturned = response.batchSize;
return { server: findOperation.server, session, response };
}
/** @internal */
override async getMore(batchSize: number): Promise<CursorResponse> {
const numReturned = this.numReturned;
if (numReturned) {
// TODO(DRIVERS-1448): Remove logic to enforce `limit` in the driver
const limit = this.findOptions.limit;
batchSize =
limit && limit > 0 && numReturned + batchSize > limit ? limit - numReturned : batchSize;
if (batchSize <= 0) {
try {
await this.close();
} catch (error) {
squashError(error);
// this is an optimization for the special case of a limit for a find command to avoid an
// extra getMore when the limit has been reached and the limit is a multiple of the batchSize.
// This is a consequence of the new query engine in 5.0 having no knowledge of the limit as it
// produces results for the find command. Once a batch is filled up, it is returned and only
// on the subsequent getMore will the query framework consider the limit, determine the cursor
// is exhausted and return a cursorId of zero.
// instead, if we determine there are no more documents to request from the server, we preemptively
// close the cursor
}
return CursorResponse.emptyGetMore;
}
}
const response = await super.getMore(batchSize);
// TODO: wrap this in some logic to prevent it from happening if we don't need this support
this.numReturned = this.numReturned + response.batchSize;
return response;
}
/**
* Get the count of documents for this cursor
* @deprecated Use `collection.estimatedDocumentCount` or `collection.countDocuments` instead
*/
async count(options?: CountOptions): Promise<number> {
emitWarningOnce(
'cursor.count is deprecated and will be removed in the next major version, please use `collection.estimatedDocumentCount` or `collection.countDocuments` instead '
);
if (typeof options === 'boolean') {
throw new MongoInvalidArgumentError('Invalid first parameter to count');
}
return await executeOperation(
this.client,
new CountOperation(this.namespace, this.cursorFilter, {
...this.findOptions, // NOTE: order matters here, we may need to refine this
...this.cursorOptions,
...options
})
);
}
/** Execute the explain for the cursor */
async explain(): Promise<Document>;
async explain(verbosity: ExplainVerbosityLike | ExplainCommandOptions): Promise<Document>;
async explain(options: { timeoutMS?: number }): Promise<Document>;
async explain(
verbosity: ExplainVerbosityLike | ExplainCommandOptions,
options: { timeoutMS?: number }
): Promise<Document>;
async explain(
verbosity?: ExplainVerbosityLike | ExplainCommandOptions | { timeoutMS?: number },
options?: { timeoutMS?: number }
): Promise<Document> {
const { explain, timeout } = this.resolveExplainTimeoutOptions(verbosity, options);
return (
await executeOperation(
this.client,
new FindOperation(this.namespace, this.cursorFilter, {
...this.findOptions, // NOTE: order matters here, we may need to refine this
...this.cursorOptions,
...timeout,
explain: explain ?? true
})
)
).shift(this.deserializationOptions);
}
/** Set the cursor query */
filter(filter: Document): this {
this.throwIfInitialized();
this.cursorFilter = filter;
return this;
}
/**
* Set the cursor hint
*
* @param hint - If specified, then the query system will only consider plans using the hinted index.
*/
hint(hint: Hint): this {
this.throwIfInitialized();
this.findOptions.hint = hint;
return this;
}
/**
* Set the cursor min
*
* @param min - Specify a $min value to specify the inclusive lower bound for a specific index in order to constrain the results of find(). The $min specifies the lower bound for all keys of a specific index in order.
*/
min(min: Document): this {
this.throwIfInitialized();
this.findOptions.min = min;
return this;
}
/**
* Set the cursor max
*
* @param max - Specify a $max value to specify the exclusive upper bound for a specific index in order to constrain the results of find(). The $max specifies the upper bound for all keys of a specific index in order.
*/
max(max: Document): this {
this.throwIfInitialized();
this.findOptions.max = max;
return this;
}
/**
* Set the cursor returnKey.
* If set to true, modifies the cursor to only return the index field or fields for the results of the query, rather than documents.
* If set to true and the query does not use an index to perform the read operation, the returned documents will not contain any fields.
*
* @param value - the returnKey value.
*/
returnKey(value: boolean): this {
this.throwIfInitialized();
this.findOptions.returnKey = value;
return this;
}
/**
* Modifies the output of a query by adding a field $recordId to matching documents. $recordId is the internal key which uniquely identifies a document in a collection.
*
* @param value - The $showDiskLoc option has now been deprecated and replaced with the showRecordId field. $showDiskLoc will still be accepted for OP_QUERY stye find.
*/
showRecordId(value: boolean): this {
this.throwIfInitialized();
this.findOptions.showRecordId = value;
return this;
}
/**
* Add a query modifier to the cursor query
*
* @param name - The query modifier (must start with $, such as $orderby etc)
* @param value - The modifier value.
*/
addQueryModifier(name: string, value: string | boolean | number | Document): this {
this.throwIfInitialized();
if (name[0] !== '$') {
throw new MongoInvalidArgumentError(`${name} is not a valid query modifier`);
}
// Strip of the $
const field = name.substr(1);
// NOTE: consider some TS magic for this
switch (field) {
case 'comment':
this.findOptions.comment = value as string | Document;
break;
case 'explain':
this.findOptions.explain = value as boolean;
break;
case 'hint':
this.findOptions.hint = value as string | Document;
break;
case 'max':
this.findOptions.max = value as Document;
break;
case 'maxTimeMS':
this.findOptions.maxTimeMS = value as number;
break;
case 'min':
this.findOptions.min = value as Document;
break;
case 'orderby':
this.findOptions.sort = formatSort(value as string | Document);
break;
case 'query':
this.cursorFilter = value as Document;
break;
case 'returnKey':
this.findOptions.returnKey = value as boolean;
break;
case 'showDiskLoc':
this.findOptions.showRecordId = value as boolean;
break;
default:
throw new MongoInvalidArgumentError(`Invalid query modifier: ${name}`);
}
return this;
}
/**
* Add a comment to the cursor query allowing for tracking the comment in the log.
*
* @param value - The comment attached to this query.
*/
comment(value: string): this {
this.throwIfInitialized();
this.findOptions.comment = value;
return this;
}
/**
* Set a maxAwaitTimeMS on a tailing cursor query to allow to customize the timeout value for the option awaitData (Only supported on MongoDB 3.2 or higher, ignored otherwise)
*
* @param value - Number of milliseconds to wait before aborting the tailed query.
*/
maxAwaitTimeMS(value: number): this {
this.throwIfInitialized();
if (typeof value !== 'number') {
throw new MongoInvalidArgumentError('Argument for maxAwaitTimeMS must be a number');
}
this.findOptions.maxAwaitTimeMS = value;
return this;
}
/**
* Set a maxTimeMS on the cursor query, allowing for hard timeout limits on queries (Only supported on MongoDB 2.6 or higher)
*
* @param value - Number of milliseconds to wait before aborting the query.
*/
override maxTimeMS(value: number): this {
this.throwIfInitialized();
if (typeof value !== 'number') {
throw new MongoInvalidArgumentError('Argument for maxTimeMS must be a number');
}
this.findOptions.maxTimeMS = value;
return this;
}
/**
* Add a project stage to the aggregation pipeline
*
* @remarks
* In order to strictly type this function you must provide an interface
* that represents the effect of your projection on the result documents.
*
* By default chaining a projection to your cursor changes the returned type to the generic
* {@link Document} type.
* You should specify a parameterized type to have assertions on your final results.
*
* @example
* ```typescript
* // Best way
* const docs: FindCursor<{ a: number }> = cursor.project<{ a: number }>({ _id: 0, a: true });
* // Flexible way
* const docs: FindCursor<Document> = cursor.project({ _id: 0, a: true });
* ```
*
* @remarks
*
* **Note for Typescript Users:** adding a transform changes the return type of the iteration of this cursor,
* it **does not** return a new instance of a cursor. This means when calling project,
* you should always assign the result to a new variable in order to get a correctly typed cursor variable.
* Take note of the following example:
*
* @example
* ```typescript
* const cursor: FindCursor<{ a: number; b: string }> = coll.find();
* const projectCursor = cursor.project<{ a: number }>({ _id: 0, a: true });
* const aPropOnlyArray: {a: number}[] = await projectCursor.toArray();
*
* // or always use chaining and save the final cursor
*
* const cursor = coll.find().project<{ a: string }>({
* _id: 0,
* a: { $convert: { input: '$a', to: 'string' }
* }});
* ```
*/
project<T extends Document = Document>(value: Document): FindCursor<T> {
this.throwIfInitialized();
this.findOptions.projection = value;
return this as unknown as FindCursor<T>;
}
/**
* Sets the sort order of the cursor query.
*
* @param sort - The key or keys set for the sort.
* @param direction - The direction of the sorting (1 or -1).
*/
sort(sort: Sort | string, direction?: SortDirection): this {
this.throwIfInitialized();
if (this.findOptions.tailable) {
throw new MongoTailableCursorError('Tailable cursor does not support sorting');
}
this.findOptions.sort = formatSort(sort, direction);
return this;
}
/**
* Allows disk use for blocking sort operations exceeding 100MB memory. (MongoDB 3.2 or higher)
*
* @remarks
* {@link https://www.mongodb.com/docs/manual/reference/command/find/#find-cmd-allowdiskuse | find command allowDiskUse documentation}
*/
allowDiskUse(allow = true): this {
this.throwIfInitialized();
if (!this.findOptions.sort) {
throw new MongoInvalidArgumentError('Option "allowDiskUse" requires a sort specification');
}
// As of 6.0 the default is true. This allows users to get back to the old behavior.
if (!allow) {
this.findOptions.allowDiskUse = false;
return this;
}
this.findOptions.allowDiskUse = true;
return this;
}
/**
* Set the collation options for the cursor.
*
* @param value - The cursor collation options (MongoDB 3.4 or higher) settings for update operation (see 3.4 documentation for available fields).
*/
collation(value: CollationOptions): this {
this.throwIfInitialized();
this.findOptions.collation = value;
return this;
}
/**
* Set the limit for the cursor.
*
* @param value - The limit for the cursor query.
*/
limit(value: number): this {
this.throwIfInitialized();
if (this.findOptions.tailable) {
throw new MongoTailableCursorError('Tailable cursor does not support limit');
}
if (typeof value !== 'number') {
throw new MongoInvalidArgumentError('Operation "limit" requires an integer');
}
this.findOptions.limit = value;
return this;
}
/**
* Set the skip for the cursor.
*
* @param value - The skip for the cursor query.
*/
skip(value: number): this {
this.throwIfInitialized();
if (this.findOptions.tailable) {
throw new MongoTailableCursorError('Tailable cursor does not support skip');
}
if (typeof value !== 'number') {
throw new MongoInvalidArgumentError('Operation "skip" requires an integer');
}
this.findOptions.skip = value;
return this;
}
}

View File

@@ -0,0 +1,48 @@
import type { Document } from '../bson';
import type { Db } from '../db';
import { executeOperation } from '../operations/execute_operation';
import {
type CollectionInfo,
ListCollectionsOperation,
type ListCollectionsOptions
} from '../operations/list_collections';
import type { ClientSession } from '../sessions';
import { AbstractCursor, type InitialCursorResponse } from './abstract_cursor';
/** @public */
export class ListCollectionsCursor<
T extends Pick<CollectionInfo, 'name' | 'type'> | CollectionInfo =
| Pick<CollectionInfo, 'name' | 'type'>
| CollectionInfo
> extends AbstractCursor<T> {
parent: Db;
filter: Document;
options?: ListCollectionsOptions;
constructor(db: Db, filter: Document, options?: ListCollectionsOptions) {
super(db.client, db.s.namespace, options);
this.parent = db;
this.filter = filter;
this.options = options;
}
clone(): ListCollectionsCursor<T> {
return new ListCollectionsCursor(this.parent, this.filter, {
...this.options,
...this.cursorOptions
});
}
/** @internal */
async _initialize(session: ClientSession | undefined): Promise<InitialCursorResponse> {
const operation = new ListCollectionsOperation(this.parent, this.filter, {
...this.cursorOptions,
...this.options,
session
});
const response = await executeOperation(this.parent.client, operation, this.timeoutContext);
return { server: operation.server, session, response };
}
}

View File

@@ -0,0 +1,37 @@
import type { Collection } from '../collection';
import { executeOperation } from '../operations/execute_operation';
import { ListIndexesOperation, type ListIndexesOptions } from '../operations/indexes';
import type { ClientSession } from '../sessions';
import { AbstractCursor, type InitialCursorResponse } from './abstract_cursor';
/** @public */
export class ListIndexesCursor extends AbstractCursor {
parent: Collection;
options?: ListIndexesOptions;
constructor(collection: Collection, options?: ListIndexesOptions) {
super(collection.client, collection.s.namespace, options);
this.parent = collection;
this.options = options;
}
clone(): ListIndexesCursor {
return new ListIndexesCursor(this.parent, {
...this.options,
...this.cursorOptions
});
}
/** @internal */
async _initialize(session: ClientSession | undefined): Promise<InitialCursorResponse> {
const operation = new ListIndexesOperation(this.parent, {
...this.cursorOptions,
...this.options,
session
});
const response = await executeOperation(this.parent.client, operation, this.timeoutContext);
return { server: operation.server, session, response };
}
}

View File

@@ -0,0 +1,20 @@
import type { Collection } from '../collection';
import type { AggregateOptions } from '../operations/aggregate';
import { AggregationCursor } from './aggregation_cursor';
/** @public */
export type ListSearchIndexesOptions = Omit<AggregateOptions, 'readConcern' | 'writeConcern'>;
/** @public */
export class ListSearchIndexesCursor extends AggregationCursor<{ name: string }> {
/** @internal */
constructor(
{ fullNamespace: ns, client }: Collection,
name: string | null,
options: ListSearchIndexesOptions = {}
) {
const pipeline =
name == null ? [{ $listSearchIndexes: {} }] : [{ $listSearchIndexes: { name } }];
super(client, ns, pipeline, options);
}
}

View File

@@ -0,0 +1,173 @@
import type { BSONSerializeOptions, Document } from '../bson';
import { CursorResponse } from '../cmap/wire_protocol/responses';
import type { Db } from '../db';
import { MongoAPIError } from '../error';
import { executeOperation } from '../operations/execute_operation';
import { GetMoreOperation } from '../operations/get_more';
import { RunCommandOperation } from '../operations/run_command';
import type { ReadConcernLike } from '../read_concern';
import type { ReadPreferenceLike } from '../read_preference';
import type { ClientSession } from '../sessions';
import { ns } from '../utils';
import {
AbstractCursor,
type CursorTimeoutMode,
type InitialCursorResponse
} from './abstract_cursor';
/** @public */
export type RunCursorCommandOptions = {
readPreference?: ReadPreferenceLike;
session?: ClientSession;
/**
* @experimental
* Specifies the time an operation will run until it throws a timeout error. Note that if
* `maxTimeMS` is provided in the command in addition to setting `timeoutMS` in the options, then
* the original value of `maxTimeMS` will be overwritten.
*/
timeoutMS?: number;
/**
* @public
* @experimental
* Specifies how `timeoutMS` is applied to the cursor. Can be either `'cursorLifeTime'` or `'iteration'`
* When set to `'iteration'`, the deadline specified by `timeoutMS` applies to each call of
* `cursor.next()`.
* When set to `'cursorLifetime'`, the deadline applies to the life of the entire cursor.
*
* Depending on the type of cursor being used, this option has different default values.
* For non-tailable cursors, this value defaults to `'cursorLifetime'`
* For tailable cursors, this value defaults to `'iteration'` since tailable cursors, by
* definition can have an arbitrarily long lifetime.
*
* @example
* ```ts
* const cursor = collection.find({}, {timeoutMS: 100, timeoutMode: 'iteration'});
* for await (const doc of cursor) {
* // process doc
* // This will throw a timeout error if any of the iterator's `next()` calls takes more than 100ms, but
* // will continue to iterate successfully otherwise, regardless of the number of batches.
* }
* ```
*
* @example
* ```ts
* const cursor = collection.find({}, { timeoutMS: 1000, timeoutMode: 'cursorLifetime' });
* const docs = await cursor.toArray(); // This entire line will throw a timeout error if all batches are not fetched and returned within 1000ms.
* ```
*/
timeoutMode?: CursorTimeoutMode;
tailable?: boolean;
awaitData?: boolean;
} & BSONSerializeOptions;
/** @public */
export class RunCommandCursor extends AbstractCursor {
public readonly command: Readonly<Record<string, any>>;
public readonly getMoreOptions: {
comment?: any;
maxAwaitTimeMS?: number;
batchSize?: number;
} = {};
/**
* Controls the `getMore.comment` field
* @param comment - any BSON value
*/
public setComment(comment: any): this {
this.getMoreOptions.comment = comment;
return this;
}
/**
* Controls the `getMore.maxTimeMS` field. Only valid when cursor is tailable await
* @param maxTimeMS - the number of milliseconds to wait for new data
*/
public setMaxTimeMS(maxTimeMS: number): this {
this.getMoreOptions.maxAwaitTimeMS = maxTimeMS;
return this;
}
/**
* Controls the `getMore.batchSize` field
* @param batchSize - the number documents to return in the `nextBatch`
*/
public setBatchSize(batchSize: number): this {
this.getMoreOptions.batchSize = batchSize;
return this;
}
/** Unsupported for RunCommandCursor */
public override clone(): never {
throw new MongoAPIError('Clone not supported, create a new cursor with db.runCursorCommand');
}
/** Unsupported for RunCommandCursor: readConcern must be configured directly on command document */
public override withReadConcern(_: ReadConcernLike): never {
throw new MongoAPIError(
'RunCommandCursor does not support readConcern it must be attached to the command being run'
);
}
/** Unsupported for RunCommandCursor: various cursor flags must be configured directly on command document */
public override addCursorFlag(_: string, __: boolean): never {
throw new MongoAPIError(
'RunCommandCursor does not support cursor flags, they must be attached to the command being run'
);
}
/**
* Unsupported for RunCommandCursor: maxTimeMS must be configured directly on command document
*/
public override maxTimeMS(_: number): never {
throw new MongoAPIError(
'maxTimeMS must be configured on the command document directly, to configure getMore.maxTimeMS use cursor.setMaxTimeMS()'
);
}
/** Unsupported for RunCommandCursor: batchSize must be configured directly on command document */
public override batchSize(_: number): never {
throw new MongoAPIError(
'batchSize must be configured on the command document directly, to configure getMore.batchSize use cursor.setBatchSize()'
);
}
/** @internal */
private db: Db;
/** @internal */
constructor(db: Db, command: Document, options: RunCursorCommandOptions = {}) {
super(db.client, ns(db.namespace), options);
this.db = db;
this.command = Object.freeze({ ...command });
}
/** @internal */
protected async _initialize(session: ClientSession): Promise<InitialCursorResponse> {
const operation = new RunCommandOperation<CursorResponse>(this.db, this.command, {
...this.cursorOptions,
session: session,
readPreference: this.cursorOptions.readPreference,
responseType: CursorResponse
});
const response = await executeOperation(this.client, operation, this.timeoutContext);
return {
server: operation.server,
session,
response
};
}
/** @internal */
override async getMore(_batchSize: number): Promise<CursorResponse> {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const getMoreOperation = new GetMoreOperation(this.namespace, this.id!, this.server!, {
...this.cursorOptions,
session: this.session,
...this.getMoreOptions
});
return await executeOperation(this.client, getMoreOperation, this.timeoutContext);
}
}