db.js 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420
  1. "use strict";
  2. Object.defineProperty(exports, "__esModule", { value: true });
  3. exports.Db = void 0;
  4. const admin_1 = require("./admin");
  5. const bson_1 = require("./bson");
  6. const change_stream_1 = require("./change_stream");
  7. const collection_1 = require("./collection");
  8. const CONSTANTS = require("./constants");
  9. const aggregation_cursor_1 = require("./cursor/aggregation_cursor");
  10. const list_collections_cursor_1 = require("./cursor/list_collections_cursor");
  11. const run_command_cursor_1 = require("./cursor/run_command_cursor");
  12. const error_1 = require("./error");
  13. const create_collection_1 = require("./operations/create_collection");
  14. const drop_1 = require("./operations/drop");
  15. const execute_operation_1 = require("./operations/execute_operation");
  16. const indexes_1 = require("./operations/indexes");
  17. const profiling_level_1 = require("./operations/profiling_level");
  18. const remove_user_1 = require("./operations/remove_user");
  19. const rename_1 = require("./operations/rename");
  20. const run_command_1 = require("./operations/run_command");
  21. const set_profiling_level_1 = require("./operations/set_profiling_level");
  22. const stats_1 = require("./operations/stats");
  23. const read_concern_1 = require("./read_concern");
  24. const read_preference_1 = require("./read_preference");
  25. const utils_1 = require("./utils");
  26. const write_concern_1 = require("./write_concern");
  27. // Allowed parameters
  28. const DB_OPTIONS_ALLOW_LIST = [
  29. 'writeConcern',
  30. 'readPreference',
  31. 'readPreferenceTags',
  32. 'native_parser',
  33. 'forceServerObjectId',
  34. 'pkFactory',
  35. 'serializeFunctions',
  36. 'raw',
  37. 'authSource',
  38. 'ignoreUndefined',
  39. 'readConcern',
  40. 'retryMiliSeconds',
  41. 'numberOfRetries',
  42. 'useBigInt64',
  43. 'promoteBuffers',
  44. 'promoteLongs',
  45. 'bsonRegExp',
  46. 'enableUtf8Validation',
  47. 'promoteValues',
  48. 'compression',
  49. 'retryWrites',
  50. 'timeoutMS'
  51. ];
  52. /**
  53. * The **Db** class is a class that represents a MongoDB Database.
  54. * @public
  55. *
  56. * @example
  57. * ```ts
  58. * import { MongoClient } from 'mongodb';
  59. *
  60. * interface Pet {
  61. * name: string;
  62. * kind: 'dog' | 'cat' | 'fish';
  63. * }
  64. *
  65. * const client = new MongoClient('mongodb://localhost:27017');
  66. * const db = client.db();
  67. *
  68. * // Create a collection that validates our union
  69. * await db.createCollection<Pet>('pets', {
  70. * validator: { $expr: { $in: ['$kind', ['dog', 'cat', 'fish']] } }
  71. * })
  72. * ```
  73. */
  74. class Db {
  75. static { this.SYSTEM_NAMESPACE_COLLECTION = CONSTANTS.SYSTEM_NAMESPACE_COLLECTION; }
  76. static { this.SYSTEM_INDEX_COLLECTION = CONSTANTS.SYSTEM_INDEX_COLLECTION; }
  77. static { this.SYSTEM_PROFILE_COLLECTION = CONSTANTS.SYSTEM_PROFILE_COLLECTION; }
  78. static { this.SYSTEM_USER_COLLECTION = CONSTANTS.SYSTEM_USER_COLLECTION; }
  79. static { this.SYSTEM_COMMAND_COLLECTION = CONSTANTS.SYSTEM_COMMAND_COLLECTION; }
  80. static { this.SYSTEM_JS_COLLECTION = CONSTANTS.SYSTEM_JS_COLLECTION; }
  81. /**
  82. * Creates a new Db instance.
  83. *
  84. * Db name cannot contain a dot, the server may apply more restrictions when an operation is run.
  85. *
  86. * @param client - The MongoClient for the database.
  87. * @param databaseName - The name of the database this instance represents.
  88. * @param options - Optional settings for Db construction.
  89. */
  90. constructor(client, databaseName, options) {
  91. options = options ?? {};
  92. // Filter the options
  93. options = (0, utils_1.filterOptions)(options, DB_OPTIONS_ALLOW_LIST);
  94. // Ensure there are no dots in database name
  95. if (typeof databaseName === 'string' && databaseName.includes('.')) {
  96. throw new error_1.MongoInvalidArgumentError(`Database names cannot contain the character '.'`);
  97. }
  98. // Internal state of the db object
  99. this.s = {
  100. // Options
  101. options,
  102. // Unpack read preference
  103. readPreference: read_preference_1.ReadPreference.fromOptions(options),
  104. // Merge bson options
  105. bsonOptions: (0, bson_1.resolveBSONOptions)(options, client),
  106. // Set up the primary key factory or fallback to ObjectId
  107. pkFactory: options?.pkFactory ?? utils_1.DEFAULT_PK_FACTORY,
  108. // ReadConcern
  109. readConcern: read_concern_1.ReadConcern.fromOptions(options),
  110. writeConcern: write_concern_1.WriteConcern.fromOptions(options),
  111. // Namespace
  112. namespace: new utils_1.MongoDBNamespace(databaseName)
  113. };
  114. this.client = client;
  115. }
  116. get databaseName() {
  117. return this.s.namespace.db;
  118. }
  119. // Options
  120. get options() {
  121. return this.s.options;
  122. }
  123. /**
  124. * Check if a secondary can be used (because the read preference is *not* set to primary)
  125. */
  126. get secondaryOk() {
  127. return this.s.readPreference?.preference !== 'primary' || false;
  128. }
  129. get readConcern() {
  130. return this.s.readConcern;
  131. }
  132. /**
  133. * The current readPreference of the Db. If not explicitly defined for
  134. * this Db, will be inherited from the parent MongoClient
  135. */
  136. get readPreference() {
  137. if (this.s.readPreference == null) {
  138. return this.client.readPreference;
  139. }
  140. return this.s.readPreference;
  141. }
  142. get bsonOptions() {
  143. return this.s.bsonOptions;
  144. }
  145. // get the write Concern
  146. get writeConcern() {
  147. return this.s.writeConcern;
  148. }
  149. get namespace() {
  150. return this.s.namespace.toString();
  151. }
  152. get timeoutMS() {
  153. return this.s.options?.timeoutMS;
  154. }
  155. /**
  156. * Create a new collection on a server with the specified options. Use this to create capped collections.
  157. * More information about command options available at https://www.mongodb.com/docs/manual/reference/command/create/
  158. *
  159. * Collection namespace validation is performed server-side.
  160. *
  161. * @param name - The name of the collection to create
  162. * @param options - Optional settings for the command
  163. */
  164. async createCollection(name, options) {
  165. options = (0, utils_1.resolveOptions)(this, options);
  166. return await (0, create_collection_1.createCollections)(this, name, options);
  167. }
  168. /**
  169. * Execute a command
  170. *
  171. * @remarks
  172. * This command does not inherit options from the MongoClient.
  173. *
  174. * The driver will ensure the following fields are attached to the command sent to the server:
  175. * - `lsid` - sourced from an implicit session or options.session
  176. * - `$readPreference` - defaults to primary or can be configured by options.readPreference
  177. * - `$db` - sourced from the name of this database
  178. *
  179. * If the client has a serverApi setting:
  180. * - `apiVersion`
  181. * - `apiStrict`
  182. * - `apiDeprecationErrors`
  183. *
  184. * When in a transaction:
  185. * - `readConcern` - sourced from readConcern set on the TransactionOptions
  186. * - `writeConcern` - sourced from writeConcern set on the TransactionOptions
  187. *
  188. * Attaching any of the above fields to the command will have no effect as the driver will overwrite the value.
  189. *
  190. * @param command - The command to run
  191. * @param options - Optional settings for the command
  192. */
  193. async command(command, options) {
  194. // Intentionally, we do not inherit options from parent for this operation.
  195. return await (0, execute_operation_1.executeOperation)(this.client, new run_command_1.RunCommandOperation(this.s.namespace, command, (0, utils_1.resolveOptions)(undefined, {
  196. ...(0, bson_1.resolveBSONOptions)(options),
  197. timeoutMS: options?.timeoutMS ?? this.timeoutMS,
  198. session: options?.session,
  199. readPreference: options?.readPreference,
  200. signal: options?.signal
  201. })));
  202. }
  203. /**
  204. * Execute an aggregation framework pipeline against the database.
  205. *
  206. * @param pipeline - An array of aggregation stages to be executed
  207. * @param options - Optional settings for the command
  208. */
  209. aggregate(pipeline = [], options) {
  210. return new aggregation_cursor_1.AggregationCursor(this.client, this.s.namespace, pipeline, (0, utils_1.resolveOptions)(this, options));
  211. }
  212. /** Return the Admin db instance */
  213. admin() {
  214. return new admin_1.Admin(this);
  215. }
  216. /**
  217. * Returns a reference to a MongoDB Collection. If it does not exist it will be created implicitly.
  218. *
  219. * Collection namespace validation is performed server-side.
  220. *
  221. * @param name - the collection name we wish to access.
  222. * @returns return the new Collection instance
  223. */
  224. collection(name, options = {}) {
  225. if (typeof options === 'function') {
  226. throw new error_1.MongoInvalidArgumentError('The callback form of this helper has been removed.');
  227. }
  228. return new collection_1.Collection(this, name, (0, utils_1.resolveOptions)(this, options));
  229. }
  230. /**
  231. * Get all the db statistics.
  232. *
  233. * @param options - Optional settings for the command
  234. */
  235. async stats(options) {
  236. return await (0, execute_operation_1.executeOperation)(this.client, new stats_1.DbStatsOperation(this, (0, utils_1.resolveOptions)(this, options)));
  237. }
  238. listCollections(filter = {}, options = {}) {
  239. return new list_collections_cursor_1.ListCollectionsCursor(this, filter, (0, utils_1.resolveOptions)(this, options));
  240. }
  241. /**
  242. * Rename a collection.
  243. *
  244. * @remarks
  245. * This operation does not inherit options from the MongoClient.
  246. *
  247. * @param fromCollection - Name of current collection to rename
  248. * @param toCollection - New name of of the collection
  249. * @param options - Optional settings for the command
  250. */
  251. async renameCollection(fromCollection, toCollection, options) {
  252. // Intentionally, we do not inherit options from parent for this operation.
  253. return await (0, execute_operation_1.executeOperation)(this.client, new rename_1.RenameOperation(this.collection(fromCollection), toCollection, (0, utils_1.resolveOptions)(undefined, {
  254. ...options,
  255. new_collection: true,
  256. readPreference: read_preference_1.ReadPreference.primary
  257. })));
  258. }
  259. /**
  260. * Drop a collection from the database, removing it permanently. New accesses will create a new collection.
  261. *
  262. * @param name - Name of collection to drop
  263. * @param options - Optional settings for the command
  264. */
  265. async dropCollection(name, options) {
  266. options = (0, utils_1.resolveOptions)(this, options);
  267. return await (0, drop_1.dropCollections)(this, name, options);
  268. }
  269. /**
  270. * Drop a database, removing it permanently from the server.
  271. *
  272. * @param options - Optional settings for the command
  273. */
  274. async dropDatabase(options) {
  275. return await (0, execute_operation_1.executeOperation)(this.client, new drop_1.DropDatabaseOperation(this, (0, utils_1.resolveOptions)(this, options)));
  276. }
  277. /**
  278. * Fetch all collections for the current db.
  279. *
  280. * @param options - Optional settings for the command
  281. */
  282. async collections(options) {
  283. options = (0, utils_1.resolveOptions)(this, options);
  284. const collections = await this.listCollections({}, { ...options, nameOnly: true }).toArray();
  285. return collections
  286. .filter(
  287. // Filter collections removing any illegal ones
  288. ({ name }) => !name.includes('$'))
  289. .map(({ name }) => new collection_1.Collection(this, name, this.s.options));
  290. }
  291. /**
  292. * Creates an index on the db and collection.
  293. *
  294. * @param name - Name of the collection to create the index on.
  295. * @param indexSpec - Specify the field to index, or an index specification
  296. * @param options - Optional settings for the command
  297. */
  298. async createIndex(name, indexSpec, options) {
  299. const indexes = await (0, execute_operation_1.executeOperation)(this.client, indexes_1.CreateIndexesOperation.fromIndexSpecification(this, name, indexSpec, options));
  300. return indexes[0];
  301. }
  302. /**
  303. * Remove a user from a database
  304. *
  305. * @param username - The username to remove
  306. * @param options - Optional settings for the command
  307. */
  308. async removeUser(username, options) {
  309. return await (0, execute_operation_1.executeOperation)(this.client, new remove_user_1.RemoveUserOperation(this, username, (0, utils_1.resolveOptions)(this, options)));
  310. }
  311. /**
  312. * Set the current profiling level of MongoDB
  313. *
  314. * @param level - The new profiling level (off, slow_only, all).
  315. * @param options - Optional settings for the command
  316. */
  317. async setProfilingLevel(level, options) {
  318. return await (0, execute_operation_1.executeOperation)(this.client, new set_profiling_level_1.SetProfilingLevelOperation(this, level, (0, utils_1.resolveOptions)(this, options)));
  319. }
  320. /**
  321. * Retrieve the current profiling Level for MongoDB
  322. *
  323. * @param options - Optional settings for the command
  324. */
  325. async profilingLevel(options) {
  326. return await (0, execute_operation_1.executeOperation)(this.client, new profiling_level_1.ProfilingLevelOperation(this, (0, utils_1.resolveOptions)(this, options)));
  327. }
  328. async indexInformation(name, options) {
  329. return await this.collection(name).indexInformation((0, utils_1.resolveOptions)(this, options));
  330. }
  331. /**
  332. * Create a new Change Stream, watching for new changes (insertions, updates,
  333. * replacements, deletions, and invalidations) in this database. Will ignore all
  334. * changes to system collections.
  335. *
  336. * @remarks
  337. * watch() accepts two generic arguments for distinct use cases:
  338. * - The first is to provide the schema that may be defined for all the collections within this database
  339. * - The second is to override the shape of the change stream document entirely, if it is not provided the type will default to ChangeStreamDocument of the first argument
  340. *
  341. * @remarks
  342. * When `timeoutMS` is configured for a change stream, it will have different behaviour depending
  343. * on whether the change stream is in iterator mode or emitter mode. In both cases, a change
  344. * stream will time out if it does not receive a change event within `timeoutMS` of the last change
  345. * event.
  346. *
  347. * Note that if a change stream is consistently timing out when watching a collection, database or
  348. * client that is being changed, then this may be due to the server timing out before it can finish
  349. * processing the existing oplog. To address this, restart the change stream with a higher
  350. * `timeoutMS`.
  351. *
  352. * If the change stream times out the initial aggregate operation to establish the change stream on
  353. * the server, then the client will close the change stream. If the getMore calls to the server
  354. * time out, then the change stream will be left open, but will throw a MongoOperationTimeoutError
  355. * when in iterator mode and emit an error event that returns a MongoOperationTimeoutError in
  356. * emitter mode.
  357. *
  358. * To determine whether or not the change stream is still open following a timeout, check the
  359. * {@link ChangeStream.closed} getter.
  360. *
  361. * @example
  362. * In iterator mode, if a next() call throws a timeout error, it will attempt to resume the change stream.
  363. * The next call can just be retried after this succeeds.
  364. * ```ts
  365. * const changeStream = collection.watch([], { timeoutMS: 100 });
  366. * try {
  367. * await changeStream.next();
  368. * } catch (e) {
  369. * if (e instanceof MongoOperationTimeoutError && !changeStream.closed) {
  370. * await changeStream.next();
  371. * }
  372. * throw e;
  373. * }
  374. * ```
  375. *
  376. * @example
  377. * In emitter mode, if the change stream goes `timeoutMS` without emitting a change event, it will
  378. * emit an error event that returns a MongoOperationTimeoutError, but will not close the change
  379. * stream unless the resume attempt fails. There is no need to re-establish change listeners as
  380. * this will automatically continue emitting change events once the resume attempt completes.
  381. *
  382. * ```ts
  383. * const changeStream = collection.watch([], { timeoutMS: 100 });
  384. * changeStream.on('change', console.log);
  385. * changeStream.on('error', e => {
  386. * if (e instanceof MongoOperationTimeoutError && !changeStream.closed) {
  387. * // do nothing
  388. * } else {
  389. * changeStream.close();
  390. * }
  391. * });
  392. * ```
  393. * @param pipeline - An array of {@link https://www.mongodb.com/docs/manual/reference/operator/aggregation-pipeline/|aggregation pipeline stages} through which to pass change stream documents. This allows for filtering (using $match) and manipulating the change stream documents.
  394. * @param options - Optional settings for the command
  395. * @typeParam TSchema - Type of the data being detected by the change stream
  396. * @typeParam TChange - Type of the whole change stream document emitted
  397. */
  398. watch(pipeline = [], options = {}) {
  399. // Allow optionally not specifying a pipeline
  400. if (!Array.isArray(pipeline)) {
  401. options = pipeline;
  402. pipeline = [];
  403. }
  404. return new change_stream_1.ChangeStream(this, pipeline, (0, utils_1.resolveOptions)(this, options));
  405. }
  406. /**
  407. * A low level cursor API providing basic driver functionality:
  408. * - ClientSession management
  409. * - ReadPreference for server selection
  410. * - Running getMores automatically when a local batch is exhausted
  411. *
  412. * @param command - The command that will start a cursor on the server.
  413. * @param options - Configurations for running the command, bson options will apply to getMores
  414. */
  415. runCursorCommand(command, options) {
  416. return new run_command_cursor_1.RunCommandCursor(this, command, options);
  417. }
  418. }
  419. exports.Db = Db;
  420. //# sourceMappingURL=db.js.map