All files / src/bulk unordered.ts

97.43% Statements 38/39
88.46% Branches 23/26
100% Functions 3/3
97.43% Lines 38/39

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116  396x   396x     396x                 396x     18451x       37066x 20723x     16343x               9734370x                 9734370x   129x           9734241x   9734241x 9669880x 64361x 63028x 1333x 1333x     9734241x     9734241x 19961x       9734241x                     1755x     1755x       9734241x       9734241x 9734241x 9734241x     9734241x 9669880x 9669880x       64361x 63028x 1333x 1333x       9734241x 9734241x   9734241x      
import type { Document } from '../bson';
import * as BSON from '../bson';
import type { Collection } from '../collection';
import { MongoInvalidArgumentError } from '../error';
import type { DeleteStatement } from '../operations/delete';
import type { UpdateStatement } from '../operations/update';
import {
  Batch,
  BatchType,
  BulkOperationBase,
  type BulkWriteOptions,
  type BulkWriteResult
} from './common';
 
/** @public */
export class UnorderedBulkOperation extends BulkOperationBase {
  /** @internal */
  constructor(collection: Collection, options: BulkWriteOptions) {
    super(collection, options, false);
  }
 
  override handleWriteError(writeResult: BulkWriteResult): void {
    if (this.s.batches.length) {
      return;
    }
 
    return super.handleWriteError(writeResult);
  }
 
  addToOperationsList(
    batchType: BatchType,
    document: Document | UpdateStatement | DeleteStatement
  ): this {
    // Get the bsonSize
    const bsonSize = BSON.calculateObjectSize(document, {
      checkKeys: false,
 
      // Since we don't know what the user selected for BSON options here,
      // err on the safe side, and check the size with ignoreUndefined: false.
      ignoreUndefined: false
    } as any);
 
    // Throw error if the doc is bigger than the max BSON size
    if (bsonSize >= this.s.maxBsonObjectSize) {
      // TODO(NODE-3483): Change this to MongoBSONError
      throw new MongoInvalidArgumentError(
        `Document is larger than the maximum size ${this.s.maxBsonObjectSize}`
      );
    }
 
    // Holds the current batch
    this.s.currentBatch = undefined;
    // Get the right type of batch
    if (batchType === BatchType.INSERT) {
      this.s.currentBatch = this.s.currentInsertBatch;
    } else if (batchType === BatchType.UPDATE) {
      this.s.currentBatch = this.s.currentUpdateBatch;
    } else Eif (batchType === BatchType.DELETE) {
      this.s.currentBatch = this.s.currentRemoveBatch;
    }
 
    const maxKeySize = this.s.maxKeySize;
 
    // Create a new batch object if we don't have a current one
    if (this.s.currentBatch == null) {
      this.s.currentBatch = new Batch(batchType, this.s.currentIndex);
    }
 
    // Check if we need to create a new batch
    if (
      // New batch if we exceed the max batch op size
      this.s.currentBatch.size + 1 >= this.s.maxWriteBatchSize ||
      // New batch if we exceed the maxBatchSizeBytes. Only matters if batch already has a doc,
      // since we can't sent an empty batch
      (this.s.currentBatch.size > 0 &&
        this.s.currentBatch.sizeBytes + maxKeySize + bsonSize >= this.s.maxBatchSizeBytes) ||
      // New batch if the new op does not have the same op type as the current batch
      this.s.currentBatch.batchType !== batchType
    ) {
      // Save the batch to the execution stack
      this.s.batches.push(this.s.currentBatch);
 
      // Create a new batch
      this.s.currentBatch = new Batch(batchType, this.s.currentIndex);
    }
 
    // We have an array of documents
    Iif (Array.isArray(document)) {
      throw new MongoInvalidArgumentError('Operation passed in cannot be an Array');
    }
 
    this.s.currentBatch.operations.push(document);
    this.s.currentBatch.originalIndexes.push(this.s.currentIndex);
    this.s.currentIndex = this.s.currentIndex + 1;
 
    // Save back the current Batch to the right type
    if (batchType === BatchType.INSERT) {
      this.s.currentInsertBatch = this.s.currentBatch;
      this.s.bulkResult.insertedIds.push({
        index: this.s.bulkResult.insertedIds.length,
        _id: (document as Document)._id
      });
    } else if (batchType === BatchType.UPDATE) {
      this.s.currentUpdateBatch = this.s.currentBatch;
    } else Eif (batchType === BatchType.DELETE) {
      this.s.currentRemoveBatch = this.s.currentBatch;
    }
 
    // Update current batch size
    this.s.currentBatch.size += 1;
    this.s.currentBatch.sizeBytes += maxKeySize + bsonSize;
 
    return this;
  }
}