@@ -22,7 +22,7 @@ import {ResourceMetrics} from '@opentelemetry/sdk-metrics';
2222import * as assert from 'assert' ;
2323import { GCPMetricsHandler } from '../src/client-side-metrics/gcp-metrics-handler' ;
2424import * as proxyquire from 'proxyquire' ;
25- import { Bigtable } from '../src' ;
25+ import { Bigtable , RawFilter } from '../src' ;
2626import { Mutation } from '../src/mutation' ;
2727import { Row } from '../src/row' ;
2828import {
@@ -191,6 +191,18 @@ function readRowsAssertionCheck(
191191 } ) ;
192192}
193193
194+ function checkCheckAndMutateCall (
195+ projectId : string ,
196+ requestsHandled : ( OnOperationCompleteData | OnAttemptCompleteData ) [ ] = [ ] ,
197+ ) {
198+ readRowsAssertionCheck (
199+ projectId ,
200+ requestsHandled ,
201+ 'Bigtable.CheckAndMutateRow' ,
202+ 'false' ,
203+ ) ;
204+ }
205+
194206function checkMultiRowCall (
195207 projectId : string ,
196208 requestsHandled : ( OnOperationCompleteData | OnAttemptCompleteData ) [ ] = [ ] ,
@@ -275,6 +287,18 @@ const mutation = {
275287 method : Mutation . methods . INSERT ,
276288} ;
277289
290+ const filter : RawFilter = {
291+ family : 'cf1' ,
292+ value : 'alincoln' ,
293+ } ;
294+
295+ const mutations = [
296+ {
297+ method : 'delete' ,
298+ data : [ 'cf1:alincoln' ] ,
299+ } ,
300+ ] ;
301+
278302const rules = [
279303 {
280304 column : 'cf1:column' ,
@@ -862,6 +886,91 @@ describe('Bigtable/ClientSideMetricsAllMethods', () => {
862886 } ) ;
863887 } ) ;
864888 } ) ;
889+ describe ( 'CheckAndMutateRow' , ( ) => {
890+ it ( 'should send the metrics to Google Cloud Monitoring for a CheckAndMutateRow call' , done => {
891+ ( async ( ) => {
892+ try {
893+ const bigtable = await mockBigtable ( defaultProjectId , done ) ;
894+ for ( const instanceId of [ instanceId1 , instanceId2 ] ) {
895+ await setupBigtableWithInsert (
896+ bigtable ,
897+ columnFamilyId ,
898+ instanceId ,
899+ [ tableId1 , tableId2 ] ,
900+ ) ;
901+ const instance = bigtable . instance ( instanceId ) ;
902+ const table = instance . table ( tableId1 ) ;
903+ const row = table . row ( columnFamilyId ) ;
904+ await row . filter ( filter , { onMatch : mutations } ) ;
905+ const table2 = instance . table ( tableId2 ) ;
906+ const row2 = table2 . row ( columnFamilyId ) ;
907+ await row2 . filter ( filter , { onMatch : mutations } ) ;
908+ }
909+ } catch ( e ) {
910+ done ( new Error ( 'An error occurred while running the script' ) ) ;
911+ done ( e ) ;
912+ }
913+ } ) ( ) . catch ( err => {
914+ throw err ;
915+ } ) ;
916+ } ) ;
917+ it ( 'should send the metrics to Google Cloud Monitoring for a custom endpoint' , done => {
918+ ( async ( ) => {
919+ try {
920+ const bigtable = await mockBigtable (
921+ defaultProjectId ,
922+ done ,
923+ 'bogus-endpoint' ,
924+ ) ;
925+ const instance = bigtable . instance ( instanceId1 ) ;
926+ const table = instance . table ( tableId1 ) ;
927+ try {
928+ // This call will fail because we are trying to hit a bogus endpoint.
929+ // The idea here is that we just want to record at least one metric
930+ // so that the exporter gets executed.
931+ const row = table . row ( columnFamilyId ) ;
932+ await row . filter ( filter , { onMatch : mutations } ) ;
933+ } catch ( e : unknown ) {
934+ // Try blocks just need a catch/finally block.
935+ }
936+ } catch ( e ) {
937+ done ( new Error ( 'An error occurred while running the script' ) ) ;
938+ done ( e ) ;
939+ }
940+ } ) ( ) . catch ( err => {
941+ throw err ;
942+ } ) ;
943+ } ) ;
944+ it ( 'should send the metrics to Google Cloud Monitoring for a CheckAndMutateRow call with a second project' , done => {
945+ ( async ( ) => {
946+ try {
947+ // This is the second project the test is configured to work with:
948+ const projectId = SECOND_PROJECT_ID ;
949+ const bigtable = await mockBigtable ( projectId , done ) ;
950+ for ( const instanceId of [ instanceId1 , instanceId2 ] ) {
951+ await setupBigtableWithInsert (
952+ bigtable ,
953+ columnFamilyId ,
954+ instanceId ,
955+ [ tableId1 , tableId2 ] ,
956+ ) ;
957+ const instance = bigtable . instance ( instanceId ) ;
958+ const table = instance . table ( tableId1 ) ;
959+ const row = table . row ( columnFamilyId ) ;
960+ await row . filter ( filter , { onMatch : mutations } ) ;
961+ const table2 = instance . table ( tableId2 ) ;
962+ const row2 = table2 . row ( columnFamilyId ) ;
963+ await row2 . filter ( filter , { onMatch : mutations } ) ;
964+ }
965+ } catch ( e ) {
966+ done ( new Error ( 'An error occurred while running the script' ) ) ;
967+ done ( e ) ;
968+ }
969+ } ) ( ) . catch ( err => {
970+ throw err ;
971+ } ) ;
972+ } ) ;
973+ } ) ;
865974 } ) ;
866975 describe ( 'Bigtable/ClientSideMetricsToGCMTimeout' , ( ) => {
867976 // This test suite simulates a situation where the user creates multiple
@@ -1430,6 +1539,111 @@ describe('Bigtable/ClientSideMetricsAllMethods', () => {
14301539 } ) ;
14311540 } ) ;
14321541 } ) ;
1542+ describe ( 'CheckAndMutateRow' , ( ) => {
1543+ it ( 'should send the metrics to Google Cloud Monitoring for a CheckAndMutateRow call' , done => {
1544+ let testFinished = false ;
1545+ /*
1546+ We need to create a timeout here because if we don't then mocha shuts down
1547+ the test as it is sleeping before the GCPMetricsHandler has a chance to
1548+ export the data. When the timeout is finished, if there were no export
1549+ errors then the test passes.
1550+ */
1551+ setTimeout ( ( ) => {
1552+ testFinished = true ;
1553+ done ( ) ;
1554+ } , 120000 ) ;
1555+ ( async ( ) => {
1556+ try {
1557+ const bigtable1 = await mockBigtable ( defaultProjectId , done ) ;
1558+ const bigtable2 = await mockBigtable ( defaultProjectId , done ) ;
1559+ for ( const bigtable of [ bigtable1 , bigtable2 ] ) {
1560+ for ( const instanceId of [ instanceId1 , instanceId2 ] ) {
1561+ await setupBigtableWithInsert (
1562+ bigtable ,
1563+ columnFamilyId ,
1564+ instanceId ,
1565+ [ tableId1 , tableId2 ] ,
1566+ ) ;
1567+ const instance = bigtable . instance ( instanceId ) ;
1568+ const table = instance . table ( tableId1 ) ;
1569+ const row = table . row ( columnFamilyId ) ;
1570+ await row . filter ( filter , { onMatch : mutations } ) ;
1571+ const table2 = instance . table ( tableId2 ) ;
1572+ const row2 = table2 . row ( columnFamilyId ) ;
1573+ await row2 . filter ( filter , { onMatch : mutations } ) ;
1574+ }
1575+ }
1576+ } catch ( e ) {
1577+ done ( new Error ( 'An error occurred while running the script' ) ) ;
1578+ done ( e ) ;
1579+ }
1580+ } ) ( ) . catch ( err => {
1581+ throw err ;
1582+ } ) ;
1583+ } ) ;
1584+ it ( 'should send the metrics to Google Cloud Monitoring for a single CheckAndMutateRow call with thirty clients' , done => {
1585+ /*
1586+ We need to create a timeout here because if we don't then mocha shuts down
1587+ the test as it is sleeping before the GCPMetricsHandler has a chance to
1588+ export the data. When the timeout is finished, if there were no export
1589+ errors then the test passes.
1590+ */
1591+ const testTimeout = setTimeout ( ( ) => {
1592+ done ( new Error ( 'The test timed out' ) ) ;
1593+ } , 480000 ) ;
1594+ let testComplete = false ;
1595+ const numClients = 30 ;
1596+ ( async ( ) => {
1597+ try {
1598+ const bigtableList = [ ] ;
1599+ const completedSet = new Set ( ) ;
1600+ for (
1601+ let bigtableCount = 0 ;
1602+ bigtableCount < numClients ;
1603+ bigtableCount ++
1604+ ) {
1605+ const currentCount = bigtableCount ;
1606+ const onExportSuccess = ( ) => {
1607+ completedSet . add ( currentCount ) ;
1608+ if ( completedSet . size === numClients ) {
1609+ // If every client has completed the export then pass the test.
1610+ clearTimeout ( testTimeout ) ;
1611+ if ( ! testComplete ) {
1612+ testComplete = true ;
1613+ done ( ) ;
1614+ }
1615+ }
1616+ } ;
1617+ bigtableList . push (
1618+ await mockBigtable ( defaultProjectId , done , onExportSuccess ) ,
1619+ ) ;
1620+ }
1621+ for ( const bigtable of bigtableList ) {
1622+ for ( const instanceId of [ instanceId1 , instanceId2 ] ) {
1623+ await setupBigtableWithInsert (
1624+ bigtable ,
1625+ columnFamilyId ,
1626+ instanceId ,
1627+ [ tableId1 , tableId2 ] ,
1628+ ) ;
1629+ const instance = bigtable . instance ( instanceId ) ;
1630+ const table = instance . table ( tableId1 ) ;
1631+ const row = table . row ( columnFamilyId ) ;
1632+ await row . filter ( filter , { onMatch : mutations } ) ;
1633+ const table2 = instance . table ( tableId2 ) ;
1634+ const row2 = table2 . row ( columnFamilyId ) ;
1635+ await row2 . filter ( filter , { onMatch : mutations } ) ;
1636+ }
1637+ }
1638+ } catch ( e ) {
1639+ done ( e ) ;
1640+ done ( new Error ( 'An error occurred while running the script' ) ) ;
1641+ }
1642+ } ) ( ) . catch ( err => {
1643+ throw err ;
1644+ } ) ;
1645+ } ) ;
1646+ } ) ;
14331647 } ) ;
14341648 describe ( 'Bigtable/ClientSideMetricsToMetricsHandler' , ( ) => {
14351649 async function getFakeBigtableWithHandler (
@@ -1689,5 +1903,25 @@ describe('Bigtable/ClientSideMetricsAllMethods', () => {
16891903 } ) ;
16901904 } ) ;
16911905 } ) ;
1906+ describe ( 'CheckAndMutateRow' , ( ) => {
1907+ it ( 'should send the metrics to the metrics handler for a CheckAndMutateRow call for a single point' , done => {
1908+ ( async ( ) => {
1909+ const bigtable = await mockBigtableWithNoInserts (
1910+ defaultProjectId ,
1911+ done ,
1912+ checkCheckAndMutateCall ,
1913+ ) ;
1914+ const instance = bigtable . instance ( instanceId1 ) ;
1915+ const table = instance . table ( tableId1 ) ;
1916+ const row = table . row ( columnFamilyId ) ;
1917+ await row . filter ( filter , { onMatch : mutations } ) ;
1918+ const table2 = instance . table ( tableId2 ) ;
1919+ const row2 = table2 . row ( columnFamilyId ) ;
1920+ await row2 . filter ( filter , { onMatch : mutations } ) ;
1921+ } ) ( ) . catch ( err => {
1922+ throw err ;
1923+ } ) ;
1924+ } ) ;
1925+ } ) ;
16921926 } ) ;
16931927} ) ;
0 commit comments