From 85ad56b6ca7a4f97bfc9fd09d6be600315986615 Mon Sep 17 00:00:00 2001 From: hiteshrepo Date: Fri, 29 Dec 2023 00:00:13 +0530 Subject: [PATCH 1/4] parses metadata collections using previous paths --- src/internal/m365/collection/site/backup.go | 9 ++ src/internal/m365/collection/site/metadata.go | 99 +++++++++++++++++++ 2 files changed, 108 insertions(+) create mode 100644 src/internal/m365/collection/site/metadata.go diff --git a/src/internal/m365/collection/site/backup.go b/src/internal/m365/collection/site/backup.go index 9ee9430c07..3caf574dbb 100644 --- a/src/internal/m365/collection/site/backup.go +++ b/src/internal/m365/collection/site/backup.go @@ -160,6 +160,15 @@ func CollectLists( currPaths = map[string]string{} ) + dps, canUsePreviousBackup, err := parseMetadataCollections(ctx, path.ListsCategory, bpc.MetadataCollections) + if err != nil { + return nil, err + } + + // [TODO] utilise deltapaths to determine list's state + _ = dps + _ = canUsePreviousBackup + lists, err := bh.GetItems(ctx, acc) if err != nil { return nil, err diff --git a/src/internal/m365/collection/site/metadata.go b/src/internal/m365/collection/site/metadata.go new file mode 100644 index 0000000000..3bf887e4da --- /dev/null +++ b/src/internal/m365/collection/site/metadata.go @@ -0,0 +1,99 @@ +package site + +import ( + "context" + "encoding/json" + + "github.com/alcionai/clues" + + "github.com/alcionai/corso/src/internal/data" + "github.com/alcionai/corso/src/pkg/backup/metadata" + "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/logger" + "github.com/alcionai/corso/src/pkg/path" +) + +func parseMetadataCollections( + ctx context.Context, + cat path.CategoryType, + colls []data.RestoreCollection, +) (metadata.DeltaPaths, bool, error) { + cdp := metadata.CatDeltaPaths{ + cat: {}, + } + + found := map[path.CategoryType]map[string]struct{}{ + cat: {}, + } + + errs := fault.New(true) + + for _, coll := range colls { + var ( + breakLoop bool + items = coll.Items(ctx, errs) + category = coll.FullPath().Category() + ) + + for { + select { + case <-ctx.Done(): + return nil, false, clues.WrapWC(ctx, ctx.Err(), "parsing collection metadata") + + case item, ok := <-items: + if !ok || errs.Failure() != nil { + breakLoop = true + break + } + + var ( + m = map[string]string{} + cdps, wantedCategory = cdp[category] + ) + + if !wantedCategory { + continue + } + + err := json.NewDecoder(item.ToReader()).Decode(&m) + if err != nil { + return nil, false, clues.NewWC(ctx, "decoding metadata json") + } + + if item.ID() == metadata.PreviousPathFileName { + if _, ok := found[category][metadata.PathKey]; ok { + return nil, false, clues.Wrap(clues.NewWC(ctx, category.String()), "multiple versions of path metadata") + } + + for k, p := range m { + cdps.AddPath(k, p) + } + + found[category][metadata.PathKey] = struct{}{} + + cdp[category] = cdps + } + } + + if breakLoop { + break + } + } + } + + if errs.Failure() != nil { + logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items") + + return metadata.DeltaPaths{}, false, nil + } + + for _, dps := range cdp { + for k, dp := range dps { + if len(dp.Path) == 0 { + delete(dps, k) + } + } + } + + return cdp[cat], true, nil +} From e326d825250e900c4cb71d27b0fe0d0a7ef047b9 Mon Sep 17 00:00:00 2001 From: hiteshrepo Date: Fri, 29 Dec 2023 00:46:39 +0530 Subject: [PATCH 2/4] bubbles up canUsePreviousBackup from CollectLists --- src/internal/m365/collection/site/backup.go | 15 ++++++++------- src/internal/m365/collection/site/backup_test.go | 2 +- src/internal/m365/service/sharepoint/backup.go | 6 +----- 3 files changed, 10 insertions(+), 13 deletions(-) diff --git a/src/internal/m365/collection/site/backup.go b/src/internal/m365/collection/site/backup.go index 3caf574dbb..76b93c4425 100644 --- a/src/internal/m365/collection/site/backup.go +++ b/src/internal/m365/collection/site/backup.go @@ -150,7 +150,7 @@ func CollectLists( su support.StatusUpdater, counter *count.Bus, errs *fault.Bus, -) ([]data.BackupCollection, error) { +) ([]data.BackupCollection, bool, error) { logger.Ctx(ctx).Debug("Creating SharePoint List Collections") var ( @@ -162,16 +162,17 @@ func CollectLists( dps, canUsePreviousBackup, err := parseMetadataCollections(ctx, path.ListsCategory, bpc.MetadataCollections) if err != nil { - return nil, err + return nil, false, err } // [TODO] utilise deltapaths to determine list's state _ = dps - _ = canUsePreviousBackup + + ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup) lists, err := bh.GetItems(ctx, acc) if err != nil { - return nil, err + return nil, false, err } for _, list := range lists { @@ -213,7 +214,7 @@ func CollectLists( path.ListsCategory, false) if err != nil { - return nil, clues.WrapWC(ctx, err, "making metadata path prefix"). + return nil, false, clues.WrapWC(ctx, err, "making metadata path prefix"). Label(count.BadPathPrefix) } @@ -225,12 +226,12 @@ func CollectLists( su, counter.Local()) if err != nil { - return nil, clues.WrapWC(ctx, err, "making metadata collection") + return nil, false, clues.WrapWC(ctx, err, "making metadata collection") } spcs = append(spcs, col) - return spcs, el.Failure() + return spcs, canUsePreviousBackup, el.Failure() } func idAnd(ss ...string) []string { diff --git a/src/internal/m365/collection/site/backup_test.go b/src/internal/m365/collection/site/backup_test.go index 776eff31e7..911dd863fd 100644 --- a/src/internal/m365/collection/site/backup_test.go +++ b/src/internal/m365/collection/site/backup_test.go @@ -114,7 +114,7 @@ func (suite *SharePointSuite) TestCollectLists() { bh := NewListsBackupHandler(bpc.ProtectedResource.ID(), ac.Lists()) - col, err := CollectLists( + col, _, err := CollectLists( ctx, bh, bpc, diff --git a/src/internal/m365/service/sharepoint/backup.go b/src/internal/m365/service/sharepoint/backup.go index 77eebc4c8b..b2ad94bb05 100644 --- a/src/internal/m365/service/sharepoint/backup.go +++ b/src/internal/m365/service/sharepoint/backup.go @@ -58,7 +58,7 @@ func ProduceBackupCollections( case path.ListsCategory: bh := site.NewListsBackupHandler(bpc.ProtectedResource.ID(), ac.Lists()) - spcs, err = site.CollectLists( + spcs, canUsePreviousBackup, err = site.CollectLists( ctx, bh, bpc, @@ -73,10 +73,6 @@ func ProduceBackupCollections( continue } - // Lists don't make use of previous metadata - // TODO: Revisit when we add support of lists - canUsePreviousBackup = true - case path.LibrariesCategory: spcs, canUsePreviousBackup, err = site.CollectLibraries( ctx, From 3a54aaf347734a210ce3b7c5b520c30bdf402453 Mon Sep 17 00:00:00 2001 From: hiteshrepo Date: Wed, 3 Jan 2024 13:42:35 +0530 Subject: [PATCH 3/4] adds test for lists metadata parsing --- src/internal/m365/collection/site/backup.go | 6 +- .../m365/collection/site/backup_test.go | 153 ++++++++++++++++++ .../site/{metadata.go => lists_metadata.go} | 2 +- 3 files changed, 156 insertions(+), 5 deletions(-) rename src/internal/m365/collection/site/{metadata.go => lists_metadata.go} (98%) diff --git a/src/internal/m365/collection/site/backup.go b/src/internal/m365/collection/site/backup.go index 76b93c4425..78a80b36d5 100644 --- a/src/internal/m365/collection/site/backup.go +++ b/src/internal/m365/collection/site/backup.go @@ -160,14 +160,12 @@ func CollectLists( currPaths = map[string]string{} ) - dps, canUsePreviousBackup, err := parseMetadataCollections(ctx, path.ListsCategory, bpc.MetadataCollections) + // [TODO](hitesh) utilise deltapaths to determine list's state + _, canUsePreviousBackup, err := parseListsMetadataCollections(ctx, path.ListsCategory, bpc.MetadataCollections) if err != nil { return nil, false, err } - // [TODO] utilise deltapaths to determine list's state - _ = dps - ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup) lists, err := bh.GetItems(ctx, acc) diff --git a/src/internal/m365/collection/site/backup_test.go b/src/internal/m365/collection/site/backup_test.go index 911dd863fd..e128e74c5f 100644 --- a/src/internal/m365/collection/site/backup_test.go +++ b/src/internal/m365/collection/site/backup_test.go @@ -1,6 +1,7 @@ package site import ( + "context" "testing" "github.com/alcionai/clues" @@ -9,10 +10,14 @@ import ( "github.com/stretchr/testify/suite" "github.com/alcionai/corso/src/internal/common/idname/mock" + "github.com/alcionai/corso/src/internal/data" + dataMock "github.com/alcionai/corso/src/internal/data/mock" + "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/version" + "github.com/alcionai/corso/src/pkg/backup/metadata" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/fault" @@ -138,3 +143,151 @@ func (suite *SharePointSuite) TestCollectLists() { assert.Less(t, 0, len(col)) assert.True(t, metadataFound) } + +func (suite *SharePointSuite) TestParseListsMetadataCollections() { + type fileValues struct { + fileName string + value string + } + + table := []struct { + name string + cat path.CategoryType + wantedCategorycat path.CategoryType + data []fileValues + expect map[string]metadata.DeltaPath + canUsePreviousBackup bool + expectError assert.ErrorAssertionFunc + }{ + { + name: "previous path only", + cat: path.ListsCategory, + wantedCategorycat: path.ListsCategory, + data: []fileValues{ + {metadata.PreviousPathFileName, "prev-path"}, + }, + expect: map[string]metadata.DeltaPath{ + "key": { + Path: "prev-path", + }, + }, + canUsePreviousBackup: true, + expectError: assert.NoError, + }, + { + name: "multiple previous paths", + cat: path.ListsCategory, + wantedCategorycat: path.ListsCategory, + data: []fileValues{ + {metadata.PreviousPathFileName, "prev-path"}, + {metadata.PreviousPathFileName, "prev-path-2"}, + }, + canUsePreviousBackup: false, + expectError: assert.Error, + }, + { + name: "unwanted category", + cat: path.LibrariesCategory, + wantedCategorycat: path.ListsCategory, + data: []fileValues{ + {metadata.PreviousPathFileName, "prev-path"}, + }, + expectError: assert.NoError, + }, + } + + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + entries := []graph.MetadataCollectionEntry{} + + for _, d := range test.data { + entries = append( + entries, + graph.NewMetadataEntry(d.fileName, map[string]string{"key": d.value})) + } + + pathPrefix, err := path.BuildMetadata( + "t", "u", + path.SharePointService, + test.cat, + false) + require.NoError(t, err, "path prefix") + + coll, err := graph.MakeMetadataCollection( + pathPrefix, + entries, + func(cos *support.ControllerOperationStatus) {}, + count.New()) + require.NoError(t, err, clues.ToCore(err)) + + dps, canUsePreviousBackup, err := parseListsMetadataCollections( + ctx, + test.wantedCategorycat, + []data.RestoreCollection{ + dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: coll}), + }) + test.expectError(t, err, clues.ToCore(err)) + + if test.cat != test.wantedCategorycat { + assert.Len(t, dps, 0) + } else { + assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup, "can use previous backup") + + assert.Len(t, dps, len(test.expect)) + + for k, v := range dps { + assert.Equal(t, v.Path, test.expect[k].Path, "path") + } + } + }) + } +} + +type failingColl struct { + t *testing.T +} + +func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item { + ic := make(chan data.Item) + defer close(ic) + + errs.AddRecoverable(ctx, assert.AnError) + + return ic +} + +func (f failingColl) FullPath() path.Path { + tmp, err := path.Build( + "tenant", + "siteid", + path.SharePointService, + path.ListsCategory, + false, + "list1") + require.NoError(f.t, err, clues.ToCore(err)) + + return tmp +} + +func (f failingColl) FetchItemByName(context.Context, string) (data.Item, error) { + // no fetch calls will be made + return nil, nil +} + +func (suite *SharePointSuite) TestParseListsMetadataCollections_ReadFailure() { + t := suite.T() + + ctx, flush := tester.NewContext(t) + defer flush() + + fc := failingColl{t} + + _, canUsePreviousBackup, err := parseListsMetadataCollections(ctx, path.ListsCategory, []data.RestoreCollection{fc}) + require.NoError(t, err) + require.False(t, canUsePreviousBackup) +} diff --git a/src/internal/m365/collection/site/metadata.go b/src/internal/m365/collection/site/lists_metadata.go similarity index 98% rename from src/internal/m365/collection/site/metadata.go rename to src/internal/m365/collection/site/lists_metadata.go index 3bf887e4da..d5c14a79bb 100644 --- a/src/internal/m365/collection/site/metadata.go +++ b/src/internal/m365/collection/site/lists_metadata.go @@ -13,7 +13,7 @@ import ( "github.com/alcionai/corso/src/pkg/path" ) -func parseMetadataCollections( +func parseListsMetadataCollections( ctx context.Context, cat path.CategoryType, colls []data.RestoreCollection, From bc09410f182407add7ac015a6e1ac80ff843bd0c Mon Sep 17 00:00:00 2001 From: Hitesh Pattanayak <48874082+HiteshRepo@users.noreply.github.com> Date: Thu, 4 Jan 2024 13:12:30 +0530 Subject: [PATCH 4/4] determine collection state by current and previous paths (#4948) determines sharepoint collection state using current and previous paths #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :sunflower: Feature #### Issue(s) #4754 #### Test Plan - [x] :muscle: Manual - [x] :zap: Unit test - [x] :green_heart: E2E --- src/internal/m365/collection/site/backup.go | 151 +++++++-- .../m365/collection/site/backup_test.go | 301 ++++++++++++++++++ .../m365/collection/site/collection.go | 40 ++- .../m365/collection/site/collection_test.go | 183 +++++++++-- src/internal/m365/collection/site/handlers.go | 2 +- .../m365/collection/site/lists_handler.go | 2 +- .../m365/collection/site/lists_metadata.go | 19 ++ .../m365/collection/site/mock/list.go | 94 +++++- src/pkg/count/keys.go | 1 + 9 files changed, 712 insertions(+), 81 deletions(-) diff --git a/src/internal/m365/collection/site/backup.go b/src/internal/m365/collection/site/backup.go index 8114b45a4b..0a09f0be78 100644 --- a/src/internal/m365/collection/site/backup.go +++ b/src/internal/m365/collection/site/backup.go @@ -7,6 +7,7 @@ import ( "time" "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/ptr" @@ -128,10 +129,13 @@ func CollectPages( collection := NewPrefetchCollection( nil, dir, + nil, + nil, ac, scope, su, - bpc.Options) + bpc.Options, + nil) collection.SetBetaService(betaService) collection.AddItem(tuple.ID, time.Now()) @@ -155,16 +159,12 @@ func CollectLists( logger.Ctx(ctx).Debug("Creating SharePoint List Collections") var ( - collection data.BackupCollection - el = errs.Local() - cl = counter.Local() - spcs = make([]data.BackupCollection, 0) - cfg = api.CallConfig{Select: idAnd("list", "lastModifiedDateTime")} - currPaths = map[string]string{} + el = errs.Local() + spcs = make([]data.BackupCollection, 0) + cfg = api.CallConfig{Select: idAnd("list", "lastModifiedDateTime")} ) - // [TODO](hitesh) utilise deltapaths to determine list's state - _, canUsePreviousBackup, err := parseListsMetadataCollections(ctx, path.ListsCategory, bpc.MetadataCollections) + dps, canUsePreviousBackup, err := parseListsMetadataCollections(ctx, path.ListsCategory, bpc.MetadataCollections) if err != nil { return nil, false, err } @@ -176,6 +176,53 @@ func CollectLists( return nil, false, err } + collections, err := populateListsCollections( + ctx, + bh, + bpc, + ac, + tenantID, + scope, + su, + lists, + dps, + counter, + el) + if err != nil { + return nil, false, err + } + + for _, spc := range collections { + spcs = append(spcs, spc) + } + + return spcs, canUsePreviousBackup, el.Failure() +} + +func populateListsCollections( + ctx context.Context, + bh backupHandler, + bpc inject.BackupProducerConfig, + ac api.Client, + tenantID string, + scope selectors.SharePointScope, + su support.StatusUpdater, + lists []models.Listable, + dps metadata.DeltaPaths, + counter *count.Bus, + el *fault.Bus, +) (map[string]data.BackupCollection, error) { + var ( + err error + collection data.BackupCollection + // collections: list-id -> backup-collection + collections = make(map[string]data.BackupCollection) + currPaths = make(map[string]string) + tombstones = makeTombstones(dps) + ) + + counter.Add(count.Lists, int64(len(lists))) + for _, list := range lists { if el.Failure() != nil { break @@ -185,12 +232,29 @@ func CollectLists( continue } - listID := ptr.Val(list.GetId()) - storageDir := path.Elements{listID} + var ( + listID = ptr.Val(list.GetId()) + storageDir = path.Elements{listID} + dp = dps[storageDir.String()] + prevPathStr = dp.Path + prevPath path.Path + ) - currPath, err := bh.canonicalPath(storageDir, tenantID) + delete(tombstones, listID) + + if len(prevPathStr) > 0 { + if prevPath, err = pathFromPrevString(prevPathStr); err != nil { + err = clues.StackWC(ctx, err).Label(count.BadPrevPath) + logger.CtxErr(ctx, err).Error("parsing prev path") + + return nil, err + } + } + + currPath, err := bh.CanonicalPath(storageDir, tenantID) if err != nil { el.AddRecoverable(ctx, clues.WrapWC(ctx, err, "creating list collection path")) + return nil, err } modTime := ptr.Val(list.GetLastModifiedDateTime()) @@ -198,8 +262,10 @@ func CollectLists( lazyFetchCol := NewLazyFetchCollection( bh, currPath, + prevPath, + storageDir.Builder(), su, - cl) + counter.Local()) lazyFetchCol.AddItem( ptr.Val(list.GetId()), @@ -213,10 +279,13 @@ func CollectLists( prefetchCol := NewPrefetchCollection( bh, currPath, + prevPath, + storageDir.Builder(), ac, scope, su, - bpc.Options) + bpc.Options, + counter.Local()) prefetchCol.AddItem( ptr.Val(list.GetId()), @@ -225,11 +294,13 @@ func CollectLists( collection = prefetchCol } - spcs = append(spcs, collection) - + collections[storageDir.String()] = collection currPaths[storageDir.String()] = currPath.String() } + handleTombstones(ctx, bpc, tombstones, collections, counter, el) + + // Build metadata path pathPrefix, err := path.BuildMetadata( tenantID, bpc.ProtectedResource.ID(), @@ -237,11 +308,11 @@ func CollectLists( path.ListsCategory, false) if err != nil { - return nil, false, clues.WrapWC(ctx, err, "making metadata path prefix"). + return nil, clues.WrapWC(ctx, err, "making metadata path prefix"). Label(count.BadPathPrefix) } - col, err := graph.MakeMetadataCollection( + mdCol, err := graph.MakeMetadataCollection( pathPrefix, []graph.MetadataCollectionEntry{ graph.NewMetadataEntry(metadata.PreviousPathFileName, currPaths), @@ -249,12 +320,12 @@ func CollectLists( su, counter.Local()) if err != nil { - return nil, false, clues.WrapWC(ctx, err, "making metadata collection") + return nil, clues.WrapWC(ctx, err, "making metadata collection") } - spcs = append(spcs, col) + collections["metadata"] = mdCol - return spcs, canUsePreviousBackup, el.Failure() + return collections, nil } func idAnd(ss ...string) []string { @@ -266,3 +337,41 @@ func idAnd(ss ...string) []string { return append(id, ss...) } + +func handleTombstones( + ctx context.Context, + bpc inject.BackupProducerConfig, + tombstones map[string]string, + collections map[string]data.BackupCollection, + counter *count.Bus, + el *fault.Bus, +) { + for id, p := range tombstones { + if el.Failure() != nil { + return + } + + ictx := clues.Add(ctx, "tombstone_id", id) + + if collections[id] != nil { + err := clues.NewWC(ictx, "conflict: tombstone exists for a live collection").Label(count.CollectionTombstoneConflict) + el.AddRecoverable(ictx, err) + + continue + } + + if len(p) == 0 { + continue + } + + prevPath, err := pathFromPrevString(p) + if err != nil { + err := clues.StackWC(ictx, err).Label(count.BadPrevPath) + logger.CtxErr(ictx, err).Error("parsing tombstone prev path") + + continue + } + + collections[id] = data.NewTombstoneCollection(prevPath, bpc.Options, counter.Local()) + } +} diff --git a/src/internal/m365/collection/site/backup_test.go b/src/internal/m365/collection/site/backup_test.go index e128e74c5f..7342385973 100644 --- a/src/internal/m365/collection/site/backup_test.go +++ b/src/internal/m365/collection/site/backup_test.go @@ -2,9 +2,11 @@ package site import ( "context" + "errors" "testing" "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" @@ -12,11 +14,13 @@ import ( "github.com/alcionai/corso/src/internal/common/idname/mock" "github.com/alcionai/corso/src/internal/data" dataMock "github.com/alcionai/corso/src/internal/data/mock" + siteMock "github.com/alcionai/corso/src/internal/m365/collection/site/mock" "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/version" + "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/backup/metadata" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/count" @@ -27,6 +31,303 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api/graph" ) +type SharePointBackupUnitSuite struct { + tester.Suite + creds account.M365Config +} + +func TestSharePointBackupUnitSuite(t *testing.T) { + suite.Run(t, &SharePointBackupUnitSuite{Suite: tester.NewUnitSuite(t)}) +} + +func (suite *SharePointBackupUnitSuite) SetupSuite() { + a := tconfig.NewFakeM365Account(suite.T()) + m365, err := a.M365Config() + require.NoError(suite.T(), err, clues.ToCore(err)) + suite.creds = m365 +} + +func (suite *SharePointBackupUnitSuite) TestCollectLists() { + t := suite.T() + + var ( + statusUpdater = func(*support.ControllerOperationStatus) {} + siteID = tconfig.M365SiteID(t) + sel = selectors.NewSharePointBackup([]string{siteID}) + ) + + table := []struct { + name string + mock siteMock.ListHandler + expectErr require.ErrorAssertionFunc + expectColls int + expectNewColls int + expectMetadataColls int + canUsePreviousBackup bool + }{ + { + name: "one list", + mock: siteMock.NewListHandler(siteMock.StubLists("one"), siteID, nil), + expectErr: require.NoError, + expectColls: 2, + expectNewColls: 1, + expectMetadataColls: 1, + canUsePreviousBackup: true, + }, + { + name: "many lists", + mock: siteMock.NewListHandler(siteMock.StubLists("one", "two"), siteID, nil), + expectErr: require.NoError, + expectColls: 3, + expectNewColls: 2, + expectMetadataColls: 1, + canUsePreviousBackup: true, + }, + { + name: "with error", + mock: siteMock.NewListHandler(siteMock.StubLists("one"), siteID, errors.New("some error")), + expectErr: require.Error, + expectColls: 0, + expectNewColls: 0, + expectMetadataColls: 0, + canUsePreviousBackup: false, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + ctx, flush := tester.NewContext(t) + defer flush() + + ac, err := api.NewClient( + suite.creds, + control.DefaultOptions(), + count.New()) + require.NoError(t, err, clues.ToCore(err)) + + bpc := inject.BackupProducerConfig{ + LastBackupVersion: version.NoBackup, + Options: control.DefaultOptions(), + ProtectedResource: mock.NewProvider(siteID, siteID), + } + + cs, canUsePreviousBackup, err := CollectLists( + ctx, + test.mock, + bpc, + ac, + suite.creds.AzureTenantID, + sel.Lists(selectors.Any())[0], + statusUpdater, + count.New(), + fault.New(false)) + + test.expectErr(t, err, clues.ToCore(err)) + assert.Len(t, cs, test.expectColls, "number of collections") + assert.Equal(t, test.canUsePreviousBackup, canUsePreviousBackup) + + newStates, metadatas := 0, 0 + for _, c := range cs { + if c.FullPath() != nil && c.FullPath().Service() == path.SharePointMetadataService { + metadatas++ + continue + } + + if c.State() == data.NewState { + newStates++ + } + } + + assert.Equal(t, test.expectNewColls, newStates, "new collections") + assert.Equal(t, test.expectMetadataColls, metadatas, "metadata collections") + }) + } +} + +func (suite *SharePointBackupUnitSuite) TestPopulateListsCollections_incremental() { + t := suite.T() + + var ( + statusUpdater = func(*support.ControllerOperationStatus) {} + siteID = tconfig.M365SiteID(t) + sel = selectors.NewSharePointBackup([]string{siteID}) + ) + + ac, err := api.NewClient( + suite.creds, + control.DefaultOptions(), + count.New()) + require.NoError(t, err, clues.ToCore(err)) + + listPathOne, err := path.Build( + suite.creds.AzureTenantID, + siteID, + path.SharePointService, + path.ListsCategory, + false, + "one") + require.NoError(suite.T(), err, clues.ToCore(err)) + + listPathTwo, err := path.Build( + suite.creds.AzureTenantID, + siteID, + path.SharePointService, + path.ListsCategory, + false, + "two") + require.NoError(suite.T(), err, clues.ToCore(err)) + + listPathThree, err := path.Build( + suite.creds.AzureTenantID, + siteID, + path.SharePointService, + path.ListsCategory, + false, + "three") + require.NoError(suite.T(), err, clues.ToCore(err)) + + table := []struct { + name string + lists []models.Listable + deltaPaths metadata.DeltaPaths + expectErr require.ErrorAssertionFunc + expectColls int + expectNewColls int + expectNotMovedColls int + expectMetadataColls int + expectTombstoneCols int + }{ + { + name: "one list", + lists: siteMock.StubLists("one"), + deltaPaths: metadata.DeltaPaths{ + "one": { + Path: listPathOne.String(), + }, + }, + expectErr: require.NoError, + expectColls: 2, + expectNotMovedColls: 1, + expectNewColls: 0, + expectMetadataColls: 1, + expectTombstoneCols: 0, + }, + { + name: "one lists, one deleted", + lists: siteMock.StubLists("two"), + deltaPaths: metadata.DeltaPaths{ + "one": { + Path: listPathOne.String(), + }, + }, + expectErr: require.NoError, + expectColls: 3, + expectNewColls: 1, + expectMetadataColls: 1, + expectTombstoneCols: 1, + }, + { + name: "two lists, one deleted", + lists: siteMock.StubLists("one", "two"), + deltaPaths: metadata.DeltaPaths{ + "one": { + Path: listPathOne.String(), + }, + "three": { + Path: listPathThree.String(), + }, + }, + expectErr: require.NoError, + expectColls: 4, + expectNotMovedColls: 1, + expectNewColls: 1, + expectMetadataColls: 1, + expectTombstoneCols: 1, + }, + { + name: "no previous paths", + lists: siteMock.StubLists("one", "two"), + deltaPaths: metadata.DeltaPaths{}, + expectErr: require.NoError, + expectColls: 3, + expectNotMovedColls: 0, + expectNewColls: 2, + expectMetadataColls: 1, + expectTombstoneCols: 0, + }, + { + name: "two lists, unchanges", + lists: siteMock.StubLists("one", "two"), + deltaPaths: metadata.DeltaPaths{ + "one": { + Path: listPathOne.String(), + }, + "two": { + Path: listPathTwo.String(), + }, + }, + expectErr: require.NoError, + expectColls: 3, + expectNotMovedColls: 2, + expectNewColls: 0, + expectMetadataColls: 1, + expectTombstoneCols: 0, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + ctx, flush := tester.NewContext(t) + defer flush() + + bpc := inject.BackupProducerConfig{ + LastBackupVersion: version.NoBackup, + Options: control.DefaultOptions(), + ProtectedResource: mock.NewProvider(siteID, siteID), + } + + cs, err := populateListsCollections( + ctx, + siteMock.NewListHandler(test.lists, siteID, nil), + bpc, + ac, + suite.creds.AzureTenantID, + sel.Lists(selectors.Any())[0], + statusUpdater, + test.lists, + test.deltaPaths, + count.New(), + fault.New(false)) + + test.expectErr(t, err, clues.ToCore(err)) + assert.Len(t, cs, test.expectColls, "number of collections") + + newStates, notMovedStates, metadatas, tombstoned := 0, 0, 0, 0 + for _, c := range cs { + if c.FullPath() != nil && c.FullPath().Service() == path.SharePointMetadataService { + metadatas++ + continue + } + + if c.State() == data.DeletedState { + tombstoned++ + } + + if c.State() == data.NewState { + newStates++ + } + + if c.State() == data.NotMovedState { + notMovedStates++ + } + } + + assert.Equal(t, test.expectNewColls, newStates, "new collections") + assert.Equal(t, test.expectNotMovedColls, notMovedStates, "not moved collections") + assert.Equal(t, test.expectMetadataColls, metadatas, "metadata collections") + assert.Equal(t, test.expectTombstoneCols, tombstoned, "tombstone collections") + }) + } +} + type SharePointSuite struct { tester.Suite } diff --git a/src/internal/m365/collection/site/collection.go b/src/internal/m365/collection/site/collection.go index 61d9533b98..1bfaf89797 100644 --- a/src/internal/m365/collection/site/collection.go +++ b/src/internal/m365/collection/site/collection.go @@ -60,7 +60,9 @@ type prefetchCollection struct { // where the category type serves as the key, and the associated channel holds the items. stream map[path.CategoryType]chan data.Item // fullPath indicates the hierarchy within the collection - fullPath path.Path + fullPath path.Path + prevPath path.Path + locationPath *path.Builder // items contains the SharePoint.List.IDs or SharePoint.Page.IDs // and their corresponding last modified time items map[string]time.Time @@ -71,19 +73,25 @@ type prefetchCollection struct { betaService *betaAPI.BetaService statusUpdater support.StatusUpdater getter getItemByIDer + Counter *count.Bus + state data.CollectionState } // NewPrefetchCollection constructor function for creating a prefetchCollection func NewPrefetchCollection( getter getItemByIDer, - folderPath path.Path, + folderPath, prevPath path.Path, + locPb *path.Builder, ac api.Client, scope selectors.SharePointScope, statusUpdater support.StatusUpdater, ctrlOpts control.Options, + counter *count.Bus, ) *prefetchCollection { c := &prefetchCollection{ fullPath: folderPath, + prevPath: prevPath, + locationPath: locPb, items: make(map[string]time.Time), getter: getter, stream: make(map[path.CategoryType]chan data.Item), @@ -91,6 +99,8 @@ func NewPrefetchCollection( statusUpdater: statusUpdater, category: scope.Category().PathType(), ctrl: ctrlOpts, + Counter: counter.Local(), + state: data.StateOf(prevPath, folderPath, counter), } return c @@ -109,18 +119,16 @@ func (pc *prefetchCollection) FullPath() path.Path { return pc.fullPath } -// TODO(ashmrtn): Fill in with previous path once the Controller compares old -// and new folder hierarchies. func (pc prefetchCollection) PreviousPath() path.Path { - return nil + return pc.prevPath } func (pc prefetchCollection) LocationPath() *path.Builder { - return path.Builder{}.Append(pc.fullPath.Folders()...) + return pc.locationPath } func (pc prefetchCollection) State() data.CollectionState { - return data.NewState + return pc.state } func (pc prefetchCollection) DoNotMergeItems() bool { @@ -363,27 +371,33 @@ type lazyFetchCollection struct { // stream is the container for each individual SharePoint item of list stream chan data.Item // fullPath indicates the hierarchy within the collection - fullPath path.Path + fullPath, prevPath path.Path + locationPath *path.Builder // jobs contain the SharePoint.List.IDs and their last modified time items map[string]time.Time statusUpdater support.StatusUpdater getter getItemByIDer counter *count.Bus + state data.CollectionState } func NewLazyFetchCollection( getter getItemByIDer, - folderPath path.Path, + folderPath, prevPath path.Path, + locPb *path.Builder, statusUpdater support.StatusUpdater, counter *count.Bus, ) *lazyFetchCollection { c := &lazyFetchCollection{ fullPath: folderPath, + prevPath: prevPath, + locationPath: locPb, items: make(map[string]time.Time), getter: getter, stream: make(chan data.Item, collectionChannelBufferSize), statusUpdater: statusUpdater, counter: counter, + state: data.StateOf(prevPath, folderPath, counter), } return c @@ -399,17 +413,15 @@ func (lc *lazyFetchCollection) FullPath() path.Path { } func (lc lazyFetchCollection) LocationPath() *path.Builder { - return path.Builder{}.Append(lc.fullPath.Folders()...) + return lc.locationPath } -// TODO(hitesh): Implement PreviousPath, State, DoNotMergeItems -// once the Controller compares old and new folder hierarchies. func (lc lazyFetchCollection) PreviousPath() path.Path { - return nil + return lc.prevPath } func (lc lazyFetchCollection) State() data.CollectionState { - return data.NewState + return lc.state } func (lc lazyFetchCollection) DoNotMergeItems() bool { diff --git a/src/internal/m365/collection/site/collection_test.go b/src/internal/m365/collection/site/collection_test.go index da8c6407c5..a268d80d3f 100644 --- a/src/internal/m365/collection/site/collection_test.go +++ b/src/internal/m365/collection/site/collection_test.go @@ -32,6 +32,87 @@ import ( "github.com/alcionai/corso/src/pkg/services/m365/api/graph" ) +type SharePointCollectionUnitSuite struct { + tester.Suite + creds account.M365Config +} + +func TestSharePointCollectionUnitSuite(t *testing.T) { + suite.Run(t, &SharePointCollectionUnitSuite{Suite: tester.NewUnitSuite(t)}) +} + +func (suite *SharePointCollectionUnitSuite) SetupSuite() { + a := tconfig.NewFakeM365Account(suite.T()) + m365, err := a.M365Config() + require.NoError(suite.T(), err, clues.ToCore(err)) + suite.creds = m365 +} + +func (suite *SharePointCollectionUnitSuite) TestPrefetchCollection_state() { + t := suite.T() + + one, err := path.Build("tid", "siteid", path.SharePointService, path.ListsCategory, false, "one") + require.NoError(suite.T(), err, clues.ToCore(err)) + two, err := path.Build("tid", "siteid", path.SharePointService, path.ListsCategory, false, "two") + require.NoError(suite.T(), err, clues.ToCore(err)) + + sel := selectors.NewSharePointBackup([]string{"site"}) + ac, err := api.NewClient(suite.creds, control.DefaultOptions(), count.New()) + require.NoError(t, err, clues.ToCore(err)) + + table := []struct { + name string + prev path.Path + curr path.Path + loc *path.Builder + expect data.CollectionState + }{ + { + name: "new", + curr: one, + loc: path.Elements{"one"}.Builder(), + expect: data.NewState, + }, + { + name: "not moved", + prev: one, + curr: one, + loc: path.Elements{"one"}.Builder(), + expect: data.NotMovedState, + }, + { + name: "moved", + prev: one, + curr: two, + loc: path.Elements{"two"}.Builder(), + expect: data.MovedState, + }, + { + name: "deleted", + prev: one, + expect: data.DeletedState, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + c := NewPrefetchCollection( + nil, + test.curr, + test.prev, + test.loc, + ac, + sel.Lists(selectors.Any())[0], + nil, + control.DefaultOptions(), + count.New()) + assert.Equal(t, test.expect, c.State(), "collection state") + assert.Equal(t, test.curr, c.FullPath(), "full path") + assert.Equal(t, test.prev, c.PreviousPath(), "prev path") + assert.Equal(t, test.loc, c.LocationPath(), "location path") + }) + } +} + type SharePointCollectionSuite struct { tester.Suite siteID string @@ -70,35 +151,44 @@ func TestSharePointCollectionSuite(t *testing.T) { // SharePoint collection and to use the data stream channel. func (suite *SharePointCollectionSuite) TestPrefetchCollection_Items() { var ( - tenant = "some" - user = "user" - dirRoot = "directory" + tenant = "some" + user = "user" + prevRoot = "prev" + dirRoot = "directory" ) sel := selectors.NewSharePointBackup([]string{"site"}) tables := []struct { name, itemName string + itemCount int64 scope selectors.SharePointScope cat path.CategoryType getter getItemByIDer - getDir func(t *testing.T) path.Path + prev string + curr string + locPb *path.Builder + getDir func(t *testing.T, root string) path.Path getItem func(t *testing.T, itemName string) data.Item }{ { - name: "List", - itemName: "MockListing", - cat: path.ListsCategory, - scope: sel.Lists(selectors.Any())[0], - getter: &mock.ListHandler{}, - getDir: func(t *testing.T) path.Path { + name: "List", + itemName: "MockListing", + itemCount: 1, + cat: path.ListsCategory, + scope: sel.Lists(selectors.Any())[0], + prev: prevRoot, + curr: dirRoot, + locPb: path.Elements{"MockListing"}.Builder(), + getter: &mock.ListHandler{}, + getDir: func(t *testing.T, root string) path.Path { dir, err := path.Build( tenant, user, path.SharePointService, path.ListsCategory, false, - dirRoot) + root) require.NoError(t, err, clues.ToCore(err)) return dir @@ -115,8 +205,10 @@ func (suite *SharePointCollectionSuite) TestPrefetchCollection_Items() { require.NoError(t, err, clues.ToCore(err)) info := &details.SharePointInfo{ + ItemType: details.SharePointList, List: &details.ListInfo{ - Name: name, + Name: name, + ItemCount: 1, }, } @@ -134,15 +226,18 @@ func (suite *SharePointCollectionSuite) TestPrefetchCollection_Items() { itemName: "MockPages", cat: path.PagesCategory, scope: sel.Pages(selectors.Any())[0], + prev: prevRoot, + curr: dirRoot, + locPb: path.Elements{"Pages"}.Builder(), getter: nil, - getDir: func(t *testing.T) path.Path { + getDir: func(t *testing.T, root string) path.Path { dir, err := path.Build( tenant, user, path.SharePointService, path.PagesCategory, false, - dirRoot) + root) require.NoError(t, err, clues.ToCore(err)) return dir @@ -172,11 +267,14 @@ func (suite *SharePointCollectionSuite) TestPrefetchCollection_Items() { col := NewPrefetchCollection( test.getter, - test.getDir(t), + test.getDir(t, test.curr), + test.getDir(t, test.prev), + test.locPb, suite.ac, test.scope, nil, - control.DefaultOptions()) + control.DefaultOptions(), + count.New()) col.stream[test.cat] = make(chan data.Item, collectionChannelBufferSize) col.stream[test.cat] <- test.getItem(t, test.itemName) @@ -195,10 +293,14 @@ func (suite *SharePointCollectionSuite) TestPrefetchCollection_Items() { require.NoError(t, err, clues.ToCore(err)) assert.NotNil(t, info) - assert.NotNil(t, info.SharePoint) + require.NotNil(t, info.SharePoint) - if test.cat == path.ListsCategory { + if info.SharePoint.ItemType == details.SharePointList { + require.NotNil(t, info.SharePoint.List) assert.Equal(t, test.itemName, info.SharePoint.List.Name) + assert.Equal(t, test.itemCount, info.SharePoint.List.ItemCount) + } else { + assert.Equal(t, test.itemName, info.SharePoint.ItemName) } }) } @@ -213,7 +315,23 @@ func (suite *SharePointCollectionSuite) TestLazyCollection_Items() { ) fullPath, err := path.Build( - "t", "pr", path.SharePointService, path.ListsCategory, false, "listid") + "t", + "pr", + path.SharePointService, + path.ListsCategory, + false, + "full") + require.NoError(t, err, clues.ToCore(err)) + + locPath := path.Elements{"full"}.Builder() + + prevPath, err := path.Build( + "t", + "pr", + path.SharePointService, + path.ListsCategory, + false, + "prev") require.NoError(t, err, clues.ToCore(err)) tables := []struct { @@ -223,7 +341,8 @@ func (suite *SharePointCollectionSuite) TestLazyCollection_Items() { expectReads []string }{ { - name: "no lists", + name: "no lists", + expectReads: []string{}, }, { name: "added lists", @@ -248,15 +367,19 @@ func (suite *SharePointCollectionSuite) TestLazyCollection_Items() { ctx, flush := tester.NewContext(t) defer flush() - getter := &mock.ListHandler{} + getter := mock.NewListHandler(nil, "", nil) defer getter.Check(t, test.expectReads) - col := &lazyFetchCollection{ - stream: make(chan data.Item), - fullPath: fullPath, - items: test.items, - getter: getter, - statusUpdater: statusUpdater, + col := NewLazyFetchCollection( + getter, + fullPath, + prevPath, + locPath, + statusUpdater, + count.New()) + + for listID, modTime := range test.items { + col.AddItem(listID, modTime) } for item := range col.Items(ctx, errs) { @@ -302,7 +425,7 @@ func (suite *SharePointCollectionSuite) TestLazyItem() { ctx, flush := tester.NewContext(t) defer flush() - lh := mock.ListHandler{} + lh := mock.NewListHandler(nil, "", nil) li := data.NewLazyItemWithInfo( ctx, @@ -346,9 +469,7 @@ func (suite *SharePointCollectionSuite) TestLazyItem_ReturnsEmptyReaderOnDeleted ctx, flush := tester.NewContext(t) defer flush() - lh := mock.ListHandler{ - Err: graph.ErrDeletedInFlight, - } + lh := mock.NewListHandler(nil, "", graph.ErrDeletedInFlight) li := data.NewLazyItemWithInfo( ctx, diff --git a/src/internal/m365/collection/site/handlers.go b/src/internal/m365/collection/site/handlers.go index 86948768aa..253883a0e0 100644 --- a/src/internal/m365/collection/site/handlers.go +++ b/src/internal/m365/collection/site/handlers.go @@ -20,7 +20,7 @@ type backupHandler interface { // canonicalPath constructs the service and category specific path for // the given builder. type canonicalPather interface { - canonicalPath( + CanonicalPath( storageDir path.Elements, tenantID string, ) (path.Path, error) diff --git a/src/internal/m365/collection/site/lists_handler.go b/src/internal/m365/collection/site/lists_handler.go index 01805f7142..5ba1b9f780 100644 --- a/src/internal/m365/collection/site/lists_handler.go +++ b/src/internal/m365/collection/site/lists_handler.go @@ -25,7 +25,7 @@ func NewListsBackupHandler(protectedResource string, ac api.Lists) listsBackupHa } } -func (bh listsBackupHandler) canonicalPath( +func (bh listsBackupHandler) CanonicalPath( storageDirFolders path.Elements, tenantID string, ) (path.Path, error) { diff --git a/src/internal/m365/collection/site/lists_metadata.go b/src/internal/m365/collection/site/lists_metadata.go index d5c14a79bb..7f1f8c1b66 100644 --- a/src/internal/m365/collection/site/lists_metadata.go +++ b/src/internal/m365/collection/site/lists_metadata.go @@ -97,3 +97,22 @@ func parseListsMetadataCollections( return cdp[cat], true, nil } + +func pathFromPrevString(ps string) (path.Path, error) { + p, err := path.FromDataLayerPath(ps, false) + if err != nil { + return nil, clues.Wrap(err, "parsing previous path string") + } + + return p, nil +} + +func makeTombstones(dps metadata.DeltaPaths) map[string]string { + r := make(map[string]string, len(dps)) + + for id, v := range dps { + r[id] = v.Path + } + + return r +} diff --git a/src/internal/m365/collection/site/mock/list.go b/src/internal/m365/collection/site/mock/list.go index f75a4a4064..ede41d3721 100644 --- a/src/internal/m365/collection/site/mock/list.go +++ b/src/internal/m365/collection/site/mock/list.go @@ -7,40 +7,90 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/stretchr/testify/assert" + "golang.org/x/exp/maps" "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/pkg/backup/details" + "github.com/alcionai/corso/src/pkg/path" + "github.com/alcionai/corso/src/pkg/services/m365/api" ) type ListHandler struct { - List models.Listable - ListIDs []string - Err error + protectedResource string + lists []models.Listable + listsMap map[string]models.Listable + err error } -func (lh *ListHandler) GetItemByID( +func NewListHandler(lists []models.Listable, protectedResource string, err error) ListHandler { + lstMap := make(map[string]models.Listable) + for _, lst := range lists { + lstMap[ptr.Val(lst.GetId())] = lst + } + + return ListHandler{ + protectedResource: protectedResource, + lists: lists, + listsMap: lstMap, + err: err, + } +} + +func (lh ListHandler) GetItemByID( ctx context.Context, itemID string, ) (models.Listable, *details.SharePointInfo, error) { - lh.ListIDs = append(lh.ListIDs, itemID) + lstInfo := &details.SharePointInfo{ + List: &details.ListInfo{ + Name: itemID, + }, + } + + lst, ok := lh.listsMap[itemID] + if ok { + return lst, lstInfo, lh.err + } + + listInfo := models.NewListInfo() + listInfo.SetTemplate(ptr.To("genericList")) ls := models.NewList() + ls.SetId(ptr.To(itemID)) + ls.SetList(listInfo) - lh.List = ls - lh.List.SetId(ptr.To(itemID)) + lh.listsMap[itemID] = ls - info := &details.SharePointInfo{ - ItemName: itemID, - } + return ls, lstInfo, lh.err +} + +func (lh ListHandler) GetItems( + context.Context, + api.CallConfig, +) ([]models.Listable, error) { + return lh.lists, lh.err +} - return ls, info, lh.Err +func (lh ListHandler) CanonicalPath( + storageDirFolders path.Elements, + tenantID string, +) (path.Path, error) { + return storageDirFolders. + Builder(). + ToDataLayerPath( + tenantID, + lh.protectedResource, + path.SharePointService, + path.ListsCategory, + false) } func (lh *ListHandler) Check(t *testing.T, expected []string) { - slices.Sort(lh.ListIDs) + listIDs := maps.Keys(lh.listsMap) + + slices.Sort(listIDs) slices.Sort(expected) - assert.Equal(t, expected, lh.ListIDs, "expected calls") + assert.Equal(t, expected, listIDs, "expected calls") } type ListRestoreHandler struct { @@ -60,3 +110,21 @@ func (lh *ListRestoreHandler) PostList( return lh.List, lh.Err } + +func StubLists(ids ...string) []models.Listable { + lists := make([]models.Listable, 0, len(ids)) + + for _, id := range ids { + listInfo := models.NewListInfo() + listInfo.SetTemplate(ptr.To("genericList")) + + lst := models.NewList() + lst.SetDisplayName(ptr.To(id)) + lst.SetId(ptr.To(id)) + lst.SetList(listInfo) + + lists = append(lists, lst) + } + + return lists +} diff --git a/src/pkg/count/keys.go b/src/pkg/count/keys.go index 9aad8acf7f..9ee1fb9fc3 100644 --- a/src/pkg/count/keys.go +++ b/src/pkg/count/keys.go @@ -64,6 +64,7 @@ const ( PrevPaths Key = "previous-paths" PreviousPathMetadataCollision Key = "previous-path-metadata-collision" Sites Key = "sites" + Lists Key = "lists" SkippedContainers Key = "skipped-containers" StreamBytesAdded Key = "stream-bytes-added" StreamDirsAdded Key = "stream-dirs-added"