Skip to content
This repository has been archived by the owner on Oct 11, 2024. It is now read-only.

Commit

Permalink
add clues/fault to sharepoint api (#2507)
Browse files Browse the repository at this point in the history
## Does this PR need a docs update or release note?

- [x] ⛔ No 

## Type of change

- [x] 🧹 Tech Debt/Cleanup

## Issue(s)

* #1970

## Test Plan

- [x] ⚡ Unit test
- [x] 💚 E2E
  • Loading branch information
ryanfkeepers authored Feb 18, 2023
1 parent b1ff20d commit 5707036
Show file tree
Hide file tree
Showing 7 changed files with 57 additions and 146 deletions.
2 changes: 1 addition & 1 deletion src/internal/connector/sharepoint/api/api.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package api

type Tuple struct {
type NameID struct {
Name string
ID string
}
Expand Down
116 changes: 51 additions & 65 deletions src/internal/connector/sharepoint/api/pages.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,20 @@ import (
"fmt"
"io"
"sync"
"time"

"github.com/pkg/errors"

"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr"
discover "github.com/alcionai/corso/src/internal/connector/discovery/api"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/models"
"github.com/alcionai/corso/src/internal/connector/graph/betasdk/sites"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
D "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
)

// GetSitePages retrieves a collection of Pages related to the give Site.
Expand All @@ -25,30 +28,31 @@ func GetSitePages(
serv *discover.BetaService,
siteID string,
pages []string,
errs *fault.Errors,
) ([]models.SitePageable, error) {
var (
col = make([]models.SitePageable, 0)
semaphoreCh = make(chan struct{}, fetchChannelSize)
opts = retrieveSitePageOptions()
err, errs error
err error
wg sync.WaitGroup
m sync.Mutex
)

defer close(semaphoreCh)

errUpdater := func(id string, err error) {
m.Lock()
errs = support.WrapAndAppend(id, err, errs)
m.Unlock()
}
updatePages := func(page models.SitePageable) {
m.Lock()
defer m.Unlock()

col = append(col, page)
m.Unlock()
}

for _, entry := range pages {
if errs.Err() != nil {
break
}

semaphoreCh <- struct{}{}

wg.Add(1)
Expand All @@ -61,47 +65,47 @@ func GetSitePages(

page, err = serv.Client().SitesById(siteID).PagesById(pageID).Get(ctx, opts)
if err != nil {
errUpdater(pageID, errors.Wrap(err, support.ConnectorStackErrorTrace(err)+" fetching page"))
} else {
updatePages(page)
errs.Add(clues.Wrap(err, "fetching page").WithClues(ctx).With(graph.ErrData(err)...))
return
}

updatePages(page)
}(entry)
}

wg.Wait()

if errs != nil {
return nil, errs
}

return col, nil
return col, errs.Err()
}

// fetchPages utility function to return the tuple of item
func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([]Tuple, error) {
func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([]NameID, error) {
var (
builder = bs.Client().SitesById(siteID).Pages()
opts = fetchPageOptions()
pageTuples = make([]Tuple, 0)
resp models.SitePageCollectionResponseable
err error
builder = bs.Client().SitesById(siteID).Pages()
opts = fetchPageOptions()
pages = make([]NameID, 0)
resp models.SitePageCollectionResponseable
err error
)

for {
resp, err = builder.Get(ctx, opts)
if err != nil {
return nil, support.ConnectorStackErrorTraceWrap(err, "failed fetching site page")
return nil, clues.Wrap(err, "fetching site page").WithClues(ctx).With(graph.ErrData(err)...)
}

for _, entry := range resp.GetValue() {
pid := *entry.GetId()
temp := Tuple{pid, pid}

if entry.GetName() != nil {
temp.Name = *entry.GetName()
var (
pid = *entry.GetId()
temp = NameID{pid, pid}
)

name, ok := ptr.ValOK(entry.GetName())
if ok {
temp.Name = name
}

pageTuples = append(pageTuples, temp)
pages = append(pages, temp)
}

if resp.GetOdataNextLink() == nil {
Expand All @@ -111,7 +115,7 @@ func FetchPages(ctx context.Context, bs *discover.BetaService, siteID string) ([
builder = sites.NewItemPagesRequestBuilder(*resp.GetOdataNextLink(), bs.Client().Adapter())
}

return pageTuples, nil
return pages, nil
}

// fetchPageOptions is used to return minimal information reltating to Site Pages
Expand All @@ -136,7 +140,7 @@ func DeleteSitePage(
) error {
err := serv.Client().SitesById(siteID).PagesById(pageID).Delete(ctx, nil)
if err != nil {
return support.ConnectorStackErrorTraceWrap(err, "deleting page: "+pageID)
return clues.Wrap(err, "deleting page").WithClues(ctx).With(graph.ErrData(err)...)
}

return nil
Expand Down Expand Up @@ -169,9 +173,11 @@ func RestoreSitePage(
pageName = pageID
)

ctx = clues.Add(ctx, "page_id", pageID)

byteArray, err := io.ReadAll(itemData.ToReader())
if err != nil {
return dii, errors.Wrap(err, "reading sharepoint page bytes from stream")
return dii, clues.Wrap(err, "reading sharepoint data").WithClues(ctx)
}

// Hydrate Page
Expand All @@ -180,9 +186,9 @@ func RestoreSitePage(
return dii, errors.Wrapf(err, "creating Page object %s", pageID)
}

pageNamePtr := page.GetName()
if pageNamePtr != nil {
pageName = *pageNamePtr
name, ok := ptr.ValOK(page.GetName())
if ok {
pageName = name
}

newName := fmt.Sprintf("%s_%s", destName, pageName)
Expand All @@ -194,19 +200,16 @@ func RestoreSitePage(
// See: https://learn.microsoft.com/en-us/graph/api/sitepage-create?view=graph-rest-beta
restoredPage, err := service.Client().SitesById(siteID).Pages().Post(ctx, page, nil)
if err != nil {
sendErr := support.ConnectorStackErrorTraceWrap(
err,
"creating page from ID: %s"+pageName+" API Error Details",
)

return dii, sendErr
return dii, clues.Wrap(err, "creating page").WithClues(ctx).With(graph.ErrData(err)...)
}

pageID = *restoredPage.GetId()
pageID = ptr.Val(restoredPage.GetId())
ctx = clues.Add(ctx, "restored_page_id", pageID)

// Publish page to make visible
// See https://learn.microsoft.com/en-us/graph/api/sitepage-publish?view=graph-rest-beta
if restoredPage.GetWebUrl() == nil {
return dii, fmt.Errorf("creating page %s incomplete. Field `webURL` not populated", pageID)
return dii, clues.New("webURL not populated during page creation").WithClues(ctx)
}

err = service.Client().
Expand All @@ -215,10 +218,7 @@ func RestoreSitePage(
Publish().
Post(ctx, nil)
if err != nil {
return dii, support.ConnectorStackErrorTraceWrap(
err,
"publishing page ID: "+*restoredPage.GetId()+" API Error Details",
)
return dii, clues.Wrap(err, "publishing page").WithClues(ctx).With(graph.ErrData(err)...)
}

dii.SharePoint = PageInfo(restoredPage, int64(len(byteArray)))
Expand All @@ -234,26 +234,12 @@ func RestoreSitePage(
// PageInfo extracts useful metadata into struct for book keeping
func PageInfo(page models.SitePageable, size int64) *details.SharePointInfo {
var (
name, webURL string
created, modified time.Time
name = ptr.Val(page.GetTitle())
webURL = ptr.Val(page.GetWebUrl())
created = ptr.Val(page.GetCreatedDateTime())
modified = ptr.Val(page.GetLastModifiedDateTime())
)

if page.GetTitle() != nil {
name = *page.GetTitle()
}

if page.GetWebUrl() != nil {
webURL = *page.GetWebUrl()
}

if page.GetCreatedDateTime() != nil {
created = *page.GetCreatedDateTime()
}

if page.GetLastModifiedDateTime() != nil {
modified = *page.GetLastModifiedDateTime()
}

return &details.SharePointInfo{
ItemType: details.SharePointItem,
ItemName: name,
Expand Down
3 changes: 2 additions & 1 deletion src/internal/connector/sharepoint/api/pages_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/sharepoint/api"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
)

type SharePointPageSuite struct {
Expand Down Expand Up @@ -71,7 +72,7 @@ func (suite *SharePointPageSuite) TestGetSitePages() {
require.NotNil(t, tuples)

jobs := []string{tuples[0].ID}
pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs)
pages, err := api.GetSitePages(ctx, suite.service, suite.siteID, jobs, fault.New(true))
assert.NoError(t, err)
assert.NotEmpty(t, pages)
}
Expand Down
4 changes: 2 additions & 2 deletions src/internal/connector/sharepoint/collection.go
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ func (sc *Collection) retrievePages(
return metrics, clues.New("beta service required").WithClues(ctx)
}

pages, err := sapi.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs)
pages, err := sapi.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs, errs)
if err != nil {
return metrics, err
}
Expand Down Expand Up @@ -310,7 +310,7 @@ func (sc *Collection) retrievePages(
sc.data <- &Item{
id: *pg.GetId(),
data: io.NopCloser(bytes.NewReader(byteArray)),
info: sharePointPageInfo(pg, size),
info: sapi.PageInfo(pg, size),
modTime: ptr.OrNow(pg.GetLastModifiedDateTime()),
}

Expand Down
28 changes: 0 additions & 28 deletions src/internal/connector/sharepoint/pageInfo.go

This file was deleted.

48 changes: 0 additions & 48 deletions src/internal/connector/sharepoint/pageInfo_test.go

This file was deleted.

2 changes: 1 addition & 1 deletion src/internal/connector/sharepoint/restore.go
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ func RestorePageCollection(
service := discover.NewBetaService(adpt)

// Restore items from collection
items := dc.Items(ctx, nil) // TODO: fault.Errors instead of nil
items := dc.Items(ctx, errs)

for {
if errs.Err() != nil {
Expand Down

0 comments on commit 5707036

Please sign in to comment.