From ba3324260eda0621a0e11f342e3c92027b4f3ef8 Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Thu, 16 Nov 2023 14:48:24 +0900 Subject: [PATCH 01/11] Change ref key of Users from _id into username --- api/types/project.go | 2 +- server/backend/database/database.go | 8 +-- server/backend/database/memory/database.go | 22 ++++---- server/backend/database/mongo/client.go | 47 +++++------------ server/backend/database/project_info.go | 4 +- server/backend/database/project_info_test.go | 6 +-- .../backend/database/testcases/testcases.go | 52 +++++++++---------- server/projects/projects.go | 8 +-- server/rpc/admin_server.go | 20 +++---- 9 files changed, 73 insertions(+), 96 deletions(-) diff --git a/api/types/project.go b/api/types/project.go index 420b0cf67..f290731ad 100644 --- a/api/types/project.go +++ b/api/types/project.go @@ -28,7 +28,7 @@ type Project struct { Name string `json:"name"` // Owner is the owner of this project. - Owner ID `json:"owner"` + Owner string `json:"owner"` // AuthWebhookURL is the url of the authorization webhook. AuthWebhookURL string `json:"auth_webhook_url"` diff --git a/server/backend/database/database.go b/server/backend/database/database.go index be0118e2f..78b498cfc 100644 --- a/server/backend/database/database.go +++ b/server/backend/database/database.go @@ -71,7 +71,7 @@ type Database interface { // FindProjectInfoByName returns a project by the given name. FindProjectInfoByName( ctx context.Context, - owner types.ID, + owner string, name string, ) (*ProjectInfo, error) @@ -93,17 +93,17 @@ type Database interface { CreateProjectInfo( ctx context.Context, name string, - owner types.ID, + owner string, clientDeactivateThreshold string, ) (*ProjectInfo, error) // ListProjectInfos returns all project infos owned by owner. - ListProjectInfos(ctx context.Context, owner types.ID) ([]*ProjectInfo, error) + ListProjectInfos(ctx context.Context, owner string) ([]*ProjectInfo, error) // UpdateProjectInfo updates the project. UpdateProjectInfo( ctx context.Context, - owner types.ID, + owner string, id types.ID, fields *types.UpdatableProjectFields, ) (*ProjectInfo, error) diff --git a/server/backend/database/memory/database.go b/server/backend/database/memory/database.go index 7a45335cb..bbbf6630c 100644 --- a/server/backend/database/memory/database.go +++ b/server/backend/database/memory/database.go @@ -78,13 +78,13 @@ func (d *DB) FindProjectInfoByPublicKey( // FindProjectInfoByName returns a project by the given name. func (d *DB) FindProjectInfoByName( _ context.Context, - owner types.ID, + owner string, name string, ) (*database.ProjectInfo, error) { txn := d.db.Txn(false) defer txn.Abort() - raw, err := txn.First(tblProjects, "owner_name", owner.String(), name) + raw, err := txn.First(tblProjects, "owner_name", owner, name) if err != nil { return nil, fmt.Errorf("find project by owner and name: %w", err) } @@ -124,7 +124,7 @@ func (d *DB) EnsureDefaultUserAndProject( return nil, nil, err } - project, err := d.ensureDefaultProjectInfo(ctx, user.ID, clientDeactivateThreshold) + project, err := d.ensureDefaultProjectInfo(ctx, username, clientDeactivateThreshold) if err != nil { return nil, nil, err } @@ -168,7 +168,7 @@ func (d *DB) ensureDefaultUserInfo( // ensureDefaultProjectInfo creates the default project if it does not exist. func (d *DB) ensureDefaultProjectInfo( _ context.Context, - defaultUserID types.ID, + defaultUserName string, defaultClientDeactivateThreshold string, ) (*database.ProjectInfo, error) { txn := d.db.Txn(true) @@ -181,7 +181,7 @@ func (d *DB) ensureDefaultProjectInfo( var info *database.ProjectInfo if raw == nil { - info = database.NewProjectInfo(database.DefaultProjectName, defaultUserID, defaultClientDeactivateThreshold) + info = database.NewProjectInfo(database.DefaultProjectName, defaultUserName, defaultClientDeactivateThreshold) info.ID = database.DefaultProjectID if err := txn.Insert(tblProjects, info); err != nil { return nil, fmt.Errorf("insert project: %w", err) @@ -198,7 +198,7 @@ func (d *DB) ensureDefaultProjectInfo( func (d *DB) CreateProjectInfo( _ context.Context, name string, - owner types.ID, + owner string, clientDeactivateThreshold string, ) (*database.ProjectInfo, error) { txn := d.db.Txn(true) @@ -206,7 +206,7 @@ func (d *DB) CreateProjectInfo( // NOTE(hackerwins): Check if the project already exists. // https://github.com/hashicorp/go-memdb/issues/7#issuecomment-270427642 - existing, err := txn.First(tblProjects, "owner_name", owner.String(), name) + existing, err := txn.First(tblProjects, "owner_name", owner, name) if err != nil { return nil, fmt.Errorf("find project by owner and name: %w", err) } @@ -265,7 +265,7 @@ func (d *DB) listProjectInfos( // ListProjectInfos returns all project infos owned by owner. func (d *DB) ListProjectInfos( _ context.Context, - owner types.ID, + owner string, ) ([]*database.ProjectInfo, error) { txn := d.db.Txn(false) defer txn.Abort() @@ -273,7 +273,7 @@ func (d *DB) ListProjectInfos( iter, err := txn.LowerBound( tblProjects, "owner_name", - owner.String(), + owner, "", ) if err != nil { @@ -296,7 +296,7 @@ func (d *DB) ListProjectInfos( // UpdateProjectInfo updates the given project. func (d *DB) UpdateProjectInfo( _ context.Context, - owner types.ID, + owner string, id types.ID, fields *types.UpdatableProjectFields, ) (*database.ProjectInfo, error) { @@ -317,7 +317,7 @@ func (d *DB) UpdateProjectInfo( } if fields.Name != nil { - existing, err := txn.First(tblProjects, "owner_name", owner.String(), *fields.Name) + existing, err := txn.First(tblProjects, "owner_name", owner, *fields.Name) if err != nil { return nil, fmt.Errorf("find project by owner and name: %w", err) } diff --git a/server/backend/database/mongo/client.go b/server/backend/database/mongo/client.go index 5429f58f7..221ff38eb 100644 --- a/server/backend/database/mongo/client.go +++ b/server/backend/database/mongo/client.go @@ -107,7 +107,7 @@ func (c *Client) EnsureDefaultUserAndProject( return nil, nil, err } - projectInfo, err := c.ensureDefaultProjectInfo(ctx, userInfo.ID, clientDeactivateThreshold) + projectInfo, err := c.ensureDefaultProjectInfo(ctx, userInfo.Username, clientDeactivateThreshold) if err != nil { return nil, nil, err } @@ -162,26 +162,22 @@ func (c *Client) ensureDefaultUserInfo( // ensureDefaultProjectInfo creates the default project info if it does not exist. func (c *Client) ensureDefaultProjectInfo( ctx context.Context, - defaultUserID types.ID, + defaultUserName string, defaultClientDeactivateThreshold string, ) (*database.ProjectInfo, error) { - candidate := database.NewProjectInfo(database.DefaultProjectName, defaultUserID, defaultClientDeactivateThreshold) + candidate := database.NewProjectInfo(database.DefaultProjectName, defaultUserName, defaultClientDeactivateThreshold) candidate.ID = database.DefaultProjectID encodedID, err := encodeID(candidate.ID) if err != nil { return nil, err } - encodedDefaultUserID, err := encodeID(defaultUserID) - if err != nil { - return nil, err - } _, err = c.collection(colProjects).UpdateOne(ctx, bson.M{ "_id": encodedID, }, bson.M{ "$setOnInsert": bson.M{ "name": candidate.Name, - "owner": encodedDefaultUserID, + "owner": candidate.Owner, "client_deactivate_threshold": candidate.ClientDeactivateThreshold, "public_key": candidate.PublicKey, "secret_key": candidate.SecretKey, @@ -211,18 +207,13 @@ func (c *Client) ensureDefaultProjectInfo( func (c *Client) CreateProjectInfo( ctx context.Context, name string, - owner types.ID, + owner string, clientDeactivateThreshold string, ) (*database.ProjectInfo, error) { - encodedOwner, err := encodeID(owner) - if err != nil { - return nil, err - } - info := database.NewProjectInfo(name, owner, clientDeactivateThreshold) result, err := c.collection(colProjects).InsertOne(ctx, bson.M{ "name": info.Name, - "owner": encodedOwner, + "owner": owner, "client_deactivate_threshold": info.ClientDeactivateThreshold, "public_key": info.PublicKey, "secret_key": info.SecretKey, @@ -274,15 +265,10 @@ func (c *Client) listProjectInfos( // ListProjectInfos returns all project infos owned by owner. func (c *Client) ListProjectInfos( ctx context.Context, - owner types.ID, + owner string, ) ([]*database.ProjectInfo, error) { - encodedOwnerID, err := encodeID(owner) - if err != nil { - return nil, err - } - cursor, err := c.collection(colProjects).Find(ctx, bson.M{ - "owner": encodedOwnerID, + "owner": owner, }) if err != nil { return nil, fmt.Errorf("fetch project infos: %w", err) @@ -316,17 +302,12 @@ func (c *Client) FindProjectInfoByPublicKey(ctx context.Context, publicKey strin // FindProjectInfoByName returns a project by name. func (c *Client) FindProjectInfoByName( ctx context.Context, - owner types.ID, + owner string, name string, ) (*database.ProjectInfo, error) { - encodedOwner, err := encodeID(owner) - if err != nil { - return nil, err - } - result := c.collection(colProjects).FindOne(ctx, bson.M{ "name": name, - "owner": encodedOwner, + "owner": owner, }) projectInfo := database.ProjectInfo{} @@ -365,14 +346,10 @@ func (c *Client) FindProjectInfoByID(ctx context.Context, id types.ID) (*databas // UpdateProjectInfo updates the project info. func (c *Client) UpdateProjectInfo( ctx context.Context, - owner types.ID, + owner string, id types.ID, fields *types.UpdatableProjectFields, ) (*database.ProjectInfo, error) { - encodedOwner, err := encodeID(owner) - if err != nil { - return nil, err - } encodedID, err := encodeID(id) if err != nil { return nil, err @@ -391,7 +368,7 @@ func (c *Client) UpdateProjectInfo( res := c.collection(colProjects).FindOneAndUpdate(ctx, bson.M{ "_id": encodedID, - "owner": encodedOwner, + "owner": owner, }, bson.M{ "$set": updatableFields, }, options.FindOneAndUpdate().SetReturnDocument(options.After)) diff --git a/server/backend/database/project_info.go b/server/backend/database/project_info.go index 8c5fe7eca..cc2d76958 100644 --- a/server/backend/database/project_info.go +++ b/server/backend/database/project_info.go @@ -43,7 +43,7 @@ type ProjectInfo struct { Name string `bson:"name"` // Owner is the owner of this project. - Owner types.ID `bson:"owner"` + Owner string `bson:"owner"` // PublicKey is the API key of this project. PublicKey string `bson:"public_key"` @@ -69,7 +69,7 @@ type ProjectInfo struct { } // NewProjectInfo creates a new ProjectInfo of the given name. -func NewProjectInfo(name string, owner types.ID, clientDeactivateThreshold string) *ProjectInfo { +func NewProjectInfo(name string, owner string, clientDeactivateThreshold string) *ProjectInfo { return &ProjectInfo{ Name: name, Owner: owner, diff --git a/server/backend/database/project_info_test.go b/server/backend/database/project_info_test.go index 658e2bf33..2fbece09c 100644 --- a/server/backend/database/project_info_test.go +++ b/server/backend/database/project_info_test.go @@ -27,9 +27,9 @@ import ( func TestProjectInfo(t *testing.T) { t.Run("update fields test", func(t *testing.T) { - dummyOwnerID := types.ID("000000000000000000000000") + dummyOwnerName := "dummy" clientDeactivateThreshold := "1h" - project := database.NewProjectInfo(t.Name(), dummyOwnerID, clientDeactivateThreshold) + project := database.NewProjectInfo(t.Name(), dummyOwnerName, clientDeactivateThreshold) testName := "testName" testURL := "testUrl" @@ -44,7 +44,7 @@ func TestProjectInfo(t *testing.T) { project.UpdateFields(&types.UpdatableProjectFields{AuthWebhookMethods: &testMethods}) assert.Equal(t, testMethods, project.AuthWebhookMethods) - assert.Equal(t, dummyOwnerID, project.Owner) + assert.Equal(t, dummyOwnerName, project.Owner) project.UpdateFields(&types.UpdatableProjectFields{ ClientDeactivateThreshold: &testClientDeactivateThreshold, diff --git a/server/backend/database/testcases/testcases.go b/server/backend/database/testcases/testcases.go index 3cc19121c..629dcc83d 100644 --- a/server/backend/database/testcases/testcases.go +++ b/server/backend/database/testcases/testcases.go @@ -42,8 +42,8 @@ import ( ) const ( - dummyOwnerID = types.ID("000000000000000000000000") - otherOwnerID = types.ID("000000000000000000000001") + dummyOwnerName = "dummy" + otherOwnerName = "other" dummyClientID = types.ID("000000000000000000000000") clientDeactivateThreshold = "1h" ) @@ -81,42 +81,42 @@ func RunFindProjectInfoByNameTest( ctx := context.Background() suffixes := []int{0, 1, 2} for _, suffix := range suffixes { - _, err := db.CreateProjectInfo(ctx, fmt.Sprintf("%s-%d", t.Name(), suffix), dummyOwnerID, clientDeactivateThreshold) + _, err := db.CreateProjectInfo(ctx, fmt.Sprintf("%s-%d", t.Name(), suffix), dummyOwnerName, clientDeactivateThreshold) assert.NoError(t, err) } - _, err := db.CreateProjectInfo(ctx, t.Name(), otherOwnerID, clientDeactivateThreshold) + _, err := db.CreateProjectInfo(ctx, t.Name(), otherOwnerName, clientDeactivateThreshold) assert.NoError(t, err) // Lists all projects that the dummyOwnerID is the owner. - projects, err := db.ListProjectInfos(ctx, dummyOwnerID) + projects, err := db.ListProjectInfos(ctx, dummyOwnerName) assert.NoError(t, err) assert.Len(t, projects, len(suffixes)) - _, err = db.CreateProjectInfo(ctx, t.Name(), dummyOwnerID, clientDeactivateThreshold) + _, err = db.CreateProjectInfo(ctx, t.Name(), dummyOwnerName, clientDeactivateThreshold) assert.NoError(t, err) - project, err := db.FindProjectInfoByName(ctx, dummyOwnerID, t.Name()) + project, err := db.FindProjectInfoByName(ctx, dummyOwnerName, t.Name()) assert.NoError(t, err) assert.Equal(t, project.Name, t.Name()) newName := fmt.Sprintf("%s-%d", t.Name(), 3) fields := &types.UpdatableProjectFields{Name: &newName} - _, err = db.UpdateProjectInfo(ctx, dummyOwnerID, project.ID, fields) + _, err = db.UpdateProjectInfo(ctx, dummyOwnerName, project.ID, fields) assert.NoError(t, err) - _, err = db.FindProjectInfoByName(ctx, dummyOwnerID, newName) + _, err = db.FindProjectInfoByName(ctx, dummyOwnerName, newName) assert.NoError(t, err) }) t.Run("FindProjectInfoByName test", func(t *testing.T) { ctx := context.Background() - info1, err := db.CreateProjectInfo(ctx, t.Name(), dummyOwnerID, clientDeactivateThreshold) + info1, err := db.CreateProjectInfo(ctx, t.Name(), dummyOwnerName, clientDeactivateThreshold) assert.NoError(t, err) - _, err = db.CreateProjectInfo(ctx, t.Name(), otherOwnerID, clientDeactivateThreshold) + _, err = db.CreateProjectInfo(ctx, t.Name(), otherOwnerName, clientDeactivateThreshold) assert.NoError(t, err) - info2, err := db.FindProjectInfoByName(ctx, dummyOwnerID, t.Name()) + info2, err := db.FindProjectInfoByName(ctx, dummyOwnerName, t.Name()) assert.NoError(t, err) assert.Equal(t, info1.ID, info2.ID) }) @@ -278,7 +278,7 @@ func RunListUserInfosTest(t *testing.T, db database.Database) { func RunActivateClientDeactivateClientTest(t *testing.T, db database.Database, projectID types.ID) { t.Run("activate and find client test", func(t *testing.T) { ctx := context.Background() - _, err := db.FindClientInfoByID(ctx, projectID, dummyOwnerID) + _, err := db.FindClientInfoByID(ctx, projectID, dummyClientID) assert.ErrorIs(t, err, database.ErrClientNotFound) clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) @@ -293,7 +293,7 @@ func RunActivateClientDeactivateClientTest(t *testing.T, db database.Database, p ctx := context.Background() // try to deactivate the client with not exists ID. - _, err := db.DeactivateClient(ctx, projectID, dummyOwnerID) + _, err := db.DeactivateClient(ctx, projectID, dummyClientID) assert.ErrorIs(t, err, database.ErrClientNotFound) clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) @@ -337,9 +337,9 @@ func RunUpdateProjectInfoTest(t *testing.T, db database.Database) { } newClientDeactivateThreshold := "1h" - info, err := db.CreateProjectInfo(ctx, t.Name(), dummyOwnerID, clientDeactivateThreshold) + info, err := db.CreateProjectInfo(ctx, t.Name(), dummyOwnerName, clientDeactivateThreshold) assert.NoError(t, err) - _, err = db.CreateProjectInfo(ctx, existName, dummyOwnerID, clientDeactivateThreshold) + _, err = db.CreateProjectInfo(ctx, existName, dummyOwnerName, clientDeactivateThreshold) assert.NoError(t, err) id := info.ID @@ -352,7 +352,7 @@ func RunUpdateProjectInfoTest(t *testing.T, db database.Database) { ClientDeactivateThreshold: &newClientDeactivateThreshold, } assert.NoError(t, fields.Validate()) - res, err := db.UpdateProjectInfo(ctx, dummyOwnerID, id, fields) + res, err := db.UpdateProjectInfo(ctx, dummyOwnerName, id, fields) assert.NoError(t, err) updateInfo, err := db.FindProjectInfoByID(ctx, id) assert.NoError(t, err) @@ -367,7 +367,7 @@ func RunUpdateProjectInfoTest(t *testing.T, db database.Database) { Name: &newName2, } assert.NoError(t, fields.Validate()) - res, err = db.UpdateProjectInfo(ctx, dummyOwnerID, id, fields) + res, err = db.UpdateProjectInfo(ctx, dummyOwnerName, id, fields) assert.NoError(t, err) updateInfo, err = db.FindProjectInfoByID(ctx, id) assert.NoError(t, err) @@ -383,7 +383,7 @@ func RunUpdateProjectInfoTest(t *testing.T, db database.Database) { AuthWebhookURL: &newAuthWebhookURL2, } assert.NoError(t, fields.Validate()) - res, err = db.UpdateProjectInfo(ctx, dummyOwnerID, id, fields) + res, err = db.UpdateProjectInfo(ctx, dummyOwnerName, id, fields) assert.NoError(t, err) updateInfo, err = db.FindProjectInfoByID(ctx, id) assert.NoError(t, err) @@ -399,7 +399,7 @@ func RunUpdateProjectInfoTest(t *testing.T, db database.Database) { ClientDeactivateThreshold: &clientDeactivateThreshold2, } assert.NoError(t, fields.Validate()) - res, err = db.UpdateProjectInfo(ctx, dummyOwnerID, id, fields) + res, err = db.UpdateProjectInfo(ctx, dummyOwnerName, id, fields) assert.NoError(t, err) updateInfo, err = db.FindProjectInfoByID(ctx, id) assert.NoError(t, err) @@ -411,12 +411,12 @@ func RunUpdateProjectInfoTest(t *testing.T, db database.Database) { // 05. Duplicated name test fields = &types.UpdatableProjectFields{Name: &existName} - _, err = db.UpdateProjectInfo(ctx, dummyOwnerID, id, fields) + _, err = db.UpdateProjectInfo(ctx, dummyOwnerName, id, fields) assert.ErrorIs(t, err, database.ErrProjectNameAlreadyExists) // 06. OwnerID not match test fields = &types.UpdatableProjectFields{Name: &existName} - _, err = db.UpdateProjectInfo(ctx, otherOwnerID, id, fields) + _, err = db.UpdateProjectInfo(ctx, otherOwnerName, id, fields) assert.ErrorIs(t, err, database.ErrProjectNotFound) }) } @@ -487,7 +487,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t ctx := context.Background() // dummy project setup - testProjectInfo, err := db.CreateProjectInfo(ctx, t.Name(), dummyOwnerID, clientDeactivateThreshold) + testProjectInfo, err := db.CreateProjectInfo(ctx, t.Name(), dummyOwnerName, clientDeactivateThreshold) assert.NoError(t, err) // dummy document setup @@ -591,7 +591,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t ctx := context.Background() // 01. Initialize a project and create documents. - projectInfo, err := db.CreateProjectInfo(ctx, t.Name(), dummyOwnerID, clientDeactivateThreshold) + projectInfo, err := db.CreateProjectInfo(ctx, t.Name(), dummyOwnerName, clientDeactivateThreshold) assert.NoError(t, err) var docInfos []*database.DocInfo @@ -630,9 +630,9 @@ func RunFindDeactivateCandidates(t *testing.T, db database.Database) { ctx := context.Background() // Lists all projects of the dummyOwnerID and otherOwnerID. - projects, err := db.ListProjectInfos(ctx, dummyOwnerID) + projects, err := db.ListProjectInfos(ctx, dummyOwnerName) assert.NoError(t, err) - otherProjects, err := db.ListProjectInfos(ctx, otherOwnerID) + otherProjects, err := db.ListProjectInfos(ctx, otherOwnerName) assert.NoError(t, err) projects = append(projects, otherProjects...) diff --git a/server/projects/projects.go b/server/projects/projects.go index ab9ee2370..638ea0719 100644 --- a/server/projects/projects.go +++ b/server/projects/projects.go @@ -29,7 +29,7 @@ import ( func CreateProject( ctx context.Context, be *backend.Backend, - owner types.ID, + owner string, name string, ) (*types.Project, error) { info, err := be.DB.CreateProjectInfo(ctx, name, owner, be.Config.ClientDeactivateThreshold) @@ -44,7 +44,7 @@ func CreateProject( func ListProjects( ctx context.Context, be *backend.Backend, - owner types.ID, + owner string, ) ([]*types.Project, error) { infos, err := be.DB.ListProjectInfos(ctx, owner) if err != nil { @@ -63,7 +63,7 @@ func ListProjects( func GetProject( ctx context.Context, be *backend.Backend, - owner types.ID, + owner string, name string, ) (*types.Project, error) { info, err := be.DB.FindProjectInfoByName(ctx, owner, name) @@ -78,7 +78,7 @@ func GetProject( func UpdateProject( ctx context.Context, be *backend.Backend, - owner types.ID, + owner string, id types.ID, fields *types.UpdatableProjectFields, ) (*types.Project, error) { diff --git a/server/rpc/admin_server.go b/server/rpc/admin_server.go index 2e754ff25..7572798d4 100644 --- a/server/rpc/admin_server.go +++ b/server/rpc/admin_server.go @@ -109,7 +109,7 @@ func (s *adminServer) CreateProject( } user := users.From(ctx) - project, err := projects.CreateProject(ctx, s.backend, user.ID, req.Name) + project, err := projects.CreateProject(ctx, s.backend, user.Username, req.Name) if err != nil { return nil, err } @@ -130,7 +130,7 @@ func (s *adminServer) ListProjects( _ *api.ListProjectsRequest, ) (*api.ListProjectsResponse, error) { user := users.From(ctx) - projectList, err := projects.ListProjects(ctx, s.backend, user.ID) + projectList, err := projects.ListProjects(ctx, s.backend, user.Username) if err != nil { return nil, err } @@ -151,7 +151,7 @@ func (s *adminServer) GetProject( req *api.GetProjectRequest, ) (*api.GetProjectResponse, error) { user := users.From(ctx) - project, err := projects.GetProject(ctx, s.backend, user.ID, req.Name) + project, err := projects.GetProject(ctx, s.backend, user.Username, req.Name) if err != nil { return nil, err } @@ -183,7 +183,7 @@ func (s *adminServer) UpdateProject( project, err := projects.UpdateProject( ctx, s.backend, - user.ID, + user.Username, types.ID(req.Id), fields, ) @@ -207,7 +207,7 @@ func (s *adminServer) GetDocument( req *api.GetDocumentRequest, ) (*api.GetDocumentResponse, error) { user := users.From(ctx) - project, err := projects.GetProject(ctx, s.backend, user.ID, req.ProjectName) + project, err := projects.GetProject(ctx, s.backend, user.Username, req.ProjectName) if err != nil { return nil, err } @@ -238,7 +238,7 @@ func (s *adminServer) GetSnapshotMeta( req *api.GetSnapshotMetaRequest, ) (*api.GetSnapshotMetaResponse, error) { user := users.From(ctx) - project, err := projects.GetProject(ctx, s.backend, user.ID, req.ProjectName) + project, err := projects.GetProject(ctx, s.backend, user.Username, req.ProjectName) if err != nil { return nil, err } @@ -271,7 +271,7 @@ func (s *adminServer) ListDocuments( req *api.ListDocumentsRequest, ) (*api.ListDocumentsResponse, error) { user := users.From(ctx) - project, err := projects.GetProject(ctx, s.backend, user.ID, req.ProjectName) + project, err := projects.GetProject(ctx, s.backend, user.Username, req.ProjectName) if err != nil { return nil, err } @@ -307,7 +307,7 @@ func (s *adminServer) SearchDocuments( req *api.SearchDocumentsRequest, ) (*api.SearchDocumentsResponse, error) { user := users.From(ctx) - project, err := projects.GetProject(ctx, s.backend, user.ID, req.ProjectName) + project, err := projects.GetProject(ctx, s.backend, user.Username, req.ProjectName) if err != nil { return nil, err } @@ -340,7 +340,7 @@ func (s *adminServer) RemoveDocumentByAdmin( req *api.RemoveDocumentByAdminRequest, ) (*api.RemoveDocumentByAdminResponse, error) { user := users.From(ctx) - project, err := projects.GetProject(ctx, s.backend, user.ID, req.ProjectName) + project, err := projects.GetProject(ctx, s.backend, user.Username, req.ProjectName) if err != nil { return nil, err } @@ -394,7 +394,7 @@ func (s *adminServer) ListChanges( req *api.ListChangesRequest, ) (*api.ListChangesResponse, error) { user := users.From(ctx) - project, err := projects.GetProject(ctx, s.backend, user.ID, req.ProjectName) + project, err := projects.GetProject(ctx, s.backend, user.Username, req.ProjectName) if err != nil { return nil, err } From 22650f68927ce23a4dc87b4230c77308e50a3a45 Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Wed, 22 Nov 2023 12:48:13 +0900 Subject: [PATCH 02/11] Change ref key of Documents from _id into (key, _id) --- admin/client.go | 4 +- api/yorkie/v1/admin.pb.go | 147 +++++----- api/yorkie/v1/admin.proto | 2 +- api/yorkie/v1/yorkie.pb.go | 195 +++++++++---- api/yorkie/v1/yorkie.proto | 2 + client/client.go | 25 +- cmd/yorkie/document/list.go | 14 +- server/backend/database/change_info.go | 2 + server/backend/database/client_info.go | 95 ++++--- server/backend/database/client_info_test.go | 42 +-- server/backend/database/database.go | 40 ++- server/backend/database/memory/database.go | 178 +++++++----- server/backend/database/memory/indexes.go | 26 +- server/backend/database/mongo/client.go | 123 +++++---- server/backend/database/mongo/indexes.go | 6 +- server/backend/database/snapshot_info.go | 5 + server/backend/database/synced_seq_info.go | 6 +- .../backend/database/testcases/testcases.go | 257 +++++++++--------- server/backend/sync/coordinator.go | 3 + server/backend/sync/memory/coordinator.go | 9 +- .../backend/sync/memory/coordinator_test.go | 4 +- server/backend/sync/memory/pubsub.go | 118 ++++---- server/backend/sync/memory/pubsub_test.go | 13 +- server/backend/sync/pubsub.go | 10 +- server/clients/clients.go | 47 ++-- server/documents/documents.go | 18 +- server/packs/history.go | 11 +- server/packs/packs.go | 21 +- server/packs/pushpull.go | 3 +- server/packs/snapshots.go | 23 +- server/rpc/admin_server.go | 17 +- server/rpc/server_test.go | 9 +- server/rpc/yorkie_server.go | 64 +++-- 33 files changed, 921 insertions(+), 618 deletions(-) diff --git a/admin/client.go b/admin/client.go index 493e0600e..f873ac1b8 100644 --- a/admin/client.go +++ b/admin/client.go @@ -248,7 +248,7 @@ func (c *Client) UpdateProject( func (c *Client) ListDocuments( ctx context.Context, projectName string, - previousID string, + previousKey string, pageSize int32, isForward bool, includeSnapshot bool, @@ -257,7 +257,7 @@ func (c *Client) ListDocuments( ctx, &api.ListDocumentsRequest{ ProjectName: projectName, - PreviousId: previousID, + PreviousKey: previousKey, PageSize: pageSize, IsForward: isForward, IncludeSnapshot: includeSnapshot, diff --git a/api/yorkie/v1/admin.pb.go b/api/yorkie/v1/admin.pb.go index d0f482dd4..0626dea1e 100644 --- a/api/yorkie/v1/admin.pb.go +++ b/api/yorkie/v1/admin.pb.go @@ -608,7 +608,7 @@ func (m *UpdateProjectResponse) GetProject() *Project { type ListDocumentsRequest struct { ProjectName string `protobuf:"bytes,1,opt,name=project_name,json=projectName,proto3" json:"project_name,omitempty"` - PreviousId string `protobuf:"bytes,2,opt,name=previous_id,json=previousId,proto3" json:"previous_id,omitempty"` + PreviousKey string `protobuf:"bytes,2,opt,name=previous_key,json=previousKey,proto3" json:"previous_key,omitempty"` PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` IsForward bool `protobuf:"varint,4,opt,name=is_forward,json=isForward,proto3" json:"is_forward,omitempty"` IncludeSnapshot bool `protobuf:"varint,5,opt,name=include_snapshot,json=includeSnapshot,proto3" json:"include_snapshot,omitempty"` @@ -657,9 +657,9 @@ func (m *ListDocumentsRequest) GetProjectName() string { return "" } -func (m *ListDocumentsRequest) GetPreviousId() string { +func (m *ListDocumentsRequest) GetPreviousKey() string { if m != nil { - return m.PreviousId + return m.PreviousKey } return "" } @@ -1328,70 +1328,69 @@ func init() { func init() { proto.RegisterFile("yorkie/v1/admin.proto", fileDescriptor_7ef4cd0843a14163) } var fileDescriptor_7ef4cd0843a14163 = []byte{ - // 999 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xdd, 0x6e, 0xe3, 0x44, - 0x14, 0xae, 0xd3, 0xa4, 0x4d, 0x4e, 0xd2, 0x2d, 0x9d, 0x4d, 0xb6, 0x59, 0xd3, 0xe6, 0x67, 0xd0, - 0x6a, 0x0b, 0x8b, 0xb2, 0xb4, 0x08, 0x04, 0x02, 0x09, 0x91, 0x42, 0xab, 0xd5, 0xfe, 0x68, 0xd7, - 0xa1, 0x37, 0xbd, 0x89, 0xbc, 0xf1, 0x69, 0x6a, 0x9a, 0xd8, 0xce, 0x8c, 0x93, 0x55, 0x7a, 0xc7, - 0x5b, 0xf0, 0x32, 0x5c, 0x70, 0xc7, 0x25, 0x8f, 0x80, 0xca, 0x33, 0x70, 0x8f, 0x6c, 0xcf, 0xb8, - 0x63, 0xe7, 0x07, 0xda, 0xed, 0x5d, 0xe6, 0x9c, 0xef, 0x7c, 0xe7, 0x6f, 0xe6, 0x9c, 0x18, 0x2a, - 0x53, 0x97, 0x5d, 0xd8, 0xf8, 0x74, 0xb2, 0xff, 0xd4, 0xb4, 0x86, 0xb6, 0xd3, 0xf2, 0x98, 0xeb, - 0xbb, 0xa4, 0x10, 0x89, 0x5b, 0x93, 0x7d, 0xfd, 0xe1, 0x35, 0x82, 0x21, 0x77, 0xc7, 0xac, 0x87, - 0x3c, 0x42, 0xd1, 0x63, 0xd8, 0xe8, 0xd8, 0x7d, 0xe7, 0xc4, 0x33, 0x70, 0x34, 0x46, 0xee, 0x13, - 0x1d, 0xf2, 0x63, 0x8e, 0xcc, 0x31, 0x87, 0x58, 0xd5, 0x1a, 0xda, 0x5e, 0xc1, 0x88, 0xcf, 0x81, - 0xce, 0x33, 0x39, 0x7f, 0xe7, 0x32, 0xab, 0x9a, 0x89, 0x74, 0xf2, 0x4c, 0xbf, 0x80, 0x7b, 0x92, - 0x88, 0x7b, 0xae, 0xc3, 0x91, 0x7c, 0x04, 0xd9, 0xc0, 0x32, 0x64, 0x29, 0x1e, 0x6c, 0xb6, 0xe2, - 0x78, 0x5a, 0x27, 0x1c, 0x99, 0x11, 0x2a, 0xe9, 0x11, 0x94, 0x5e, 0xb8, 0xfd, 0x67, 0xce, 0xfb, - 0xba, 0x7f, 0x04, 0x1b, 0x82, 0x47, 0x78, 0x2f, 0x43, 0xce, 0x77, 0x2f, 0xd0, 0x11, 0x2c, 0xd1, - 0x81, 0x7e, 0x02, 0xe5, 0x43, 0x86, 0xa6, 0x8f, 0xaf, 0x99, 0xfb, 0x33, 0xf6, 0x7c, 0xe9, 0x96, - 0x40, 0x56, 0x71, 0x19, 0xfe, 0xa6, 0x3f, 0x42, 0x25, 0x85, 0x15, 0xd4, 0x9f, 0xc2, 0xba, 0x17, - 0x89, 0x44, 0x6e, 0x44, 0xc9, 0x4d, 0x82, 0x25, 0x84, 0x3e, 0x86, 0xad, 0x63, 0xf4, 0xff, 0x87, - 0xbf, 0x36, 0x10, 0x15, 0x78, 0x2b, 0x67, 0x15, 0xb8, 0xff, 0xc2, 0xe6, 0x92, 0x84, 0x0b, 0x77, - 0xf4, 0x08, 0xca, 0x49, 0xb1, 0x20, 0x6f, 0x41, 0x5e, 0x58, 0xf2, 0xaa, 0xd6, 0x58, 0x5d, 0xc0, - 0x1e, 0x63, 0xa8, 0x09, 0xe5, 0x13, 0xcf, 0x9a, 0x2d, 0xdf, 0x3d, 0xc8, 0xd8, 0x96, 0x48, 0x26, - 0x63, 0x5b, 0xe4, 0x6b, 0x58, 0x3b, 0xb3, 0x71, 0x60, 0xf1, 0xb0, 0x4f, 0xc5, 0x83, 0xa6, 0xda, - 0xfc, 0x80, 0xc0, 0x7c, 0x3b, 0x90, 0x1c, 0x47, 0x21, 0xd0, 0x10, 0x06, 0x41, 0xd5, 0x53, 0x2e, - 0x6e, 0x55, 0x88, 0xdf, 0xb5, 0x28, 0xe5, 0x1f, 0xdc, 0xde, 0x78, 0x88, 0x4e, 0x5c, 0x0a, 0xd2, - 0x84, 0x92, 0xc0, 0x74, 0x95, 0x0e, 0x14, 0x85, 0xec, 0x55, 0x70, 0xcf, 0xea, 0x50, 0xf4, 0x18, - 0x4e, 0x6c, 0x77, 0xcc, 0xbb, 0xb6, 0xbc, 0x6a, 0x20, 0x45, 0xcf, 0x2c, 0xf2, 0x21, 0x14, 0x3c, - 0xb3, 0x8f, 0x5d, 0x6e, 0x5f, 0x62, 0x75, 0xb5, 0xa1, 0xed, 0xe5, 0x82, 0x9b, 0xd8, 0xc7, 0x8e, - 0x7d, 0x89, 0x64, 0x17, 0xc0, 0xe6, 0xdd, 0x33, 0x97, 0xbd, 0x33, 0x99, 0x55, 0xcd, 0x36, 0xb4, - 0xbd, 0xbc, 0x51, 0xb0, 0xf9, 0x51, 0x24, 0x20, 0x1f, 0xc3, 0x07, 0xb6, 0xd3, 0x1b, 0x8c, 0x2d, - 0xec, 0x72, 0xc7, 0xf4, 0xf8, 0xb9, 0xeb, 0x57, 0x73, 0x21, 0x68, 0x53, 0xc8, 0x3b, 0x42, 0x4c, - 0xdf, 0x40, 0x25, 0x95, 0x82, 0x28, 0xc5, 0x57, 0x50, 0xb0, 0xa4, 0x50, 0xf4, 0x4d, 0x57, 0x8a, - 0x21, 0x0d, 0x3a, 0xe3, 0xe1, 0xd0, 0x64, 0x53, 0xe3, 0x1a, 0x4c, 0x4f, 0xc3, 0x3b, 0x26, 0x01, - 0x37, 0xa8, 0x49, 0x13, 0x4a, 0x92, 0xa5, 0x7b, 0x81, 0x53, 0x51, 0x94, 0xa2, 0x94, 0x3d, 0xc7, - 0x29, 0x7d, 0x09, 0xf7, 0x13, 0xdc, 0x22, 0xd8, 0x2f, 0x21, 0x2f, 0x51, 0xa2, 0x71, 0xcb, 0x62, - 0x8d, 0xb1, 0xf4, 0x12, 0x76, 0x0c, 0x1c, 0xba, 0x13, 0x94, 0x90, 0xf6, 0xf4, 0xfb, 0x60, 0xbc, - 0xdd, 0x69, 0xd0, 0xc1, 0x98, 0x38, 0x73, 0x59, 0x2f, 0x6a, 0x63, 0xde, 0x88, 0x0e, 0xb4, 0x0e, - 0xbb, 0x0b, 0x7c, 0x47, 0x49, 0xd1, 0x5f, 0x34, 0x78, 0x70, 0x8c, 0xbe, 0x6c, 0xd5, 0x4b, 0xf4, - 0xcd, 0xbb, 0x8d, 0xab, 0x09, 0xc0, 0x91, 0x4d, 0x90, 0x75, 0x39, 0x8e, 0xc2, 0xe0, 0x56, 0xdb, - 0x99, 0xcf, 0x34, 0xa3, 0x10, 0x49, 0x3b, 0x38, 0xa2, 0x1d, 0xd8, 0x9e, 0x09, 0x41, 0xd4, 0x5c, - 0x87, 0x7c, 0x7c, 0xb9, 0x02, 0xff, 0x25, 0x23, 0x3e, 0x93, 0x1d, 0x58, 0x1f, 0x98, 0x43, 0xcf, - 0x65, 0x7e, 0xe8, 0x37, 0xa2, 0x95, 0x22, 0xea, 0xc0, 0x83, 0x0e, 0x9a, 0xac, 0x77, 0x7e, 0x9b, - 0x87, 0x53, 0x86, 0xdc, 0x68, 0x8c, 0x4c, 0x26, 0x14, 0x1d, 0x96, 0xbe, 0x16, 0xea, 0xc3, 0xf6, - 0x8c, 0x3f, 0x91, 0x44, 0x1d, 0x8a, 0xbe, 0xeb, 0x9b, 0x83, 0x6e, 0xcf, 0x1d, 0x8b, 0xbb, 0x93, - 0x33, 0x20, 0x14, 0x1d, 0x06, 0x92, 0xe4, 0x33, 0xc8, 0xdc, 0xe4, 0x19, 0xfc, 0xa6, 0x01, 0x09, - 0x9e, 0xd6, 0xe1, 0xb9, 0xe9, 0xf4, 0x91, 0xdf, 0x6d, 0xeb, 0x1e, 0x05, 0x2c, 0x62, 0x7c, 0x24, - 0x9b, 0x17, 0x8f, 0x95, 0x0e, 0x8e, 0x92, 0x65, 0xc9, 0x2e, 0x1d, 0x22, 0xb9, 0xd4, 0x10, 0xa1, - 0xed, 0x68, 0xcc, 0xc7, 0xe1, 0x8b, 0x8a, 0x3d, 0x81, 0xf5, 0x5e, 0x24, 0x12, 0x53, 0x61, 0x4b, - 0x29, 0x47, 0x04, 0x36, 0x24, 0xe2, 0xe0, 0x9f, 0x75, 0x28, 0x85, 0x97, 0xba, 0x83, 0x6c, 0x62, - 0xf7, 0x90, 0x7c, 0x07, 0x6b, 0xd1, 0x06, 0x27, 0x55, 0xc5, 0x2c, 0xf1, 0xef, 0x40, 0x7f, 0x38, - 0x47, 0x23, 0x9e, 0xc4, 0x0a, 0xf9, 0x16, 0x72, 0xe1, 0x0e, 0x26, 0xdb, 0x0a, 0x4a, 0xdd, 0xee, - 0x7a, 0x75, 0x56, 0x11, 0x5b, 0xff, 0x04, 0x1b, 0x89, 0x75, 0x4b, 0xea, 0x6a, 0xf0, 0x73, 0x96, - 0xb6, 0xde, 0x58, 0x0c, 0x88, 0x59, 0xdf, 0x40, 0x49, 0xdd, 0x7c, 0xa4, 0xa6, 0x46, 0x30, 0xbb, - 0x29, 0xf5, 0xfa, 0x42, 0x7d, 0x4c, 0xf9, 0x1c, 0xe0, 0x7a, 0x4f, 0x93, 0x1d, 0xc5, 0x60, 0x66, - 0xcf, 0xeb, 0xbb, 0x0b, 0xb4, 0x6a, 0xd6, 0x89, 0x75, 0x97, 0xc8, 0x7a, 0xde, 0xae, 0x4d, 0x64, - 0x3d, 0x77, 0x53, 0x46, 0xac, 0x89, 0xcd, 0x41, 0xd2, 0x69, 0xa5, 0x5f, 0x77, 0x82, 0x75, 0xee, - 0xd2, 0xa1, 0x2b, 0xe4, 0x15, 0x14, 0x95, 0x01, 0x4f, 0x52, 0xb9, 0xa5, 0x96, 0x8a, 0x5e, 0x5b, - 0xa4, 0x8e, 0xf9, 0x06, 0x50, 0x99, 0x3b, 0x65, 0xc9, 0x63, 0xc5, 0x74, 0xd9, 0x0e, 0xd0, 0xf7, - 0xfe, 0x1b, 0x18, 0x7b, 0x3b, 0x85, 0xcd, 0xd4, 0xb8, 0x24, 0xcd, 0x64, 0x88, 0x73, 0xa6, 0xb9, - 0x4e, 0x97, 0x41, 0x54, 0xee, 0xd4, 0x14, 0x4b, 0x70, 0xcf, 0x9f, 0xa8, 0x09, 0xee, 0x05, 0x43, - 0x30, 0xaa, 0xba, 0xf2, 0xd6, 0x13, 0x55, 0x9f, 0x1d, 0x61, 0x7a, 0x6d, 0x91, 0x5a, 0xf2, 0xb5, - 0x9f, 0xfc, 0x71, 0x55, 0xd3, 0xfe, 0xbc, 0xaa, 0x69, 0x7f, 0x5d, 0xd5, 0xb4, 0x5f, 0xff, 0xae, - 0xad, 0xc0, 0x96, 0x85, 0x13, 0x69, 0x66, 0x7a, 0x76, 0x6b, 0xb2, 0xff, 0x5a, 0x3b, 0xcd, 0xb6, - 0xbe, 0x99, 0xec, 0xbf, 0x5d, 0x0b, 0xbf, 0x12, 0x3e, 0xff, 0x37, 0x00, 0x00, 0xff, 0xff, 0xe5, - 0x95, 0x51, 0xe5, 0x64, 0x0c, 0x00, 0x00, + // 992 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0x4f, 0x73, 0xdb, 0x44, + 0x14, 0xaf, 0x12, 0x3b, 0xb1, 0x9f, 0x9d, 0x86, 0x6c, 0xed, 0xc6, 0x15, 0x89, 0xe3, 0x2c, 0xd3, + 0x69, 0xa0, 0x8c, 0x4b, 0xc2, 0xc0, 0xc0, 0xc0, 0x0c, 0x83, 0x03, 0xc9, 0x30, 0x69, 0x3b, 0xad, + 0x4c, 0x2e, 0xb9, 0x78, 0x54, 0xeb, 0xc5, 0x11, 0xb1, 0x25, 0x79, 0x57, 0x76, 0xc7, 0xb9, 0xf1, + 0x2d, 0xf8, 0x32, 0x9c, 0xb8, 0x70, 0xe4, 0x23, 0x30, 0xe1, 0x33, 0x70, 0x67, 0xa4, 0xdd, 0x55, + 0x56, 0xf2, 0x1f, 0x68, 0xc8, 0xcd, 0xfb, 0xde, 0xef, 0xfd, 0xde, 0xbf, 0xdd, 0xf7, 0x2c, 0xa8, + 0x4e, 0x7c, 0x76, 0xe9, 0xe2, 0xb3, 0xf1, 0xfe, 0x33, 0xdb, 0x19, 0xb8, 0x5e, 0x33, 0x60, 0x7e, + 0xe8, 0x93, 0xa2, 0x10, 0x37, 0xc7, 0xfb, 0xe6, 0xa3, 0x1b, 0x04, 0x43, 0xee, 0x8f, 0x58, 0x17, + 0xb9, 0x40, 0xd1, 0x63, 0x58, 0x6b, 0xbb, 0x3d, 0xef, 0x34, 0xb0, 0x70, 0x38, 0x42, 0x1e, 0x12, + 0x13, 0x0a, 0x23, 0x8e, 0xcc, 0xb3, 0x07, 0x58, 0x33, 0x1a, 0xc6, 0x5e, 0xd1, 0x4a, 0xce, 0x91, + 0x2e, 0xb0, 0x39, 0x7f, 0xeb, 0x33, 0xa7, 0xb6, 0x24, 0x74, 0xea, 0x4c, 0x3f, 0x83, 0xfb, 0x8a, + 0x88, 0x07, 0xbe, 0xc7, 0x91, 0x7c, 0x00, 0xb9, 0xc8, 0x32, 0x66, 0x29, 0x1d, 0xac, 0x37, 0x93, + 0x78, 0x9a, 0xa7, 0x1c, 0x99, 0x15, 0x2b, 0xe9, 0x11, 0x94, 0x9f, 0xfb, 0xbd, 0x1f, 0xbc, 0xff, + 0xeb, 0xfe, 0x31, 0xac, 0x49, 0x1e, 0xe9, 0xbd, 0x02, 0xf9, 0xd0, 0xbf, 0x44, 0x4f, 0xb2, 0x88, + 0x03, 0xfd, 0x08, 0x2a, 0x87, 0x0c, 0xed, 0x10, 0x5f, 0x31, 0xff, 0x27, 0xec, 0x86, 0xca, 0x2d, + 0x81, 0x9c, 0xe6, 0x32, 0xfe, 0x4d, 0xbf, 0x87, 0x6a, 0x06, 0x2b, 0xa9, 0x3f, 0x86, 0xd5, 0x40, + 0x88, 0x64, 0x6e, 0x44, 0xcb, 0x4d, 0x81, 0x15, 0x84, 0x3e, 0x81, 0x8d, 0x63, 0x0c, 0xff, 0x83, + 0xbf, 0x16, 0x10, 0x1d, 0x78, 0x2b, 0x67, 0x55, 0x78, 0xf0, 0xdc, 0xe5, 0x8a, 0x84, 0x4b, 0x77, + 0xf4, 0x08, 0x2a, 0x69, 0xb1, 0x24, 0x6f, 0x42, 0x41, 0x5a, 0xf2, 0x9a, 0xd1, 0x58, 0x9e, 0xc3, + 0x9e, 0x60, 0xa8, 0x0d, 0x95, 0xd3, 0xc0, 0x99, 0x2e, 0xdf, 0x7d, 0x58, 0x72, 0x1d, 0x99, 0xcc, + 0x92, 0xeb, 0x90, 0x2f, 0x61, 0xe5, 0xdc, 0xc5, 0xbe, 0xc3, 0xe3, 0x3e, 0x95, 0x0e, 0x76, 0xf5, + 0xe6, 0x47, 0x04, 0xf6, 0x9b, 0xbe, 0xe2, 0x38, 0x8a, 0x81, 0x96, 0x34, 0x88, 0xaa, 0x9e, 0x71, + 0x71, 0xab, 0x42, 0xfc, 0x66, 0x88, 0x94, 0xbf, 0xf3, 0xbb, 0xa3, 0x01, 0x7a, 0x49, 0x29, 0xc8, + 0x2e, 0x94, 0x25, 0xa6, 0xa3, 0x75, 0xa0, 0x24, 0x65, 0x2f, 0xa3, 0x7b, 0x16, 0x43, 0x70, 0xec, + 0xfa, 0x23, 0xde, 0xb9, 0xc4, 0x89, 0xbc, 0x6b, 0x25, 0x25, 0x3b, 0xc1, 0x09, 0x79, 0x1f, 0x8a, + 0x81, 0xdd, 0xc3, 0x0e, 0x77, 0xaf, 0xb0, 0xb6, 0xdc, 0x30, 0xf6, 0xf2, 0xd1, 0x5d, 0xec, 0x61, + 0xdb, 0xbd, 0x42, 0xb2, 0x0d, 0xe0, 0xf2, 0xce, 0xb9, 0xcf, 0xde, 0xda, 0xcc, 0xa9, 0xe5, 0x1a, + 0xc6, 0x5e, 0xc1, 0x2a, 0xba, 0xfc, 0x48, 0x08, 0xc8, 0x87, 0xf0, 0x9e, 0xeb, 0x75, 0xfb, 0x23, + 0x07, 0x3b, 0xdc, 0xb3, 0x03, 0x7e, 0xe1, 0x87, 0xb5, 0x7c, 0x0c, 0x5a, 0x97, 0xf2, 0xb6, 0x14, + 0xd3, 0xd7, 0x50, 0xcd, 0x24, 0x21, 0x8b, 0xf1, 0x05, 0x14, 0x1d, 0x25, 0x94, 0x9d, 0x33, 0xb5, + 0x72, 0x28, 0x83, 0xf6, 0x68, 0x30, 0xb0, 0xd9, 0xc4, 0xba, 0x01, 0xd3, 0xb3, 0xf8, 0x96, 0x29, + 0xc0, 0xbb, 0x55, 0x45, 0xb1, 0xe8, 0x55, 0x51, 0xb2, 0x13, 0x9c, 0xd0, 0x17, 0xf0, 0x20, 0xc5, + 0x2d, 0x83, 0xfd, 0x1c, 0x0a, 0x0a, 0x25, 0x5b, 0xb7, 0x28, 0xd6, 0x04, 0x4b, 0xaf, 0x60, 0xcb, + 0xc2, 0x81, 0x3f, 0x46, 0x05, 0x69, 0x4d, 0xbe, 0x8d, 0x06, 0xdc, 0x9d, 0x06, 0x1d, 0x0d, 0x8a, + 0x73, 0x9f, 0x75, 0x45, 0x1b, 0x0b, 0x96, 0x38, 0xd0, 0x1d, 0xd8, 0x9e, 0xe3, 0x5b, 0x24, 0x45, + 0x7f, 0x36, 0xe0, 0xe1, 0x31, 0x86, 0xaa, 0x55, 0x2f, 0x30, 0xb4, 0xef, 0x36, 0xae, 0x5d, 0x00, + 0x8e, 0x6c, 0x8c, 0xac, 0xc3, 0x71, 0x18, 0x07, 0xb7, 0xdc, 0x5a, 0xfa, 0xc4, 0xb0, 0x8a, 0x42, + 0xda, 0xc6, 0x21, 0x6d, 0xc3, 0xe6, 0x54, 0x08, 0xb2, 0xe6, 0x26, 0x14, 0x92, 0xcb, 0x15, 0xf9, + 0x2f, 0x5b, 0xc9, 0x99, 0x6c, 0xc1, 0x6a, 0xdf, 0x1e, 0x04, 0x3e, 0x0b, 0x63, 0xbf, 0x82, 0x56, + 0x89, 0xa8, 0x07, 0x0f, 0xdb, 0x68, 0xb3, 0xee, 0xc5, 0x6d, 0x9e, 0x4e, 0x05, 0xf2, 0xc3, 0x11, + 0x32, 0x95, 0x90, 0x38, 0x2c, 0x7c, 0x2d, 0x34, 0x84, 0xcd, 0x29, 0x7f, 0x32, 0x89, 0x1d, 0x28, + 0x85, 0x7e, 0x68, 0xf7, 0x3b, 0x5d, 0x7f, 0x24, 0xef, 0x4e, 0xde, 0x82, 0x58, 0x74, 0x18, 0x49, + 0xd2, 0xcf, 0x60, 0xe9, 0x5d, 0x9e, 0xc1, 0xaf, 0x06, 0x90, 0xe8, 0x69, 0x1d, 0x5e, 0xd8, 0x5e, + 0x0f, 0xf9, 0xdd, 0xb6, 0xee, 0xb1, 0x36, 0x40, 0xd2, 0xcd, 0x4b, 0x86, 0x48, 0x1b, 0x87, 0xe9, + 0xb2, 0xe4, 0x16, 0x0e, 0x91, 0x7c, 0x66, 0x88, 0xd0, 0x96, 0x18, 0xf4, 0x49, 0xf8, 0xb2, 0x62, + 0x4f, 0x61, 0xb5, 0x2b, 0x44, 0x72, 0x2a, 0x6c, 0x68, 0xe5, 0x10, 0x60, 0x4b, 0x21, 0x0e, 0xfe, + 0x5e, 0x85, 0x72, 0x7c, 0xa9, 0xdb, 0xc8, 0xc6, 0x6e, 0x17, 0xc9, 0x37, 0xb0, 0x22, 0x76, 0x38, + 0xa9, 0x69, 0x66, 0xa9, 0xff, 0x07, 0xe6, 0xa3, 0x19, 0x1a, 0xf9, 0x24, 0xee, 0x91, 0xaf, 0x21, + 0x1f, 0x6f, 0x61, 0xb2, 0xa9, 0xa1, 0xf4, 0xfd, 0x6e, 0xd6, 0xa6, 0x15, 0x89, 0xf5, 0x8f, 0xb0, + 0x96, 0x5a, 0xb8, 0x64, 0x47, 0x0f, 0x7e, 0xc6, 0xda, 0x36, 0x1b, 0xf3, 0x01, 0x09, 0xeb, 0x6b, + 0x28, 0xeb, 0xbb, 0x8f, 0xd4, 0xf5, 0x08, 0xa6, 0x77, 0xa5, 0xb9, 0x33, 0x57, 0x9f, 0x50, 0x9e, + 0x00, 0xdc, 0x6c, 0x6a, 0xb2, 0xa5, 0x19, 0x4c, 0x6d, 0x7a, 0x73, 0x7b, 0x8e, 0x56, 0xcf, 0x3a, + 0xb5, 0xf0, 0x52, 0x59, 0xcf, 0xda, 0xb6, 0xa9, 0xac, 0x67, 0xee, 0x4a, 0xc1, 0x9a, 0xda, 0x1c, + 0x24, 0x9b, 0x56, 0xf6, 0x75, 0xa7, 0x58, 0x67, 0x2e, 0x1d, 0x7a, 0x8f, 0xbc, 0x84, 0x92, 0x36, + 0xe0, 0x49, 0x26, 0xb7, 0xcc, 0x52, 0x31, 0xeb, 0xf3, 0xd4, 0x09, 0x5f, 0x1f, 0xaa, 0x33, 0xa7, + 0x2c, 0x79, 0xa2, 0x99, 0x2e, 0xda, 0x01, 0xe6, 0xde, 0xbf, 0x03, 0x13, 0x6f, 0x67, 0xb0, 0x9e, + 0x19, 0x97, 0x64, 0x37, 0x1d, 0xe2, 0x8c, 0x69, 0x6e, 0xd2, 0x45, 0x10, 0x9d, 0x3b, 0x33, 0xc5, + 0x52, 0xdc, 0xb3, 0x27, 0x6a, 0x8a, 0x7b, 0xce, 0x10, 0x14, 0x55, 0xd7, 0xde, 0x7a, 0xaa, 0xea, + 0xd3, 0x23, 0xcc, 0xac, 0xcf, 0x53, 0x2b, 0xbe, 0xd6, 0xd3, 0xdf, 0xaf, 0xeb, 0xc6, 0x1f, 0xd7, + 0x75, 0xe3, 0xcf, 0xeb, 0xba, 0xf1, 0xcb, 0x5f, 0xf5, 0x7b, 0xb0, 0xe1, 0xe0, 0x58, 0x99, 0xd9, + 0x81, 0xdb, 0x1c, 0xef, 0xbf, 0x32, 0xce, 0x72, 0xcd, 0xaf, 0xc6, 0xfb, 0x6f, 0x56, 0xe2, 0xef, + 0x84, 0x4f, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0x2b, 0x2d, 0x14, 0x68, 0x66, 0x0c, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -2373,10 +2372,10 @@ func (m *ListDocumentsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x18 } - if len(m.PreviousId) > 0 { - i -= len(m.PreviousId) - copy(dAtA[i:], m.PreviousId) - i = encodeVarintAdmin(dAtA, i, uint64(len(m.PreviousId))) + if len(m.PreviousKey) > 0 { + i -= len(m.PreviousKey) + copy(dAtA[i:], m.PreviousKey) + i = encodeVarintAdmin(dAtA, i, uint64(len(m.PreviousKey))) i-- dAtA[i] = 0x12 } @@ -3091,7 +3090,7 @@ func (m *ListDocumentsRequest) Size() (n int) { if l > 0 { n += 1 + l + sovAdmin(uint64(l)) } - l = len(m.PreviousId) + l = len(m.PreviousKey) if l > 0 { n += 1 + l + sovAdmin(uint64(l)) } @@ -4483,7 +4482,7 @@ func (m *ListDocumentsRequest) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PreviousId", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field PreviousKey", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -4511,7 +4510,7 @@ func (m *ListDocumentsRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.PreviousId = string(dAtA[iNdEx:postIndex]) + m.PreviousKey = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 0 { diff --git a/api/yorkie/v1/admin.proto b/api/yorkie/v1/admin.proto index ee6eadd52..f5c64a75c 100644 --- a/api/yorkie/v1/admin.proto +++ b/api/yorkie/v1/admin.proto @@ -94,7 +94,7 @@ message UpdateProjectResponse { message ListDocumentsRequest { string project_name = 1; - string previous_id = 2; + string previous_key = 2; int32 page_size = 3; bool is_forward = 4; bool include_snapshot = 5; diff --git a/api/yorkie/v1/yorkie.pb.go b/api/yorkie/v1/yorkie.pb.go index 5e71c30ca..67a52c275 100644 --- a/api/yorkie/v1/yorkie.pb.go +++ b/api/yorkie/v1/yorkie.pb.go @@ -437,6 +437,7 @@ func (m *DetachDocumentResponse) GetChangePack() *ChangePack { type WatchDocumentRequest struct { ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` DocumentId string `protobuf:"bytes,2,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"` + DocumentKey string `protobuf:"bytes,3,opt,name=document_key,json=documentKey,proto3" json:"document_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -489,6 +490,13 @@ func (m *WatchDocumentRequest) GetDocumentId() string { return "" } +func (m *WatchDocumentRequest) GetDocumentKey() string { + if m != nil { + return m.DocumentKey + } + return "" +} + type WatchDocumentResponse struct { // Types that are valid to be assigned to Body: // *WatchDocumentResponse_Initialization_ @@ -857,6 +865,7 @@ type BroadcastRequest struct { DocumentId string `protobuf:"bytes,2,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"` Topic string `protobuf:"bytes,3,opt,name=topic,proto3" json:"topic,omitempty"` Payload []byte `protobuf:"bytes,4,opt,name=payload,proto3" json:"payload,omitempty"` + DocumentKey string `protobuf:"bytes,5,opt,name=document_key,json=documentKey,proto3" json:"document_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -923,6 +932,13 @@ func (m *BroadcastRequest) GetPayload() []byte { return nil } +func (m *BroadcastRequest) GetDocumentKey() string { + if m != nil { + return m.DocumentKey + } + return "" +} + type BroadcastResponse struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -985,52 +1001,53 @@ func init() { func init() { proto.RegisterFile("yorkie/v1/yorkie.proto", fileDescriptor_40070c858814ab24) } var fileDescriptor_40070c858814ab24 = []byte{ - // 715 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0x4f, 0x4f, 0xdb, 0x4a, - 0x10, 0xcf, 0xf2, 0xef, 0x91, 0xc9, 0x83, 0x07, 0x0b, 0x09, 0x79, 0xe1, 0xbd, 0x90, 0x6e, 0x2f, - 0x48, 0x48, 0x49, 0x03, 0x2a, 0x97, 0x9e, 0x80, 0x54, 0x22, 0xaa, 0xd4, 0xa6, 0x6e, 0x55, 0x04, - 0x52, 0x65, 0x2d, 0xf6, 0xd2, 0xac, 0x62, 0xbc, 0xc6, 0xde, 0x58, 0x72, 0x4f, 0xfd, 0x02, 0xbd, - 0xf7, 0x3b, 0xf4, 0x5b, 0xf4, 0xd4, 0x23, 0xc7, 0x1e, 0x2b, 0xfa, 0x45, 0xaa, 0x78, 0x9d, 0x60, - 0x1b, 0x37, 0xd0, 0x82, 0xd4, 0xde, 0xb2, 0xb3, 0x33, 0xbf, 0xfd, 0xcd, 0x78, 0x7e, 0x33, 0x81, - 0x52, 0x20, 0xdc, 0x1e, 0x67, 0x0d, 0xbf, 0xd9, 0x50, 0xbf, 0xea, 0x8e, 0x2b, 0xa4, 0xc0, 0xf9, - 0xe8, 0xe4, 0x37, 0x2b, 0xff, 0x5e, 0xba, 0xb8, 0xcc, 0x13, 0x7d, 0xd7, 0x60, 0x9e, 0xf2, 0x22, - 0xdb, 0x50, 0xdc, 0x31, 0x24, 0xf7, 0xa9, 0x64, 0x7b, 0x16, 0x67, 0xb6, 0xd4, 0xd8, 0x59, 0x9f, - 0x79, 0x12, 0xff, 0x0f, 0x60, 0x84, 0x06, 0xbd, 0xc7, 0x82, 0x32, 0xaa, 0xa1, 0xf5, 0xbc, 0x96, - 0x57, 0x96, 0x27, 0x2c, 0x20, 0x0f, 0xa1, 0x94, 0x8e, 0xf3, 0x1c, 0x61, 0x7b, 0x0c, 0xaf, 0x42, - 0xe4, 0xa6, 0x73, 0x33, 0x8a, 0x9b, 0x55, 0x86, 0xb6, 0x49, 0xb6, 0x61, 0xa5, 0xc5, 0x68, 0xe6, - 0x83, 0x63, 0xe3, 0x2a, 0x50, 0xbe, 0x1a, 0xa7, 0x1e, 0x24, 0x16, 0x14, 0x77, 0xa4, 0xa4, 0x46, - 0xb7, 0x25, 0x8c, 0xfe, 0xe9, 0x0d, 0x11, 0xf1, 0x36, 0x14, 0x8c, 0x2e, 0xb5, 0xdf, 0x30, 0xdd, - 0xa1, 0x46, 0xaf, 0x3c, 0x51, 0x43, 0xeb, 0x85, 0xcd, 0x62, 0x7d, 0x54, 0xb4, 0xfa, 0x5e, 0x78, - 0xdb, 0xa1, 0x46, 0x4f, 0x03, 0x63, 0xf4, 0x9b, 0x9c, 0x41, 0x29, 0xfd, 0x5a, 0x94, 0xf8, 0x1a, - 0x14, 0xcc, 0xc8, 0x76, 0xf9, 0x20, 0x0c, 0x4d, 0xb7, 0x78, 0xf2, 0x13, 0x82, 0x62, 0x8b, 0xfd, - 0x74, 0x86, 0x29, 0x3e, 0x13, 0xd7, 0xf1, 0x99, 0xbc, 0x21, 0x1f, 0xbc, 0x05, 0x25, 0x97, 0x9d, - 0x0a, 0x9f, 0xe9, 0xfc, 0x44, 0xb7, 0x85, 0xd4, 0x69, 0x58, 0x10, 0x66, 0x96, 0xa7, 0x6a, 0x68, - 0x7d, 0x56, 0x5b, 0x52, 0xb7, 0xed, 0x93, 0xa7, 0x42, 0xee, 0x44, 0x57, 0xa4, 0x03, 0xa5, 0x74, - 0x0e, 0x51, 0xdd, 0x7e, 0xb5, 0x2c, 0x2f, 0x61, 0xf9, 0x80, 0xca, 0x3b, 0x2e, 0x0a, 0xf9, 0x82, - 0xa0, 0x98, 0x82, 0x8d, 0x78, 0x1e, 0xc2, 0x3c, 0xb7, 0xb9, 0xe4, 0xd4, 0xe2, 0x6f, 0xa9, 0xe4, - 0xc2, 0x0e, 0xc1, 0x0b, 0x9b, 0x8d, 0x18, 0xd5, 0xcc, 0xc8, 0x7a, 0x3b, 0x11, 0xb6, 0x9f, 0xd3, - 0x52, 0x40, 0x78, 0x03, 0xa6, 0x99, 0xcf, 0x6c, 0x19, 0x25, 0xbf, 0x14, 0x43, 0x6c, 0x09, 0xe3, - 0xf1, 0xe0, 0x6a, 0x3f, 0xa7, 0x29, 0x9f, 0x4a, 0x03, 0xe6, 0x93, 0x80, 0x31, 0xad, 0x72, 0xd3, - 0x2b, 0xa3, 0xda, 0xe4, 0xa5, 0x56, 0xdb, 0xa6, 0xb7, 0x3b, 0x03, 0x53, 0xc7, 0xc2, 0x0c, 0xc8, - 0x7b, 0x04, 0x45, 0x2d, 0xfc, 0x34, 0x7f, 0x44, 0x1f, 0x0d, 0x5a, 0x22, 0x4d, 0x27, 0xbb, 0x25, - 0xd0, 0x4d, 0x11, 0x3f, 0x22, 0x28, 0x75, 0xfa, 0x5e, 0xb7, 0xd3, 0xb7, 0x2c, 0xe5, 0xe2, 0xfd, - 0x5e, 0xa9, 0xac, 0x42, 0xde, 0xe9, 0x7b, 0x5d, 0x5d, 0xd8, 0x56, 0x10, 0xa9, 0x63, 0x76, 0x60, - 0x78, 0x66, 0x5b, 0x01, 0x79, 0x0e, 0x2b, 0x57, 0xc8, 0xde, 0xb2, 0x00, 0xef, 0x10, 0x2c, 0xec, - 0xba, 0x82, 0x9a, 0x06, 0xf5, 0xee, 0xe8, 0xeb, 0x2e, 0xc3, 0xb4, 0x14, 0x0e, 0x37, 0xc2, 0xa4, - 0xf3, 0x9a, 0x3a, 0xe0, 0x32, 0xfc, 0xe5, 0xd0, 0xc0, 0x12, 0x54, 0x89, 0xfe, 0x6f, 0x6d, 0x78, - 0x24, 0x4b, 0xb0, 0x18, 0x63, 0xa0, 0xf2, 0xd9, 0x3c, 0x9f, 0x86, 0xb9, 0xc3, 0x90, 0xfc, 0x0b, - 0xe6, 0xfa, 0xdc, 0x60, 0xf8, 0x00, 0xe6, 0x93, 0x0b, 0x04, 0xd7, 0x62, 0xe9, 0x65, 0xee, 0xa4, - 0xca, 0xbd, 0x31, 0x1e, 0xd1, 0x32, 0xc8, 0xe1, 0xd7, 0xb0, 0x90, 0x5e, 0x15, 0x98, 0xc4, 0x05, - 0x95, 0xbd, 0x7f, 0x2a, 0xf7, 0xc7, 0xfa, 0x8c, 0xe0, 0x07, 0xbc, 0x13, 0xf3, 0x3f, 0xc9, 0x3b, - 0x6b, 0x11, 0x25, 0x79, 0x67, 0x2e, 0x0f, 0x05, 0x9c, 0x1c, 0x90, 0x09, 0xe0, 0xcc, 0xf9, 0x9f, - 0x00, 0xce, 0x9e, 0xae, 0x0a, 0x38, 0x29, 0xb3, 0x04, 0x70, 0xe6, 0x40, 0x48, 0x00, 0x67, 0x6b, - 0x94, 0xe4, 0xf0, 0x11, 0xfc, 0x93, 0xea, 0x5f, 0x1c, 0x8f, 0xcb, 0x16, 0x62, 0x85, 0x8c, 0x73, - 0x19, 0x61, 0xbf, 0x82, 0xb9, 0xc4, 0x2c, 0xc5, 0x6b, 0x3f, 0x9e, 0xb2, 0x0a, 0xb7, 0x76, 0xdd, - 0x18, 0x26, 0xb9, 0x07, 0x08, 0xef, 0x43, 0x7e, 0xd4, 0x9d, 0x78, 0x35, 0x16, 0x92, 0x56, 0x4d, - 0xe5, 0xbf, 0xec, 0xcb, 0x21, 0xd6, 0xee, 0xc6, 0xe7, 0x8b, 0x2a, 0x3a, 0xbf, 0xa8, 0xa2, 0xaf, - 0x17, 0x55, 0xf4, 0xe1, 0x5b, 0x35, 0x07, 0x8b, 0x26, 0xf3, 0x87, 0x41, 0xd4, 0xe1, 0x75, 0xbf, - 0xd9, 0x41, 0x47, 0x53, 0xf5, 0x47, 0x7e, 0xf3, 0x78, 0x26, 0xfc, 0xb7, 0xb5, 0xf5, 0x3d, 0x00, - 0x00, 0xff, 0xff, 0x99, 0x77, 0x1a, 0xa9, 0xad, 0x09, 0x00, 0x00, + // 730 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0x4d, 0x4f, 0xdb, 0x4c, + 0x10, 0xce, 0x02, 0xe1, 0x25, 0x13, 0xe0, 0x85, 0x85, 0x84, 0x34, 0xb4, 0x21, 0xb8, 0x17, 0x24, + 0xa4, 0xa4, 0x01, 0x95, 0x4b, 0x4f, 0x40, 0x2a, 0x11, 0x55, 0x6a, 0x53, 0x57, 0x2a, 0x02, 0xa9, + 0x8a, 0x16, 0x7b, 0x69, 0x56, 0x31, 0x5e, 0x63, 0x6f, 0x5c, 0xb9, 0xff, 0xa1, 0xf7, 0xde, 0x7b, + 0xec, 0xbf, 0xe8, 0xa9, 0x47, 0x8e, 0x3d, 0x56, 0xf4, 0x8f, 0x54, 0xf1, 0x3a, 0xc6, 0x76, 0xdc, + 0x40, 0x0b, 0x52, 0x7b, 0xcb, 0xce, 0xc7, 0x33, 0x1f, 0x9e, 0x67, 0x26, 0x50, 0xf4, 0xb8, 0xdd, + 0x63, 0xb4, 0xee, 0x36, 0xea, 0xf2, 0x57, 0xcd, 0xb2, 0xb9, 0xe0, 0x38, 0x17, 0xbc, 0xdc, 0x46, + 0xf9, 0xde, 0x95, 0x89, 0x4d, 0x1d, 0xde, 0xb7, 0x35, 0xea, 0x48, 0x2b, 0x65, 0x07, 0x0a, 0xbb, + 0x9a, 0x60, 0x2e, 0x11, 0x74, 0xdf, 0x60, 0xd4, 0x14, 0x2a, 0x3d, 0xef, 0x53, 0x47, 0xe0, 0x07, + 0x00, 0x9a, 0x2f, 0xe8, 0xf4, 0xa8, 0x57, 0x42, 0x55, 0xb4, 0x91, 0x53, 0x73, 0x52, 0xf2, 0x8c, + 0x7a, 0xca, 0x63, 0x28, 0x26, 0xfd, 0x1c, 0x8b, 0x9b, 0x0e, 0xc5, 0xab, 0x10, 0x98, 0x75, 0x98, + 0x1e, 0xf8, 0xcd, 0x48, 0x41, 0x4b, 0x57, 0x76, 0x60, 0xa5, 0x49, 0x49, 0x6a, 0xc0, 0xb1, 0x7e, + 0x65, 0x28, 0x8d, 0xfa, 0xc9, 0x80, 0x8a, 0x01, 0x85, 0x5d, 0x21, 0x88, 0xd6, 0x6d, 0x72, 0xad, + 0x7f, 0x76, 0x43, 0x44, 0xbc, 0x03, 0x79, 0xad, 0x4b, 0xcc, 0xb7, 0xb4, 0x63, 0x11, 0xad, 0x57, + 0x9a, 0xa8, 0xa2, 0x8d, 0xfc, 0x56, 0xa1, 0x16, 0x36, 0xad, 0xb6, 0xef, 0x6b, 0xdb, 0x44, 0xeb, + 0xa9, 0xa0, 0x85, 0xbf, 0x95, 0x73, 0x28, 0x26, 0xa3, 0x05, 0x85, 0xaf, 0x41, 0x5e, 0x0f, 0x64, + 0x57, 0x01, 0x61, 0x28, 0xba, 0x45, 0xc8, 0x2f, 0x08, 0x0a, 0x4d, 0xfa, 0xdb, 0x15, 0x26, 0xf2, + 0x99, 0xb8, 0x2e, 0x9f, 0xc9, 0x1b, 0xe6, 0x83, 0xb7, 0xa1, 0x68, 0xd3, 0x33, 0xee, 0xd2, 0x0e, + 0x3b, 0xed, 0x98, 0x5c, 0x74, 0x88, 0xdf, 0x10, 0xaa, 0x97, 0xa6, 0xaa, 0x68, 0x63, 0x46, 0x5d, + 0x92, 0xda, 0xd6, 0xe9, 0x73, 0x2e, 0x76, 0x03, 0x95, 0xd2, 0x86, 0x62, 0xb2, 0x86, 0xa0, 0x6f, + 0x7f, 0xda, 0x96, 0x77, 0xb0, 0x7c, 0x48, 0xc4, 0x5d, 0x37, 0x65, 0x1d, 0x66, 0x43, 0x83, 0xc1, + 0xe4, 0x4f, 0xfa, 0x16, 0xa1, 0xd3, 0x60, 0xf6, 0xbf, 0x21, 0x28, 0x24, 0x22, 0x07, 0xa5, 0x1c, + 0xc1, 0x3c, 0x33, 0x99, 0x60, 0xc4, 0x60, 0xef, 0x89, 0x60, 0xdc, 0xf4, 0xe3, 0xe7, 0xb7, 0xea, + 0x91, 0x6a, 0x52, 0x3d, 0x6b, 0xad, 0x98, 0xdb, 0x41, 0x46, 0x4d, 0x00, 0xe1, 0x4d, 0xc8, 0x52, + 0x97, 0x9a, 0x22, 0xe8, 0xcf, 0x52, 0x04, 0xb1, 0xc9, 0xb5, 0xa7, 0x03, 0xd5, 0x41, 0x46, 0x95, + 0x36, 0xe5, 0x3a, 0xcc, 0xc7, 0x01, 0x23, 0x74, 0x66, 0xba, 0x53, 0x42, 0xd5, 0xc9, 0x2b, 0x3a, + 0xb7, 0x74, 0x67, 0x6f, 0x1a, 0xa6, 0x4e, 0xb8, 0xee, 0x29, 0x1f, 0x10, 0x14, 0x54, 0xff, 0xeb, + 0xfd, 0x13, 0xa3, 0x36, 0x98, 0x9a, 0x64, 0x3a, 0xe9, 0x53, 0x83, 0x6e, 0x8a, 0xf8, 0x19, 0x41, + 0xb1, 0xdd, 0x77, 0xba, 0xed, 0xbe, 0x61, 0x48, 0x13, 0xe7, 0xef, 0xb2, 0x69, 0x15, 0x72, 0x56, + 0xdf, 0xe9, 0x76, 0xb8, 0x69, 0x78, 0x01, 0x81, 0x66, 0x06, 0x82, 0x17, 0xa6, 0xe1, 0x29, 0x2f, + 0x61, 0x65, 0x24, 0xd9, 0x5b, 0x36, 0xe0, 0x13, 0x82, 0x85, 0x3d, 0x9b, 0x13, 0x5d, 0x23, 0xce, + 0x1d, 0x7d, 0xdd, 0x65, 0xc8, 0x0a, 0x6e, 0x31, 0x2d, 0x20, 0x8b, 0x7c, 0xe0, 0x12, 0xfc, 0x67, + 0x11, 0xcf, 0xe0, 0x44, 0xee, 0x85, 0x59, 0x75, 0xf8, 0x1c, 0xe1, 0x58, 0x76, 0x94, 0x63, 0x4b, + 0xb0, 0x18, 0x49, 0x52, 0x96, 0xbc, 0x75, 0x91, 0x85, 0xb9, 0x23, 0xbf, 0xbe, 0x57, 0xd4, 0x76, + 0x99, 0x46, 0xf1, 0x21, 0xcc, 0xc7, 0xcf, 0x10, 0xae, 0x46, 0x3a, 0x90, 0x7a, 0xd9, 0xca, 0xeb, + 0x63, 0x2c, 0x82, 0x93, 0x92, 0xc1, 0x6f, 0x60, 0x21, 0x79, 0x70, 0xb0, 0x12, 0xe5, 0x5c, 0xfa, + 0x15, 0x2b, 0x3f, 0x1c, 0x6b, 0x13, 0xc2, 0x0f, 0xf2, 0x8e, 0x5d, 0x91, 0x78, 0xde, 0x69, 0xe7, + 0x2c, 0x9e, 0x77, 0xea, 0x09, 0x92, 0xc0, 0xf1, 0x35, 0x1b, 0x03, 0x4e, 0xbd, 0x22, 0x31, 0xe0, + 0xf4, 0x1d, 0x2d, 0x81, 0xe3, 0x4c, 0x8c, 0x01, 0xa7, 0xee, 0x8c, 0x18, 0x70, 0x3a, 0x8d, 0x95, + 0x0c, 0x3e, 0x86, 0xff, 0x13, 0x23, 0x8e, 0xa3, 0x7e, 0xe9, 0x5c, 0x2d, 0x2b, 0xe3, 0x4c, 0x42, + 0xec, 0xd7, 0x30, 0x17, 0x5b, 0xb7, 0x78, 0xed, 0xd7, 0x8b, 0x58, 0xe2, 0x56, 0xaf, 0xdb, 0xd4, + 0x4a, 0xe6, 0x11, 0xc2, 0x07, 0x90, 0x0b, 0xa7, 0x13, 0xaf, 0x46, 0x5c, 0x92, 0xc4, 0x2a, 0xdf, + 0x4f, 0x57, 0x0e, 0xb1, 0xf6, 0x36, 0xbf, 0x5e, 0x56, 0xd0, 0xc5, 0x65, 0x05, 0x7d, 0xbf, 0xac, + 0xa0, 0x8f, 0x3f, 0x2a, 0x19, 0x58, 0xd4, 0xa9, 0x3b, 0x74, 0x22, 0x16, 0xab, 0xb9, 0x8d, 0x36, + 0x3a, 0x9e, 0xaa, 0x3d, 0x71, 0x1b, 0x27, 0xd3, 0xfe, 0x7f, 0xb6, 0xed, 0x9f, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x39, 0x50, 0xcd, 0x53, 0xf3, 0x09, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1740,6 +1757,13 @@ func (m *WatchDocumentRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.DocumentKey) > 0 { + i -= len(m.DocumentKey) + copy(dAtA[i:], m.DocumentKey) + i = encodeVarintYorkie(dAtA, i, uint64(len(m.DocumentKey))) + i-- + dAtA[i] = 0x1a + } if len(m.DocumentId) > 0 { i -= len(m.DocumentId) copy(dAtA[i:], m.DocumentId) @@ -2089,6 +2113,13 @@ func (m *BroadcastRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.DocumentKey) > 0 { + i -= len(m.DocumentKey) + copy(dAtA[i:], m.DocumentKey) + i = encodeVarintYorkie(dAtA, i, uint64(len(m.DocumentKey))) + i-- + dAtA[i] = 0x2a + } if len(m.Payload) > 0 { i -= len(m.Payload) copy(dAtA[i:], m.Payload) @@ -2315,6 +2346,10 @@ func (m *WatchDocumentRequest) Size() (n int) { if l > 0 { n += 1 + l + sovYorkie(uint64(l)) } + l = len(m.DocumentKey) + if l > 0 { + n += 1 + l + sovYorkie(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2483,6 +2518,10 @@ func (m *BroadcastRequest) Size() (n int) { if l > 0 { n += 1 + l + sovYorkie(uint64(l)) } + l = len(m.DocumentKey) + if l > 0 { + n += 1 + l + sovYorkie(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -3396,6 +3435,38 @@ func (m *WatchDocumentRequest) Unmarshal(dAtA []byte) error { } m.DocumentId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DocumentKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowYorkie + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthYorkie + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthYorkie + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.DocumentKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipYorkie(dAtA[iNdEx:]) @@ -4277,6 +4348,38 @@ func (m *BroadcastRequest) Unmarshal(dAtA []byte) error { m.Payload = []byte{} } iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DocumentKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowYorkie + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthYorkie + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthYorkie + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.DocumentKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipYorkie(dAtA[iNdEx:]) diff --git a/api/yorkie/v1/yorkie.proto b/api/yorkie/v1/yorkie.proto index da71d6a11..ced9b559e 100644 --- a/api/yorkie/v1/yorkie.proto +++ b/api/yorkie/v1/yorkie.proto @@ -78,6 +78,7 @@ message DetachDocumentResponse { message WatchDocumentRequest { string client_id = 1; string document_id = 2; + string document_key = 3; } message WatchDocumentResponse { @@ -117,6 +118,7 @@ message BroadcastRequest { string document_id = 2; string topic = 3; bytes payload = 4; + string document_key = 5; } message BroadcastResponse { diff --git a/client/client.go b/client/client.go index 63e65054c..cc7ea686c 100644 --- a/client/client.go +++ b/client/client.go @@ -413,8 +413,9 @@ func (c *Client) Watch( stream, err := c.client.WatchDocument( withShardKey(ctx, c.options.APIKey, doc.Key().String()), &api.WatchDocumentRequest{ - ClientId: c.id.String(), - DocumentId: attachment.docID.String(), + ClientId: c.id.String(), + DocumentKey: doc.Key().String(), + DocumentId: attachment.docID.String(), }, ) if err != nil { @@ -565,17 +566,6 @@ func handleResponse( return nil, ErrUnsupportedWatchResponseType } -// FindDocKey returns the document key of the given document id. -func (c *Client) FindDocKey(docID string) (key.Key, error) { - for _, attachment := range c.attachments { - if attachment.docID.String() == docID { - return attachment.doc.Key(), nil - } - } - - return "", ErrDocumentNotAttached -} - // ID returns the ID of this client. func (c *Client) ID() *time.ActorID { return c.id @@ -692,10 +682,11 @@ func (c *Client) broadcast(ctx context.Context, doc *document.Document, topic st _, err := c.client.Broadcast( withShardKey(ctx, c.options.APIKey, doc.Key().String()), &api.BroadcastRequest{ - ClientId: c.id.String(), - DocumentId: attachment.docID.String(), - Topic: topic, - Payload: payload, + ClientId: c.id.String(), + DocumentKey: doc.Key().String(), + DocumentId: attachment.docID.String(), + Topic: topic, + Payload: payload, }, ) if err != nil { diff --git a/cmd/yorkie/document/list.go b/cmd/yorkie/document/list.go index 4989c9d86..9cb73c479 100644 --- a/cmd/yorkie/document/list.go +++ b/cmd/yorkie/document/list.go @@ -31,9 +31,9 @@ import ( ) var ( - previousID string - pageSize int32 - isForward bool + previousKey string + pageSize int32 + isForward bool ) func newListCommand() *cobra.Command { @@ -63,7 +63,7 @@ func newListCommand() *cobra.Command { }() ctx := context.Background() - documents, err := cli.ListDocuments(ctx, projectName, previousID, pageSize, isForward, true) + documents, err := cli.ListDocuments(ctx, projectName, previousKey, pageSize, isForward, true) if err != nil { return err } @@ -101,10 +101,10 @@ func newListCommand() *cobra.Command { func init() { cmd := newListCommand() cmd.Flags().StringVar( - &previousID, - "previous-id", + &previousKey, + "previous-key", "", - "The previous document ID to start from", + "The previous document key to start from", ) cmd.Flags().Int32Var( &pageSize, diff --git a/server/backend/database/change_info.go b/server/backend/database/change_info.go index bfbf76680..75671b496 100644 --- a/server/backend/database/change_info.go +++ b/server/backend/database/change_info.go @@ -26,6 +26,7 @@ import ( api "github.com/yorkie-team/yorkie/api/yorkie/v1" "github.com/yorkie-team/yorkie/pkg/document/change" "github.com/yorkie-team/yorkie/pkg/document/innerpresence" + "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/operations" "github.com/yorkie-team/yorkie/pkg/document/time" ) @@ -36,6 +37,7 @@ var ErrEncodeOperationFailed = errors.New("encode operations failed") // ChangeInfo is a structure representing information of a change. type ChangeInfo struct { ID types.ID `bson:"_id"` + DocKey key.Key `bson:"doc_key"` DocID types.ID `bson:"doc_id"` ServerSeq int64 `bson:"server_seq"` ClientSeq uint32 `bson:"client_seq"` diff --git a/server/backend/database/client_info.go b/server/backend/database/client_info.go index a33b9dac8..0cf3cec7b 100644 --- a/server/backend/database/client_info.go +++ b/server/backend/database/client_info.go @@ -23,6 +23,7 @@ import ( "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/pkg/document/change" + "github.com/yorkie-team/yorkie/pkg/document/key" ) // Below are the errors may occur depending on the document and client status. @@ -69,7 +70,7 @@ type ClientInfo struct { Status string `bson:"status"` // Documents is a map of document which is attached to the client. - Documents map[types.ID]*ClientDocInfo `bson:"documents"` + Documents map[key.Key]map[types.ID]*ClientDocInfo `bson:"documents"` // CreatedAt is the time when the client was created. CreatedAt time.Time `bson:"created_at"` @@ -101,20 +102,26 @@ func (i *ClientInfo) Deactivate() { } // AttachDocument attaches the given document to this client. -func (i *ClientInfo) AttachDocument(docID types.ID) error { +func (i *ClientInfo) AttachDocument(docKey key.Key, docID types.ID) error { if i.Status != ClientActivated { - return fmt.Errorf("client(%s) attaches document(%s): %w", i.ID.String(), docID.String(), ErrClientNotActivated) + return fmt.Errorf("client(%s) attaches document(%s.%s): %w", + i.ID.String(), docKey.String(), docID.String(), ErrClientNotActivated) } if i.Documents == nil { - i.Documents = make(map[types.ID]*ClientDocInfo) + i.Documents = make(map[key.Key]map[types.ID]*ClientDocInfo) } - if i.hasDocument(docID) && i.Documents[docID].Status == DocumentAttached { - return fmt.Errorf("client(%s) attaches document(%s): %w", i.ID.String(), docID.String(), ErrDocumentAlreadyAttached) + if i.Documents[docKey] == nil { + i.Documents[docKey] = make(map[types.ID]*ClientDocInfo) } - i.Documents[docID] = &ClientDocInfo{ + if i.hasDocument(docKey, docID) && i.Documents[docKey][docID].Status == DocumentAttached { + return fmt.Errorf("client(%s) attaches document(%s.%s): %w", + i.ID.String(), docKey.String(), docID.String(), ErrDocumentAlreadyAttached) + } + + i.Documents[docKey][docID] = &ClientDocInfo{ Status: DocumentAttached, ServerSeq: 0, ClientSeq: 0, @@ -125,45 +132,46 @@ func (i *ClientInfo) AttachDocument(docID types.ID) error { } // DetachDocument detaches the given document from this client. -func (i *ClientInfo) DetachDocument(docID types.ID) error { - if err := i.EnsureDocumentAttached(docID); err != nil { +func (i *ClientInfo) DetachDocument(docKey key.Key, docID types.ID) error { + if err := i.EnsureDocumentAttached(docKey, docID); err != nil { return err } - i.Documents[docID].Status = DocumentDetached - i.Documents[docID].ClientSeq = 0 - i.Documents[docID].ServerSeq = 0 + i.Documents[docKey][docID].Status = DocumentDetached + i.Documents[docKey][docID].ClientSeq = 0 + i.Documents[docKey][docID].ServerSeq = 0 i.UpdatedAt = time.Now() return nil } // RemoveDocument removes the given document from this client. -func (i *ClientInfo) RemoveDocument(docID types.ID) error { - if err := i.EnsureDocumentAttached(docID); err != nil { +func (i *ClientInfo) RemoveDocument(docKey key.Key, docID types.ID) error { + if err := i.EnsureDocumentAttached(docKey, docID); err != nil { return err } - i.Documents[docID].Status = DocumentRemoved - i.Documents[docID].ClientSeq = 0 - i.Documents[docID].ServerSeq = 0 + i.Documents[docKey][docID].Status = DocumentRemoved + i.Documents[docKey][docID].ClientSeq = 0 + i.Documents[docKey][docID].ServerSeq = 0 i.UpdatedAt = time.Now() return nil } // IsAttached returns whether the given document is attached to this client. -func (i *ClientInfo) IsAttached(docID types.ID) (bool, error) { - if !i.hasDocument(docID) { - return false, fmt.Errorf("check document(%s) is attached: %w", docID.String(), ErrDocumentNeverAttached) +func (i *ClientInfo) IsAttached(docKey key.Key, docID types.ID) (bool, error) { + if !i.hasDocument(docKey, docID) { + return false, fmt.Errorf("check document(%s.%s) is attached: %w", + docKey.String(), docID.String(), ErrDocumentNeverAttached) } - return i.Documents[docID].Status == DocumentAttached, nil + return i.Documents[docKey][docID].Status == DocumentAttached, nil } // Checkpoint returns the checkpoint of the given document. -func (i *ClientInfo) Checkpoint(docID types.ID) change.Checkpoint { - clientDocInfo := i.Documents[docID] +func (i *ClientInfo) Checkpoint(docKey key.Key, docID types.ID) change.Checkpoint { + clientDocInfo := i.Documents[docKey][docID] if clientDocInfo == nil { return change.InitialCheckpoint } @@ -173,33 +181,35 @@ func (i *ClientInfo) Checkpoint(docID types.ID) change.Checkpoint { // UpdateCheckpoint updates the checkpoint of the given document. func (i *ClientInfo) UpdateCheckpoint( + docKey key.Key, docID types.ID, cp change.Checkpoint, ) error { - if !i.hasDocument(docID) { - return fmt.Errorf("update checkpoint in document(%s): %w", docID.String(), ErrDocumentNeverAttached) + if !i.hasDocument(docKey, docID) { + return fmt.Errorf("update checkpoint in document(%s.%s): %w", + docKey.String(), docID.String(), ErrDocumentNeverAttached) } - i.Documents[docID].ServerSeq = cp.ServerSeq - i.Documents[docID].ClientSeq = cp.ClientSeq + i.Documents[docKey][docID].ServerSeq = cp.ServerSeq + i.Documents[docKey][docID].ClientSeq = cp.ClientSeq i.UpdatedAt = time.Now() return nil } // EnsureDocumentAttached ensures the given document is attached. -func (i *ClientInfo) EnsureDocumentAttached(docID types.ID) error { +func (i *ClientInfo) EnsureDocumentAttached(docKey key.Key, docID types.ID) error { if i.Status != ClientActivated { - return fmt.Errorf("ensure attached document(%s) in client(%s): %w", - docID.String(), + return fmt.Errorf("ensure attached document(%s.%s) in client(%s): %w", + docKey.String(), docID.String(), i.ID.String(), ErrClientNotActivated, ) } - if !i.hasDocument(docID) || i.Documents[docID].Status != DocumentAttached { - return fmt.Errorf("ensure attached document(%s) in client(%s): %w", - docID.String(), + if !i.hasDocument(docKey, docID) || i.Documents[docKey][docID].Status != DocumentAttached { + return fmt.Errorf("ensure attached document(%s.%s) in client(%s): %w", + docKey.String(), docID.String(), i.ID.String(), ErrDocumentNotAttached, ) @@ -214,12 +224,15 @@ func (i *ClientInfo) DeepCopy() *ClientInfo { return nil } - documents := make(map[types.ID]*ClientDocInfo, len(i.Documents)) - for k, v := range i.Documents { - documents[k] = &ClientDocInfo{ - Status: v.Status, - ServerSeq: v.ServerSeq, - ClientSeq: v.ClientSeq, + documents := make(map[key.Key]map[types.ID]*ClientDocInfo, len(i.Documents)) + for docKey, v := range i.Documents { + documents[docKey] = make(map[types.ID]*ClientDocInfo, len(i.Documents[docKey])) + for docID, docInfo := range v { + documents[docKey][docID] = &ClientDocInfo{ + Status: docInfo.Status, + ServerSeq: docInfo.ServerSeq, + ClientSeq: docInfo.ClientSeq, + } } } @@ -234,6 +247,6 @@ func (i *ClientInfo) DeepCopy() *ClientInfo { } } -func (i *ClientInfo) hasDocument(docID types.ID) bool { - return i.Documents != nil && i.Documents[docID] != nil +func (i *ClientInfo) hasDocument(docKey key.Key, docID types.ID) bool { + return i.Documents != nil && i.Documents[docKey][docID] != nil } diff --git a/server/backend/database/client_info_test.go b/server/backend/database/client_info_test.go index ac126e622..e8a6b4548 100644 --- a/server/backend/database/client_info_test.go +++ b/server/backend/database/client_info_test.go @@ -23,10 +23,12 @@ import ( "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/pkg/document/change" + "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/server/backend/database" ) func TestClientInfo(t *testing.T) { + dummyDocKey := key.Key("dummy") dummyDocID := types.ID("000000000000000000000000") t.Run("attach/detach document test", func(t *testing.T) { @@ -34,27 +36,27 @@ func TestClientInfo(t *testing.T) { Status: database.ClientActivated, } - err := clientInfo.AttachDocument(dummyDocID) + err := clientInfo.AttachDocument(dummyDocKey, dummyDocID) assert.NoError(t, err) - isAttached, err := clientInfo.IsAttached(dummyDocID) + isAttached, err := clientInfo.IsAttached(dummyDocKey, dummyDocID) assert.NoError(t, err) assert.True(t, isAttached) - err = clientInfo.UpdateCheckpoint(dummyDocID, change.MaxCheckpoint) + err = clientInfo.UpdateCheckpoint(dummyDocKey, dummyDocID, change.MaxCheckpoint) assert.NoError(t, err) - err = clientInfo.EnsureDocumentAttached(dummyDocID) + err = clientInfo.EnsureDocumentAttached(dummyDocKey, dummyDocID) assert.NoError(t, err) - err = clientInfo.DetachDocument(dummyDocID) + err = clientInfo.DetachDocument(dummyDocKey, dummyDocID) assert.NoError(t, err) - isAttached, err = clientInfo.IsAttached(dummyDocID) + isAttached, err = clientInfo.IsAttached(dummyDocKey, dummyDocID) assert.NoError(t, err) assert.False(t, isAttached) - err = clientInfo.AttachDocument(dummyDocID) + err = clientInfo.AttachDocument(dummyDocKey, dummyDocID) assert.NoError(t, err) - isAttached, err = clientInfo.IsAttached(dummyDocID) + isAttached, err = clientInfo.IsAttached(dummyDocKey, dummyDocID) assert.NoError(t, err) assert.True(t, isAttached) @@ -86,15 +88,15 @@ func TestClientInfo(t *testing.T) { Status: database.ClientActivated, } - err := clientInfo.AttachDocument(dummyDocID) + err := clientInfo.AttachDocument(dummyDocKey, dummyDocID) assert.NoError(t, err) - isAttached, err := clientInfo.IsAttached(dummyDocID) + isAttached, err := clientInfo.IsAttached(dummyDocKey, dummyDocID) assert.NoError(t, err) assert.True(t, isAttached) clientInfo.Deactivate() - err = clientInfo.EnsureDocumentAttached(dummyDocID) + err = clientInfo.EnsureDocumentAttached(dummyDocKey, dummyDocID) assert.ErrorIs(t, err, database.ErrClientNotActivated) }) @@ -103,13 +105,13 @@ func TestClientInfo(t *testing.T) { Status: database.ClientDeactivated, } - err := clientInfo.AttachDocument(dummyDocID) + err := clientInfo.AttachDocument(dummyDocKey, dummyDocID) assert.ErrorIs(t, err, database.ErrClientNotActivated) - err = clientInfo.EnsureDocumentAttached(dummyDocID) + err = clientInfo.EnsureDocumentAttached(dummyDocKey, dummyDocID) assert.ErrorIs(t, err, database.ErrClientNotActivated) - err = clientInfo.DetachDocument(dummyDocID) + err = clientInfo.DetachDocument(dummyDocKey, dummyDocID) assert.ErrorIs(t, err, database.ErrClientNotActivated) }) @@ -118,7 +120,7 @@ func TestClientInfo(t *testing.T) { Status: database.ClientActivated, } - err := clientInfo.DetachDocument(dummyDocID) + err := clientInfo.DetachDocument(dummyDocKey, dummyDocID) assert.ErrorIs(t, err, database.ErrDocumentNotAttached) }) @@ -127,10 +129,10 @@ func TestClientInfo(t *testing.T) { Status: database.ClientActivated, } - _, err := clientInfo.IsAttached(dummyDocID) + _, err := clientInfo.IsAttached(dummyDocKey, dummyDocID) assert.ErrorIs(t, err, database.ErrDocumentNeverAttached) - err = clientInfo.UpdateCheckpoint(dummyDocID, change.MaxCheckpoint) + err = clientInfo.UpdateCheckpoint(dummyDocKey, dummyDocID, change.MaxCheckpoint) assert.ErrorIs(t, err, database.ErrDocumentNeverAttached) }) @@ -139,13 +141,13 @@ func TestClientInfo(t *testing.T) { Status: database.ClientActivated, } - err := clientInfo.AttachDocument(dummyDocID) + err := clientInfo.AttachDocument(dummyDocKey, dummyDocID) assert.NoError(t, err) - isAttached, err := clientInfo.IsAttached(dummyDocID) + isAttached, err := clientInfo.IsAttached(dummyDocKey, dummyDocID) assert.NoError(t, err) assert.True(t, isAttached) - err = clientInfo.AttachDocument(dummyDocID) + err = clientInfo.AttachDocument(dummyDocKey, dummyDocID) assert.ErrorIs(t, err, database.ErrDocumentAlreadyAttached) }) } diff --git a/server/backend/database/database.go b/server/backend/database/database.go index 78b498cfc..99c9a1f20 100644 --- a/server/backend/database/database.go +++ b/server/backend/database/database.go @@ -160,24 +160,23 @@ type Database interface { createDocIfNotExist bool, ) (*DocInfo, error) - // FindDocInfoByID finds the document of the given ID. - FindDocInfoByID( + // FindDocInfoByKeyAndID finds the document of the given key and ID. + FindDocInfoByKeyAndID( ctx context.Context, - projectID types.ID, - id types.ID, + docKey key.Key, + docID types.ID, ) (*DocInfo, error) // UpdateDocInfoStatusToRemoved updates the document status to removed. UpdateDocInfoStatusToRemoved( ctx context.Context, - projectID types.ID, + docKey key.Key, docID types.ID, ) error // CreateChangeInfos stores the given changes then updates the given docInfo. CreateChangeInfos( ctx context.Context, - projectID types.ID, docInfo *DocInfo, initialServerSeq int64, changes []*change.Change, @@ -188,12 +187,14 @@ type Database interface { // save storage. PurgeStaleChanges( ctx context.Context, + docKey key.Key, docID types.ID, ) error // FindChangesBetweenServerSeqs returns the changes between two server sequences. FindChangesBetweenServerSeqs( ctx context.Context, + docKey key.Key, docID types.ID, from int64, to int64, @@ -202,33 +203,50 @@ type Database interface { // FindChangeInfosBetweenServerSeqs returns the changeInfos between two server sequences. FindChangeInfosBetweenServerSeqs( ctx context.Context, + docKey key.Key, docID types.ID, from int64, to int64, ) ([]*ChangeInfo, error) // CreateSnapshotInfo stores the snapshot of the given document. - CreateSnapshotInfo(ctx context.Context, docID types.ID, doc *document.InternalDocument) error + CreateSnapshotInfo( + ctx context.Context, + docKey key.Key, + docID types.ID, + doc *document.InternalDocument, + ) error // FindSnapshotInfoByID returns the snapshot by the given id. - FindSnapshotInfoByID(ctx context.Context, id types.ID) (*SnapshotInfo, error) + FindSnapshotInfoByID( + ctx context.Context, + docKey key.Key, + docID types.ID, + serverSeq int64, + ) (*SnapshotInfo, error) // FindClosestSnapshotInfo finds the closest snapshot info in a given serverSeq. FindClosestSnapshotInfo( ctx context.Context, + docKey key.Key, docID types.ID, serverSeq int64, includeSnapshot bool, ) (*SnapshotInfo, error) // FindMinSyncedSeqInfo finds the minimum synced sequence info. - FindMinSyncedSeqInfo(ctx context.Context, docID types.ID) (*SyncedSeqInfo, error) + FindMinSyncedSeqInfo( + ctx context.Context, + docKey key.Key, + docID types.ID, + ) (*SyncedSeqInfo, error) // UpdateAndFindMinSyncedTicket updates the given serverSeq of the given client // and returns the min synced ticket. UpdateAndFindMinSyncedTicket( ctx context.Context, clientInfo *ClientInfo, + docKey key.Key, docID types.ID, serverSeq int64, ) (*time.Ticket, error) @@ -237,6 +255,7 @@ type Database interface { UpdateSyncedSeq( ctx context.Context, clientInfo *ClientInfo, + docKey key.Key, docID types.ID, serverSeq int64, ) error @@ -245,7 +264,7 @@ type Database interface { FindDocInfosByPaging( ctx context.Context, projectID types.ID, - paging types.Paging[types.ID], + paging types.Paging[key.Key], ) ([]*DocInfo, error) // FindDocInfosByQuery returns the documentInfos which match the given query. @@ -260,6 +279,7 @@ type Database interface { IsDocumentAttached( ctx context.Context, projectID types.ID, + docKey key.Key, docID types.ID, excludeClientID types.ID, ) (bool, error) diff --git a/server/backend/database/memory/database.go b/server/backend/database/memory/database.go index bbbf6630c..8c47f5709 100644 --- a/server/backend/database/memory/database.go +++ b/server/backend/database/memory/database.go @@ -20,6 +20,7 @@ package memory import ( "context" "fmt" + "strings" gotime "time" "github.com/hashicorp/go-memdb" @@ -384,7 +385,7 @@ func (d *DB) ListUserInfos(_ context.Context) ([]*database.UserInfo, error) { txn := d.db.Txn(false) defer txn.Abort() - iter, err := txn.Get(tblUsers, "id") + iter, err := txn.Get(tblUsers, "username") if err != nil { return nil, fmt.Errorf("fetch users: %w", err) } @@ -509,8 +510,8 @@ func (d *DB) UpdateClientInfoAfterPushPull( clientInfo *database.ClientInfo, docInfo *database.DocInfo, ) error { - clientDocInfo := clientInfo.Documents[docInfo.ID] - attached, err := clientInfo.IsAttached(docInfo.ID) + clientDocInfo := clientInfo.Documents[docInfo.Key][docInfo.ID] + attached, err := clientInfo.IsAttached(docInfo.Key, docInfo.ID) if err != nil { return err } @@ -529,16 +530,20 @@ func (d *DB) UpdateClientInfoAfterPushPull( loaded := raw.(*database.ClientInfo).DeepCopy() if !attached { - loaded.Documents[docInfo.ID] = &database.ClientDocInfo{ + loaded.Documents[docInfo.Key][docInfo.ID] = &database.ClientDocInfo{ Status: clientDocInfo.Status, } loaded.UpdatedAt = gotime.Now() } else { - if _, ok := loaded.Documents[docInfo.ID]; !ok { - loaded.Documents[docInfo.ID] = &database.ClientDocInfo{} + if _, ok := loaded.Documents[docInfo.Key]; !ok { + loaded.Documents[docInfo.Key] = make(map[types.ID]*database.ClientDocInfo) } - loadedClientDocInfo := loaded.Documents[docInfo.ID] + if _, ok := loaded.Documents[docInfo.Key][docInfo.ID]; !ok { + loaded.Documents[docInfo.Key][docInfo.ID] = &database.ClientDocInfo{} + } + + loadedClientDocInfo := loaded.Documents[docInfo.Key][docInfo.ID] serverSeq := loadedClientDocInfo.ServerSeq if clientDocInfo.ServerSeq > loadedClientDocInfo.ServerSeq { serverSeq = clientDocInfo.ServerSeq @@ -547,7 +552,7 @@ func (d *DB) UpdateClientInfoAfterPushPull( if clientDocInfo.ClientSeq > loadedClientDocInfo.ClientSeq { clientSeq = clientDocInfo.ClientSeq } - loaded.Documents[docInfo.ID] = &database.ClientDocInfo{ + loaded.Documents[docInfo.Key][docInfo.ID] = &database.ClientDocInfo{ ServerSeq: serverSeq, ClientSeq: clientSeq, Status: clientDocInfo.Status, @@ -719,27 +724,29 @@ func (d *DB) FindDocInfoByKey( return raw.(*database.DocInfo).DeepCopy(), nil } -// FindDocInfoByID finds a docInfo of the given ID. -func (d *DB) FindDocInfoByID( +// FindDocInfoByKeyAndID finds a docInfo of the given ID. +func (d *DB) FindDocInfoByKeyAndID( _ context.Context, - projectID types.ID, - id types.ID, + docKey key.Key, + docID types.ID, ) (*database.DocInfo, error) { txn := d.db.Txn(true) defer txn.Abort() - raw, err := txn.First(tblDocuments, "id", id.String()) + raw, err := txn.First(tblDocuments, "key_id", docKey.String(), docID.String()) if err != nil { - return nil, fmt.Errorf("find document by id: %w", err) + return nil, fmt.Errorf("find document by key and ID: %w", err) } if raw == nil { - return nil, fmt.Errorf("finding doc info by ID(%s): %w", id, database.ErrDocumentNotFound) + return nil, fmt.Errorf("finding doc info by key and ID(%s.%s): %w", + docKey, docID, database.ErrDocumentNotFound) } docInfo := raw.(*database.DocInfo) - if docInfo.ProjectID != projectID { - return nil, fmt.Errorf("finding doc info by ID(%s): %w", id, database.ErrDocumentNotFound) + if docInfo.Key != docKey && docInfo.ID != docID { + return nil, fmt.Errorf("finding doc info by key and ID(%s.%s): %w", + docKey, docID, database.ErrDocumentNotFound) } return docInfo.DeepCopy(), nil @@ -748,24 +755,26 @@ func (d *DB) FindDocInfoByID( // UpdateDocInfoStatusToRemoved updates the status of the document to removed. func (d *DB) UpdateDocInfoStatusToRemoved( _ context.Context, - projectID types.ID, - id types.ID, + docKey key.Key, + docID types.ID, ) error { txn := d.db.Txn(true) defer txn.Abort() - raw, err := txn.First(tblDocuments, "id", id.String()) + raw, err := txn.First(tblDocuments, "key_id", docKey.String(), docID.String()) if err != nil { - return fmt.Errorf("find document by id: %w", err) + return fmt.Errorf("find document by key and ID: %w", err) } if raw == nil { - return fmt.Errorf("finding doc info by ID(%s): %w", id, database.ErrDocumentNotFound) + return fmt.Errorf("finding doc info by key and ID(%s.%s): %w", + docKey, docID, database.ErrDocumentNotFound) } docInfo := raw.(*database.DocInfo) - if docInfo.ProjectID != projectID { - return fmt.Errorf("finding doc info by ID(%s): %w", id, database.ErrDocumentNotFound) + if docInfo.Key != docKey && docInfo.ID != docID { + return fmt.Errorf("finding doc info by key and ID(%s.%s): %w", + docKey, docID, database.ErrDocumentNotFound) } docInfo.RemovedAt = gotime.Now() @@ -786,7 +795,6 @@ func (d *DB) UpdateDocInfoStatusToRemoved( // removeDoc condition is true, mark IsRemoved to true in doc info. func (d *DB) CreateChangeInfos( _ context.Context, - projectID types.ID, docInfo *database.DocInfo, initialServerSeq int64, changes []*change.Change, @@ -807,6 +815,7 @@ func (d *DB) CreateChangeInfos( if err := txn.Insert(tblChanges, &database.ChangeInfo{ ID: newID(), + DocKey: docInfo.Key, DocID: docInfo.ID, ServerSeq: cn.ServerSeq(), ActorID: types.ID(cn.ID().ActorID().String()), @@ -822,19 +831,19 @@ func (d *DB) CreateChangeInfos( raw, err := txn.First( tblDocuments, - "project_id_id", - projectID.String(), + "key_id", + docInfo.Key.String(), docInfo.ID.String(), ) if err != nil { return fmt.Errorf("find document: %w", err) } if raw == nil { - return fmt.Errorf("%s: %w", docInfo.ID, database.ErrDocumentNotFound) + return fmt.Errorf("%s.%s: %w", docInfo.Key, docInfo.ID, database.ErrDocumentNotFound) } loadedDocInfo := raw.(*database.DocInfo).DeepCopy() if loadedDocInfo.ServerSeq != initialServerSeq { - return fmt.Errorf("%s: %w", docInfo.ID, database.ErrConflictOnUpdate) + return fmt.Errorf("%s.%s: %w", docInfo.Key, docInfo.ID, database.ErrConflictOnUpdate) } now := gotime.Now() @@ -859,6 +868,7 @@ func (d *DB) CreateChangeInfos( // save storage. func (d *DB) PurgeStaleChanges( _ context.Context, + docKey key.Key, docID types.ID, ) error { txn := d.db.Txn(true) @@ -866,7 +876,7 @@ func (d *DB) PurgeStaleChanges( // Find the smallest server seq in `syncedseqs`. // Because offline client can pull changes when it becomes online. - it, err := txn.Get(tblSyncedSeqs, "id") + it, err := txn.Get(tblSyncedSeqs, "doc_key_doc_id") if err != nil { return fmt.Errorf("fetch syncedseqs: %w", err) } @@ -874,7 +884,8 @@ func (d *DB) PurgeStaleChanges( minSyncedServerSeq := change.MaxServerSeq for raw := it.Next(); raw != nil; raw = it.Next() { info := raw.(*database.SyncedSeqInfo) - if info.DocID == docID && info.ServerSeq < minSyncedServerSeq { + if info.DocKey == docKey && info.DocID == docID && + info.ServerSeq < minSyncedServerSeq { minSyncedServerSeq = info.ServerSeq } } @@ -885,7 +896,8 @@ func (d *DB) PurgeStaleChanges( // Delete all changes before the smallest server seq. iterator, err := txn.ReverseLowerBound( tblChanges, - "doc_id_server_seq", + "doc_key_doc_id_server_seq", + docKey.String(), docID.String(), minSyncedServerSeq, ) @@ -896,7 +908,8 @@ func (d *DB) PurgeStaleChanges( for raw := iterator.Next(); raw != nil; raw = iterator.Next() { info := raw.(*database.ChangeInfo) if err = txn.Delete(tblChanges, info); err != nil { - return fmt.Errorf("delete change %s: %w", info.ID, err) + return fmt.Errorf("delete change (%s.%s.%d): %w", + info.DocKey, info.DocID, info.ServerSeq, err) } } return nil @@ -905,11 +918,12 @@ func (d *DB) PurgeStaleChanges( // FindChangesBetweenServerSeqs returns the changes between two server sequences. func (d *DB) FindChangesBetweenServerSeqs( ctx context.Context, + docKey key.Key, docID types.ID, from int64, to int64, ) ([]*change.Change, error) { - infos, err := d.FindChangeInfosBetweenServerSeqs(ctx, docID, from, to) + infos, err := d.FindChangeInfosBetweenServerSeqs(ctx, docKey, docID, from, to) if err != nil { return nil, err } @@ -930,6 +944,7 @@ func (d *DB) FindChangesBetweenServerSeqs( // FindChangeInfosBetweenServerSeqs returns the changeInfos between two server sequences. func (d *DB) FindChangeInfosBetweenServerSeqs( _ context.Context, + docKey key.Key, docID types.ID, from int64, to int64, @@ -941,7 +956,8 @@ func (d *DB) FindChangeInfosBetweenServerSeqs( iterator, err := txn.LowerBound( tblChanges, - "doc_id_server_seq", + "doc_key_doc_id_server_seq", + docKey.String(), docID.String(), from, ) @@ -951,7 +967,7 @@ func (d *DB) FindChangeInfosBetweenServerSeqs( for raw := iterator.Next(); raw != nil; raw = iterator.Next() { info := raw.(*database.ChangeInfo) - if info.DocID != docID || info.ServerSeq > to { + if info.DocKey != docKey || info.DocID != docID || info.ServerSeq > to { break } infos = append(infos, info.DeepCopy()) @@ -962,6 +978,7 @@ func (d *DB) FindChangeInfosBetweenServerSeqs( // CreateSnapshotInfo stores the snapshot of the given document. func (d *DB) CreateSnapshotInfo( _ context.Context, + docKey key.Key, docID types.ID, doc *document.InternalDocument, ) error { @@ -975,6 +992,7 @@ func (d *DB) CreateSnapshotInfo( if err := txn.Insert(tblSnapshots, &database.SnapshotInfo{ ID: newID(), + DocKey: docKey, DocID: docID, ServerSeq: doc.Checkpoint().ServerSeq, Lamport: doc.Lamport(), @@ -988,15 +1006,22 @@ func (d *DB) CreateSnapshotInfo( } // FindSnapshotInfoByID returns the snapshot by the given id. -func (d *DB) FindSnapshotInfoByID(_ context.Context, id types.ID) (*database.SnapshotInfo, error) { +func (d *DB) FindSnapshotInfoByID( + _ context.Context, + docKey key.Key, + docID types.ID, + serverSeq int64, +) (*database.SnapshotInfo, error) { txn := d.db.Txn(false) defer txn.Abort() - raw, err := txn.First(tblSnapshots, "id", id.String()) + raw, err := txn.First( + tblSnapshots, "doc_key_doc_id_server_seq", docKey.String(), docID.String(), serverSeq) if err != nil { - return nil, fmt.Errorf("find snapshot by id: %w", err) + return nil, fmt.Errorf("find snapshot by (docKey, docID, serverSeq): %w", err) } if raw == nil { - return nil, fmt.Errorf("%s: %w", id, database.ErrSnapshotNotFound) + return nil, fmt.Errorf("(%s.%s.%d): %w", + docKey, docID, serverSeq, database.ErrSnapshotNotFound) } return raw.(*database.SnapshotInfo).DeepCopy(), nil @@ -1005,6 +1030,7 @@ func (d *DB) FindSnapshotInfoByID(_ context.Context, id types.ID) (*database.Sna // FindClosestSnapshotInfo finds the last snapshot of the given document. func (d *DB) FindClosestSnapshotInfo( _ context.Context, + docKey key.Key, docID types.ID, serverSeq int64, includeSnapshot bool, @@ -1012,12 +1038,8 @@ func (d *DB) FindClosestSnapshotInfo( txn := d.db.Txn(false) defer txn.Abort() - iterator, err := txn.ReverseLowerBound( - tblSnapshots, - "doc_id_server_seq", - docID.String(), - serverSeq, - ) + iterator, err := txn.ReverseLowerBound(tblSnapshots, "doc_key_doc_id_server_seq", + docKey.String(), docID.String(), serverSeq) if err != nil { return nil, fmt.Errorf("fetch snapshots before %d: %w", serverSeq, err) } @@ -1025,9 +1047,10 @@ func (d *DB) FindClosestSnapshotInfo( var snapshotInfo *database.SnapshotInfo for raw := iterator.Next(); raw != nil; raw = iterator.Next() { info := raw.(*database.SnapshotInfo) - if info.DocID == docID { + if info.DocKey == docKey && info.DocID == docID { snapshotInfo = &database.SnapshotInfo{ ID: info.ID, + DocKey: info.DocKey, DocID: info.DocID, ServerSeq: info.ServerSeq, Lamport: info.Lamport, @@ -1050,12 +1073,13 @@ func (d *DB) FindClosestSnapshotInfo( // FindMinSyncedSeqInfo finds the minimum synced sequence info. func (d *DB) FindMinSyncedSeqInfo( _ context.Context, + docKey key.Key, docID types.ID, ) (*database.SyncedSeqInfo, error) { txn := d.db.Txn(false) defer txn.Abort() - it, err := txn.Get(tblSyncedSeqs, "id") + it, err := txn.Get(tblSyncedSeqs, "doc_id_doc_key_client_id") if err != nil { return nil, fmt.Errorf("fetch syncedseqs: %w", err) } @@ -1064,7 +1088,7 @@ func (d *DB) FindMinSyncedSeqInfo( minSyncedServerSeq := change.MaxServerSeq for raw := it.Next(); raw != nil; raw = it.Next() { info := raw.(*database.SyncedSeqInfo) - if info.DocID == docID && info.ServerSeq < minSyncedServerSeq { + if info.DocKey == docKey && info.DocID == docID && info.ServerSeq < minSyncedServerSeq { minSyncedServerSeq = info.ServerSeq syncedSeqInfo = info } @@ -1081,10 +1105,11 @@ func (d *DB) FindMinSyncedSeqInfo( func (d *DB) UpdateAndFindMinSyncedTicket( ctx context.Context, clientInfo *database.ClientInfo, + docKey key.Key, docID types.ID, serverSeq int64, ) (*time.Ticket, error) { - if err := d.UpdateSyncedSeq(ctx, clientInfo, docID, serverSeq); err != nil { + if err := d.UpdateSyncedSeq(ctx, clientInfo, docKey, docID, serverSeq); err != nil { return nil, err } @@ -1093,19 +1118,21 @@ func (d *DB) UpdateAndFindMinSyncedTicket( iterator, err := txn.LowerBound( tblSyncedSeqs, - "doc_id_lamport_actor_id", + "doc_key_doc_id_lamport_actor_id", + docKey.String(), docID.String(), int64(0), time.InitialActorID.String(), ) if err != nil { - return nil, fmt.Errorf("fetch smallest syncedseq of %s: %w", docID.String(), err) + return nil, fmt.Errorf("fetch smallest syncedseq of the document (%s.%s): %w", + docKey.String(), docID.String(), err) } var syncedSeqInfo *database.SyncedSeqInfo if raw := iterator.Next(); raw != nil { info := raw.(*database.SyncedSeqInfo) - if info.DocID == docID { + if info.DocKey == docKey && info.DocID == docID { syncedSeqInfo = info } } @@ -1130,13 +1157,14 @@ func (d *DB) UpdateAndFindMinSyncedTicket( func (d *DB) UpdateSyncedSeq( _ context.Context, clientInfo *database.ClientInfo, + docKey key.Key, docID types.ID, serverSeq int64, ) error { txn := d.db.Txn(true) defer txn.Abort() - isAttached, err := clientInfo.IsAttached(docID) + isAttached, err := clientInfo.IsAttached(docKey, docID) if err != nil { return err } @@ -1144,32 +1172,37 @@ func (d *DB) UpdateSyncedSeq( if !isAttached { if _, err = txn.DeleteAll( tblSyncedSeqs, - "doc_id_client_id", + "doc_key_doc_id_client_id", + docKey.String(), docID.String(), clientInfo.ID.String(), ); err != nil { - return fmt.Errorf("delete syncedseqs of %s: %w", docID.String(), err) + return fmt.Errorf("delete syncedseqs of the document (%s.%s): %w", + docKey.String(), docID.String(), err) } txn.Commit() return nil } - ticket, err := d.findTicketByServerSeq(txn, docID, serverSeq) + ticket, err := d.findTicketByServerSeq(txn, docKey, docID, serverSeq) if err != nil { return err } raw, err := txn.First( tblSyncedSeqs, - "doc_id_client_id", + "doc_key_doc_id_client_id", + docKey.String(), docID.String(), clientInfo.ID.String(), ) if err != nil { - return fmt.Errorf("fetch syncedseqs of %s: %w", docID.String(), err) + return fmt.Errorf("fetch syncedseqs of the document (%s.%s): %w", + docKey.String(), docID.String(), err) } syncedSeqInfo := &database.SyncedSeqInfo{ + DocKey: docKey, DocID: docID, ClientID: clientInfo.ID, Lamport: ticket.Lamport(), @@ -1183,7 +1216,8 @@ func (d *DB) UpdateSyncedSeq( } if err := txn.Insert(tblSyncedSeqs, syncedSeqInfo); err != nil { - return fmt.Errorf("insert syncedseqs of %s: %w", docID.String(), err) + return fmt.Errorf("insert syncedseqs of the document (%s.%s): %w", + docKey.String(), docID.String(), err) } txn.Commit() @@ -1194,7 +1228,7 @@ func (d *DB) UpdateSyncedSeq( func (d *DB) FindDocInfosByPaging( _ context.Context, projectID types.ID, - paging types.Paging[types.ID], + paging types.Paging[key.Key], ) ([]*database.DocInfo, error) { txn := d.db.Txn(false) defer txn.Abort() @@ -1204,19 +1238,19 @@ func (d *DB) FindDocInfosByPaging( if paging.IsForward { iterator, err = txn.LowerBound( tblDocuments, - "project_id_id", + "project_id_key", projectID.String(), paging.Offset.String(), ) } else { offset := paging.Offset if paging.Offset == "" { - offset = types.IDFromActorID(time.MaxActorID) + offset = key.Key(strings.Repeat(string(rune(127)), 120)) } iterator, err = txn.ReverseLowerBound( tblDocuments, - "project_id_id", + "project_id_key", projectID.String(), offset.String(), ) @@ -1232,7 +1266,7 @@ func (d *DB) FindDocInfosByPaging( break } - if info.ID != paging.Offset && info.RemovedAt.IsZero() { + if info.Key != paging.Offset && info.RemovedAt.IsZero() { docInfos = append(docInfos, info) } } @@ -1275,6 +1309,7 @@ func (d *DB) FindDocInfosByQuery( func (d *DB) IsDocumentAttached( _ context.Context, projectID types.ID, + docKey key.Key, docID types.ID, excludeClientID types.ID, ) (bool, error) { @@ -1294,7 +1329,7 @@ func (d *DB) IsDocumentAttached( if clientInfo.ID == excludeClientID { continue } - clientDocInfo := clientInfo.Documents[docID] + clientDocInfo := clientInfo.Documents[docKey][docID] if clientDocInfo == nil { continue } @@ -1308,6 +1343,7 @@ func (d *DB) IsDocumentAttached( func (d *DB) findTicketByServerSeq( txn *memdb.Txn, + docKey key.Key, docID types.ID, serverSeq int64, ) (*time.Ticket, error) { @@ -1317,17 +1353,19 @@ func (d *DB) findTicketByServerSeq( raw, err := txn.First( tblChanges, - "doc_id_server_seq", + "doc_key_doc_id_server_seq", + docKey.String(), docID.String(), serverSeq, ) if err != nil { - return nil, fmt.Errorf("fetch change of %s: %w", docID.String(), err) + return nil, fmt.Errorf("fetch change of the document (%s.%s): %w", + docKey.String(), docID.String(), err) } if raw == nil { return nil, fmt.Errorf( - "docID %s, serverSeq %d: %w", - docID.String(), + "docKey %s, docID %s, serverSeq %d: %w", + docKey.String(), docID.String(), serverSeq, database.ErrDocumentNotFound, ) diff --git a/server/backend/database/memory/indexes.go b/server/backend/database/memory/indexes.go index c30c352b4..681c27069 100644 --- a/server/backend/database/memory/indexes.go +++ b/server/backend/database/memory/indexes.go @@ -117,12 +117,12 @@ var schema = &memdb.DBSchema{ Unique: true, Indexer: &memdb.StringFieldIndex{Field: "ID"}, }, - "project_id_id": { - Name: "project_id_id", + "key_id": { + Name: "key_id", Unique: true, Indexer: &memdb.CompoundIndex{ Indexes: []memdb.Indexer{ - &memdb.StringFieldIndex{Field: "ProjectID"}, + &memdb.StringFieldIndex{Field: "Key"}, &memdb.StringFieldIndex{Field: "ID"}, }, }, @@ -156,11 +156,12 @@ var schema = &memdb.DBSchema{ Unique: true, Indexer: &memdb.StringFieldIndex{Field: "ID"}, }, - "doc_id_server_seq": { - Name: "doc_id_server_seq", + "doc_key_doc_id_server_seq": { + Name: "doc_key_doc_id_server_seq", Unique: true, Indexer: &memdb.CompoundIndex{ Indexes: []memdb.Indexer{ + &memdb.StringFieldIndex{Field: "DocKey"}, &memdb.StringFieldIndex{Field: "DocID"}, &memdb.IntFieldIndex{Field: "ServerSeq"}, }, @@ -176,11 +177,12 @@ var schema = &memdb.DBSchema{ Unique: true, Indexer: &memdb.StringFieldIndex{Field: "ID"}, }, - "doc_id_server_seq": { - Name: "doc_id_server_seq", + "doc_key_doc_id_server_seq": { + Name: "doc_key_doc_id_server_seq", Unique: true, Indexer: &memdb.CompoundIndex{ Indexes: []memdb.Indexer{ + &memdb.StringFieldIndex{Field: "DocKey"}, &memdb.StringFieldIndex{Field: "DocID"}, &memdb.IntFieldIndex{Field: "ServerSeq"}, }, @@ -196,20 +198,22 @@ var schema = &memdb.DBSchema{ Unique: true, Indexer: &memdb.StringFieldIndex{Field: "ID"}, }, - "doc_id_client_id": { - Name: "doc_id_client_id", + "doc_key_doc_id_client_id": { + Name: "doc_key_doc_id_client_id", Unique: true, Indexer: &memdb.CompoundIndex{ Indexes: []memdb.Indexer{ + &memdb.StringFieldIndex{Field: "DocKey"}, &memdb.StringFieldIndex{Field: "DocID"}, &memdb.StringFieldIndex{Field: "ClientID"}, }, }, }, - "doc_id_lamport_actor_id": { - Name: "doc_id_lamport_actor_id", + "doc_key_doc_id_lamport_actor_id": { + Name: "doc_key_doc_id_lamport_actor_id", Indexer: &memdb.CompoundIndex{ Indexes: []memdb.Indexer{ + &memdb.StringFieldIndex{Field: "DocKey"}, &memdb.StringFieldIndex{Field: "DocID"}, &memdb.IntFieldIndex{Field: "Lamport"}, &memdb.StringFieldIndex{Field: "ActorID"}, diff --git a/server/backend/database/mongo/client.go b/server/backend/database/mongo/client.go index 221ff38eb..1a3cfc9ef 100644 --- a/server/backend/database/mongo/client.go +++ b/server/backend/database/mongo/client.go @@ -563,8 +563,8 @@ func (c *Client) UpdateClientInfoAfterPushPull( return err } - clientDocInfoKey := "documents." + docInfo.ID.String() + "." - clientDocInfo, ok := clientInfo.Documents[docInfo.ID] + clientDocInfoKey := getClientDocInfoKey(docInfo.Key, docInfo.ID) + clientDocInfo, ok := clientInfo.Documents[docInfo.Key][docInfo.ID] if !ok { return fmt.Errorf("client doc info: %w", database.ErrDocumentNeverAttached) } @@ -580,7 +580,7 @@ func (c *Client) UpdateClientInfoAfterPushPull( }, } - attached, err := clientInfo.IsAttached(docInfo.ID) + attached, err := clientInfo.IsAttached(docInfo.Key, docInfo.ID) if err != nil { return err } @@ -716,6 +716,7 @@ func (c *Client) FindDocInfoByKeyAndOwner( var result *mongo.SingleResult if res.UpsertedCount > 0 { result = c.collection(colDocuments).FindOneAndUpdate(ctx, bson.M{ + "key": docKey, "_id": res.UpsertedID, }, bson.M{ "$set": bson.M{ @@ -775,25 +776,20 @@ func (c *Client) FindDocInfoByKey( return &docInfo, nil } -// FindDocInfoByID finds a docInfo of the given ID. -func (c *Client) FindDocInfoByID( +// FindDocInfoByKeyAndID finds a docInfo of the given ID. +func (c *Client) FindDocInfoByKeyAndID( ctx context.Context, - projectID types.ID, + key key.Key, id types.ID, ) (*database.DocInfo, error) { - encodedProjectID, err := encodeID(projectID) - if err != nil { - return nil, err - } - encodedDocID, err := encodeID(id) if err != nil { return nil, err } result := c.collection(colDocuments).FindOne(ctx, bson.M{ - "_id": encodedDocID, - "project_id": encodedProjectID, + "key": key, + "_id": encodedDocID, }) if result.Err() == mongo.ErrNoDocuments { return nil, fmt.Errorf("%s: %w", id, database.ErrDocumentNotFound) @@ -813,22 +809,17 @@ func (c *Client) FindDocInfoByID( // UpdateDocInfoStatusToRemoved updates the document status to removed. func (c *Client) UpdateDocInfoStatusToRemoved( ctx context.Context, - projectID types.ID, + key key.Key, id types.ID, ) error { - encodedProjectID, err := encodeID(projectID) - if err != nil { - return err - } - encodedDocID, err := encodeID(id) if err != nil { return err } result := c.collection(colDocuments).FindOneAndUpdate(ctx, bson.M{ - "_id": encodedDocID, - "project_id": encodedProjectID, + "key": key, + "_id": encodedDocID, }, bson.M{ "$set": bson.M{ "removed_at": gotime.Now(), @@ -836,7 +827,7 @@ func (c *Client) UpdateDocInfoStatusToRemoved( }, options.FindOneAndUpdate().SetReturnDocument(options.After)) if result.Err() == mongo.ErrNoDocuments { - return fmt.Errorf("%s: %w", id, database.ErrDocumentNotFound) + return fmt.Errorf("%s.%s: %w", key, id, database.ErrDocumentNotFound) } if result.Err() != nil { return fmt.Errorf("update document info status to removed: %w", result.Err()) @@ -848,7 +839,6 @@ func (c *Client) UpdateDocInfoStatusToRemoved( // CreateChangeInfos stores the given changes and doc info. func (c *Client) CreateChangeInfos( ctx context.Context, - _ types.ID, docInfo *database.DocInfo, initialServerSeq int64, changes []*change.Change, @@ -871,6 +861,7 @@ func (c *Client) CreateChangeInfos( } models = append(models, mongo.NewUpdateOneModel().SetFilter(bson.M{ + "doc_key": docInfo.Key, "doc_id": encodedDocID, "server_seq": cn.ServerSeq(), }).SetUpdate(bson.M{"$set": bson.M{ @@ -905,6 +896,7 @@ func (c *Client) CreateChangeInfos( } res, err := c.collection(colDocuments).UpdateOne(ctx, bson.M{ + "key": docInfo.Key, "_id": encodedDocID, "server_seq": initialServerSeq, }, bson.M{ @@ -927,6 +919,7 @@ func (c *Client) CreateChangeInfos( // save storage. func (c *Client) PurgeStaleChanges( ctx context.Context, + docKey key.Key, docID types.ID, ) error { encodedDocID, err := encodeID(docID) @@ -938,7 +931,10 @@ func (c *Client) PurgeStaleChanges( // Because offline client can pull changes when it becomes online. result := c.collection(colSyncedSeqs).FindOne( ctx, - bson.M{"doc_id": encodedDocID}, + bson.M{ + "doc_key": docKey, + "doc_id": encodedDocID, + }, options.FindOne().SetSort(bson.M{"server_seq": 1}), ) if result.Err() == mongo.ErrNoDocuments { @@ -956,6 +952,7 @@ func (c *Client) PurgeStaleChanges( if _, err := c.collection(colChanges).DeleteMany( ctx, bson.M{ + "doc_key": docKey, "doc_id": encodedDocID, "server_seq": bson.M{"$lt": minSyncedSeqInfo.ServerSeq}, }, @@ -970,11 +967,12 @@ func (c *Client) PurgeStaleChanges( // FindChangesBetweenServerSeqs returns the changes between two server sequences. func (c *Client) FindChangesBetweenServerSeqs( ctx context.Context, + docKey key.Key, docID types.ID, from int64, to int64, ) ([]*change.Change, error) { - infos, err := c.FindChangeInfosBetweenServerSeqs(ctx, docID, from, to) + infos, err := c.FindChangeInfosBetweenServerSeqs(ctx, docKey, docID, from, to) if err != nil { return nil, err } @@ -994,6 +992,7 @@ func (c *Client) FindChangesBetweenServerSeqs( // FindChangeInfosBetweenServerSeqs returns the changeInfos between two server sequences. func (c *Client) FindChangeInfosBetweenServerSeqs( ctx context.Context, + docKey key.Key, docID types.ID, from int64, to int64, @@ -1004,7 +1003,8 @@ func (c *Client) FindChangeInfosBetweenServerSeqs( } cursor, err := c.collection(colChanges).Find(ctx, bson.M{ - "doc_id": encodedDocID, + "doc_key": docKey, + "doc_id": encodedDocID, "server_seq": bson.M{ "$gte": from, "$lte": to, @@ -1025,6 +1025,7 @@ func (c *Client) FindChangeInfosBetweenServerSeqs( // CreateSnapshotInfo stores the snapshot of the given document. func (c *Client) CreateSnapshotInfo( ctx context.Context, + docKey key.Key, docID types.ID, doc *document.InternalDocument, ) error { @@ -1038,6 +1039,7 @@ func (c *Client) CreateSnapshotInfo( } if _, err := c.collection(colSnapshots).InsertOne(ctx, bson.M{ + "doc_key": docKey, "doc_id": encodedDocID, "server_seq": doc.Checkpoint().ServerSeq, "lamport": doc.Lamport(), @@ -1053,15 +1055,19 @@ func (c *Client) CreateSnapshotInfo( // FindSnapshotInfoByID returns the snapshot by the given id. func (c *Client) FindSnapshotInfoByID( ctx context.Context, - id types.ID, + docKey key.Key, + docID types.ID, + serverSeq int64, ) (*database.SnapshotInfo, error) { - encodedID, err := encodeID(id) + encodedDocID, err := encodeID(docID) if err != nil { return nil, err } result := c.collection(colSnapshots).FindOne(ctx, bson.M{ - "_id": encodedID, + "doc_key": docKey, + "doc_id": encodedDocID, + "server_seq": serverSeq, }) snapshotInfo := &database.SnapshotInfo{} @@ -1082,6 +1088,7 @@ func (c *Client) FindSnapshotInfoByID( // FindClosestSnapshotInfo finds the last snapshot of the given document. func (c *Client) FindClosestSnapshotInfo( ctx context.Context, + docKey key.Key, docID types.ID, serverSeq int64, includeSnapshot bool, @@ -1100,7 +1107,8 @@ func (c *Client) FindClosestSnapshotInfo( } result := c.collection(colSnapshots).FindOne(ctx, bson.M{ - "doc_id": encodedDocID, + "doc_key": docKey, + "doc_id": encodedDocID, "server_seq": bson.M{ "$lte": serverSeq, }, @@ -1124,6 +1132,7 @@ func (c *Client) FindClosestSnapshotInfo( // FindMinSyncedSeqInfo finds the minimum synced sequence info. func (c *Client) FindMinSyncedSeqInfo( ctx context.Context, + docKey key.Key, docID types.ID, ) (*database.SyncedSeqInfo, error) { encodedDocID, err := encodeID(docID) @@ -1132,7 +1141,8 @@ func (c *Client) FindMinSyncedSeqInfo( } syncedSeqResult := c.collection(colSyncedSeqs).FindOne(ctx, bson.M{ - "doc_id": encodedDocID, + "doc_key": docKey, + "doc_id": encodedDocID, }, options.FindOne().SetSort(bson.D{ {Key: "server_seq", Value: 1}, })) @@ -1157,10 +1167,11 @@ func (c *Client) FindMinSyncedSeqInfo( func (c *Client) UpdateAndFindMinSyncedTicket( ctx context.Context, clientInfo *database.ClientInfo, + docKey key.Key, docID types.ID, serverSeq int64, ) (*time.Ticket, error) { - if err := c.UpdateSyncedSeq(ctx, clientInfo, docID, serverSeq); err != nil { + if err := c.UpdateSyncedSeq(ctx, clientInfo, docKey, docID, serverSeq); err != nil { return nil, err } @@ -1171,7 +1182,8 @@ func (c *Client) UpdateAndFindMinSyncedTicket( // 02. find min synced seq of the given document. result := c.collection(colSyncedSeqs).FindOne(ctx, bson.M{ - "doc_id": encodedDocID, + "doc_key": docKey, + "doc_id": encodedDocID, }, options.FindOne().SetSort(bson.D{ {Key: "lamport", Value: 1}, {Key: "actor_id", Value: 1}, @@ -1207,7 +1219,7 @@ func (c *Client) UpdateAndFindMinSyncedTicket( func (c *Client) FindDocInfosByPaging( ctx context.Context, projectID types.ID, - paging types.Paging[types.ID], + paging types.Paging[key.Key], ) ([]*database.DocInfo, error) { encodedProjectID, err := encodeID(projectID) if err != nil { @@ -1223,25 +1235,20 @@ func (c *Client) FindDocInfosByPaging( }, } if paging.Offset != "" { - encodedOffset, err := encodeID(paging.Offset) - if err != nil { - return nil, err - } - k := "$lt" if paging.IsForward { k = "$gt" } - filter["_id"] = bson.M{ - k: encodedOffset, + filter["key"] = bson.M{ + k: paging.Offset, } } opts := options.Find().SetLimit(int64(paging.PageSize)) if paging.IsForward { - opts = opts.SetSort(map[string]int{"_id": 1}) + opts = opts.SetSort(map[string]int{"key": 1}) } else { - opts = opts.SetSort(map[string]int{"_id": -1}) + opts = opts.SetSort(map[string]int{"key": -1}) } cursor, err := c.collection(colDocuments).Find(ctx, filter, opts) @@ -1298,6 +1305,7 @@ func (c *Client) FindDocInfosByQuery( func (c *Client) UpdateSyncedSeq( ctx context.Context, clientInfo *database.ClientInfo, + docKey key.Key, docID types.ID, serverSeq int64, ) error { @@ -1311,13 +1319,14 @@ func (c *Client) UpdateSyncedSeq( } // 01. update synced seq of the given client. - isAttached, err := clientInfo.IsAttached(docID) + isAttached, err := clientInfo.IsAttached(docKey, docID) if err != nil { return err } if !isAttached { if _, err = c.collection(colSyncedSeqs).DeleteOne(ctx, bson.M{ + "doc_key": docKey, "doc_id": encodedDocID, "client_id": encodedClientID, }, options.Delete()); err != nil { @@ -1326,12 +1335,13 @@ func (c *Client) UpdateSyncedSeq( return nil } - ticket, err := c.findTicketByServerSeq(ctx, docID, serverSeq) + ticket, err := c.findTicketByServerSeq(ctx, docKey, docID, serverSeq) if err != nil { return err } if _, err = c.collection(colSyncedSeqs).UpdateOne(ctx, bson.M{ + "doc_key": docKey, "doc_id": encodedDocID, "client_id": encodedClientID, }, bson.M{ @@ -1351,6 +1361,7 @@ func (c *Client) UpdateSyncedSeq( func (c *Client) IsDocumentAttached( ctx context.Context, projectID types.ID, + docKey key.Key, docID types.ID, excludeClientID types.ID, ) (bool, error) { @@ -1359,7 +1370,7 @@ func (c *Client) IsDocumentAttached( return false, err } - clientDocInfoKey := "documents." + docID.String() + "." + clientDocInfoKey := getClientDocInfoKey(docKey, docID) filter := bson.M{ "project_id": encodedProjectID, clientDocInfoKey + "status": database.DocumentAttached, @@ -1384,6 +1395,7 @@ func (c *Client) IsDocumentAttached( func (c *Client) findTicketByServerSeq( ctx context.Context, + docKey key.Key, docID types.ID, serverSeq int64, ) (*time.Ticket, error) { @@ -1397,6 +1409,7 @@ func (c *Client) findTicketByServerSeq( } result := c.collection(colChanges).FindOne(ctx, bson.M{ + "doc_key": docKey, "doc_id": encodedDocID, "server_seq": serverSeq, }) @@ -1429,6 +1442,18 @@ func (c *Client) findTicketByServerSeq( ), nil } +func (c *Client) CleanUpAllCollections(ctx context.Context) error { + collections := []string{colProjects, colUsers, colClients, + colDocuments, colChanges, colSnapshots, colSyncedSeqs} + for _, col := range collections { + _, err := c.collection(col).DeleteMany(ctx, bson.D{}) + if err != nil { + return err + } + } + return nil +} + func (c *Client) collection( name string, opts ...*options.CollectionOptions, @@ -1455,3 +1480,9 @@ func escapeRegex(str string) string { } return buf.String() } +func getClientDocInfoKey( + docKey key.Key, + docID types.ID, +) string { + return fmt.Sprintf("documents.%s.%s.", docKey, docID.String()) +} diff --git a/server/backend/database/mongo/indexes.go b/server/backend/database/mongo/indexes.go index 953659b3f..1de4f0dbe 100644 --- a/server/backend/database/mongo/indexes.go +++ b/server/backend/database/mongo/indexes.go @@ -89,8 +89,8 @@ var collectionInfos = []collectionInfo{ name: colDocuments, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ - {Key: "project_id", Value: bsonx.Int32(1)}, {Key: "key", Value: bsonx.Int32(1)}, + {Key: "project_id", Value: bsonx.Int32(1)}, }, Options: options.Index().SetPartialFilterExpression( bsonx.Doc{ @@ -102,6 +102,7 @@ var collectionInfos = []collectionInfo{ name: colChanges, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ + {Key: "doc_key", Value: bsonx.Int32(1)}, {Key: "doc_id", Value: bsonx.Int32(1)}, {Key: "server_seq", Value: bsonx.Int32(1)}, }, @@ -111,6 +112,7 @@ var collectionInfos = []collectionInfo{ name: colSnapshots, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ + {Key: "doc_key", Value: bsonx.Int32(1)}, {Key: "doc_id", Value: bsonx.Int32(1)}, {Key: "server_seq", Value: bsonx.Int32(1)}, }, @@ -120,12 +122,14 @@ var collectionInfos = []collectionInfo{ name: colSyncedSeqs, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ + {Key: "doc_key", Value: bsonx.Int32(1)}, {Key: "doc_id", Value: bsonx.Int32(1)}, {Key: "client_id", Value: bsonx.Int32(1)}, }, Options: options.Index().SetUnique(true), }, { Keys: bsonx.Doc{ + {Key: "doc_key", Value: bsonx.Int32(1)}, {Key: "doc_id", Value: bsonx.Int32(1)}, {Key: "lamport", Value: bsonx.Int32(1)}, {Key: "actor_id", Value: bsonx.Int32(1)}, diff --git a/server/backend/database/snapshot_info.go b/server/backend/database/snapshot_info.go index efa88ea29..ad741c74e 100644 --- a/server/backend/database/snapshot_info.go +++ b/server/backend/database/snapshot_info.go @@ -20,6 +20,7 @@ import ( "time" "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" ) // SnapshotInfo is a structure representing information of the snapshot. @@ -27,6 +28,9 @@ type SnapshotInfo struct { // ID is the unique ID of the snapshot. ID types.ID `bson:"_id"` + // DocKey is the key of the document which the snapshot belongs to. + DocKey key.Key `bson:"doc_key"` + // DocID is the ID of the document which the snapshot belongs to. DocID types.ID `bson:"doc_id"` @@ -51,6 +55,7 @@ func (i *SnapshotInfo) DeepCopy() *SnapshotInfo { return &SnapshotInfo{ ID: i.ID, + DocKey: i.DocKey, DocID: i.DocID, ServerSeq: i.ServerSeq, Lamport: i.Lamport, diff --git a/server/backend/database/synced_seq_info.go b/server/backend/database/synced_seq_info.go index 608521a76..9cfc87822 100644 --- a/server/backend/database/synced_seq_info.go +++ b/server/backend/database/synced_seq_info.go @@ -16,12 +16,16 @@ package database -import "github.com/yorkie-team/yorkie/api/types" +import ( + "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" +) // SyncedSeqInfo is a structure representing information about the synchronized // sequence for each client. type SyncedSeqInfo struct { ID types.ID `bson:"_id"` + DocKey key.Key `bson:"doc_key"` DocID types.ID `bson:"doc_id"` ClientID types.ID `bson:"client_id"` Lamport int64 `bson:"lamport"` diff --git a/server/backend/database/testcases/testcases.go b/server/backend/database/testcases/testcases.go index 629dcc83d..1a6ab770b 100644 --- a/server/backend/database/testcases/testcases.go +++ b/server/backend/database/testcases/testcases.go @@ -59,7 +59,7 @@ func RunFindDocInfoTest( clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) - _, err = db.FindDocInfoByID(context.Background(), projectID, dummyClientID) + _, err = db.FindDocInfoByKeyAndID(context.Background(), "dummy", dummyClientID) assert.ErrorIs(t, err, database.ErrDocumentNotFound) docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) @@ -171,7 +171,7 @@ func RunFindChangesBetweenServerSeqsTest( clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) - assert.NoError(t, clientInfo.AttachDocument(docInfo.ID)) + assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) bytesID, _ := clientInfo.ID.Bytes() @@ -194,12 +194,13 @@ func RunFindChangesBetweenServerSeqsTest( } // Store changes - err := db.CreateChangeInfos(ctx, projectID, docInfo, 0, pack.Changes, false) + err := db.CreateChangeInfos(ctx, docInfo, 0, pack.Changes, false) assert.NoError(t, err) // Find changes loadedChanges, err := db.FindChangesBetweenServerSeqs( ctx, + docInfo.Key, docInfo.ID, 6, 10, @@ -228,26 +229,26 @@ func RunFindClosestSnapshotInfoTest(t *testing.T, db database.Database, projectI return nil })) - assert.NoError(t, db.CreateSnapshotInfo(ctx, docInfo.ID, doc.InternalDocument())) - snapshot, err := db.FindClosestSnapshotInfo(ctx, docInfo.ID, change.MaxCheckpoint.ServerSeq, true) + assert.NoError(t, db.CreateSnapshotInfo(ctx, docKey, docInfo.ID, doc.InternalDocument())) + snapshot, err := db.FindClosestSnapshotInfo(ctx, docKey, docInfo.ID, change.MaxCheckpoint.ServerSeq, true) assert.NoError(t, err) assert.Equal(t, int64(0), snapshot.ServerSeq) pack := change.NewPack(doc.Key(), doc.Checkpoint().NextServerSeq(1), nil, nil) assert.NoError(t, doc.ApplyChangePack(pack)) - assert.NoError(t, db.CreateSnapshotInfo(ctx, docInfo.ID, doc.InternalDocument())) - snapshot, err = db.FindClosestSnapshotInfo(ctx, docInfo.ID, change.MaxCheckpoint.ServerSeq, true) + assert.NoError(t, db.CreateSnapshotInfo(ctx, docKey, docInfo.ID, doc.InternalDocument())) + snapshot, err = db.FindClosestSnapshotInfo(ctx, docKey, docInfo.ID, change.MaxCheckpoint.ServerSeq, true) assert.NoError(t, err) assert.Equal(t, int64(1), snapshot.ServerSeq) pack = change.NewPack(doc.Key(), doc.Checkpoint().NextServerSeq(2), nil, nil) assert.NoError(t, doc.ApplyChangePack(pack)) - assert.NoError(t, db.CreateSnapshotInfo(ctx, docInfo.ID, doc.InternalDocument())) - snapshot, err = db.FindClosestSnapshotInfo(ctx, docInfo.ID, change.MaxCheckpoint.ServerSeq, true) + assert.NoError(t, db.CreateSnapshotInfo(ctx, docKey, docInfo.ID, doc.InternalDocument())) + snapshot, err = db.FindClosestSnapshotInfo(ctx, docKey, docInfo.ID, change.MaxCheckpoint.ServerSeq, true) assert.NoError(t, err) assert.Equal(t, int64(2), snapshot.ServerSeq) - snapshot, err = db.FindClosestSnapshotInfo(ctx, docInfo.ID, 1, true) + snapshot, err = db.FindClosestSnapshotInfo(ctx, docKey, docInfo.ID, 1, true) assert.NoError(t, err) assert.Equal(t, int64(1), snapshot.ServerSeq) }) @@ -443,29 +444,29 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t } // initial page, offset is empty - infos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.ID]{PageSize: pageSize}) + infos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{PageSize: pageSize}) assert.NoError(t, err) assertKeys([]key.Key{"8", "7", "6", "5", "4"}, infos) // backward - infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.ID]{ - Offset: infos[len(infos)-1].ID, + infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{ + Offset: infos[len(infos)-1].Key, PageSize: pageSize, }) assert.NoError(t, err) assertKeys([]key.Key{"3", "2", "1", "0"}, infos) // backward again - emptyInfos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.ID]{ - Offset: infos[len(infos)-1].ID, + emptyInfos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{ + Offset: infos[len(infos)-1].Key, PageSize: pageSize, }) assert.NoError(t, err) assertKeys(nil, emptyInfos) // forward - infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.ID]{ - Offset: infos[0].ID, + infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{ + Offset: infos[0].Key, PageSize: pageSize, IsForward: true, }) @@ -473,8 +474,8 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t assertKeys([]key.Key{"4", "5", "6", "7", "8"}, infos) // forward again - emptyInfos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.ID]{ - Offset: infos[len(infos)-1].ID, + emptyInfos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{ + Offset: infos[len(infos)-1].Key, PageSize: pageSize, IsForward: true, }) @@ -493,7 +494,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t // dummy document setup var dummyDocInfos []*database.DocInfo for i := 0; i <= testDocCnt; i++ { - testDocKey := key.Key("testdockey" + strconv.Itoa(i)) + testDocKey := key.Key(fmt.Sprintf("%s%02d", "testdockey", i)) docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, testProjectInfo.ID, dummyClientID, testDocKey, true) assert.NoError(t, err) dummyDocInfos = append(dummyDocInfos, docInfo) @@ -501,7 +502,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t cases := []struct { name string - offset string + offset key.Key pageSize int isForward bool testResult []int @@ -536,28 +537,28 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }, { name: "FindDocInfosByPaging --offset test", - offset: dummyDocInfos[13].ID.String(), + offset: dummyDocInfos[13].Key, pageSize: 0, isForward: false, testResult: helper.NewRangeSlice(12, 0), }, { name: "FindDocInfosByPaging --forward --offset test", - offset: dummyDocInfos[13].ID.String(), + offset: dummyDocInfos[13].Key, pageSize: 0, isForward: true, testResult: helper.NewRangeSlice(14, testDocCnt), }, { name: "FindDocInfosByPaging --size --offset test", - offset: dummyDocInfos[13].ID.String(), + offset: dummyDocInfos[13].Key, pageSize: 10, isForward: false, testResult: helper.NewRangeSlice(12, 3), }, { name: "FindDocInfosByPaging --size --forward --offset test", - offset: dummyDocInfos[13].ID.String(), + offset: dummyDocInfos[13].Key, pageSize: 10, isForward: true, testResult: helper.NewRangeSlice(14, 23), @@ -567,8 +568,8 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t for _, c := range cases { t.Run(c.name, func(t *testing.T) { ctx := context.Background() - testPaging := types.Paging[types.ID]{ - Offset: types.ID(c.offset), + testPaging := types.Paging[key.Key]{ + Offset: c.offset, PageSize: c.pageSize, IsForward: c.isForward, } @@ -578,9 +579,9 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t for idx, docInfo := range docInfos { resultIdx := c.testResult[idx] - assert.Equal(t, docInfo.Key, dummyDocInfos[resultIdx].Key) - assert.Equal(t, docInfo.ID, dummyDocInfos[resultIdx].ID) - assert.Equal(t, docInfo.ProjectID, dummyDocInfos[resultIdx].ProjectID) + assert.Equal(t, dummyDocInfos[resultIdx].Key, docInfo.Key) + assert.Equal(t, dummyDocInfos[resultIdx].ID, docInfo.ID) + assert.Equal(t, dummyDocInfos[resultIdx].ProjectID, docInfo.ProjectID) } }) } @@ -603,7 +604,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t } // 02. List the documents. - result, err := db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[types.ID]{ + result, err := db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[key.Key]{ PageSize: 10, IsForward: false, }) @@ -611,11 +612,11 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t assert.Len(t, result, len(docInfos)) // 03. Remove a document. - err = db.CreateChangeInfos(ctx, projectInfo.ID, docInfos[0], 0, []*change.Change{}, true) + err = db.CreateChangeInfos(ctx, docInfos[0], 0, []*change.Change{}, true) assert.NoError(t, err) // 04. List the documents again and check the filtered result. - result, err = db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[types.ID]{ + result, err = db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[key.Key]{ PageSize: 10, IsForward: false, }) @@ -679,13 +680,13 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type // 01. Create a client and a document then attach the document to the client. clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) - assert.NoError(t, clientInfo.AttachDocument(docInfo.ID)) + assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) // 02. Remove the document and check the document is removed. - err := db.CreateChangeInfos(ctx, projectID, docInfo, 0, []*change.Change{}, true) + err := db.CreateChangeInfos(ctx, docInfo, 0, []*change.Change{}, true) assert.NoError(t, err) - docInfo, err = db.FindDocInfoByID(ctx, projectID, docInfo.ID) + docInfo, err = db.FindDocInfoByKeyAndID(ctx, docInfo.Key, docInfo.ID) assert.NoError(t, err) assert.Equal(t, false, docInfo.RemovedAt.IsZero()) }) @@ -697,17 +698,17 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type // 01. Create a client and a document then attach the document to the client. clientInfo1, _ := db.ActivateClient(ctx, projectID, t.Name()) docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.ID, docKey, true) - assert.NoError(t, clientInfo1.AttachDocument(docInfo1.ID)) + assert.NoError(t, clientInfo1.AttachDocument(docInfo1.Key, docInfo1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo1)) // 02. Remove the document. - assert.NoError(t, clientInfo1.RemoveDocument(docInfo1.ID)) - err := db.CreateChangeInfos(ctx, projectID, docInfo1, 0, []*change.Change{}, true) + assert.NoError(t, clientInfo1.RemoveDocument(docInfo1.Key, docInfo1.ID)) + err := db.CreateChangeInfos(ctx, docInfo1, 0, []*change.Change{}, true) assert.NoError(t, err) // 03. Create a document with same key and check they have same key but different id. docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.ID, docKey, true) - assert.NoError(t, clientInfo1.AttachDocument(docInfo2.ID)) + assert.NoError(t, clientInfo1.AttachDocument(docInfo2.Key, docInfo2.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo2)) assert.Equal(t, docInfo1.Key, docInfo2.Key) assert.NotEqual(t, docInfo1.ID, docInfo2.ID) @@ -719,26 +720,26 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) - assert.NoError(t, clientInfo.AttachDocument(docInfo.ID)) + assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) doc := document.New(key.Key(t.Name())) pack := doc.CreateChangePack() // Set removed_at in docInfo and store changes - assert.NoError(t, clientInfo.RemoveDocument(docInfo.ID)) - err := db.CreateChangeInfos(ctx, projectID, docInfo, 0, pack.Changes, true) + assert.NoError(t, clientInfo.RemoveDocument(docInfo.Key, docInfo.ID)) + err := db.CreateChangeInfos(ctx, docInfo, 0, pack.Changes, true) assert.NoError(t, err) // Check whether removed_at is set in docInfo - docInfo, err = db.FindDocInfoByID(ctx, projectID, docInfo.ID) + docInfo, err = db.FindDocInfoByKeyAndID(ctx, docInfo.Key, docInfo.ID) assert.NoError(t, err) assert.NotEqual(t, gotime.Time{}, docInfo.RemovedAt) // Check whether DocumentRemoved status is set in clientInfo clientInfo, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) assert.NoError(t, err) - assert.NotEqual(t, database.DocumentRemoved, clientInfo.Documents[docInfo.ID].Status) + assert.NotEqual(t, database.DocumentRemoved, clientInfo.Documents[docKey][docInfo.ID].Status) }) } @@ -757,7 +758,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr err = db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo) assert.ErrorIs(t, err, database.ErrDocumentNeverAttached) - assert.NoError(t, clientInfo.AttachDocument(docInfo.ID)) + assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) }) @@ -769,13 +770,13 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docInfo.ID)) + assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) result, err := db.FindClientInfoByID(ctx, projectID, clientInfo.ID) - assert.Equal(t, result.Documents[docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docInfo.ID].ServerSeq, int64(0)) - assert.Equal(t, result.Documents[docInfo.ID].ClientSeq, uint32(0)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(0)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(0)) assert.NoError(t, err) }) @@ -787,37 +788,37 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docInfo.ID)) - clientInfo.Documents[docInfo.ID].ServerSeq = 1 - clientInfo.Documents[docInfo.ID].ClientSeq = 1 + assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) + clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 1 + clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 1 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) result, err := db.FindClientInfoByID(ctx, projectID, clientInfo.ID) - assert.Equal(t, result.Documents[docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docInfo.ID].ServerSeq, int64(1)) - assert.Equal(t, result.Documents[docInfo.ID].ClientSeq, uint32(1)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(1)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(1)) assert.NoError(t, err) // update with larger seq - clientInfo.Documents[docInfo.ID].ServerSeq = 3 - clientInfo.Documents[docInfo.ID].ClientSeq = 5 + clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 3 + clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 5 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) result, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) - assert.Equal(t, result.Documents[docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docInfo.ID].ServerSeq, int64(3)) - assert.Equal(t, result.Documents[docInfo.ID].ClientSeq, uint32(5)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(3)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(5)) assert.NoError(t, err) // update with smaller seq(should be ignored) - clientInfo.Documents[docInfo.ID].ServerSeq = 2 - clientInfo.Documents[docInfo.ID].ClientSeq = 3 + clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 2 + clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 3 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) result, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) - assert.Equal(t, result.Documents[docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docInfo.ID].ServerSeq, int64(3)) - assert.Equal(t, result.Documents[docInfo.ID].ClientSeq, uint32(5)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(3)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(5)) assert.NoError(t, err) }) @@ -829,24 +830,24 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docInfo.ID)) - clientInfo.Documents[docInfo.ID].ServerSeq = 1 - clientInfo.Documents[docInfo.ID].ClientSeq = 1 + assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) + clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 1 + clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 1 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) result, err := db.FindClientInfoByID(ctx, projectID, clientInfo.ID) - assert.Equal(t, result.Documents[docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docInfo.ID].ServerSeq, int64(1)) - assert.Equal(t, result.Documents[docInfo.ID].ClientSeq, uint32(1)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(1)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(1)) assert.NoError(t, err) - assert.NoError(t, clientInfo.DetachDocument(docInfo.ID)) + assert.NoError(t, clientInfo.DetachDocument(docKey, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) result, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) - assert.Equal(t, result.Documents[docInfo.ID].Status, database.DocumentDetached) - assert.Equal(t, result.Documents[docInfo.ID].ServerSeq, int64(0)) - assert.Equal(t, result.Documents[docInfo.ID].ClientSeq, uint32(0)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentDetached) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(0)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(0)) assert.NoError(t, err) }) @@ -858,24 +859,24 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docInfo.ID)) - clientInfo.Documents[docInfo.ID].ServerSeq = 1 - clientInfo.Documents[docInfo.ID].ClientSeq = 1 + assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) + clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 1 + clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 1 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) result, err := db.FindClientInfoByID(ctx, projectID, clientInfo.ID) - assert.Equal(t, result.Documents[docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docInfo.ID].ServerSeq, int64(1)) - assert.Equal(t, result.Documents[docInfo.ID].ClientSeq, uint32(1)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(1)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(1)) assert.NoError(t, err) - assert.NoError(t, clientInfo.RemoveDocument(docInfo.ID)) + assert.NoError(t, clientInfo.RemoveDocument(docKey, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) result, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) - assert.Equal(t, result.Documents[docInfo.ID].Status, database.DocumentRemoved) - assert.Equal(t, result.Documents[docInfo.ID].ServerSeq, int64(0)) - assert.Equal(t, result.Documents[docInfo.ID].ClientSeq, uint32(0)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentRemoved) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(0)) + assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(0)) assert.NoError(t, err) }) @@ -887,7 +888,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docInfo.ID)) + assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) clientInfo.ID = "invalid clientInfo id" @@ -912,44 +913,44 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ assert.NoError(t, err) // 01. Check if document is attached without attaching - attached, err := db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err := db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.False(t, attached) // 02. Check if document is attached after attaching - assert.NoError(t, c1.AttachDocument(d1.ID)) + assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.True(t, attached) // 03. Check if document is attached after detaching - assert.NoError(t, c1.DetachDocument(d1.ID)) + assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.False(t, attached) // 04. Check if document is attached after two clients attaching - assert.NoError(t, c1.AttachDocument(d1.ID)) + assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - assert.NoError(t, c2.AttachDocument(d1.ID)) + assert.NoError(t, c2.AttachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c2, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.True(t, attached) // 05. Check if document is attached after a client detaching - assert.NoError(t, c1.DetachDocument(d1.ID)) + assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.True(t, attached) // 06. Check if document is attached after another client detaching - assert.NoError(t, c2.DetachDocument(d1.ID)) + assert.NoError(t, c2.DetachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c2, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.False(t, attached) }) @@ -966,32 +967,32 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ assert.NoError(t, err) // 01. Check if documents are attached after attaching - assert.NoError(t, c1.AttachDocument(d1.ID)) + assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err := db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err := db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.True(t, attached) - assert.NoError(t, c1.AttachDocument(d2.ID)) + assert.NoError(t, c1.AttachDocument(d2.Key, d2.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d2)) - attached, err = db.IsDocumentAttached(ctx, projectID, d2.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d2.Key, d2.ID, "") assert.NoError(t, err) assert.True(t, attached) // 02. Check if a document is attached after detaching another document - assert.NoError(t, c1.DetachDocument(d2.ID)) + assert.NoError(t, c1.DetachDocument(d2.Key, d2.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d2)) - attached, err = db.IsDocumentAttached(ctx, projectID, d2.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d2.Key, d2.ID, "") assert.NoError(t, err) assert.False(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.True(t, attached) // 03. Check if a document is attached after detaching remaining document - assert.NoError(t, c1.DetachDocument(d1.ID)) + assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.False(t, attached) }) @@ -1008,68 +1009,68 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ assert.NoError(t, err) // 01. Check if document is attached without attaching - attached, err := db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err := db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.False(t, attached) // 02. Check if document is attached after attaching - assert.NoError(t, c1.AttachDocument(d1.ID)) + assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) assert.NoError(t, err) assert.False(t, attached) // 03. Check if document is attached after detaching - assert.NoError(t, c1.DetachDocument(d1.ID)) + assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.False(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) assert.NoError(t, err) assert.False(t, attached) // 04. Check if document is attached after two clients attaching - assert.NoError(t, c1.AttachDocument(d1.ID)) + assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - assert.NoError(t, c2.AttachDocument(d1.ID)) + assert.NoError(t, c2.AttachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c2, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, c2.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c2.ID) assert.NoError(t, err) assert.True(t, attached) // 05. Check if document is attached after a client detaching - assert.NoError(t, c1.DetachDocument(d1.ID)) + assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, c2.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c2.ID) assert.NoError(t, err) assert.False(t, attached) // 06. Check if document is attached after another client detaching - assert.NoError(t, c2.DetachDocument(d1.ID)) + assert.NoError(t, c2.DetachDocument(d1.Key, d1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c2, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") assert.NoError(t, err) assert.False(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) assert.NoError(t, err) assert.False(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.ID, c2.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c2.ID) assert.NoError(t, err) assert.False(t, attached) }) diff --git a/server/backend/sync/coordinator.go b/server/backend/sync/coordinator.go index 764a942d6..110e81a7e 100644 --- a/server/backend/sync/coordinator.go +++ b/server/backend/sync/coordinator.go @@ -22,6 +22,7 @@ import ( gotime "time" "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" ) @@ -41,12 +42,14 @@ type Coordinator interface { Subscribe( ctx context.Context, subscriber *time.ActorID, + documentKey key.Key, documentID types.ID, ) (*Subscription, []*time.ActorID, error) // Unsubscribe unsubscribes from the given documents. Unsubscribe( ctx context.Context, + documentKey key.Key, documentID types.ID, sub *Subscription, ) error diff --git a/server/backend/sync/memory/coordinator.go b/server/backend/sync/memory/coordinator.go index dba6ed0e7..0bc52b55a 100644 --- a/server/backend/sync/memory/coordinator.go +++ b/server/backend/sync/memory/coordinator.go @@ -21,6 +21,7 @@ import ( "context" "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" "github.com/yorkie-team/yorkie/pkg/locker" "github.com/yorkie-team/yorkie/server/backend/sync" @@ -58,24 +59,26 @@ func (c *Coordinator) NewLocker( func (c *Coordinator) Subscribe( ctx context.Context, subscriber *time.ActorID, + documentKey key.Key, documentID types.ID, ) (*sync.Subscription, []*time.ActorID, error) { - sub, err := c.pubSub.Subscribe(ctx, subscriber, documentID) + sub, err := c.pubSub.Subscribe(ctx, subscriber, documentKey, documentID) if err != nil { return nil, nil, err } - ids := c.pubSub.ClientIDs(documentID) + ids := c.pubSub.ClientIDs(documentKey, documentID) return sub, ids, nil } // Unsubscribe unsubscribes the given documents. func (c *Coordinator) Unsubscribe( ctx context.Context, + documentKey key.Key, documentID types.ID, sub *sync.Subscription, ) error { - c.pubSub.Unsubscribe(ctx, documentID, sub) + c.pubSub.Unsubscribe(ctx, documentKey, documentID, sub) return nil } diff --git a/server/backend/sync/memory/coordinator_test.go b/server/backend/sync/memory/coordinator_test.go index 02e2e5ce4..e32bbfe25 100644 --- a/server/backend/sync/memory/coordinator_test.go +++ b/server/backend/sync/memory/coordinator_test.go @@ -23,6 +23,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" "github.com/yorkie-team/yorkie/server/backend/sync/memory" ) @@ -30,6 +31,7 @@ import ( func TestCoordinator(t *testing.T) { t.Run("subscriptions map test", func(t *testing.T) { coordinator := memory.NewCoordinator(nil) + docKey := key.Key(t.Name() + "key") docID := types.ID(t.Name() + "id") ctx := context.Background() @@ -37,7 +39,7 @@ func TestCoordinator(t *testing.T) { id, err := time.ActorIDFromBytes([]byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, byte(i)}) assert.NoError(t, err) - _, clientIDs, err := coordinator.Subscribe(ctx, id, docID) + _, clientIDs, err := coordinator.Subscribe(ctx, id, docKey, docID) assert.NoError(t, err) assert.Len(t, clientIDs, i+1) } diff --git a/server/backend/sync/memory/pubsub.go b/server/backend/sync/memory/pubsub.go index a01703784..06d1c1411 100644 --- a/server/backend/sync/memory/pubsub.go +++ b/server/backend/sync/memory/pubsub.go @@ -24,6 +24,7 @@ import ( "go.uber.org/zap" "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" "github.com/yorkie-team/yorkie/server/backend/sync" "github.com/yorkie-team/yorkie/server/logging" @@ -65,15 +66,15 @@ func (s *subscriptions) Len() int { // PubSub is the memory implementation of PubSub, used for single server. type PubSub struct { - subscriptionsMapMu *gosync.RWMutex - subscriptionsMapByDocID map[types.ID]*subscriptions + subscriptionsMapMu *gosync.RWMutex + subscriptionsMapByDoc map[key.Key]map[types.ID]*subscriptions } // NewPubSub creates an instance of PubSub. func NewPubSub() *PubSub { return &PubSub{ - subscriptionsMapMu: &gosync.RWMutex{}, - subscriptionsMapByDocID: make(map[types.ID]*subscriptions), + subscriptionsMapMu: &gosync.RWMutex{}, + subscriptionsMapByDoc: make(map[key.Key]map[types.ID]*subscriptions), } } @@ -81,12 +82,13 @@ func NewPubSub() *PubSub { func (m *PubSub) Subscribe( ctx context.Context, subscriber *time.ActorID, + documentKey key.Key, documentID types.ID, ) (*sync.Subscription, error) { if logging.Enabled(zap.DebugLevel) { logging.From(ctx).Debugf( - `Subscribe(%s,%s) Start`, - documentID.String(), + `Subscribe(%s.%s,%s) Start`, + documentKey, documentID, subscriber.String(), ) } @@ -95,15 +97,18 @@ func (m *PubSub) Subscribe( defer m.subscriptionsMapMu.Unlock() sub := sync.NewSubscription(subscriber) - if _, ok := m.subscriptionsMapByDocID[documentID]; !ok { - m.subscriptionsMapByDocID[documentID] = newSubscriptions() + if _, ok := m.subscriptionsMapByDoc[documentKey]; !ok { + m.subscriptionsMapByDoc[documentKey] = make(map[types.ID]*subscriptions) } - m.subscriptionsMapByDocID[documentID].Add(sub) + if _, ok := m.subscriptionsMapByDoc[documentKey][documentID]; !ok { + m.subscriptionsMapByDoc[documentKey][documentID] = newSubscriptions() + } + m.subscriptionsMapByDoc[documentKey][documentID].Add(sub) if logging.Enabled(zap.DebugLevel) { logging.From(ctx).Debugf( - `Subscribe(%s,%s) End`, - documentID.String(), + `Subscribe(%s.%s,%s) End`, + documentKey, documentID, subscriber.String(), ) } @@ -113,6 +118,7 @@ func (m *PubSub) Subscribe( // Unsubscribe unsubscribes the given docKeys. func (m *PubSub) Unsubscribe( ctx context.Context, + documentKey key.Key, documentID types.ID, sub *sync.Subscription, ) { @@ -121,26 +127,31 @@ func (m *PubSub) Unsubscribe( if logging.Enabled(zap.DebugLevel) { logging.From(ctx).Debugf( - `Unsubscribe(%s,%s) Start`, - documentID, + `Unsubscribe(%s.%s,%s) Start`, + documentKey, documentID, sub.Subscriber().String(), ) } sub.Close() - if subs, ok := m.subscriptionsMapByDocID[documentID]; ok { - subs.Delete(sub.ID()) + if subsByDocID, ok := m.subscriptionsMapByDoc[documentKey]; ok { + if subs, ok := subsByDocID[documentID]; ok { + subs.Delete(sub.ID()) - if subs.Len() == 0 { - delete(m.subscriptionsMapByDocID, documentID) + if subs.Len() == 0 { + delete(m.subscriptionsMapByDoc[documentKey], documentID) + } + } + if len(subsByDocID) == 0 { + delete(m.subscriptionsMapByDoc, documentKey) } } if logging.Enabled(zap.DebugLevel) { logging.From(ctx).Debugf( - `Unsubscribe(%s,%s) End`, - documentID, + `Unsubscribe(%s.%s,%s) End`, + documentKey, documentID, sub.Subscriber().String(), ) } @@ -155,53 +166,60 @@ func (m *PubSub) Publish( m.subscriptionsMapMu.RLock() defer m.subscriptionsMapMu.RUnlock() + documentKey := event.DocumentKey documentID := event.DocumentID if logging.Enabled(zap.DebugLevel) { - logging.From(ctx).Debugf(`Publish(%s,%s) Start`, documentID.String(), publisherID.String()) + logging.From(ctx).Debugf(`Publish(%s.%s,%s) Start`, + documentKey, documentID, + publisherID.String()) } - if subs, ok := m.subscriptionsMapByDocID[documentID]; ok { - for _, sub := range subs.Map() { - if sub.Subscriber().Compare(publisherID) == 0 { - continue - } - - if logging.Enabled(zap.DebugLevel) { - logging.From(ctx).Debugf( - `Publish %s(%s,%s) to %s`, - event.Type, - documentID.String(), - publisherID.String(), - sub.Subscriber().String(), - ) - } - - // NOTE: When a subscription is being closed by a subscriber, - // the subscriber may not receive messages. - select { - case sub.Events() <- event: - case <-gotime.After(100 * gotime.Millisecond): - logging.From(ctx).Warnf( - `Publish(%s,%s) to %s timeout`, - documentID.String(), - publisherID.String(), - sub.Subscriber().String(), - ) + if subsByDocID, ok := m.subscriptionsMapByDoc[documentKey]; ok { + if subs, ok := subsByDocID[documentID]; ok { + for _, sub := range subs.Map() { + if sub.Subscriber().Compare(publisherID) == 0 { + continue + } + + if logging.Enabled(zap.DebugLevel) { + logging.From(ctx).Debugf( + `Publish %s(%s.%s,%s) to %s`, + event.Type, + documentKey, documentID, + publisherID.String(), + sub.Subscriber().String(), + ) + } + + // NOTE: When a subscription is being closed by a subscriber, + // the subscriber may not receive messages. + select { + case sub.Events() <- event: + case <-gotime.After(100 * gotime.Millisecond): + logging.From(ctx).Warnf( + `Publish(%s.%s,%s) to %s timeout`, + documentKey, documentID, + publisherID.String(), + sub.Subscriber().String(), + ) + } } } } if logging.Enabled(zap.DebugLevel) { - logging.From(ctx).Debugf(`Publish(%s,%s) End`, documentID.String(), publisherID.String()) + logging.From(ctx).Debugf(`Publish(%s.%s,%s) End`, + documentKey, documentID, + publisherID.String()) } } // ClientIDs returns the clients of the given document. -func (m *PubSub) ClientIDs(documentID types.ID) []*time.ActorID { +func (m *PubSub) ClientIDs(documentKey key.Key, documentID types.ID) []*time.ActorID { m.subscriptionsMapMu.RLock() defer m.subscriptionsMapMu.RUnlock() var ids []*time.ActorID - for _, sub := range m.subscriptionsMapByDocID[documentID].Map() { + for _, sub := range m.subscriptionsMapByDoc[documentKey][documentID].Map() { ids = append(ids, sub.Subscriber()) } return ids diff --git a/server/backend/sync/memory/pubsub_test.go b/server/backend/sync/memory/pubsub_test.go index f1152bfcc..7d5c934d9 100644 --- a/server/backend/sync/memory/pubsub_test.go +++ b/server/backend/sync/memory/pubsub_test.go @@ -24,6 +24,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" "github.com/yorkie-team/yorkie/server/backend/sync" "github.com/yorkie-team/yorkie/server/backend/sync/memory" @@ -37,19 +38,21 @@ func TestPubSub(t *testing.T) { t.Run("publish subscribe test", func(t *testing.T) { pubSub := memory.NewPubSub() + key := key.Key(t.Name() + "key") id := types.ID(t.Name() + "id") docEvent := sync.DocEvent{ - Type: types.DocumentWatchedEvent, - Publisher: idB, - DocumentID: id, + Type: types.DocumentWatchedEvent, + Publisher: idB, + DocumentKey: key, + DocumentID: id, } ctx := context.Background() // subscribe the documents by actorA - subA, err := pubSub.Subscribe(ctx, idA, id) + subA, err := pubSub.Subscribe(ctx, idA, key, id) assert.NoError(t, err) defer func() { - pubSub.Unsubscribe(ctx, id, subA) + pubSub.Unsubscribe(ctx, key, id, subA) }() var wg gosync.WaitGroup diff --git a/server/backend/sync/pubsub.go b/server/backend/sync/pubsub.go index c4551b176..3b6868257 100644 --- a/server/backend/sync/pubsub.go +++ b/server/backend/sync/pubsub.go @@ -20,6 +20,7 @@ import ( "github.com/rs/xid" "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" ) @@ -47,10 +48,11 @@ func (s *Subscription) ID() string { // DocEvent represents events that occur related to the document. type DocEvent struct { - Type types.DocEventType - Publisher *time.ActorID - DocumentID types.ID - Body types.DocEventBody + Type types.DocEventType + Publisher *time.ActorID + DocumentKey key.Key + DocumentID types.ID + Body types.DocEventBody } // Events returns the DocEvent channel of this subscription. diff --git a/server/clients/clients.go b/server/clients/clients.go index 1803c7f17..7f75be99b 100644 --- a/server/clients/clients.go +++ b/server/clients/clients.go @@ -60,31 +60,34 @@ func Deactivate( return nil, err } - for id, clientDocInfo := range clientInfo.Documents { - isAttached, err := clientInfo.IsAttached(id) - if err != nil { - return nil, err - } - if !isAttached { - continue - } + for docKey, v := range clientInfo.Documents { + for docID, clientDocInfo := range v { + isAttached, err := clientInfo.IsAttached(docKey, docID) + if err != nil { + return nil, err + } + if !isAttached { + continue + } - if err := clientInfo.DetachDocument(id); err != nil { - return nil, err - } + if err := clientInfo.DetachDocument(docKey, docID); err != nil { + return nil, err + } - // TODO(hackerwins): We need to remove the presence of the client from the document. - // Be careful that housekeeping is executed by the leader. And documents are sharded - // by the servers in the cluster. So, we need to consider the case where the leader is - // not the same as the server that handles the document. + // TODO(hackerwins): We need to remove the presence of the client from the document. + // Be careful that housekeeping is executed by the leader. And documents are sharded + // by the servers in the cluster. So, we need to consider the case where the leader is + // not the same as the server that handles the document. - if err := db.UpdateSyncedSeq( - ctx, - clientInfo, - id, - clientDocInfo.ServerSeq, - ); err != nil { - return nil, err + if err := db.UpdateSyncedSeq( + ctx, + clientInfo, + docKey, + docID, + clientDocInfo.ServerSeq, + ); err != nil { + return nil, err + } } } diff --git a/server/documents/documents.go b/server/documents/documents.go index 68e46a9bf..0711cd3e9 100644 --- a/server/documents/documents.go +++ b/server/documents/documents.go @@ -48,7 +48,7 @@ func ListDocumentSummaries( ctx context.Context, be *backend.Backend, project *types.Project, - paging types.Paging[types.ID], + paging types.Paging[key.Key], includeSnapshot bool, ) ([]*types.DocumentSummary, error) { if paging.PageSize > pageSizeLimit { @@ -196,13 +196,13 @@ func FindDocInfoByKey( } // FindDocInfo returns a document for the given document ID. -func FindDocInfo( +func FindDocInfoByKeyAndID( ctx context.Context, be *backend.Backend, - project *types.Project, + docKey key.Key, docID types.ID, ) (*database.DocInfo, error) { - return be.DB.FindDocInfoByID(ctx, project.ID, docID) + return be.DB.FindDocInfoByKeyAndID(ctx, docKey, docID) } // FindDocInfoByKeyAndOwner returns a document for the given document key. If @@ -230,14 +230,15 @@ func RemoveDocument( ctx context.Context, be *backend.Backend, project *types.Project, + docKey key.Key, docID types.ID, force bool, ) error { if force { - return be.DB.UpdateDocInfoStatusToRemoved(ctx, project.ID, docID) + return be.DB.UpdateDocInfoStatusToRemoved(ctx, docKey, docID) } - isAttached, err := be.DB.IsDocumentAttached(ctx, project.ID, docID, "") + isAttached, err := be.DB.IsDocumentAttached(ctx, project.ID, docKey, docID, "") if err != nil { return err } @@ -245,7 +246,7 @@ func RemoveDocument( return ErrDocumentAttached } - return be.DB.UpdateDocInfoStatusToRemoved(ctx, project.ID, docID) + return be.DB.UpdateDocInfoStatusToRemoved(ctx, docKey, docID) } // IsDocumentAttached returns true if the given document is attached to any client. @@ -253,8 +254,9 @@ func IsDocumentAttached( ctx context.Context, be *backend.Backend, project *types.Project, + docKey key.Key, docID types.ID, excludeClientID types.ID, ) (bool, error) { - return be.DB.IsDocumentAttached(ctx, project.ID, docID, excludeClientID) + return be.DB.IsDocumentAttached(ctx, project.ID, docKey, docID, excludeClientID) } diff --git a/server/packs/history.go b/server/packs/history.go index 84b0a402e..109ee90a3 100644 --- a/server/packs/history.go +++ b/server/packs/history.go @@ -33,13 +33,18 @@ func FindChanges( to int64, ) ([]*change.Change, error) { if be.Config.SnapshotWithPurgingChanges { - minSyncedSeqInfo, err := be.DB.FindMinSyncedSeqInfo(ctx, docInfo.ID) + minSyncedSeqInfo, err := be.DB.FindMinSyncedSeqInfo( + ctx, + docInfo.Key, + docInfo.ID, + ) if err != nil { return nil, err } snapshotInfo, err := be.DB.FindClosestSnapshotInfo( - ctx, docInfo.ID, + ctx, + docInfo.Key, docInfo.ID, minSyncedSeqInfo.ServerSeq+be.Config.SnapshotInterval, false, ) @@ -54,7 +59,7 @@ func FindChanges( changes, err := be.DB.FindChangesBetweenServerSeqs( ctx, - docInfo.ID, + docInfo.Key, docInfo.ID, from, to, ) diff --git a/server/packs/packs.go b/server/packs/packs.go index 84099c17d..f97d9c718 100644 --- a/server/packs/packs.go +++ b/server/packs/packs.go @@ -84,7 +84,7 @@ func PushPull( be.Metrics.AddPushPullSentOperations(respPack.OperationsLen()) be.Metrics.AddPushPullSnapshotBytes(respPack.SnapshotLen()) - if err := clientInfo.UpdateCheckpoint(docInfo.ID, respPack.Checkpoint); err != nil { + if err := clientInfo.UpdateCheckpoint(docInfo.Key, docInfo.ID, respPack.Checkpoint); err != nil { return nil, err } @@ -92,7 +92,6 @@ func PushPull( if len(pushedChanges) > 0 || reqPack.IsRemoved { if err := be.DB.CreateChangeInfos( ctx, - project.ID, docInfo, initialServerSeq, pushedChanges, @@ -112,7 +111,7 @@ func PushPull( minSyncedTicket, err := be.DB.UpdateAndFindMinSyncedTicket( ctx, clientInfo, - docInfo.ID, + docInfo.Key, docInfo.ID, reqPack.Checkpoint.ServerSeq, ) if err != nil { @@ -147,9 +146,10 @@ func PushPull( ctx, publisherID, sync.DocEvent{ - Type: types.DocumentChangedEvent, - Publisher: publisherID, - DocumentID: docInfo.ID, + Type: types.DocumentChangedEvent, + Publisher: publisherID, + DocumentKey: docInfo.Key, + DocumentID: docInfo.ID, }, ) @@ -196,7 +196,12 @@ func BuildDocumentForServerSeq( docInfo *database.DocInfo, serverSeq int64, ) (*document.InternalDocument, error) { - snapshotInfo, err := be.DB.FindClosestSnapshotInfo(ctx, docInfo.ID, serverSeq, true) + snapshotInfo, err := be.DB.FindClosestSnapshotInfo( + ctx, + docInfo.Key, docInfo.ID, + serverSeq, + true, + ) if err != nil { return nil, err } @@ -216,7 +221,7 @@ func BuildDocumentForServerSeq( // certain size (e.g. 100) and read and gradually reflect it into the document. changes, err := be.DB.FindChangesBetweenServerSeqs( ctx, - docInfo.ID, + docInfo.Key, docInfo.ID, snapshotInfo.ServerSeq+1, serverSeq, ) diff --git a/server/packs/pushpull.go b/server/packs/pushpull.go index a8c979482..7788d68be 100644 --- a/server/packs/pushpull.go +++ b/server/packs/pushpull.go @@ -43,7 +43,7 @@ func pushChanges( reqPack *change.Pack, initialServerSeq int64, ) (change.Checkpoint, []*change.Change) { - cp := clientInfo.Checkpoint(docInfo.ID) + cp := clientInfo.Checkpoint(docInfo.Key, docInfo.ID) var pushedChanges []*change.Change for _, cn := range reqPack.Changes { @@ -185,6 +185,7 @@ func pullChangeInfos( ) (change.Checkpoint, []*database.ChangeInfo, error) { pulledChanges, err := be.DB.FindChangeInfosBetweenServerSeqs( ctx, + docInfo.Key, docInfo.ID, reqPack.Checkpoint.ServerSeq+1, initialServerSeq, diff --git a/server/packs/snapshots.go b/server/packs/snapshots.go index ac56be161..77c304ad2 100644 --- a/server/packs/snapshots.go +++ b/server/packs/snapshots.go @@ -34,7 +34,12 @@ func storeSnapshot( minSyncedTicket *time.Ticket, ) error { // 01. get the closest snapshot's metadata of this docInfo - snapshotMetadata, err := be.DB.FindClosestSnapshotInfo(ctx, docInfo.ID, docInfo.ServerSeq, false) + snapshotMetadata, err := be.DB.FindClosestSnapshotInfo( + ctx, + docInfo.Key, + docInfo.ID, + docInfo.ServerSeq, + false) if err != nil { return err } @@ -48,6 +53,7 @@ func storeSnapshot( // 02. retrieve the changes between last snapshot and current docInfo changes, err := be.DB.FindChangesBetweenServerSeqs( ctx, + docInfo.Key, docInfo.ID, snapshotMetadata.ServerSeq+1, docInfo.ServerSeq, @@ -59,7 +65,12 @@ func storeSnapshot( // 03. create document instance of the docInfo snapshotInfo := snapshotMetadata if snapshotMetadata.ID != "" { - snapshotInfo, err = be.DB.FindSnapshotInfoByID(ctx, snapshotInfo.ID) + snapshotInfo, err = be.DB.FindSnapshotInfoByID( + ctx, + snapshotInfo.DocKey, + snapshotInfo.DocID, + snapshotInfo.ServerSeq, + ) if err != nil { return err } @@ -88,7 +99,12 @@ func storeSnapshot( } // 04. save the snapshot of the docInfo - if err := be.DB.CreateSnapshotInfo(ctx, docInfo.ID, doc); err != nil { + if err := be.DB.CreateSnapshotInfo( + ctx, + docInfo.Key, + docInfo.ID, + doc, + ); err != nil { return err } @@ -96,6 +112,7 @@ func storeSnapshot( if be.Config.SnapshotWithPurgingChanges { if err := be.DB.PurgeStaleChanges( ctx, + docInfo.Key, docInfo.ID, ); err != nil { logging.From(ctx).Error(err) diff --git a/server/rpc/admin_server.go b/server/rpc/admin_server.go index 7572798d4..0d63920e6 100644 --- a/server/rpc/admin_server.go +++ b/server/rpc/admin_server.go @@ -280,8 +280,8 @@ func (s *adminServer) ListDocuments( ctx, s.backend, project, - types.Paging[types.ID]{ - Offset: types.ID(req.PreviousId), + types.Paging[key.Key]{ + Offset: key.Key(req.PreviousKey), PageSize: int(req.PageSize), IsForward: req.IsForward, }, @@ -365,7 +365,11 @@ func (s *adminServer) RemoveDocumentByAdmin( } }() - if err := documents.RemoveDocument(ctx, s.backend, project, docInfo.ID, req.Force); err != nil { + if err := documents.RemoveDocument( + ctx, s.backend, project, + docInfo.Key, docInfo.ID, + req.Force, + ); err != nil { return nil, err } @@ -375,9 +379,10 @@ func (s *adminServer) RemoveDocumentByAdmin( ctx, publisherID, sync.DocEvent{ - Type: types.DocumentChangedEvent, - Publisher: publisherID, - DocumentID: docInfo.ID, + Type: types.DocumentChangedEvent, + Publisher: publisherID, + DocumentKey: docInfo.Key, + DocumentID: docInfo.ID, }, ) diff --git a/server/rpc/server_test.go b/server/rpc/server_test.go index b52e60f5f..b96b8c742 100644 --- a/server/rpc/server_test.go +++ b/server/rpc/server_test.go @@ -658,8 +658,10 @@ func TestSDKRPCServerBackend(t *testing.T) { ) assert.NoError(t, err) + docKey := helper.TestDocKey(t).String() + packWithNoChanges := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), + DocumentKey: docKey, Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, } @@ -676,8 +678,9 @@ func TestSDKRPCServerBackend(t *testing.T) { watchResp, err := testClient.WatchDocument( context.Background(), &api.WatchDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, + ClientId: activateResp.ClientId, + DocumentKey: docKey, + DocumentId: resPack.DocumentId, }, ) assert.NoError(t, err) diff --git a/server/rpc/yorkie_server.go b/server/rpc/yorkie_server.go index f3fa6bc67..f41789dc1 100644 --- a/server/rpc/yorkie_server.go +++ b/server/rpc/yorkie_server.go @@ -148,7 +148,7 @@ func (s *yorkieServer) AttachDocument( return nil, err } - if err := clientInfo.AttachDocument(docInfo.ID); err != nil { + if err := clientInfo.AttachDocument(docInfo.Key, docInfo.ID); err != nil { return nil, err } @@ -213,23 +213,27 @@ func (s *yorkieServer) DetachDocument( if err != nil { return nil, err } - docInfo, err := documents.FindDocInfo(ctx, s.backend, project, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, s.backend, pack.DocumentKey, docID) if err != nil { return nil, err } - isAttached, err := documents.IsDocumentAttached(ctx, s.backend, project, docInfo.ID, clientInfo.ID) + isAttached, err := documents.IsDocumentAttached( + ctx, s.backend, project, + docInfo.Key, docInfo.ID, + clientInfo.ID, + ) if err != nil { return nil, err } if req.RemoveIfNotAttached && !isAttached { pack.IsRemoved = true - if err := clientInfo.RemoveDocument(docInfo.ID); err != nil { + if err := clientInfo.RemoveDocument(docInfo.Key, docInfo.ID); err != nil { return nil, err } } else { - if err := clientInfo.DetachDocument(docInfo.ID); err != nil { + if err := clientInfo.DetachDocument(docInfo.Key, docInfo.ID); err != nil { return nil, err } } @@ -305,12 +309,12 @@ func (s *yorkieServer) PushPullChanges( if err != nil { return nil, err } - docInfo, err := documents.FindDocInfo(ctx, s.backend, project, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, s.backend, pack.DocumentKey, docID) if err != nil { return nil, err } - if err := clientInfo.EnsureDocumentAttached(docInfo.ID); err != nil { + if err := clientInfo.EnsureDocumentAttached(docInfo.Key, docInfo.ID); err != nil { return nil, err } @@ -339,15 +343,17 @@ func (s *yorkieServer) WatchDocument( if err != nil { return err } + + docKey := key.Key(req.DocumentKey) docID, err := converter.FromDocumentID(req.DocumentId) if err != nil { return err } - docInfo, err := documents.FindDocInfo( + docInfo, err := documents.FindDocInfoByKeyAndID( stream.Context(), s.backend, - projects.From(stream.Context()), + docKey, docID, ) if err != nil { @@ -382,13 +388,13 @@ func (s *yorkieServer) WatchDocument( } }() - subscription, clientIDs, err := s.watchDoc(stream.Context(), clientID, docID) + subscription, clientIDs, err := s.watchDoc(stream.Context(), clientID, docKey, docID) if err != nil { logging.From(stream.Context()).Error(err) return err } defer func() { - s.unwatchDoc(subscription, docID) + s.unwatchDoc(subscription, docKey, docID) }() var pbClientIDs []string @@ -482,12 +488,12 @@ func (s *yorkieServer) RemoveDocument( if err != nil { return nil, err } - docInfo, err := documents.FindDocInfo(ctx, s.backend, project, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, s.backend, pack.DocumentKey, docID) if err != nil { return nil, err } - if err := clientInfo.RemoveDocument(docInfo.ID); err != nil { + if err := clientInfo.RemoveDocument(docInfo.Key, docInfo.ID); err != nil { return nil, err } @@ -509,9 +515,10 @@ func (s *yorkieServer) RemoveDocument( func (s *yorkieServer) watchDoc( ctx context.Context, clientID *time.ActorID, + documentKey key.Key, documentID types.ID, ) (*sync.Subscription, []*time.ActorID, error) { - subscription, clientIDs, err := s.backend.Coordinator.Subscribe(ctx, clientID, documentID) + subscription, clientIDs, err := s.backend.Coordinator.Subscribe(ctx, clientID, documentKey, documentID) if err != nil { logging.From(ctx).Error(err) return nil, nil, err @@ -521,9 +528,10 @@ func (s *yorkieServer) watchDoc( ctx, subscription.Subscriber(), sync.DocEvent{ - Type: types.DocumentWatchedEvent, - Publisher: subscription.Subscriber(), - DocumentID: documentID, + Type: types.DocumentWatchedEvent, + Publisher: subscription.Subscriber(), + DocumentKey: documentKey, + DocumentID: documentID, }, ) @@ -532,17 +540,19 @@ func (s *yorkieServer) watchDoc( func (s *yorkieServer) unwatchDoc( subscription *sync.Subscription, + documentKey key.Key, documentID types.ID, ) { ctx := context.Background() - _ = s.backend.Coordinator.Unsubscribe(ctx, documentID, subscription) + _ = s.backend.Coordinator.Unsubscribe(ctx, documentKey, documentID, subscription) s.backend.Coordinator.Publish( ctx, subscription.Subscriber(), sync.DocEvent{ - Type: types.DocumentUnwatchedEvent, - Publisher: subscription.Subscriber(), - DocumentID: documentID, + Type: types.DocumentUnwatchedEvent, + Publisher: subscription.Subscriber(), + DocumentKey: documentKey, + DocumentID: documentID, }, ) } @@ -556,15 +566,16 @@ func (s *yorkieServer) Broadcast( return nil, err } + docKey := key.Key(req.DocumentKey) docID, err := converter.FromDocumentID(req.DocumentId) if err != nil { return nil, err } - docInfo, err := documents.FindDocInfo( + docInfo, err := documents.FindDocInfoByKeyAndID( ctx, s.backend, - projects.From(ctx), + docKey, docID, ) if err != nil { @@ -588,9 +599,10 @@ func (s *yorkieServer) Broadcast( ctx, clientID, sync.DocEvent{ - Type: types.DocumentBroadcastEvent, - Publisher: clientID, - DocumentID: docID, + Type: types.DocumentBroadcastEvent, + Publisher: clientID, + DocumentKey: docKey, + DocumentID: docID, Body: types.DocEventBody{ Topic: req.Topic, Payload: req.Payload, From 2dcd8d0f8d4efb87e2e10ad7f7f934b7bd82b812 Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Wed, 22 Nov 2023 17:21:42 +0900 Subject: [PATCH 03/11] Support document paging using (_id, key) order --- admin/client.go | 6 +- api/yorkie/v1/admin.pb.go | 199 +++++++++++------- api/yorkie/v1/admin.proto | 3 +- cmd/yorkie/document/list.go | 18 +- server/backend/database/database.go | 30 ++- server/backend/database/memory/database.go | 29 ++- server/backend/database/memory/indexes.go | 9 + server/backend/database/mongo/client.go | 18 +- .../backend/database/testcases/testcases.go | 94 ++++++--- server/documents/documents.go | 2 +- server/rpc/admin_server.go | 8 +- test/integration/document_test.go | 6 +- 12 files changed, 285 insertions(+), 137 deletions(-) diff --git a/admin/client.go b/admin/client.go index f873ac1b8..a75d35372 100644 --- a/admin/client.go +++ b/admin/client.go @@ -34,6 +34,7 @@ import ( api "github.com/yorkie-team/yorkie/api/yorkie/v1" "github.com/yorkie-team/yorkie/pkg/document" "github.com/yorkie-team/yorkie/pkg/document/key" + "github.com/yorkie-team/yorkie/server/backend/database" ) // Option configures Options. @@ -248,7 +249,7 @@ func (c *Client) UpdateProject( func (c *Client) ListDocuments( ctx context.Context, projectName string, - previousKey string, + previousOffset database.DocOffset, pageSize int32, isForward bool, includeSnapshot bool, @@ -257,10 +258,11 @@ func (c *Client) ListDocuments( ctx, &api.ListDocumentsRequest{ ProjectName: projectName, - PreviousKey: previousKey, + PreviousId: previousOffset.ID.String(), PageSize: pageSize, IsForward: isForward, IncludeSnapshot: includeSnapshot, + PreviousKey: previousOffset.Key.String(), }, ) if err != nil { diff --git a/api/yorkie/v1/admin.pb.go b/api/yorkie/v1/admin.pb.go index 0626dea1e..0b4c9d67e 100644 --- a/api/yorkie/v1/admin.pb.go +++ b/api/yorkie/v1/admin.pb.go @@ -608,10 +608,11 @@ func (m *UpdateProjectResponse) GetProject() *Project { type ListDocumentsRequest struct { ProjectName string `protobuf:"bytes,1,opt,name=project_name,json=projectName,proto3" json:"project_name,omitempty"` - PreviousKey string `protobuf:"bytes,2,opt,name=previous_key,json=previousKey,proto3" json:"previous_key,omitempty"` + PreviousId string `protobuf:"bytes,2,opt,name=previous_id,json=previousId,proto3" json:"previous_id,omitempty"` PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` IsForward bool `protobuf:"varint,4,opt,name=is_forward,json=isForward,proto3" json:"is_forward,omitempty"` IncludeSnapshot bool `protobuf:"varint,5,opt,name=include_snapshot,json=includeSnapshot,proto3" json:"include_snapshot,omitempty"` + PreviousKey string `protobuf:"bytes,6,opt,name=previous_key,json=previousKey,proto3" json:"previous_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -657,9 +658,9 @@ func (m *ListDocumentsRequest) GetProjectName() string { return "" } -func (m *ListDocumentsRequest) GetPreviousKey() string { +func (m *ListDocumentsRequest) GetPreviousId() string { if m != nil { - return m.PreviousKey + return m.PreviousId } return "" } @@ -685,6 +686,13 @@ func (m *ListDocumentsRequest) GetIncludeSnapshot() bool { return false } +func (m *ListDocumentsRequest) GetPreviousKey() string { + if m != nil { + return m.PreviousKey + } + return "" +} + type ListDocumentsResponse struct { Documents []*DocumentSummary `protobuf:"bytes,1,rep,name=documents,proto3" json:"documents,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -1328,69 +1336,71 @@ func init() { func init() { proto.RegisterFile("yorkie/v1/admin.proto", fileDescriptor_7ef4cd0843a14163) } var fileDescriptor_7ef4cd0843a14163 = []byte{ - // 992 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0x4f, 0x73, 0xdb, 0x44, - 0x14, 0xaf, 0x12, 0x3b, 0xb1, 0x9f, 0x9d, 0x86, 0x6c, 0xed, 0xc6, 0x15, 0x89, 0xe3, 0x2c, 0xd3, - 0x69, 0xa0, 0x8c, 0x4b, 0xc2, 0xc0, 0xc0, 0xc0, 0x0c, 0x83, 0x03, 0xc9, 0x30, 0x69, 0x3b, 0xad, - 0x4c, 0x2e, 0xb9, 0x78, 0x54, 0xeb, 0xc5, 0x11, 0xb1, 0x25, 0x79, 0x57, 0x76, 0xc7, 0xb9, 0xf1, - 0x2d, 0xf8, 0x32, 0x9c, 0xb8, 0x70, 0xe4, 0x23, 0x30, 0xe1, 0x33, 0x70, 0x67, 0xa4, 0xdd, 0x55, - 0x56, 0xf2, 0x1f, 0x68, 0xc8, 0xcd, 0xfb, 0xde, 0xef, 0xfd, 0xde, 0xbf, 0xdd, 0xf7, 0x2c, 0xa8, - 0x4e, 0x7c, 0x76, 0xe9, 0xe2, 0xb3, 0xf1, 0xfe, 0x33, 0xdb, 0x19, 0xb8, 0x5e, 0x33, 0x60, 0x7e, - 0xe8, 0x93, 0xa2, 0x10, 0x37, 0xc7, 0xfb, 0xe6, 0xa3, 0x1b, 0x04, 0x43, 0xee, 0x8f, 0x58, 0x17, - 0xb9, 0x40, 0xd1, 0x63, 0x58, 0x6b, 0xbb, 0x3d, 0xef, 0x34, 0xb0, 0x70, 0x38, 0x42, 0x1e, 0x12, - 0x13, 0x0a, 0x23, 0x8e, 0xcc, 0xb3, 0x07, 0x58, 0x33, 0x1a, 0xc6, 0x5e, 0xd1, 0x4a, 0xce, 0x91, - 0x2e, 0xb0, 0x39, 0x7f, 0xeb, 0x33, 0xa7, 0xb6, 0x24, 0x74, 0xea, 0x4c, 0x3f, 0x83, 0xfb, 0x8a, - 0x88, 0x07, 0xbe, 0xc7, 0x91, 0x7c, 0x00, 0xb9, 0xc8, 0x32, 0x66, 0x29, 0x1d, 0xac, 0x37, 0x93, - 0x78, 0x9a, 0xa7, 0x1c, 0x99, 0x15, 0x2b, 0xe9, 0x11, 0x94, 0x9f, 0xfb, 0xbd, 0x1f, 0xbc, 0xff, - 0xeb, 0xfe, 0x31, 0xac, 0x49, 0x1e, 0xe9, 0xbd, 0x02, 0xf9, 0xd0, 0xbf, 0x44, 0x4f, 0xb2, 0x88, - 0x03, 0xfd, 0x08, 0x2a, 0x87, 0x0c, 0xed, 0x10, 0x5f, 0x31, 0xff, 0x27, 0xec, 0x86, 0xca, 0x2d, - 0x81, 0x9c, 0xe6, 0x32, 0xfe, 0x4d, 0xbf, 0x87, 0x6a, 0x06, 0x2b, 0xa9, 0x3f, 0x86, 0xd5, 0x40, - 0x88, 0x64, 0x6e, 0x44, 0xcb, 0x4d, 0x81, 0x15, 0x84, 0x3e, 0x81, 0x8d, 0x63, 0x0c, 0xff, 0x83, - 0xbf, 0x16, 0x10, 0x1d, 0x78, 0x2b, 0x67, 0x55, 0x78, 0xf0, 0xdc, 0xe5, 0x8a, 0x84, 0x4b, 0x77, - 0xf4, 0x08, 0x2a, 0x69, 0xb1, 0x24, 0x6f, 0x42, 0x41, 0x5a, 0xf2, 0x9a, 0xd1, 0x58, 0x9e, 0xc3, - 0x9e, 0x60, 0xa8, 0x0d, 0x95, 0xd3, 0xc0, 0x99, 0x2e, 0xdf, 0x7d, 0x58, 0x72, 0x1d, 0x99, 0xcc, - 0x92, 0xeb, 0x90, 0x2f, 0x61, 0xe5, 0xdc, 0xc5, 0xbe, 0xc3, 0xe3, 0x3e, 0x95, 0x0e, 0x76, 0xf5, - 0xe6, 0x47, 0x04, 0xf6, 0x9b, 0xbe, 0xe2, 0x38, 0x8a, 0x81, 0x96, 0x34, 0x88, 0xaa, 0x9e, 0x71, - 0x71, 0xab, 0x42, 0xfc, 0x66, 0x88, 0x94, 0xbf, 0xf3, 0xbb, 0xa3, 0x01, 0x7a, 0x49, 0x29, 0xc8, - 0x2e, 0x94, 0x25, 0xa6, 0xa3, 0x75, 0xa0, 0x24, 0x65, 0x2f, 0xa3, 0x7b, 0x16, 0x43, 0x70, 0xec, - 0xfa, 0x23, 0xde, 0xb9, 0xc4, 0x89, 0xbc, 0x6b, 0x25, 0x25, 0x3b, 0xc1, 0x09, 0x79, 0x1f, 0x8a, - 0x81, 0xdd, 0xc3, 0x0e, 0x77, 0xaf, 0xb0, 0xb6, 0xdc, 0x30, 0xf6, 0xf2, 0xd1, 0x5d, 0xec, 0x61, - 0xdb, 0xbd, 0x42, 0xb2, 0x0d, 0xe0, 0xf2, 0xce, 0xb9, 0xcf, 0xde, 0xda, 0xcc, 0xa9, 0xe5, 0x1a, - 0xc6, 0x5e, 0xc1, 0x2a, 0xba, 0xfc, 0x48, 0x08, 0xc8, 0x87, 0xf0, 0x9e, 0xeb, 0x75, 0xfb, 0x23, - 0x07, 0x3b, 0xdc, 0xb3, 0x03, 0x7e, 0xe1, 0x87, 0xb5, 0x7c, 0x0c, 0x5a, 0x97, 0xf2, 0xb6, 0x14, - 0xd3, 0xd7, 0x50, 0xcd, 0x24, 0x21, 0x8b, 0xf1, 0x05, 0x14, 0x1d, 0x25, 0x94, 0x9d, 0x33, 0xb5, - 0x72, 0x28, 0x83, 0xf6, 0x68, 0x30, 0xb0, 0xd9, 0xc4, 0xba, 0x01, 0xd3, 0xb3, 0xf8, 0x96, 0x29, - 0xc0, 0xbb, 0x55, 0x45, 0xb1, 0xe8, 0x55, 0x51, 0xb2, 0x13, 0x9c, 0xd0, 0x17, 0xf0, 0x20, 0xc5, - 0x2d, 0x83, 0xfd, 0x1c, 0x0a, 0x0a, 0x25, 0x5b, 0xb7, 0x28, 0xd6, 0x04, 0x4b, 0xaf, 0x60, 0xcb, - 0xc2, 0x81, 0x3f, 0x46, 0x05, 0x69, 0x4d, 0xbe, 0x8d, 0x06, 0xdc, 0x9d, 0x06, 0x1d, 0x0d, 0x8a, - 0x73, 0x9f, 0x75, 0x45, 0x1b, 0x0b, 0x96, 0x38, 0xd0, 0x1d, 0xd8, 0x9e, 0xe3, 0x5b, 0x24, 0x45, - 0x7f, 0x36, 0xe0, 0xe1, 0x31, 0x86, 0xaa, 0x55, 0x2f, 0x30, 0xb4, 0xef, 0x36, 0xae, 0x5d, 0x00, - 0x8e, 0x6c, 0x8c, 0xac, 0xc3, 0x71, 0x18, 0x07, 0xb7, 0xdc, 0x5a, 0xfa, 0xc4, 0xb0, 0x8a, 0x42, - 0xda, 0xc6, 0x21, 0x6d, 0xc3, 0xe6, 0x54, 0x08, 0xb2, 0xe6, 0x26, 0x14, 0x92, 0xcb, 0x15, 0xf9, - 0x2f, 0x5b, 0xc9, 0x99, 0x6c, 0xc1, 0x6a, 0xdf, 0x1e, 0x04, 0x3e, 0x0b, 0x63, 0xbf, 0x82, 0x56, - 0x89, 0xa8, 0x07, 0x0f, 0xdb, 0x68, 0xb3, 0xee, 0xc5, 0x6d, 0x9e, 0x4e, 0x05, 0xf2, 0xc3, 0x11, - 0x32, 0x95, 0x90, 0x38, 0x2c, 0x7c, 0x2d, 0x34, 0x84, 0xcd, 0x29, 0x7f, 0x32, 0x89, 0x1d, 0x28, - 0x85, 0x7e, 0x68, 0xf7, 0x3b, 0x5d, 0x7f, 0x24, 0xef, 0x4e, 0xde, 0x82, 0x58, 0x74, 0x18, 0x49, - 0xd2, 0xcf, 0x60, 0xe9, 0x5d, 0x9e, 0xc1, 0xaf, 0x06, 0x90, 0xe8, 0x69, 0x1d, 0x5e, 0xd8, 0x5e, - 0x0f, 0xf9, 0xdd, 0xb6, 0xee, 0xb1, 0x36, 0x40, 0xd2, 0xcd, 0x4b, 0x86, 0x48, 0x1b, 0x87, 0xe9, - 0xb2, 0xe4, 0x16, 0x0e, 0x91, 0x7c, 0x66, 0x88, 0xd0, 0x96, 0x18, 0xf4, 0x49, 0xf8, 0xb2, 0x62, - 0x4f, 0x61, 0xb5, 0x2b, 0x44, 0x72, 0x2a, 0x6c, 0x68, 0xe5, 0x10, 0x60, 0x4b, 0x21, 0x0e, 0xfe, - 0x5e, 0x85, 0x72, 0x7c, 0xa9, 0xdb, 0xc8, 0xc6, 0x6e, 0x17, 0xc9, 0x37, 0xb0, 0x22, 0x76, 0x38, - 0xa9, 0x69, 0x66, 0xa9, 0xff, 0x07, 0xe6, 0xa3, 0x19, 0x1a, 0xf9, 0x24, 0xee, 0x91, 0xaf, 0x21, - 0x1f, 0x6f, 0x61, 0xb2, 0xa9, 0xa1, 0xf4, 0xfd, 0x6e, 0xd6, 0xa6, 0x15, 0x89, 0xf5, 0x8f, 0xb0, - 0x96, 0x5a, 0xb8, 0x64, 0x47, 0x0f, 0x7e, 0xc6, 0xda, 0x36, 0x1b, 0xf3, 0x01, 0x09, 0xeb, 0x6b, - 0x28, 0xeb, 0xbb, 0x8f, 0xd4, 0xf5, 0x08, 0xa6, 0x77, 0xa5, 0xb9, 0x33, 0x57, 0x9f, 0x50, 0x9e, - 0x00, 0xdc, 0x6c, 0x6a, 0xb2, 0xa5, 0x19, 0x4c, 0x6d, 0x7a, 0x73, 0x7b, 0x8e, 0x56, 0xcf, 0x3a, - 0xb5, 0xf0, 0x52, 0x59, 0xcf, 0xda, 0xb6, 0xa9, 0xac, 0x67, 0xee, 0x4a, 0xc1, 0x9a, 0xda, 0x1c, - 0x24, 0x9b, 0x56, 0xf6, 0x75, 0xa7, 0x58, 0x67, 0x2e, 0x1d, 0x7a, 0x8f, 0xbc, 0x84, 0x92, 0x36, - 0xe0, 0x49, 0x26, 0xb7, 0xcc, 0x52, 0x31, 0xeb, 0xf3, 0xd4, 0x09, 0x5f, 0x1f, 0xaa, 0x33, 0xa7, - 0x2c, 0x79, 0xa2, 0x99, 0x2e, 0xda, 0x01, 0xe6, 0xde, 0xbf, 0x03, 0x13, 0x6f, 0x67, 0xb0, 0x9e, - 0x19, 0x97, 0x64, 0x37, 0x1d, 0xe2, 0x8c, 0x69, 0x6e, 0xd2, 0x45, 0x10, 0x9d, 0x3b, 0x33, 0xc5, - 0x52, 0xdc, 0xb3, 0x27, 0x6a, 0x8a, 0x7b, 0xce, 0x10, 0x14, 0x55, 0xd7, 0xde, 0x7a, 0xaa, 0xea, - 0xd3, 0x23, 0xcc, 0xac, 0xcf, 0x53, 0x2b, 0xbe, 0xd6, 0xd3, 0xdf, 0xaf, 0xeb, 0xc6, 0x1f, 0xd7, - 0x75, 0xe3, 0xcf, 0xeb, 0xba, 0xf1, 0xcb, 0x5f, 0xf5, 0x7b, 0xb0, 0xe1, 0xe0, 0x58, 0x99, 0xd9, - 0x81, 0xdb, 0x1c, 0xef, 0xbf, 0x32, 0xce, 0x72, 0xcd, 0xaf, 0xc6, 0xfb, 0x6f, 0x56, 0xe2, 0xef, - 0x84, 0x4f, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0x2b, 0x2d, 0x14, 0x68, 0x66, 0x0c, 0x00, 0x00, + // 1009 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xdd, 0x6e, 0xe3, 0x44, + 0x14, 0xae, 0xd3, 0xa4, 0x4d, 0x4e, 0xd2, 0x2d, 0x9d, 0x4d, 0xb6, 0x59, 0xd3, 0xe6, 0xc7, 0x68, + 0xb5, 0x85, 0x45, 0x59, 0x5a, 0x04, 0x02, 0x81, 0x84, 0x48, 0xa1, 0xd5, 0x6a, 0x7f, 0xb4, 0xeb, + 0xd0, 0x9b, 0xde, 0x44, 0xde, 0xf8, 0x34, 0x35, 0x4d, 0x6c, 0x67, 0xc6, 0xc9, 0x2a, 0xbd, 0xe3, + 0x2d, 0x78, 0x19, 0xee, 0xb9, 0xe4, 0x11, 0x50, 0xe1, 0x15, 0xb8, 0x47, 0xf6, 0xcc, 0xb8, 0x63, + 0xe7, 0x07, 0xda, 0xed, 0x5d, 0xe6, 0xcc, 0x77, 0xbe, 0xf3, 0x37, 0xe7, 0x1c, 0x07, 0x2a, 0x53, + 0x8f, 0x5e, 0x38, 0xf8, 0x74, 0xb2, 0xff, 0xd4, 0xb2, 0x87, 0x8e, 0xdb, 0xf2, 0xa9, 0x17, 0x78, + 0xa4, 0xc0, 0xc5, 0xad, 0xc9, 0xbe, 0xfe, 0xf0, 0x1a, 0x41, 0x91, 0x79, 0x63, 0xda, 0x43, 0xc6, + 0x51, 0xc6, 0x31, 0x6c, 0x74, 0x9c, 0xbe, 0x7b, 0xe2, 0x9b, 0x38, 0x1a, 0x23, 0x0b, 0x88, 0x0e, + 0xf9, 0x31, 0x43, 0xea, 0x5a, 0x43, 0xac, 0x6a, 0x0d, 0x6d, 0xaf, 0x60, 0xc6, 0xe7, 0xf0, 0xce, + 0xb7, 0x18, 0x7b, 0xe7, 0x51, 0xbb, 0x9a, 0xe1, 0x77, 0xf2, 0x6c, 0x7c, 0x01, 0xf7, 0x24, 0x11, + 0xf3, 0x3d, 0x97, 0x21, 0xf9, 0x08, 0xb2, 0xa1, 0x66, 0xc4, 0x52, 0x3c, 0xd8, 0x6c, 0xc5, 0xfe, + 0xb4, 0x4e, 0x18, 0x52, 0x33, 0xba, 0x34, 0x8e, 0xa0, 0xf4, 0xc2, 0xeb, 0x3f, 0x73, 0xdf, 0xd7, + 0xfc, 0x23, 0xd8, 0x10, 0x3c, 0xc2, 0x7a, 0x19, 0x72, 0x81, 0x77, 0x81, 0xae, 0x60, 0xe1, 0x07, + 0xe3, 0x13, 0x28, 0x1f, 0x52, 0xb4, 0x02, 0x7c, 0x4d, 0xbd, 0x9f, 0xb1, 0x17, 0x48, 0xb3, 0x04, + 0xb2, 0x8a, 0xc9, 0xe8, 0xb7, 0xf1, 0x23, 0x54, 0x52, 0x58, 0x41, 0xfd, 0x29, 0xac, 0xfb, 0x5c, + 0x24, 0x62, 0x23, 0x4a, 0x6c, 0x12, 0x2c, 0x21, 0xc6, 0x63, 0xd8, 0x3a, 0xc6, 0xe0, 0x7f, 0xd8, + 0x6b, 0x03, 0x51, 0x81, 0xb7, 0x32, 0x56, 0x81, 0xfb, 0x2f, 0x1c, 0x26, 0x49, 0x98, 0x30, 0x67, + 0x1c, 0x41, 0x39, 0x29, 0x16, 0xe4, 0x2d, 0xc8, 0x0b, 0x4d, 0x56, 0xd5, 0x1a, 0xab, 0x0b, 0xd8, + 0x63, 0x8c, 0x61, 0x41, 0xf9, 0xc4, 0xb7, 0x67, 0xd3, 0x77, 0x0f, 0x32, 0x8e, 0x2d, 0x82, 0xc9, + 0x38, 0x36, 0xf9, 0x1a, 0xd6, 0xce, 0x1c, 0x1c, 0xd8, 0x2c, 0xaa, 0x53, 0xf1, 0xa0, 0xa9, 0x16, + 0x3f, 0x24, 0xb0, 0xde, 0x0e, 0x24, 0xc7, 0x51, 0x04, 0x34, 0x85, 0x42, 0x98, 0xf5, 0x94, 0x89, + 0x5b, 0x25, 0xe2, 0x6f, 0x8d, 0x87, 0xfc, 0x83, 0xd7, 0x1b, 0x0f, 0xd1, 0x8d, 0x53, 0x41, 0x9a, + 0x50, 0x12, 0x98, 0xae, 0x52, 0x81, 0xa2, 0x90, 0xbd, 0x0a, 0xdf, 0x59, 0x1d, 0x8a, 0x3e, 0xc5, + 0x89, 0xe3, 0x8d, 0x59, 0xd7, 0x91, 0x4f, 0x0d, 0xa4, 0xe8, 0x99, 0x4d, 0x3e, 0x84, 0x82, 0x6f, + 0xf5, 0xb1, 0xcb, 0x9c, 0x4b, 0xac, 0xae, 0x36, 0xb4, 0xbd, 0x5c, 0xf8, 0x12, 0xfb, 0xd8, 0x71, + 0x2e, 0x91, 0xec, 0x02, 0x38, 0xac, 0x7b, 0xe6, 0xd1, 0x77, 0x16, 0xb5, 0xab, 0xd9, 0x86, 0xb6, + 0x97, 0x37, 0x0b, 0x0e, 0x3b, 0xe2, 0x02, 0xf2, 0x31, 0x7c, 0xe0, 0xb8, 0xbd, 0xc1, 0xd8, 0xc6, + 0x2e, 0x73, 0x2d, 0x9f, 0x9d, 0x7b, 0x41, 0x35, 0x17, 0x81, 0x36, 0x85, 0xbc, 0x23, 0xc4, 0xdc, + 0x55, 0xe1, 0xc7, 0x05, 0x4e, 0xab, 0x6b, 0xd2, 0x55, 0x2e, 0x7b, 0x8e, 0x53, 0xe3, 0x0d, 0x54, + 0x52, 0x51, 0x8a, 0x6c, 0x7d, 0x05, 0x05, 0x5b, 0x0a, 0x45, 0x69, 0x75, 0x25, 0x5f, 0x52, 0xa1, + 0x33, 0x1e, 0x0e, 0x2d, 0x3a, 0x35, 0xaf, 0xc1, 0xc6, 0x69, 0xf4, 0x0c, 0x25, 0xe0, 0x06, 0x69, + 0x6b, 0x42, 0x49, 0xb2, 0x44, 0xee, 0xf2, 0xbc, 0x15, 0xa5, 0x2c, 0x74, 0xf7, 0x25, 0xdc, 0x4f, + 0x70, 0x0b, 0x67, 0xbf, 0x84, 0xbc, 0x44, 0x89, 0xda, 0x2e, 0xf3, 0x35, 0xc6, 0x1a, 0x97, 0xb0, + 0x63, 0xe2, 0xd0, 0x9b, 0xa0, 0x84, 0xb4, 0xa7, 0xdf, 0x87, 0x13, 0xf0, 0x4e, 0x9d, 0x0e, 0x27, + 0xc9, 0x99, 0x47, 0x7b, 0xbc, 0xd2, 0x79, 0x93, 0x1f, 0x8c, 0x3a, 0xec, 0x2e, 0xb0, 0xcd, 0x83, + 0x32, 0x7e, 0xd1, 0xe0, 0xc1, 0x31, 0x06, 0xb2, 0x9a, 0x2f, 0x31, 0xb0, 0xee, 0xd6, 0xaf, 0x26, + 0x00, 0x43, 0x3a, 0x41, 0xda, 0x65, 0x38, 0x8a, 0x9c, 0x5b, 0x6d, 0x67, 0x3e, 0xd3, 0xcc, 0x02, + 0x97, 0x76, 0x70, 0x64, 0x74, 0x60, 0x7b, 0xc6, 0x05, 0x91, 0x73, 0x1d, 0xf2, 0xf1, 0xfb, 0x0b, + 0xed, 0x97, 0xcc, 0xf8, 0x4c, 0x76, 0x60, 0x7d, 0x60, 0x0d, 0x7d, 0x8f, 0x06, 0x91, 0x5d, 0x4e, + 0x2b, 0x45, 0x86, 0x0b, 0x0f, 0x3a, 0x68, 0xd1, 0xde, 0xf9, 0x6d, 0x7a, 0xab, 0x0c, 0xb9, 0xd1, + 0x18, 0xa9, 0x0c, 0x88, 0x1f, 0x96, 0x36, 0x94, 0x11, 0xc0, 0xf6, 0x8c, 0x3d, 0x11, 0x44, 0x1d, + 0x8a, 0x81, 0x17, 0x58, 0x83, 0x6e, 0xcf, 0x1b, 0x8b, 0xb7, 0x93, 0x33, 0x21, 0x12, 0x1d, 0x86, + 0x92, 0x64, 0x1b, 0x64, 0x6e, 0xd2, 0x06, 0xbf, 0x69, 0x40, 0xc2, 0xd6, 0x3a, 0x3c, 0xb7, 0xdc, + 0x3e, 0xb2, 0xbb, 0x2d, 0xdd, 0x23, 0xa5, 0xb3, 0x93, 0xc5, 0x8b, 0xbb, 0xbb, 0x83, 0xa3, 0x64, + 0x5a, 0xb2, 0x4b, 0xe7, 0x4c, 0x2e, 0x35, 0x67, 0x8c, 0x36, 0xdf, 0x04, 0xb1, 0xfb, 0x22, 0x63, + 0x4f, 0x60, 0xbd, 0xc7, 0x45, 0x62, 0x2a, 0x6c, 0x29, 0xe9, 0xe0, 0x60, 0x53, 0x22, 0x0e, 0xfe, + 0x59, 0x87, 0x52, 0xf4, 0xa8, 0x3b, 0x48, 0x27, 0x4e, 0x0f, 0xc9, 0x77, 0xb0, 0xc6, 0x97, 0x3c, + 0xa9, 0x2a, 0x6a, 0x89, 0x0f, 0x08, 0xfd, 0xe1, 0x9c, 0x1b, 0xd1, 0x12, 0x2b, 0xe4, 0x5b, 0xc8, + 0x45, 0x6b, 0x9a, 0x6c, 0x2b, 0x28, 0xf5, 0x03, 0x40, 0xaf, 0xce, 0x5e, 0xc4, 0xda, 0x3f, 0xc1, + 0x46, 0x62, 0x23, 0x93, 0xba, 0xea, 0xfc, 0x9c, 0xbd, 0xae, 0x37, 0x16, 0x03, 0x62, 0xd6, 0x37, + 0x50, 0x52, 0x97, 0x23, 0xa9, 0xa9, 0x1e, 0xcc, 0x2e, 0x53, 0xbd, 0xbe, 0xf0, 0x3e, 0xa6, 0x7c, + 0x0e, 0x70, 0xbd, 0xca, 0xc9, 0x8e, 0xa2, 0x30, 0xf3, 0x29, 0xa0, 0xef, 0x2e, 0xb8, 0x55, 0xa3, + 0x4e, 0x6c, 0xc4, 0x44, 0xd4, 0xf3, 0xd6, 0x71, 0x22, 0xea, 0xb9, 0xcb, 0x94, 0xb3, 0x26, 0x36, + 0x07, 0x49, 0x87, 0x95, 0xee, 0xee, 0x04, 0xeb, 0xdc, 0xa5, 0x63, 0xac, 0x90, 0x57, 0x50, 0x54, + 0x06, 0x3c, 0x49, 0xc5, 0x96, 0x5a, 0x2a, 0x7a, 0x6d, 0xd1, 0x75, 0xcc, 0x37, 0x80, 0xca, 0xdc, + 0x29, 0x4b, 0x1e, 0x2b, 0xaa, 0xcb, 0x76, 0x80, 0xbe, 0xf7, 0xdf, 0xc0, 0xd8, 0xda, 0x29, 0x6c, + 0xa6, 0xc6, 0x25, 0x69, 0x26, 0x5d, 0x9c, 0x33, 0xcd, 0x75, 0x63, 0x19, 0x44, 0xe5, 0x4e, 0x4d, + 0xb1, 0x04, 0xf7, 0xfc, 0x89, 0x9a, 0xe0, 0x5e, 0x30, 0x04, 0x79, 0xd6, 0x95, 0x5e, 0x4f, 0x64, + 0x7d, 0x76, 0x84, 0xe9, 0xb5, 0x45, 0xd7, 0x92, 0xaf, 0xfd, 0xe4, 0xf7, 0xab, 0x9a, 0xf6, 0xc7, + 0x55, 0x4d, 0xfb, 0xf3, 0xaa, 0xa6, 0xfd, 0xfa, 0x57, 0x6d, 0x05, 0xb6, 0x6c, 0x9c, 0x48, 0x35, + 0xcb, 0x77, 0x5a, 0x93, 0xfd, 0xd7, 0xda, 0x69, 0xb6, 0xf5, 0xcd, 0x64, 0xff, 0xed, 0x5a, 0xf4, + 0x47, 0xe2, 0xf3, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x65, 0x95, 0xd7, 0x2b, 0x87, 0x0c, 0x00, + 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -2347,6 +2357,13 @@ func (m *ListDocumentsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.PreviousKey) > 0 { + i -= len(m.PreviousKey) + copy(dAtA[i:], m.PreviousKey) + i = encodeVarintAdmin(dAtA, i, uint64(len(m.PreviousKey))) + i-- + dAtA[i] = 0x32 + } if m.IncludeSnapshot { i-- if m.IncludeSnapshot { @@ -2372,10 +2389,10 @@ func (m *ListDocumentsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x18 } - if len(m.PreviousKey) > 0 { - i -= len(m.PreviousKey) - copy(dAtA[i:], m.PreviousKey) - i = encodeVarintAdmin(dAtA, i, uint64(len(m.PreviousKey))) + if len(m.PreviousId) > 0 { + i -= len(m.PreviousId) + copy(dAtA[i:], m.PreviousId) + i = encodeVarintAdmin(dAtA, i, uint64(len(m.PreviousId))) i-- dAtA[i] = 0x12 } @@ -3090,7 +3107,7 @@ func (m *ListDocumentsRequest) Size() (n int) { if l > 0 { n += 1 + l + sovAdmin(uint64(l)) } - l = len(m.PreviousKey) + l = len(m.PreviousId) if l > 0 { n += 1 + l + sovAdmin(uint64(l)) } @@ -3103,6 +3120,10 @@ func (m *ListDocumentsRequest) Size() (n int) { if m.IncludeSnapshot { n += 2 } + l = len(m.PreviousKey) + if l > 0 { + n += 1 + l + sovAdmin(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -4482,7 +4503,7 @@ func (m *ListDocumentsRequest) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PreviousKey", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field PreviousId", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -4510,7 +4531,7 @@ func (m *ListDocumentsRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.PreviousKey = string(dAtA[iNdEx:postIndex]) + m.PreviousId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 0 { @@ -4571,6 +4592,38 @@ func (m *ListDocumentsRequest) Unmarshal(dAtA []byte) error { } } m.IncludeSnapshot = bool(v != 0) + case 6: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field PreviousKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAdmin + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthAdmin + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthAdmin + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.PreviousKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAdmin(dAtA[iNdEx:]) diff --git a/api/yorkie/v1/admin.proto b/api/yorkie/v1/admin.proto index f5c64a75c..da8c9cfbf 100644 --- a/api/yorkie/v1/admin.proto +++ b/api/yorkie/v1/admin.proto @@ -94,10 +94,11 @@ message UpdateProjectResponse { message ListDocumentsRequest { string project_name = 1; - string previous_key = 2; + string previous_id = 2; int32 page_size = 3; bool is_forward = 4; bool include_snapshot = 5; + string previous_key = 6; } message ListDocumentsResponse { diff --git a/cmd/yorkie/document/list.go b/cmd/yorkie/document/list.go index 9cb73c479..e1e10a8a8 100644 --- a/cmd/yorkie/document/list.go +++ b/cmd/yorkie/document/list.go @@ -28,12 +28,13 @@ import ( "github.com/yorkie-team/yorkie/admin" "github.com/yorkie-team/yorkie/cmd/yorkie/config" "github.com/yorkie-team/yorkie/pkg/units" + "github.com/yorkie-team/yorkie/server/backend/database" ) var ( - previousKey string - pageSize int32 - isForward bool + previousOffset database.DocOffset + pageSize int32 + isForward bool ) func newListCommand() *cobra.Command { @@ -63,7 +64,7 @@ func newListCommand() *cobra.Command { }() ctx := context.Background() - documents, err := cli.ListDocuments(ctx, projectName, previousKey, pageSize, isForward, true) + documents, err := cli.ListDocuments(ctx, projectName, previousOffset, pageSize, isForward, true) if err != nil { return err } @@ -100,11 +101,10 @@ func newListCommand() *cobra.Command { func init() { cmd := newListCommand() - cmd.Flags().StringVar( - &previousKey, - "previous-key", - "", - "The previous document key to start from", + cmd.Flags().Var( + &previousOffset, + "previous-offset", + "The previous document offset to start from", ) cmd.Flags().Int32Var( &pageSize, diff --git a/server/backend/database/database.go b/server/backend/database/database.go index 99c9a1f20..820650c41 100644 --- a/server/backend/database/database.go +++ b/server/backend/database/database.go @@ -20,6 +20,8 @@ package database import ( "context" "errors" + "fmt" + "strings" "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/pkg/document" @@ -264,7 +266,7 @@ type Database interface { FindDocInfosByPaging( ctx context.Context, projectID types.ID, - paging types.Paging[key.Key], + paging types.Paging[DocOffset], ) ([]*DocInfo, error) // FindDocInfosByQuery returns the documentInfos which match the given query. @@ -284,3 +286,29 @@ type Database interface { excludeClientID types.ID, ) (bool, error) } + +type DocOffset struct { + Key key.Key + ID types.ID +} + +// String is used both by fmt.Print and by Cobra in help text +func (o *DocOffset) String() string { + return fmt.Sprintf("%s.%s", o.Key, o.ID) +} + +// Set must have pointer receiver so it doesn't change the value of a copy +func (o *DocOffset) Set(v string) error { + parsed := strings.Split(v, ",") + if len(parsed) != 2 { + return errors.New("use the format 'docKey,docID' for the input") + } + o.Key = key.Key(parsed[0]) + o.ID = types.ID(parsed[1]) + return nil +} + +// Type is only used in help text +func (o *DocOffset) Type() string { + return "DocumentOffset" +} diff --git a/server/backend/database/memory/database.go b/server/backend/database/memory/database.go index 8c47f5709..225cac1e7 100644 --- a/server/backend/database/memory/database.go +++ b/server/backend/database/memory/database.go @@ -20,7 +20,6 @@ package memory import ( "context" "fmt" - "strings" gotime "time" "github.com/hashicorp/go-memdb" @@ -1228,7 +1227,7 @@ func (d *DB) UpdateSyncedSeq( func (d *DB) FindDocInfosByPaging( _ context.Context, projectID types.ID, - paging types.Paging[key.Key], + paging types.Paging[database.DocOffset], ) ([]*database.DocInfo, error) { txn := d.db.Txn(false) defer txn.Abort() @@ -1238,21 +1237,20 @@ func (d *DB) FindDocInfosByPaging( if paging.IsForward { iterator, err = txn.LowerBound( tblDocuments, - "project_id_key", + "project_id_id", projectID.String(), - paging.Offset.String(), + paging.Offset.ID.String(), ) } else { - offset := paging.Offset - if paging.Offset == "" { - offset = key.Key(strings.Repeat(string(rune(127)), 120)) + if paging.Offset.ID == "" { + paging.Offset.ID = types.IDFromActorID(time.MaxActorID) } iterator, err = txn.ReverseLowerBound( tblDocuments, - "project_id_key", + "project_id_id", projectID.String(), - offset.String(), + paging.Offset.ID.String(), ) } if err != nil { @@ -1266,8 +1264,17 @@ func (d *DB) FindDocInfosByPaging( break } - if info.Key != paging.Offset && info.RemovedAt.IsZero() { - docInfos = append(docInfos, info) + if info.RemovedAt.IsZero() { + include := false + if info.ID != paging.Offset.ID { + include = true + } else if (paging.IsForward && info.Key > paging.Offset.Key) || (!paging.IsForward && info.Key < paging.Offset.Key) { + include = true + } + + if include { + docInfos = append(docInfos, info) + } } } diff --git a/server/backend/database/memory/indexes.go b/server/backend/database/memory/indexes.go index 681c27069..72ed0ae84 100644 --- a/server/backend/database/memory/indexes.go +++ b/server/backend/database/memory/indexes.go @@ -136,6 +136,15 @@ var schema = &memdb.DBSchema{ }, }, }, + "project_id_id": { + Name: "project_id_id", + Indexer: &memdb.CompoundIndex{ + Indexes: []memdb.Indexer{ + &memdb.StringFieldIndex{Field: "ProjectID"}, + &memdb.StringFieldIndex{Field: "ID"}, + }, + }, + }, "project_id_key_removed_at": { Name: "project_id_key_removed_at", Indexer: &memdb.CompoundIndex{ diff --git a/server/backend/database/mongo/client.go b/server/backend/database/mongo/client.go index 1a3cfc9ef..03666ec63 100644 --- a/server/backend/database/mongo/client.go +++ b/server/backend/database/mongo/client.go @@ -1219,7 +1219,7 @@ func (c *Client) UpdateAndFindMinSyncedTicket( func (c *Client) FindDocInfosByPaging( ctx context.Context, projectID types.ID, - paging types.Paging[key.Key], + paging types.Paging[database.DocOffset], ) ([]*database.DocInfo, error) { encodedProjectID, err := encodeID(projectID) if err != nil { @@ -1234,21 +1234,27 @@ func (c *Client) FindDocInfosByPaging( "$exists": false, }, } - if paging.Offset != "" { + if paging.Offset.Key != "" && paging.Offset.ID != "" { + encodedDocID, err := encodeID(paging.Offset.ID) + if err != nil { + return nil, err + } + k := "$lt" if paging.IsForward { k = "$gt" } - filter["key"] = bson.M{ - k: paging.Offset, + filter["$or"] = []bson.M{ + {"_id": bson.M{k: encodedDocID}}, + {"_id": encodedDocID, "key": bson.M{k: paging.Offset.Key}}, } } opts := options.Find().SetLimit(int64(paging.PageSize)) if paging.IsForward { - opts = opts.SetSort(map[string]int{"key": 1}) + opts = opts.SetSort(bson.D{{Key: "_id", Value: 1}, {Key: "key", Value: 1}}) } else { - opts = opts.SetSort(map[string]int{"key": -1}) + opts = opts.SetSort(bson.D{{Key: "_id", Value: -1}, {Key: "key", Value: -1}}) } cursor, err := c.collection(colDocuments).Find(ctx, filter, opts) diff --git a/server/backend/database/testcases/testcases.go b/server/backend/database/testcases/testcases.go index 1a6ab770b..69e13427e 100644 --- a/server/backend/database/testcases/testcases.go +++ b/server/backend/database/testcases/testcases.go @@ -444,29 +444,38 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t } // initial page, offset is empty - infos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{PageSize: pageSize}) + infos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{PageSize: pageSize}) assert.NoError(t, err) assertKeys([]key.Key{"8", "7", "6", "5", "4"}, infos) // backward - infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{ - Offset: infos[len(infos)-1].Key, + infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ + Offset: database.DocOffset{ + Key: infos[len(infos)-1].Key, + ID: infos[len(infos)-1].ID, + }, PageSize: pageSize, }) assert.NoError(t, err) assertKeys([]key.Key{"3", "2", "1", "0"}, infos) // backward again - emptyInfos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{ - Offset: infos[len(infos)-1].Key, + emptyInfos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ + Offset: database.DocOffset{ + Key: infos[len(infos)-1].Key, + ID: infos[len(infos)-1].ID, + }, PageSize: pageSize, }) assert.NoError(t, err) assertKeys(nil, emptyInfos) // forward - infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{ - Offset: infos[0].Key, + infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ + Offset: database.DocOffset{ + Key: infos[0].Key, + ID: infos[0].ID, + }, PageSize: pageSize, IsForward: true, }) @@ -474,8 +483,11 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t assertKeys([]key.Key{"4", "5", "6", "7", "8"}, infos) // forward again - emptyInfos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[key.Key]{ - Offset: infos[len(infos)-1].Key, + emptyInfos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ + Offset: database.DocOffset{ + Key: infos[len(infos)-1].Key, + ID: infos[len(infos)-1].ID, + }, PageSize: pageSize, IsForward: true, }) @@ -502,63 +514,87 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t cases := []struct { name string - offset key.Key + offset database.DocOffset pageSize int isForward bool testResult []int }{ { - name: "FindDocInfosByPaging no flag test", - offset: "", + name: "FindDocInfosByPaging no flag test", + offset: database.DocOffset{ + Key: "", + ID: "", + }, pageSize: 0, isForward: false, testResult: helper.NewRangeSlice(testDocCnt, 0), }, { - name: "FindDocInfosByPaging --forward test", - offset: "", + name: "FindDocInfosByPaging --forward test", + offset: database.DocOffset{ + Key: "", + ID: "", + }, pageSize: 0, isForward: true, testResult: helper.NewRangeSlice(0, testDocCnt), }, { - name: "FindDocInfosByPaging --size test", - offset: "", + name: "FindDocInfosByPaging --size test", + offset: database.DocOffset{ + Key: "", + ID: "", + }, pageSize: 4, isForward: false, testResult: helper.NewRangeSlice(testDocCnt, testDocCnt-4), }, { - name: "FindDocInfosByPaging --size --forward test", - offset: "", + name: "FindDocInfosByPaging --size --forward test", + offset: database.DocOffset{ + Key: "", + ID: "", + }, pageSize: 4, isForward: true, testResult: helper.NewRangeSlice(0, 3), }, { - name: "FindDocInfosByPaging --offset test", - offset: dummyDocInfos[13].Key, + name: "FindDocInfosByPaging --offset test", + offset: database.DocOffset{ + Key: dummyDocInfos[13].Key, + ID: dummyDocInfos[13].ID, + }, pageSize: 0, isForward: false, testResult: helper.NewRangeSlice(12, 0), }, { - name: "FindDocInfosByPaging --forward --offset test", - offset: dummyDocInfos[13].Key, + name: "FindDocInfosByPaging --forward --offset test", + offset: database.DocOffset{ + Key: dummyDocInfos[13].Key, + ID: dummyDocInfos[13].ID, + }, pageSize: 0, isForward: true, testResult: helper.NewRangeSlice(14, testDocCnt), }, { - name: "FindDocInfosByPaging --size --offset test", - offset: dummyDocInfos[13].Key, + name: "FindDocInfosByPaging --size --offset test", + offset: database.DocOffset{ + Key: dummyDocInfos[13].Key, + ID: dummyDocInfos[13].ID, + }, pageSize: 10, isForward: false, testResult: helper.NewRangeSlice(12, 3), }, { - name: "FindDocInfosByPaging --size --forward --offset test", - offset: dummyDocInfos[13].Key, + name: "FindDocInfosByPaging --size --forward --offset test", + offset: database.DocOffset{ + Key: dummyDocInfos[13].Key, + ID: dummyDocInfos[13].ID, + }, pageSize: 10, isForward: true, testResult: helper.NewRangeSlice(14, 23), @@ -568,7 +604,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t for _, c := range cases { t.Run(c.name, func(t *testing.T) { ctx := context.Background() - testPaging := types.Paging[key.Key]{ + testPaging := types.Paging[database.DocOffset]{ Offset: c.offset, PageSize: c.pageSize, IsForward: c.isForward, @@ -604,7 +640,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t } // 02. List the documents. - result, err := db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[key.Key]{ + result, err := db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[database.DocOffset]{ PageSize: 10, IsForward: false, }) @@ -616,7 +652,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t assert.NoError(t, err) // 04. List the documents again and check the filtered result. - result, err = db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[key.Key]{ + result, err = db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[database.DocOffset]{ PageSize: 10, IsForward: false, }) diff --git a/server/documents/documents.go b/server/documents/documents.go index 0711cd3e9..b3d4fbc97 100644 --- a/server/documents/documents.go +++ b/server/documents/documents.go @@ -48,7 +48,7 @@ func ListDocumentSummaries( ctx context.Context, be *backend.Backend, project *types.Project, - paging types.Paging[key.Key], + paging types.Paging[database.DocOffset], includeSnapshot bool, ) ([]*types.DocumentSummary, error) { if paging.PageSize > pageSizeLimit { diff --git a/server/rpc/admin_server.go b/server/rpc/admin_server.go index 0d63920e6..84126f0a6 100644 --- a/server/rpc/admin_server.go +++ b/server/rpc/admin_server.go @@ -26,6 +26,7 @@ import ( "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" "github.com/yorkie-team/yorkie/server/backend" + "github.com/yorkie-team/yorkie/server/backend/database" "github.com/yorkie-team/yorkie/server/backend/sync" "github.com/yorkie-team/yorkie/server/documents" "github.com/yorkie-team/yorkie/server/logging" @@ -280,8 +281,11 @@ func (s *adminServer) ListDocuments( ctx, s.backend, project, - types.Paging[key.Key]{ - Offset: key.Key(req.PreviousKey), + types.Paging[database.DocOffset]{ + Offset: database.DocOffset{ + Key: key.Key(req.PreviousKey), + ID: types.ID(req.PreviousId), + }, PageSize: int(req.PageSize), IsForward: req.IsForward, }, diff --git a/test/integration/document_test.go b/test/integration/document_test.go index 84c49d66d..8ea7cfed2 100644 --- a/test/integration/document_test.go +++ b/test/integration/document_test.go @@ -34,6 +34,7 @@ import ( "github.com/yorkie-team/yorkie/pkg/document/innerpresence" "github.com/yorkie-team/yorkie/pkg/document/json" "github.com/yorkie-team/yorkie/pkg/document/presence" + "github.com/yorkie-team/yorkie/server/backend/database" "github.com/yorkie-team/yorkie/test/helper" ) @@ -806,11 +807,12 @@ func TestDocumentWithProjects(t *testing.T) { assert.NoError(t, cli.Sync(ctx)) - docs, err := adminCli.ListDocuments(ctx, "default", "000000000000000000000000", 0, true, false) + offset := database.DocOffset{Key: "", ID: ""} + docs, err := adminCli.ListDocuments(ctx, "default", offset, 0, true, false) assert.NoError(t, err) assert.Equal(t, "", docs[0].Snapshot) - docs, err = adminCli.ListDocuments(ctx, "default", "000000000000000000000000", 0, true, true) + docs, err = adminCli.ListDocuments(ctx, "default", offset, 0, true, true) assert.NoError(t, err) assert.NotEqual(t, 0, len(docs[0].Snapshot)) }) From 5c42e17c94d5d64af51586ae2303195e36c386aa Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Sat, 25 Nov 2023 21:13:37 +0900 Subject: [PATCH 04/11] Add testcases for sharded DB cluster --- .github/workflows/ci.yml | 25 + build/docker/sharding/prod/docker-compose.yml | 201 ++++ .../sharding/prod/scripts/init-config1.js | 12 + .../sharding/prod/scripts/init-mongos1.js | 12 + .../sharding/prod/scripts/init-shard1-1.js | 10 + .../sharding/prod/scripts/init-shard2-1.js | 10 + .../sharding/prod/scripts/init-shard3-1.js | 10 + build/docker/sharding/test/docker-compose.yml | 78 ++ .../sharding/test/scripts/init-config1.js | 9 + .../sharding/test/scripts/init-mongos1.js | 32 + .../sharding/test/scripts/init-shard1-1.js | 8 + .../sharding/test/scripts/init-shard2-1.js | 8 + server/backend/database/database.go | 1 + server/backend/database/mongo/client.go | 164 ++- server/backend/database/mongo/encoder.go | 3 +- server/backend/database/mongo/indexes.go | 35 +- server/backend/database/mongo/registry.go | 3 +- .../backend/database/mongo/registry_test.go | 2 +- .../backend/database/testcases/testcases.go | 134 ++- server/documents/documents.go | 2 +- server/rpc/server_test.go | 860 +------------- server/rpc/testcases/testcases.go | 1002 +++++++++++++++++ test/helper/helper.go | 133 +++ test/shard/mongo_client_test.go | 222 ++++ test/shard/server_test.go | 213 ++++ 25 files changed, 2220 insertions(+), 969 deletions(-) create mode 100644 build/docker/sharding/prod/docker-compose.yml create mode 100644 build/docker/sharding/prod/scripts/init-config1.js create mode 100644 build/docker/sharding/prod/scripts/init-mongos1.js create mode 100644 build/docker/sharding/prod/scripts/init-shard1-1.js create mode 100644 build/docker/sharding/prod/scripts/init-shard2-1.js create mode 100644 build/docker/sharding/prod/scripts/init-shard3-1.js create mode 100644 build/docker/sharding/test/docker-compose.yml create mode 100644 build/docker/sharding/test/scripts/init-config1.js create mode 100644 build/docker/sharding/test/scripts/init-mongos1.js create mode 100644 build/docker/sharding/test/scripts/init-shard1-1.js create mode 100644 build/docker/sharding/test/scripts/init-shard2-1.js create mode 100644 server/rpc/testcases/testcases.go create mode 100644 test/shard/mongo_client_test.go create mode 100644 test/shard/server_test.go diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c00c8a632..f5ef29432 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -90,3 +90,28 @@ jobs: fail-on-alert: false github-token: ${{ secrets.GITHUB_TOKEN }} comment-always: true + + shard_test: + name: shard_test + runs-on: ubuntu-latest + steps: + + - name: Set up Go ${{ env.GO_VERSION }} + uses: actions/setup-go@v3 + with: + go-version: ${{ env.GO_VERSION }} + + - name: Check out code + uses: actions/checkout@v4 + + - name: Get tools dependencies + run: make tools + + - name: Stack + run: docker-compose -f build/docker/sharding/test/docker-compose.yml up --build -d + + - name: Wait for 30 seconds until the DB cluster is ready + run: sleep 30s + + - name: Run the tests with shard tag + run: go test -tags shard -race -v ./... diff --git a/build/docker/sharding/prod/docker-compose.yml b/build/docker/sharding/prod/docker-compose.yml new file mode 100644 index 000000000..163e06144 --- /dev/null +++ b/build/docker/sharding/prod/docker-compose.yml @@ -0,0 +1,201 @@ +version: '3' +services: + + # Config Server + config1: + image: mongo:7.0.1 + container_name: mongo-config1 + command: + - /bin/sh + - -c + - | # run the init script for the primary replica after mongod has been started. + sh -c "sleep 5 && mongosh < /scripts/init-config1.js" & + mongod --port 27017 --configsvr --replSet config-rs --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27100:27017 + restart: always + networks: + - sharding + config2: + image: mongo:7.0.1 + container_name: mongo-config2 + command: mongod --port 27017 --configsvr --replSet config-rs --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27101:27017 + restart: always + networks: + - sharding + config3: + image: mongo:7.0.1 + container_name: mongo-config3 + command: mongod --port 27017 --configsvr --replSet config-rs --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27102:27017 + restart: always + networks: + - sharding + + # Shards + # Shards 1 + shard1-1: + image: mongo:7.0.1 + container_name: mongo-shard1-1 + command: + - /bin/sh + - -c + - | # run the init script for the primary replica after mongod has been started. + sh -c "sleep 5 && mongosh < /scripts/init-shard1-1.js" & + mongod --port 27017 --shardsvr --replSet shard-rs-1 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27110:27017 + restart: always + networks: + - sharding + + shard1-2: + image: mongo:7.0.1 + container_name: mongo-shard1-2 + command: mongod --port 27017 --shardsvr --replSet shard-rs-1 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27111:27017 + restart: always + networks: + - sharding + + shard1-3: + image: mongo:7.0.1 + container_name: mongo-shard1-3 + command: mongod --port 27017 --shardsvr --replSet shard-rs-1 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27112:27017 + restart: always + networks: + - sharding + + # Shards 2 + shard2-1: + image: mongo:7.0.1 + container_name: mongo-shard2-1 + command: + - /bin/sh + - -c + - | # run the init script for the primary replica after mongod has been started. + sh -c "sleep 5 && mongosh < /scripts/init-shard2-1.js" & + mongod --port 27017 --shardsvr --replSet shard-rs-2 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27113:27017 + restart: always + networks: + - sharding + + shard2-2: + image: mongo:7.0.1 + container_name: mongo-shard2-2 + command: mongod --port 27017 --shardsvr --replSet shard-rs-2 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27114:27017 + restart: always + networks: + - sharding + + shard2-3: + image: mongo:7.0.1 + container_name: mongo-shard2-3 + command: mongod --port 27017 --shardsvr --replSet shard-rs-2 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27115:27017 + restart: always + networks: + - sharding + + # Shards 3 + shard3-1: + image: mongo:7.0.1 + container_name: mongo-shard3-1 + command: + - /bin/sh + - -c + - | # run the init script for the primary replica after mongod has been started. + sh -c "sleep 5 && mongosh < /scripts/init-shard3-1.js" & + mongod --port 27017 --shardsvr --replSet shard-rs-3 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27116:27017 + restart: always + networks: + - sharding + + shard3-2: + image: mongo:7.0.1 + container_name: mongo-shard3-2 + command: mongod --port 27017 --shardsvr --replSet shard-rs-3 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27117:27017 + restart: always + networks: + - sharding + + shard3-3: + image: mongo:7.0.1 + container_name: mongo-shard3-3 + command: mongod --port 27017 --shardsvr --replSet shard-rs-3 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27118:27017 + restart: always + networks: + - sharding + + # Mongos + mongos1: + image: mongo:7.0.1 + container_name: mongos1 + command: + - /bin/sh + - -c + - | # run the init script for the primary replica after the config servers and the shards has been configured + sh -c "sleep 40 && mongosh < /scripts/init-mongos1.js" & + mongos --port 27017 --configdb config-rs/config1:27017,config2:27017,config3:27017 --bind_ip_all + ports: + - 27017:27017 + restart: always + volumes: + - ./scripts:/scripts + networks: + - sharding + mongos2: + image: mongo:7.0.1 + container_name: mongos2 + command: mongos --port 27017 --configdb config-rs/config1:27017,config2:27017,config3:27017 --bind_ip_all + ports: + - 27018:27017 + restart: always + volumes: + - ./scripts:/scripts + networks: + - sharding + +networks: + sharding: diff --git a/build/docker/sharding/prod/scripts/init-config1.js b/build/docker/sharding/prod/scripts/init-config1.js new file mode 100644 index 000000000..b87cd4f53 --- /dev/null +++ b/build/docker/sharding/prod/scripts/init-config1.js @@ -0,0 +1,12 @@ +rs.initiate( + { + _id: "config-rs", + configsvr: true, + members: [ + { _id : 0, host : "config1:27017" }, + { _id : 1, host : "config2:27017" }, + { _id : 2, host : "config3:27017" }, + ] + } + ) + \ No newline at end of file diff --git a/build/docker/sharding/prod/scripts/init-mongos1.js b/build/docker/sharding/prod/scripts/init-mongos1.js new file mode 100644 index 000000000..c7fc7cb7d --- /dev/null +++ b/build/docker/sharding/prod/scripts/init-mongos1.js @@ -0,0 +1,12 @@ +sh.addShard("shard-rs-1/shard1-1:27017,shard1-2:27017,shard1-3:27017") +sh.addShard("shard-rs-2/shard2-1:27017,shard2-2:27017,shard2-3:27017") +sh.addShard("shard-rs-3/shard3-1:27017,shard3-2:27017,shard3-3:27017") + +sh.enableSharding("yorkie-meta") +sh.shardCollection("yorkie-meta.projects", { owner: 1, name: 1 }, true) +sh.shardCollection("yorkie-meta.users", { username: 1 }, true) +sh.shardCollection("yorkie-meta.clients", { project_id: 1, key: 1 }, true) +sh.shardCollection("yorkie-meta.documents", { project_id: 1, key: 1 }, true) +sh.shardCollection("yorkie-meta.changes", { doc_id: 1, server_seq: 1 }, true) +sh.shardCollection("yorkie-meta.snapshots", { doc_id: 1, server_seq: 1 }, true) +sh.shardCollection("yorkie-meta.syncedseqs", { doc_id: 1, client_id: 1 }, true) diff --git a/build/docker/sharding/prod/scripts/init-shard1-1.js b/build/docker/sharding/prod/scripts/init-shard1-1.js new file mode 100644 index 000000000..e63dc0670 --- /dev/null +++ b/build/docker/sharding/prod/scripts/init-shard1-1.js @@ -0,0 +1,10 @@ +rs.initiate( + { + _id : "shard-rs-1", + members: [ + { _id : 0, host : "shard1-1:27017" }, + { _id : 1, host : "shard1-2:27017" }, + { _id : 2, host : "shard1-3:27017" } + ] + } + ) diff --git a/build/docker/sharding/prod/scripts/init-shard2-1.js b/build/docker/sharding/prod/scripts/init-shard2-1.js new file mode 100644 index 000000000..ebc893ed6 --- /dev/null +++ b/build/docker/sharding/prod/scripts/init-shard2-1.js @@ -0,0 +1,10 @@ +rs.initiate( + { + _id : "shard-rs-2", + members: [ + { _id : 0, host : "shard2-1:27017" }, + { _id : 1, host : "shard2-2:27017" }, + { _id : 2, host : "shard2-3:27017" } + ] + } + ) diff --git a/build/docker/sharding/prod/scripts/init-shard3-1.js b/build/docker/sharding/prod/scripts/init-shard3-1.js new file mode 100644 index 000000000..7d75c5790 --- /dev/null +++ b/build/docker/sharding/prod/scripts/init-shard3-1.js @@ -0,0 +1,10 @@ +rs.initiate( + { + _id : "shard-rs-3", + members: [ + { _id : 0, host : "shard3-1:27017" }, + { _id : 1, host : "shard3-2:27017" }, + { _id : 2, host : "shard3-3:27017" } + ] + } + ) diff --git a/build/docker/sharding/test/docker-compose.yml b/build/docker/sharding/test/docker-compose.yml new file mode 100644 index 000000000..55edbca14 --- /dev/null +++ b/build/docker/sharding/test/docker-compose.yml @@ -0,0 +1,78 @@ +version: '3' +services: + + # Config Server + config1: + image: mongo:7.0.1 + container_name: mongo-config1 + command: + - /bin/sh + - -c + - | # run the init script for the primary replica after mongod has been started. + sh -c "sleep 5 && mongosh < /scripts/init-config1.js" & + mongod --port 27017 --configsvr --replSet config-rs --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27100:27017 + restart: always + networks: + - sharding + + # Shards + # Shards 1 + shard1-1: + image: mongo:7.0.1 + container_name: mongo-shard1-1 + command: + - /bin/sh + - -c + - | # run the init script for the primary replica after mongod has been started. + sh -c "sleep 5 && mongosh < /scripts/init-shard1-1.js" & + mongod --port 27017 --shardsvr --replSet shard-rs-1 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27110:27017 + restart: always + networks: + - sharding + + # Shards 2 + shard2-1: + image: mongo:7.0.1 + container_name: mongo-shard2-1 + command: + - /bin/sh + - -c + - | # run the init script for the primary replica after mongod has been started. + sh -c "sleep 5 && mongosh < /scripts/init-shard2-1.js" & + mongod --port 27017 --shardsvr --replSet shard-rs-2 --bind_ip_all + volumes: + - ./scripts:/scripts + ports: + - 27113:27017 + restart: always + networks: + - sharding + + # Mongos + mongos1: + image: mongo:7.0.1 + container_name: mongos1 + command: + - /bin/sh + - -c + - | # run the init script for the primary replica after the config servers and the shards has been configured + sh -c "sleep 20 && mongosh < /scripts/init-mongos1.js" & + mongos --port 27017 --configdb config-rs/config1:27017 --bind_ip_all + ports: + - 27017:27017 + restart: always + volumes: + - ./scripts:/scripts + networks: + - sharding + +networks: + sharding: diff --git a/build/docker/sharding/test/scripts/init-config1.js b/build/docker/sharding/test/scripts/init-config1.js new file mode 100644 index 000000000..544cf2072 --- /dev/null +++ b/build/docker/sharding/test/scripts/init-config1.js @@ -0,0 +1,9 @@ +rs.initiate( + { + _id: "config-rs", + configsvr: true, + members: [ + { _id : 0, host : "config1:27017" }, + ] + } + ) diff --git a/build/docker/sharding/test/scripts/init-mongos1.js b/build/docker/sharding/test/scripts/init-mongos1.js new file mode 100644 index 000000000..01f4fe750 --- /dev/null +++ b/build/docker/sharding/test/scripts/init-mongos1.js @@ -0,0 +1,32 @@ +sh.addShard("shard-rs-1/shard1-1:27017") +sh.addShard("shard-rs-2/shard2-1:27017") + +// The DB 'yorkie-meta-1' is for the mongo client test. +sh.enableSharding("yorkie-meta-1") +sh.shardCollection("yorkie-meta-1.users", { username: 1 }, true) +// sh.shardCollection("yorkie-meta-1.clients", { _id: 1 }, true) +sh.shardCollection("yorkie-meta-1.documents", { key: 1 }) +sh.shardCollection("yorkie-meta-1.changes", { doc_key: 1 }) +sh.shardCollection("yorkie-meta-1.snapshots", { doc_key: 1 }) +sh.shardCollection("yorkie-meta-1.syncedseqs", { doc_key: 1 }) +// Split the inital range at "duplicateIDTestDocKey5" to allow doc_ids duplicate in different shards. +sh.splitAt("yorkie-meta-1.documents", { key: "duplicateIDTestDocKey5" }) +// Move the chunk to another shard. +const currentShard = db.getSiblingDB("config").chunks.findOne({ min: { key: 'duplicateIDTestDocKey5' } }).shard +var nextShard = "" +if (currentShard == "shard-rs-1") { + nextShard = "shard-rs-2" +} else { + nextShard = "shard-rs-1" +} +db.adminCommand({ moveChunk: "yorkie-meta-1.documents", find: { key: "duplicateIDTestDocKey5" }, to: nextShard }) + + +// The DB 'yorkie-meta-2' is for the server test. +sh.enableSharding("yorkie-meta-2") +sh.shardCollection("yorkie-meta-2.users", { username: 1 }, true) +// sh.shardCollection("yorkie-meta-2.clients", { _id: 1 }, true) +sh.shardCollection("yorkie-meta-2.documents", { key: 1 }) +sh.shardCollection("yorkie-meta-2.changes", { doc_key: 1 }) +sh.shardCollection("yorkie-meta-2.snapshots", { doc_key: 1 }) +sh.shardCollection("yorkie-meta-2.syncedseqs", { doc_key: 1 }) diff --git a/build/docker/sharding/test/scripts/init-shard1-1.js b/build/docker/sharding/test/scripts/init-shard1-1.js new file mode 100644 index 000000000..00f50407f --- /dev/null +++ b/build/docker/sharding/test/scripts/init-shard1-1.js @@ -0,0 +1,8 @@ +rs.initiate( + { + _id: "shard-rs-1", + members: [ + { _id: 0, host: "shard1-1:27017" }, + ] + } +) diff --git a/build/docker/sharding/test/scripts/init-shard2-1.js b/build/docker/sharding/test/scripts/init-shard2-1.js new file mode 100644 index 000000000..6cfc0fe56 --- /dev/null +++ b/build/docker/sharding/test/scripts/init-shard2-1.js @@ -0,0 +1,8 @@ +rs.initiate( + { + _id: "shard-rs-2", + members: [ + { _id: 0, host: "shard2-1:27017" }, + ] + } +) diff --git a/server/backend/database/database.go b/server/backend/database/database.go index 820650c41..ad5608f18 100644 --- a/server/backend/database/database.go +++ b/server/backend/database/database.go @@ -287,6 +287,7 @@ type Database interface { ) (bool, error) } +// DocOffset represents a paging offset when listing documents. type DocOffset struct { Key key.Key ID types.ID diff --git a/server/backend/database/mongo/client.go b/server/backend/database/mongo/client.go index 03666ec63..531a07dd2 100644 --- a/server/backend/database/mongo/client.go +++ b/server/backend/database/mongo/client.go @@ -60,7 +60,7 @@ func Dial(conf *Config) (*Client, error) { ctx, options.Client(). ApplyURI(conf.ConnectionURI). - SetRegistry(newRegistryBuilder().Build()), + SetRegistry(NewRegistryBuilder().Build()), ) if err != nil { return nil, fmt.Errorf("connect to mongo: %w", err) @@ -131,7 +131,7 @@ func (c *Client) ensureDefaultUserInfo( hashedPassword, ) - _, err = c.collection(colUsers).UpdateOne(ctx, bson.M{ + _, err = c.collection(ColUsers).UpdateOne(ctx, bson.M{ "username": candidate.Username, }, bson.M{ "$setOnInsert": bson.M{ @@ -144,7 +144,7 @@ func (c *Client) ensureDefaultUserInfo( return nil, fmt.Errorf("upsert default user info: %w", err) } - result := c.collection(colUsers).FindOne(ctx, bson.M{ + result := c.collection(ColUsers).FindOne(ctx, bson.M{ "username": candidate.Username, }) @@ -167,12 +167,12 @@ func (c *Client) ensureDefaultProjectInfo( ) (*database.ProjectInfo, error) { candidate := database.NewProjectInfo(database.DefaultProjectName, defaultUserName, defaultClientDeactivateThreshold) candidate.ID = database.DefaultProjectID - encodedID, err := encodeID(candidate.ID) + encodedID, err := EncodeID(candidate.ID) if err != nil { return nil, err } - _, err = c.collection(colProjects).UpdateOne(ctx, bson.M{ + _, err = c.collection(ColProjects).UpdateOne(ctx, bson.M{ "_id": encodedID, }, bson.M{ "$setOnInsert": bson.M{ @@ -188,7 +188,7 @@ func (c *Client) ensureDefaultProjectInfo( return nil, fmt.Errorf("create default project: %w", err) } - result := c.collection(colProjects).FindOne(ctx, bson.M{ + result := c.collection(ColProjects).FindOne(ctx, bson.M{ "_id": encodedID, }) @@ -211,7 +211,7 @@ func (c *Client) CreateProjectInfo( clientDeactivateThreshold string, ) (*database.ProjectInfo, error) { info := database.NewProjectInfo(name, owner, clientDeactivateThreshold) - result, err := c.collection(colProjects).InsertOne(ctx, bson.M{ + result, err := c.collection(ColProjects).InsertOne(ctx, bson.M{ "name": info.Name, "owner": owner, "client_deactivate_threshold": info.ClientDeactivateThreshold, @@ -237,7 +237,7 @@ func (c *Client) listProjectInfos( pageSize int, housekeepingLastProjectID types.ID, ) ([]*database.ProjectInfo, error) { - encodedID, err := encodeID(housekeepingLastProjectID) + encodedID, err := EncodeID(housekeepingLastProjectID) if err != nil { return nil, err } @@ -245,7 +245,7 @@ func (c *Client) listProjectInfos( opts := options.Find() opts.SetLimit(int64(pageSize)) - cursor, err := c.collection(colProjects).Find(ctx, bson.M{ + cursor, err := c.collection(ColProjects).Find(ctx, bson.M{ "_id": bson.M{ "$gt": encodedID, }, @@ -267,7 +267,7 @@ func (c *Client) ListProjectInfos( ctx context.Context, owner string, ) ([]*database.ProjectInfo, error) { - cursor, err := c.collection(colProjects).Find(ctx, bson.M{ + cursor, err := c.collection(ColProjects).Find(ctx, bson.M{ "owner": owner, }) if err != nil { @@ -284,7 +284,7 @@ func (c *Client) ListProjectInfos( // FindProjectInfoByPublicKey returns a project by public key. func (c *Client) FindProjectInfoByPublicKey(ctx context.Context, publicKey string) (*database.ProjectInfo, error) { - result := c.collection(colProjects).FindOne(ctx, bson.M{ + result := c.collection(ColProjects).FindOne(ctx, bson.M{ "public_key": publicKey, }) @@ -305,7 +305,7 @@ func (c *Client) FindProjectInfoByName( owner string, name string, ) (*database.ProjectInfo, error) { - result := c.collection(colProjects).FindOne(ctx, bson.M{ + result := c.collection(ColProjects).FindOne(ctx, bson.M{ "name": name, "owner": owner, }) @@ -323,12 +323,12 @@ func (c *Client) FindProjectInfoByName( // FindProjectInfoByID returns a project by the given id. func (c *Client) FindProjectInfoByID(ctx context.Context, id types.ID) (*database.ProjectInfo, error) { - encodedID, err := encodeID(id) + encodedID, err := EncodeID(id) if err != nil { return nil, err } - result := c.collection(colProjects).FindOne(ctx, bson.M{ + result := c.collection(ColProjects).FindOne(ctx, bson.M{ "_id": encodedID, }) @@ -350,7 +350,7 @@ func (c *Client) UpdateProjectInfo( id types.ID, fields *types.UpdatableProjectFields, ) (*database.ProjectInfo, error) { - encodedID, err := encodeID(id) + encodedID, err := EncodeID(id) if err != nil { return nil, err } @@ -366,7 +366,7 @@ func (c *Client) UpdateProjectInfo( } updatableFields["updated_at"] = gotime.Now() - res := c.collection(colProjects).FindOneAndUpdate(ctx, bson.M{ + res := c.collection(ColProjects).FindOneAndUpdate(ctx, bson.M{ "_id": encodedID, "owner": owner, }, bson.M{ @@ -394,7 +394,7 @@ func (c *Client) CreateUserInfo( hashedPassword string, ) (*database.UserInfo, error) { info := database.NewUserInfo(username, hashedPassword) - result, err := c.collection(colUsers).InsertOne(ctx, bson.M{ + result, err := c.collection(ColUsers).InsertOne(ctx, bson.M{ "username": info.Username, "hashed_password": info.HashedPassword, "created_at": info.CreatedAt, @@ -413,7 +413,7 @@ func (c *Client) CreateUserInfo( // FindUserInfo returns a user by username. func (c *Client) FindUserInfo(ctx context.Context, username string) (*database.UserInfo, error) { - result := c.collection(colUsers).FindOne(ctx, bson.M{ + result := c.collection(ColUsers).FindOne(ctx, bson.M{ "username": username, }) @@ -432,7 +432,7 @@ func (c *Client) FindUserInfo(ctx context.Context, username string) (*database.U func (c *Client) ListUserInfos( ctx context.Context, ) ([]*database.UserInfo, error) { - cursor, err := c.collection(colUsers).Find(ctx, bson.M{}) + cursor, err := c.collection(ColUsers).Find(ctx, bson.M{}) if err != nil { return nil, fmt.Errorf("list user infos: %w", err) } @@ -447,13 +447,13 @@ func (c *Client) ListUserInfos( // ActivateClient activates the client of the given key. func (c *Client) ActivateClient(ctx context.Context, projectID types.ID, key string) (*database.ClientInfo, error) { - encodedProjectID, err := encodeID(projectID) + encodedProjectID, err := EncodeID(projectID) if err != nil { return nil, err } now := gotime.Now() - res, err := c.collection(colClients).UpdateOne(ctx, bson.M{ + res, err := c.collection(ColClients).UpdateOne(ctx, bson.M{ "project_id": encodedProjectID, "key": key, }, bson.M{ @@ -468,7 +468,7 @@ func (c *Client) ActivateClient(ctx context.Context, projectID types.ID, key str var result *mongo.SingleResult if res.UpsertedCount > 0 { - result = c.collection(colClients).FindOneAndUpdate(ctx, bson.M{ + result = c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ "_id": res.UpsertedID, }, bson.M{ "$set": bson.M{ @@ -476,7 +476,7 @@ func (c *Client) ActivateClient(ctx context.Context, projectID types.ID, key str }, }) } else { - result = c.collection(colClients).FindOne(ctx, bson.M{ + result = c.collection(ColClients).FindOne(ctx, bson.M{ "key": key, }) } @@ -491,16 +491,16 @@ func (c *Client) ActivateClient(ctx context.Context, projectID types.ID, key str // DeactivateClient deactivates the client of the given ID. func (c *Client) DeactivateClient(ctx context.Context, projectID, clientID types.ID) (*database.ClientInfo, error) { - encodedProjectID, err := encodeID(projectID) + encodedProjectID, err := EncodeID(projectID) if err != nil { return nil, err } - encodedClientID, err := encodeID(clientID) + encodedClientID, err := EncodeID(clientID) if err != nil { return nil, err } - res := c.collection(colClients).FindOneAndUpdate(ctx, bson.M{ + res := c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ "_id": encodedClientID, "project_id": encodedProjectID, }, bson.M{ @@ -523,16 +523,16 @@ func (c *Client) DeactivateClient(ctx context.Context, projectID, clientID types // FindClientInfoByID finds the client of the given ID. func (c *Client) FindClientInfoByID(ctx context.Context, projectID, clientID types.ID) (*database.ClientInfo, error) { - encodedProjectID, err := encodeID(projectID) + encodedProjectID, err := EncodeID(projectID) if err != nil { return nil, err } - encodedClientID, err := encodeID(clientID) + encodedClientID, err := EncodeID(clientID) if err != nil { return nil, err } - result := c.collection(colClients).FindOneAndUpdate(ctx, bson.M{ + result := c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ "_id": encodedClientID, "project_id": encodedProjectID, }, bson.M{ @@ -558,7 +558,7 @@ func (c *Client) UpdateClientInfoAfterPushPull( clientInfo *database.ClientInfo, docInfo *database.DocInfo, ) error { - encodedClientID, err := encodeID(clientInfo.ID) + encodedClientID, err := EncodeID(clientInfo.ID) if err != nil { return err } @@ -596,7 +596,7 @@ func (c *Client) UpdateClientInfoAfterPushPull( } } - result := c.collection(colClients).FindOneAndUpdate(ctx, bson.M{ + result := c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ "_id": encodedClientID, }, updater) @@ -616,7 +616,7 @@ func (c *Client) findDeactivateCandidatesPerProject( project *database.ProjectInfo, candidatesLimit int, ) ([]*database.ClientInfo, error) { - encodedProjectID, err := encodeID(project.ID) + encodedProjectID, err := EncodeID(project.ID) if err != nil { return nil, err } @@ -626,7 +626,7 @@ func (c *Client) findDeactivateCandidatesPerProject( return nil, err } - cursor, err := c.collection(colClients).Find(ctx, bson.M{ + cursor, err := c.collection(ColClients).Find(ctx, bson.M{ "project_id": encodedProjectID, "status": database.ClientActivated, "updated_at": bson.M{ @@ -687,11 +687,11 @@ func (c *Client) FindDocInfoByKeyAndOwner( docKey key.Key, createDocIfNotExist bool, ) (*database.DocInfo, error) { - encodedProjectID, err := encodeID(projectID) + encodedProjectID, err := EncodeID(projectID) if err != nil { return nil, err } - encodedOwnerID, err := encodeID(clientID) + encodedOwnerID, err := EncodeID(clientID) if err != nil { return nil, err } @@ -704,7 +704,7 @@ func (c *Client) FindDocInfoByKeyAndOwner( }, } now := gotime.Now() - res, err := c.collection(colDocuments).UpdateOne(ctx, filter, bson.M{ + res, err := c.collection(ColDocuments).UpdateOne(ctx, filter, bson.M{ "$set": bson.M{ "accessed_at": now, }, @@ -715,7 +715,7 @@ func (c *Client) FindDocInfoByKeyAndOwner( var result *mongo.SingleResult if res.UpsertedCount > 0 { - result = c.collection(colDocuments).FindOneAndUpdate(ctx, bson.M{ + result = c.collection(ColDocuments).FindOneAndUpdate(ctx, bson.M{ "key": docKey, "_id": res.UpsertedID, }, bson.M{ @@ -726,7 +726,7 @@ func (c *Client) FindDocInfoByKeyAndOwner( }, }) } else { - result = c.collection(colDocuments).FindOne(ctx, filter) + result = c.collection(ColDocuments).FindOne(ctx, filter) if result.Err() == mongo.ErrNoDocuments { return nil, fmt.Errorf("%s %s: %w", projectID, docKey, database.ErrDocumentNotFound) } @@ -749,12 +749,12 @@ func (c *Client) FindDocInfoByKey( projectID types.ID, docKey key.Key, ) (*database.DocInfo, error) { - encodedProjectID, err := encodeID(projectID) + encodedProjectID, err := EncodeID(projectID) if err != nil { return nil, err } - result := c.collection(colDocuments).FindOne(ctx, bson.M{ + result := c.collection(ColDocuments).FindOne(ctx, bson.M{ "project_id": encodedProjectID, "key": docKey, "removed_at": bson.M{ @@ -782,12 +782,12 @@ func (c *Client) FindDocInfoByKeyAndID( key key.Key, id types.ID, ) (*database.DocInfo, error) { - encodedDocID, err := encodeID(id) + encodedDocID, err := EncodeID(id) if err != nil { return nil, err } - result := c.collection(colDocuments).FindOne(ctx, bson.M{ + result := c.collection(ColDocuments).FindOne(ctx, bson.M{ "key": key, "_id": encodedDocID, }) @@ -812,12 +812,12 @@ func (c *Client) UpdateDocInfoStatusToRemoved( key key.Key, id types.ID, ) error { - encodedDocID, err := encodeID(id) + encodedDocID, err := EncodeID(id) if err != nil { return err } - result := c.collection(colDocuments).FindOneAndUpdate(ctx, bson.M{ + result := c.collection(ColDocuments).FindOneAndUpdate(ctx, bson.M{ "key": key, "_id": encodedDocID, }, bson.M{ @@ -844,7 +844,7 @@ func (c *Client) CreateChangeInfos( changes []*change.Change, isRemoved bool, ) error { - encodedDocID, err := encodeID(docInfo.ID) + encodedDocID, err := EncodeID(docInfo.ID) if err != nil { return err } @@ -877,7 +877,7 @@ func (c *Client) CreateChangeInfos( // TODO(hackerwins): We need to handle the updates for the two collections // below atomically. if len(changes) > 0 { - if _, err = c.collection(colChanges).BulkWrite( + if _, err = c.collection(ColChanges).BulkWrite( ctx, models, options.BulkWrite().SetOrdered(true), @@ -895,7 +895,7 @@ func (c *Client) CreateChangeInfos( updateFields["removed_at"] = now } - res, err := c.collection(colDocuments).UpdateOne(ctx, bson.M{ + res, err := c.collection(ColDocuments).UpdateOne(ctx, bson.M{ "key": docInfo.Key, "_id": encodedDocID, "server_seq": initialServerSeq, @@ -922,14 +922,14 @@ func (c *Client) PurgeStaleChanges( docKey key.Key, docID types.ID, ) error { - encodedDocID, err := encodeID(docID) + encodedDocID, err := EncodeID(docID) if err != nil { return err } // Find the smallest server seq in `syncedseqs`. // Because offline client can pull changes when it becomes online. - result := c.collection(colSyncedSeqs).FindOne( + result := c.collection(ColSyncedSeqs).FindOne( ctx, bson.M{ "doc_key": docKey, @@ -949,7 +949,7 @@ func (c *Client) PurgeStaleChanges( } // Delete all changes before the smallest server seq. - if _, err := c.collection(colChanges).DeleteMany( + if _, err := c.collection(ColChanges).DeleteMany( ctx, bson.M{ "doc_key": docKey, @@ -997,12 +997,12 @@ func (c *Client) FindChangeInfosBetweenServerSeqs( from int64, to int64, ) ([]*database.ChangeInfo, error) { - encodedDocID, err := encodeID(docID) + encodedDocID, err := EncodeID(docID) if err != nil { return nil, err } - cursor, err := c.collection(colChanges).Find(ctx, bson.M{ + cursor, err := c.collection(ColChanges).Find(ctx, bson.M{ "doc_key": docKey, "doc_id": encodedDocID, "server_seq": bson.M{ @@ -1029,7 +1029,7 @@ func (c *Client) CreateSnapshotInfo( docID types.ID, doc *document.InternalDocument, ) error { - encodedDocID, err := encodeID(docID) + encodedDocID, err := EncodeID(docID) if err != nil { return err } @@ -1038,7 +1038,7 @@ func (c *Client) CreateSnapshotInfo( return err } - if _, err := c.collection(colSnapshots).InsertOne(ctx, bson.M{ + if _, err := c.collection(ColSnapshots).InsertOne(ctx, bson.M{ "doc_key": docKey, "doc_id": encodedDocID, "server_seq": doc.Checkpoint().ServerSeq, @@ -1059,12 +1059,12 @@ func (c *Client) FindSnapshotInfoByID( docID types.ID, serverSeq int64, ) (*database.SnapshotInfo, error) { - encodedDocID, err := encodeID(docID) + encodedDocID, err := EncodeID(docID) if err != nil { return nil, err } - result := c.collection(colSnapshots).FindOne(ctx, bson.M{ + result := c.collection(ColSnapshots).FindOne(ctx, bson.M{ "doc_key": docKey, "doc_id": encodedDocID, "server_seq": serverSeq, @@ -1093,7 +1093,7 @@ func (c *Client) FindClosestSnapshotInfo( serverSeq int64, includeSnapshot bool, ) (*database.SnapshotInfo, error) { - encodedDocID, err := encodeID(docID) + encodedDocID, err := EncodeID(docID) if err != nil { return nil, err } @@ -1106,7 +1106,7 @@ func (c *Client) FindClosestSnapshotInfo( option.SetProjection(bson.M{"Snapshot": 0}) } - result := c.collection(colSnapshots).FindOne(ctx, bson.M{ + result := c.collection(ColSnapshots).FindOne(ctx, bson.M{ "doc_key": docKey, "doc_id": encodedDocID, "server_seq": bson.M{ @@ -1135,12 +1135,12 @@ func (c *Client) FindMinSyncedSeqInfo( docKey key.Key, docID types.ID, ) (*database.SyncedSeqInfo, error) { - encodedDocID, err := encodeID(docID) + encodedDocID, err := EncodeID(docID) if err != nil { return nil, err } - syncedSeqResult := c.collection(colSyncedSeqs).FindOne(ctx, bson.M{ + syncedSeqResult := c.collection(ColSyncedSeqs).FindOne(ctx, bson.M{ "doc_key": docKey, "doc_id": encodedDocID, }, options.FindOne().SetSort(bson.D{ @@ -1175,13 +1175,13 @@ func (c *Client) UpdateAndFindMinSyncedTicket( return nil, err } - encodedDocID, err := encodeID(docID) + encodedDocID, err := EncodeID(docID) if err != nil { return nil, err } // 02. find min synced seq of the given document. - result := c.collection(colSyncedSeqs).FindOne(ctx, bson.M{ + result := c.collection(ColSyncedSeqs).FindOne(ctx, bson.M{ "doc_key": docKey, "doc_id": encodedDocID, }, options.FindOne().SetSort(bson.D{ @@ -1221,7 +1221,7 @@ func (c *Client) FindDocInfosByPaging( projectID types.ID, paging types.Paging[database.DocOffset], ) ([]*database.DocInfo, error) { - encodedProjectID, err := encodeID(projectID) + encodedProjectID, err := EncodeID(projectID) if err != nil { return nil, err } @@ -1235,7 +1235,7 @@ func (c *Client) FindDocInfosByPaging( }, } if paging.Offset.Key != "" && paging.Offset.ID != "" { - encodedDocID, err := encodeID(paging.Offset.ID) + encodedDocID, err := EncodeID(paging.Offset.ID) if err != nil { return nil, err } @@ -1257,7 +1257,7 @@ func (c *Client) FindDocInfosByPaging( opts = opts.SetSort(bson.D{{Key: "_id", Value: -1}, {Key: "key", Value: -1}}) } - cursor, err := c.collection(colDocuments).Find(ctx, filter, opts) + cursor, err := c.collection(ColDocuments).Find(ctx, filter, opts) if err != nil { return nil, fmt.Errorf("find documents: %w", err) } @@ -1277,12 +1277,12 @@ func (c *Client) FindDocInfosByQuery( query string, pageSize int, ) (*types.SearchResult[*database.DocInfo], error) { - encodedProjectID, err := encodeID(projectID) + encodedProjectID, err := EncodeID(projectID) if err != nil { return nil, err } - cursor, err := c.collection(colDocuments).Find(ctx, bson.M{ + cursor, err := c.collection(ColDocuments).Find(ctx, bson.M{ "project_id": encodedProjectID, "key": bson.M{"$regex": primitive.Regex{ Pattern: "^" + escapeRegex(query), @@ -1315,11 +1315,11 @@ func (c *Client) UpdateSyncedSeq( docID types.ID, serverSeq int64, ) error { - encodedDocID, err := encodeID(docID) + encodedDocID, err := EncodeID(docID) if err != nil { return err } - encodedClientID, err := encodeID(clientInfo.ID) + encodedClientID, err := EncodeID(clientInfo.ID) if err != nil { return err } @@ -1331,7 +1331,7 @@ func (c *Client) UpdateSyncedSeq( } if !isAttached { - if _, err = c.collection(colSyncedSeqs).DeleteOne(ctx, bson.M{ + if _, err = c.collection(ColSyncedSeqs).DeleteOne(ctx, bson.M{ "doc_key": docKey, "doc_id": encodedDocID, "client_id": encodedClientID, @@ -1346,7 +1346,7 @@ func (c *Client) UpdateSyncedSeq( return err } - if _, err = c.collection(colSyncedSeqs).UpdateOne(ctx, bson.M{ + if _, err = c.collection(ColSyncedSeqs).UpdateOne(ctx, bson.M{ "doc_key": docKey, "doc_id": encodedDocID, "client_id": encodedClientID, @@ -1371,7 +1371,7 @@ func (c *Client) IsDocumentAttached( docID types.ID, excludeClientID types.ID, ) (bool, error) { - encodedProjectID, err := encodeID(projectID) + encodedProjectID, err := EncodeID(projectID) if err != nil { return false, err } @@ -1383,7 +1383,7 @@ func (c *Client) IsDocumentAttached( } if excludeClientID != "" { - encodedExcludeClientID, err := encodeID(excludeClientID) + encodedExcludeClientID, err := EncodeID(excludeClientID) if err != nil { return false, err } @@ -1391,7 +1391,7 @@ func (c *Client) IsDocumentAttached( filter["_id"] = bson.M{"$ne": encodedExcludeClientID} } - result := c.collection(colClients).FindOne(ctx, filter) + result := c.collection(ColClients).FindOne(ctx, filter) if result.Err() == mongo.ErrNoDocuments { return false, nil } @@ -1409,12 +1409,12 @@ func (c *Client) findTicketByServerSeq( return time.InitialTicket, nil } - encodedDocID, err := encodeID(docID) + encodedDocID, err := EncodeID(docID) if err != nil { return nil, err } - result := c.collection(colChanges).FindOne(ctx, bson.M{ + result := c.collection(ColChanges).FindOne(ctx, bson.M{ "doc_key": docKey, "doc_id": encodedDocID, "server_seq": serverSeq, @@ -1448,18 +1448,6 @@ func (c *Client) findTicketByServerSeq( ), nil } -func (c *Client) CleanUpAllCollections(ctx context.Context) error { - collections := []string{colProjects, colUsers, colClients, - colDocuments, colChanges, colSnapshots, colSyncedSeqs} - for _, col := range collections { - _, err := c.collection(col).DeleteMany(ctx, bson.D{}) - if err != nil { - return err - } - } - return nil -} - func (c *Client) collection( name string, opts ...*options.CollectionOptions, diff --git a/server/backend/database/mongo/encoder.go b/server/backend/database/mongo/encoder.go index 2c553c0d7..6b3e117e6 100644 --- a/server/backend/database/mongo/encoder.go +++ b/server/backend/database/mongo/encoder.go @@ -31,7 +31,8 @@ func encodeActorID(id *time.ActorID) primitive.ObjectID { return objectID } -func encodeID(id types.ID) (primitive.ObjectID, error) { +// EncodeID transforms the types.ID value into the primitive.ObjectID value. +func EncodeID(id types.ID) (primitive.ObjectID, error) { objectID, err := primitive.ObjectIDFromHex(id.String()) if err != nil { return objectID, fmt.Errorf("%s: %w", id, types.ErrInvalidID) diff --git a/server/backend/database/mongo/indexes.go b/server/backend/database/mongo/indexes.go index 1de4f0dbe..5512ca80a 100644 --- a/server/backend/database/mongo/indexes.go +++ b/server/backend/database/mongo/indexes.go @@ -26,13 +26,20 @@ import ( ) const ( - colProjects = "projects" - colUsers = "users" - colClients = "clients" - colDocuments = "documents" - colChanges = "changes" - colSnapshots = "snapshots" - colSyncedSeqs = "syncedseqs" + // ColProjects represents the projects collection in the database. + ColProjects = "projects" + // ColUsers represents the users collection in the database. + ColUsers = "users" + // ColClients represents the clients collection in the database. + ColClients = "clients" + // ColDocuments represents the documents collection in the database. + ColDocuments = "documents" + // ColChanges represents the changes collection in the database. + ColChanges = "changes" + // ColSnapshots represents the snapshots collection in the database. + ColSnapshots = "snapshots" + // ColSyncedSeqs represents the syncedseqs collection in the database. + ColSyncedSeqs = "syncedseqs" ) type collectionInfo struct { @@ -43,7 +50,7 @@ type collectionInfo struct { // Below are names and indexes information of collections that stores Yorkie data. var collectionInfos = []collectionInfo{ { - name: colProjects, + name: ColProjects, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ {Key: "owner", Value: bsonx.Int32(1)}, @@ -59,14 +66,14 @@ var collectionInfos = []collectionInfo{ }}, }, { - name: colUsers, + name: ColUsers, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{{Key: "username", Value: bsonx.Int32(1)}}, Options: options.Index().SetUnique(true), }}, }, { - name: colClients, + name: ColClients, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ {Key: "project_id", Value: bsonx.Int32(1)}, @@ -86,7 +93,7 @@ var collectionInfos = []collectionInfo{ }}, }, { - name: colDocuments, + name: ColDocuments, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ {Key: "key", Value: bsonx.Int32(1)}, @@ -99,7 +106,7 @@ var collectionInfos = []collectionInfo{ ).SetUnique(true), }}, }, { - name: colChanges, + name: ColChanges, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ {Key: "doc_key", Value: bsonx.Int32(1)}, @@ -109,7 +116,7 @@ var collectionInfos = []collectionInfo{ Options: options.Index().SetUnique(true), }}, }, { - name: colSnapshots, + name: ColSnapshots, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ {Key: "doc_key", Value: bsonx.Int32(1)}, @@ -119,7 +126,7 @@ var collectionInfos = []collectionInfo{ Options: options.Index().SetUnique(true), }}, }, { - name: colSyncedSeqs, + name: ColSyncedSeqs, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ {Key: "doc_key", Value: bsonx.Int32(1)}, diff --git a/server/backend/database/mongo/registry.go b/server/backend/database/mongo/registry.go index 729059ccb..6c27983ec 100644 --- a/server/backend/database/mongo/registry.go +++ b/server/backend/database/mongo/registry.go @@ -26,7 +26,8 @@ import ( "github.com/yorkie-team/yorkie/api/types" ) -func newRegistryBuilder() *bsoncodec.RegistryBuilder { +// NewRegistryBuilder returns a new registry builder with the default encoder and decoder. +func NewRegistryBuilder() *bsoncodec.RegistryBuilder { rb := bsoncodec.NewRegistryBuilder() bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb) diff --git a/server/backend/database/mongo/registry_test.go b/server/backend/database/mongo/registry_test.go index de9bd482b..83b7cf35c 100644 --- a/server/backend/database/mongo/registry_test.go +++ b/server/backend/database/mongo/registry_test.go @@ -28,7 +28,7 @@ import ( ) func TestRegistry(t *testing.T) { - registry := newRegistryBuilder().Build() + registry := NewRegistryBuilder().Build() id := types.ID(primitive.NewObjectID().Hex()) data, err := bson.MarshalWithRegistry(registry, bson.M{ diff --git a/server/backend/database/testcases/testcases.go b/server/backend/database/testcases/testcases.go index 69e13427e..e14749fa3 100644 --- a/server/backend/database/testcases/testcases.go +++ b/server/backend/database/testcases/testcases.go @@ -81,7 +81,12 @@ func RunFindProjectInfoByNameTest( ctx := context.Background() suffixes := []int{0, 1, 2} for _, suffix := range suffixes { - _, err := db.CreateProjectInfo(ctx, fmt.Sprintf("%s-%d", t.Name(), suffix), dummyOwnerName, clientDeactivateThreshold) + _, err := db.CreateProjectInfo( + ctx, + fmt.Sprintf("%s-%d", t.Name(), suffix), + dummyOwnerName, + clientDeactivateThreshold, + ) assert.NoError(t, err) } @@ -426,27 +431,30 @@ func RunUpdateProjectInfoTest(t *testing.T, db database.Database) { func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID types.ID) { t.Run("simple FindDocInfosByPaging test", func(t *testing.T) { ctx := context.Background() - - assertKeys := func(expectedKeys []key.Key, infos []*database.DocInfo) { - var keys []key.Key - for _, info := range infos { - keys = append(keys, info.Key) - } - assert.EqualValues(t, expectedKeys, keys) - } - pageSize := 5 totalSize := 9 + clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) + docInfos := make([]*database.DocInfo, 0, totalSize) for i := 0; i < totalSize; i++ { - _, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, key.Key(fmt.Sprintf("%d", i)), true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, key.Key(fmt.Sprintf("%d", i)), true) assert.NoError(t, err) + docInfos = append(docInfos, docInfo) + } + + // NOTE(sejongk): sorting is required because doc_id may not sequentially increase in a sharded DB cluster. + SortDocInfos(docInfos) + docKeys := make([]key.Key, 0, totalSize) + docKeysInReverse := make([]key.Key, 0, totalSize) + for _, docInfo := range docInfos { + docKeys = append(docKeys, docInfo.Key) + docKeysInReverse = append([]key.Key{docInfo.Key}, docKeysInReverse...) } // initial page, offset is empty infos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{PageSize: pageSize}) assert.NoError(t, err) - assertKeys([]key.Key{"8", "7", "6", "5", "4"}, infos) + AssertKeys(t, docKeysInReverse[:pageSize], infos) // backward infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ @@ -457,7 +465,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t PageSize: pageSize, }) assert.NoError(t, err) - assertKeys([]key.Key{"3", "2", "1", "0"}, infos) + AssertKeys(t, docKeysInReverse[pageSize:], infos) // backward again emptyInfos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ @@ -468,7 +476,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t PageSize: pageSize, }) assert.NoError(t, err) - assertKeys(nil, emptyInfos) + AssertKeys(t, nil, emptyInfos) // forward infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ @@ -480,7 +488,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t IsForward: true, }) assert.NoError(t, err) - assertKeys([]key.Key{"4", "5", "6", "7", "8"}, infos) + AssertKeys(t, docKeys[totalSize-pageSize:], infos) // forward again emptyInfos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ @@ -492,7 +500,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t IsForward: true, }) assert.NoError(t, err) - assertKeys(nil, emptyInfos) + AssertKeys(t, nil, emptyInfos) }) t.Run("complex FindDocInfosByPaging test", func(t *testing.T) { @@ -512,6 +520,9 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t dummyDocInfos = append(dummyDocInfos, docInfo) } + // NOTE(sejongk): sorting is required because doc_id may not sequentially increase in a sharded DB cluster. + SortDocInfos(dummyDocInfos) + cases := []struct { name string offset database.DocOffset @@ -624,7 +635,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }) t.Run("FindDocInfosByPaging with docInfoRemovedAt test", func(t *testing.T) { - const testDocCnt = 3 + const testDocCnt = 5 ctx := context.Background() // 01. Initialize a project and create documents. @@ -639,6 +650,13 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t docInfos = append(docInfos, docInfo) } + // NOTE(sejongk): sorting is required because doc_id may not sequentially increase in a sharded DB cluster. + SortDocInfos(docInfos) + docKeysInReverse := make([]key.Key, 0, testDocCnt) + for _, docInfo := range docInfos { + docKeysInReverse = append([]key.Key{docInfo.Key}, docKeysInReverse...) + } + // 02. List the documents. result, err := db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[database.DocOffset]{ PageSize: 10, @@ -646,9 +664,12 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }) assert.NoError(t, err) assert.Len(t, result, len(docInfos)) + AssertKeys(t, docKeysInReverse, result) - // 03. Remove a document. - err = db.CreateChangeInfos(ctx, docInfos[0], 0, []*change.Change{}, true) + // 03. Remove some documents. + err = db.CreateChangeInfos(ctx, docInfos[1], 0, []*change.Change{}, true) + assert.NoError(t, err) + err = db.CreateChangeInfos(ctx, docInfos[3], 0, []*change.Change{}, true) assert.NoError(t, err) // 04. List the documents again and check the filtered result. @@ -657,7 +678,8 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t IsForward: false, }) assert.NoError(t, err) - assert.Len(t, result, len(docInfos)-1) + assert.Len(t, result, len(docInfos)-2) + AssertKeys(t, []key.Key{docKeysInReverse[0], docKeysInReverse[2], docKeysInReverse[4]}, result) }) } @@ -727,7 +749,7 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type assert.Equal(t, false, docInfo.RemovedAt.IsZero()) }) - t.Run("reuse same key to create docInfo test ", func(t *testing.T) { + t.Run("reuse same key to create docInfo test", func(t *testing.T) { ctx := context.Background() docKey := helper.TestDocKey(t) @@ -750,6 +772,57 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type assert.NotEqual(t, docInfo1.ID, docInfo2.ID) }) + t.Run("correct document references of changes when reusing same key test", func(t *testing.T) { + ctx := context.Background() + docKey := helper.TestDocKey(t) + changeCnt := 5 + + // 01. Create a client and a document then attach the document to the client. + clientInfo1, _ := db.ActivateClient(ctx, projectID, t.Name()) + docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.ID, docKey, true) + assert.NoError(t, clientInfo1.AttachDocument(docInfo1.Key, docInfo1.ID)) + assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo1)) + + // 02. Generate changes at the document. + bytesID, _ := clientInfo1.ID.Bytes() + actorID, _ := time.ActorIDFromBytes(bytesID) + doc1 := document.New(docKey) + doc1.SetActor(actorID) + assert.NoError(t, doc1.Update(func(root *json.Object, _ *presence.Presence) error { + root.SetNewArray("array") + return nil + })) + for idx := 0; idx < changeCnt; idx++ { + assert.NoError(t, doc1.Update(func(root *json.Object, _ *presence.Presence) error { + root.GetArray("array").AddString("A") + return nil + })) + } + pack1 := doc1.CreateChangePack() + + // 03. Store changes and remove the document. + assert.NoError(t, clientInfo1.RemoveDocument(docInfo1.Key, docInfo1.ID)) + err := db.CreateChangeInfos(ctx, docInfo1, 0, pack1.Changes, true) + assert.NoError(t, err) + + // 04. Create a document with same key and check they have same key but different id. + docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.ID, docKey, true) + assert.NoError(t, clientInfo1.AttachDocument(docInfo2.Key, docInfo2.ID)) + assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo2)) + assert.Equal(t, docInfo1.Key, docInfo2.Key) + assert.NotEqual(t, docInfo1.ID, docInfo2.ID) + + // 05. Check whether the changes of the removed document are referencing the removed document. + changeInfos1, err := db.FindChangeInfosBetweenServerSeqs(ctx, docKey, docInfo1.ID, 0, 1) + assert.NoError(t, err) + assert.Len(t, changeInfos1, 1) + + // 06. Check whether the changes of the removed document aren't referencing the active document. + changeInfos2, err := db.FindChangeInfosBetweenServerSeqs(ctx, docKey, docInfo2.ID, 0, 1) + assert.NoError(t, err) + assert.Len(t, changeInfos2, 0) + }) + t.Run("set removed_at in docInfo test", func(t *testing.T) { ctx := context.Background() docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) @@ -1111,3 +1184,22 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ assert.False(t, attached) }) } + +// SortDocInfos sorts the given docInfo slice using the (doc_id, doc_key) ascending order. +func SortDocInfos(docInfos []*database.DocInfo) { + sort.Slice(docInfos, func(i, j int) bool { + if docInfos[i].ID != docInfos[j].ID { + return docInfos[i].ID < docInfos[j].ID + } + return docInfos[i].Key < docInfos[j].Key + }) +} + +// AssertKeys checks the equivalence between the provided expectedKeys and the keys in the given infos. +func AssertKeys(t *testing.T, expectedKeys []key.Key, infos []*database.DocInfo) { + var keys []key.Key + for _, info := range infos { + keys = append(keys, info.Key) + } + assert.EqualValues(t, expectedKeys, keys) +} diff --git a/server/documents/documents.go b/server/documents/documents.go index b3d4fbc97..60ca64de0 100644 --- a/server/documents/documents.go +++ b/server/documents/documents.go @@ -195,7 +195,7 @@ func FindDocInfoByKey( ) } -// FindDocInfo returns a document for the given document ID. +// FindDocInfoByKeyAndID returns a document for the given document ID. func FindDocInfoByKeyAndID( ctx context.Context, be *backend.Backend, diff --git a/server/rpc/server_test.go b/server/rpc/server_test.go index b96b8c742..890be55ec 100644 --- a/server/rpc/server_test.go +++ b/server/rpc/server_test.go @@ -18,19 +18,14 @@ package rpc_test import ( "context" - "encoding/hex" "fmt" "log" "os" "testing" - "time" - "github.com/gogo/protobuf/types" "github.com/stretchr/testify/assert" "google.golang.org/grpc" - "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials/insecure" - "google.golang.org/grpc/status" "github.com/yorkie-team/yorkie/admin" api "github.com/yorkie-team/yorkie/api/yorkie/v1" @@ -41,27 +36,16 @@ import ( "github.com/yorkie-team/yorkie/server/backend/housekeeping" "github.com/yorkie-team/yorkie/server/profiling/prometheus" "github.com/yorkie-team/yorkie/server/rpc" + "github.com/yorkie-team/yorkie/server/rpc/testcases" "github.com/yorkie-team/yorkie/test/helper" ) var ( - defaultProjectName = "default" - invalidSlugName = "@#$%^&*()_+" - - nilClientID = "000000000000000000000000" - emptyClientID = "" - invalidClientID = "invalid" - testRPCServer *rpc.Server testRPCAddr = fmt.Sprintf("localhost:%d", helper.RPCPort) testClient api.YorkieServiceClient testAdminAuthInterceptor *admin.AuthInterceptor testAdminClient api.AdminServiceClient - - invalidChangePack = &api.ChangePack{ - DocumentKey: "invalid", - Checkpoint: nil, - } ) func TestMain(m *testing.M) { @@ -151,881 +135,73 @@ func TestMain(m *testing.M) { func TestSDKRPCServerBackend(t *testing.T) { t.Run("activate/deactivate client test", func(t *testing.T) { - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, - ) - assert.NoError(t, err) - - _, err = testClient.DeactivateClient( - context.Background(), - &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, - ) - assert.NoError(t, err) - - // invalid argument - _, err = testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: ""}, - ) - assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) - - _, err = testClient.DeactivateClient( - context.Background(), - &api.DeactivateClientRequest{ClientId: emptyClientID}, - ) - assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) - - // client not found - _, err = testClient.DeactivateClient( - context.Background(), - &api.DeactivateClientRequest{ClientId: nilClientID}, - ) - assert.Equal(t, codes.NotFound, status.Convert(err).Code()) + testcases.RunActivateAndDeactivateClientTest(t, testClient) }) t.Run("attach/detach document test", func(t *testing.T) { - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, - ) - assert.NoError(t, err) - - packWithNoChanges := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - } - - resPack, err := testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - // try to attach with invalid client ID - _, err = testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: invalidClientID, - ChangePack: packWithNoChanges, - }, - ) - assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) - - // try to attach with invalid client - _, err = testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: nilClientID, - ChangePack: packWithNoChanges, - }, - ) - assert.Equal(t, codes.NotFound, status.Convert(err).Code()) - - // try to attach already attached document - _, err = testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) - - // try to attach invalid change pack - _, err = testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: invalidChangePack, - }, - ) - assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) - - _, err = testClient.DetachDocument( - context.Background(), - &api.DetachDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - // try to detach already detached document - _, err = testClient.DetachDocument( - context.Background(), - &api.DetachDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithNoChanges, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) - - _, err = testClient.DetachDocument( - context.Background(), - &api.DetachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: invalidChangePack, - }, - ) - assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) - - // document not found - _, err = testClient.DetachDocument( - context.Background(), - &api.DetachDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: "000000000000000000000000", - ChangePack: &api.ChangePack{ - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - }, - }, - ) - assert.Equal(t, codes.NotFound, status.Convert(err).Code()) - - _, err = testClient.DeactivateClient( - context.Background(), - &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, - ) - assert.NoError(t, err) - - // try to attach the document with a deactivated client - _, err = testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + testcases.RunAttachAndDetachDocumentTest(t, testClient) }) t.Run("attach/detach on removed document test", func(t *testing.T) { - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, - ) - assert.NoError(t, err) - - packWithNoChanges := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - } - - packWithRemoveRequest := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - IsRemoved: true, - } - - resPack, err := testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - _, err = testClient.RemoveDocument( - context.Background(), - &api.RemoveDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithRemoveRequest, - }, - ) - assert.NoError(t, err) - - // try to detach document with same ID as removed document - // FailedPrecondition because document is not attached. - _, err = testClient.DetachDocument( - context.Background(), - &api.DetachDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithNoChanges, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) - - // try to create new document with same key as removed document - resPack, err = testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - _, err = testClient.RemoveDocument( - context.Background(), - &api.RemoveDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithRemoveRequest, - }, - ) - assert.NoError(t, err) + testcases.RunAttachAndDetachRemovedDocumentTest(t, testClient) }) t.Run("push/pull changes test", func(t *testing.T) { - packWithNoChanges := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - } - - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: helper.TestDocKey(t).String()}, - ) - assert.NoError(t, err) - - actorID, _ := hex.DecodeString(activateResp.ClientId) - resPack, err := testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 1}, - Changes: []*api.Change{{ - Id: &api.ChangeID{ - ClientSeq: 1, - Lamport: 1, - ActorId: actorID, - }, - }}, - }, - }, - ) - assert.NoError(t, err) - - _, err = testClient.PushPullChanges( - context.Background(), - &api.PushPullChangesRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 2}, - Changes: []*api.Change{{ - Id: &api.ChangeID{ - ClientSeq: 2, - Lamport: 2, - ActorId: actorID, - }, - }}, - }, - }, - ) - assert.NoError(t, err) - - _, err = testClient.DetachDocument( - context.Background(), - &api.DetachDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 3}, - Changes: []*api.Change{{ - Id: &api.ChangeID{ - ClientSeq: 3, - Lamport: 3, - ActorId: actorID, - }, - }}, - }, - }, - ) - assert.NoError(t, err) - - // try to push/pull with detached document - _, err = testClient.PushPullChanges( - context.Background(), - &api.PushPullChangesRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithNoChanges, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) - - // try to push/pull with invalid pack - _, err = testClient.PushPullChanges( - context.Background(), - &api.PushPullChangesRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: invalidChangePack, - }, - ) - assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) - - _, err = testClient.DeactivateClient( - context.Background(), - &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, - ) - assert.NoError(t, err) - - // try to push/pull with deactivated client - _, err = testClient.PushPullChanges( - context.Background(), - &api.PushPullChangesRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithNoChanges, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + testcases.RunPushPullChangeTest(t, testClient) }) t.Run("push/pull on removed document test", func(t *testing.T) { - packWithNoChanges := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - } - - packWithRemoveRequest := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - IsRemoved: true, - } - - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: helper.TestDocKey(t).String()}, - ) - assert.NoError(t, err) - - resPack, err := testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - _, err = testClient.RemoveDocument( - context.Background(), - &api.RemoveDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithRemoveRequest, - }, - ) - assert.NoError(t, err) - - // try to push/pull on removed document - _, err = testClient.PushPullChanges( - context.Background(), - &api.PushPullChangesRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithNoChanges, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + testcases.RunPushPullChangeOnRemovedDocumentTest(t, testClient) }) t.Run("remove document test", func(t *testing.T) { - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, - ) - assert.NoError(t, err) - - packWithNoChanges := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - } - - packWithRemoveRequest := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - IsRemoved: true, - } - - resPack, err := testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - _, err = testClient.RemoveDocument( - context.Background(), - &api.RemoveDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithRemoveRequest, - }, - ) - assert.NoError(t, err) - - // try to remove removed document - _, err = testClient.RemoveDocument( - context.Background(), - &api.RemoveDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithRemoveRequest, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + testcases.RunRemoveDocumentTest(t, testClient) }) t.Run("remove document with invalid client state test", func(t *testing.T) { - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, - ) - assert.NoError(t, err) - - packWithNoChanges := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - } - - packWithRemoveRequest := &api.ChangePack{ - DocumentKey: helper.TestDocKey(t).String(), - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - IsRemoved: true, - } - - resPack, err := testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - _, err = testClient.DetachDocument( - context.Background(), - &api.DetachDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - // try to remove detached document - _, err = testClient.RemoveDocument( - context.Background(), - &api.RemoveDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithRemoveRequest, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) - - _, err = testClient.DeactivateClient( - context.Background(), - &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, - ) - assert.NoError(t, err) - - // try to remove document with a deactivated client - _, err = testClient.RemoveDocument( - context.Background(), - &api.RemoveDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentId: resPack.DocumentId, - ChangePack: packWithRemoveRequest, - }, - ) - assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + testcases.RunRemoveDocumentWithInvalidClientStateTest(t, testClient) }) t.Run("watch document test", func(t *testing.T) { - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, - ) - assert.NoError(t, err) - - docKey := helper.TestDocKey(t).String() - - packWithNoChanges := &api.ChangePack{ - DocumentKey: docKey, - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - } - - resPack, err := testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - // watch document - watchResp, err := testClient.WatchDocument( - context.Background(), - &api.WatchDocumentRequest{ - ClientId: activateResp.ClientId, - DocumentKey: docKey, - DocumentId: resPack.DocumentId, - }, - ) - assert.NoError(t, err) - - // check if stream is open - _, err = watchResp.Recv() - assert.NoError(t, err) - - // wait for MaxConnectionAge + MaxConnectionAgeGrace - time.Sleep(helper.RPCMaxConnectionAge + helper.RPCMaxConnectionAgeGrace) - - // check if stream has closed by server (EOF) - _, err = watchResp.Recv() - assert.Equal(t, codes.Unavailable, status.Code(err)) - assert.Contains(t, err.Error(), "EOF") + testcases.RunWatchDocumentTest(t, testClient) }) } func TestAdminRPCServerBackend(t *testing.T) { t.Run("admin signup test", func(t *testing.T) { - adminUser := helper.TestSlugName(t) - adminPassword := helper.AdminPassword + "123!" - - _, err := testAdminClient.SignUp( - context.Background(), - &api.SignUpRequest{ - Username: adminUser, - Password: adminPassword, - }, - ) - assert.NoError(t, err) - - // try to sign up with existing username - _, err = testAdminClient.SignUp( - context.Background(), - &api.SignUpRequest{ - Username: adminUser, - Password: adminPassword, - }, - ) - assert.Equal(t, codes.AlreadyExists, status.Convert(err).Code()) + testcases.RunAdminSignUpTest(t, testAdminClient) }) t.Run("admin login test", func(t *testing.T) { - _, err := testAdminClient.LogIn( - context.Background(), - &api.LogInRequest{ - Username: helper.AdminUser, - Password: helper.AdminPassword, - }, - ) - assert.NoError(t, err) - - // try to log in with invalid password - _, err = testAdminClient.LogIn( - context.Background(), - &api.LogInRequest{ - Username: helper.AdminUser, - Password: invalidSlugName, - }, - ) - assert.Equal(t, codes.Unauthenticated, status.Convert(err).Code()) + testcases.RunAdminLoginTest(t, testAdminClient) }) t.Run("admin create project test", func(t *testing.T) { - projectName := helper.TestSlugName(t) - - resp, err := testAdminClient.LogIn( - context.Background(), - &api.LogInRequest{ - Username: helper.AdminUser, - Password: helper.AdminPassword, - }, - ) - assert.NoError(t, err) - - testAdminAuthInterceptor.SetToken(resp.Token) - - _, err = testAdminClient.CreateProject( - context.Background(), - &api.CreateProjectRequest{ - Name: projectName, - }, - ) - assert.NoError(t, err) - - // try to create project with existing name - _, err = testAdminClient.CreateProject( - context.Background(), - &api.CreateProjectRequest{ - Name: projectName, - }, - ) - assert.Equal(t, codes.AlreadyExists, status.Convert(err).Code()) + testcases.RunAdminCreateProjectTest(t, testAdminClient, testAdminAuthInterceptor) }) t.Run("admin list projects test", func(t *testing.T) { - resp, err := testAdminClient.LogIn( - context.Background(), - &api.LogInRequest{ - Username: helper.AdminUser, - Password: helper.AdminPassword, - }, - ) - assert.NoError(t, err) - - testAdminAuthInterceptor.SetToken(resp.Token) - - _, err = testAdminClient.CreateProject( - context.Background(), - &api.CreateProjectRequest{ - Name: helper.TestSlugName(t), - }, - ) - assert.NoError(t, err) - - _, err = testAdminClient.ListProjects( - context.Background(), - &api.ListProjectsRequest{}, - ) - assert.NoError(t, err) + testcases.RunAdminListProjectsTest(t, testAdminClient, testAdminAuthInterceptor) }) t.Run("admin get project test", func(t *testing.T) { - projectName := helper.TestSlugName(t) - - resp, err := testAdminClient.LogIn( - context.Background(), - &api.LogInRequest{ - Username: helper.AdminUser, - Password: helper.AdminPassword, - }, - ) - assert.NoError(t, err) - - testAdminAuthInterceptor.SetToken(resp.Token) - - _, err = testAdminClient.CreateProject( - context.Background(), - &api.CreateProjectRequest{ - Name: projectName, - }, - ) - assert.NoError(t, err) - - _, err = testAdminClient.GetProject( - context.Background(), - &api.GetProjectRequest{ - Name: projectName, - }, - ) - assert.NoError(t, err) - - // try to get project with non-existing name - _, err = testAdminClient.GetProject( - context.Background(), - &api.GetProjectRequest{ - Name: invalidSlugName, - }, - ) - assert.Equal(t, codes.NotFound, status.Convert(err).Code()) + testcases.RunAdminGetProjectTest(t, testAdminClient, testAdminAuthInterceptor) }) t.Run("admin update project test", func(t *testing.T) { - projectName := helper.TestSlugName(t) - - resp, err := testAdminClient.LogIn( - context.Background(), - &api.LogInRequest{ - Username: helper.AdminUser, - Password: helper.AdminPassword, - }, - ) - assert.NoError(t, err) - - testAdminAuthInterceptor.SetToken(resp.Token) - - createResp, err := testAdminClient.CreateProject( - context.Background(), - &api.CreateProjectRequest{ - Name: projectName, - }, - ) - assert.NoError(t, err) - - _, err = testAdminClient.UpdateProject( - context.Background(), - &api.UpdateProjectRequest{ - Id: createResp.Project.Id, - Fields: &api.UpdatableProjectFields{ - Name: &types.StringValue{Value: "updated"}, - }, - }, - ) - assert.NoError(t, err) - - // try to update project with invalid field - _, err = testAdminClient.UpdateProject( - context.Background(), - &api.UpdateProjectRequest{ - Id: projectName, - Fields: &api.UpdatableProjectFields{ - Name: &types.StringValue{Value: invalidSlugName}, - }, - }, - ) - assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) + testcases.RunAdminUpdateProjectTest(t, testAdminClient, testAdminAuthInterceptor) }) t.Run("admin list documents test", func(t *testing.T) { - resp, err := testAdminClient.LogIn( - context.Background(), - &api.LogInRequest{ - Username: helper.AdminUser, - Password: helper.AdminPassword, - }, - ) - assert.NoError(t, err) - - testAdminAuthInterceptor.SetToken(resp.Token) - - _, err = testAdminClient.ListDocuments( - context.Background(), - &api.ListDocumentsRequest{ - ProjectName: defaultProjectName, - }, - ) - assert.NoError(t, err) - - // try to list documents with non-existing project name - _, err = testAdminClient.ListDocuments( - context.Background(), - &api.ListDocumentsRequest{ - ProjectName: invalidSlugName, - }, - ) - assert.Equal(t, codes.NotFound, status.Convert(err).Code()) + testcases.RunAdminListDocumentsTest(t, testAdminClient, testAdminAuthInterceptor) }) t.Run("admin get document test", func(t *testing.T) { - testDocumentKey := helper.TestDocKey(t).String() - - resp, err := testAdminClient.LogIn( - context.Background(), - &api.LogInRequest{ - Username: helper.AdminUser, - Password: helper.AdminPassword, - }, - ) - assert.NoError(t, err) - - testAdminAuthInterceptor.SetToken(resp.Token) - - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, - ) - assert.NoError(t, err) - - packWithNoChanges := &api.ChangePack{ - DocumentKey: testDocumentKey, - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - } - - _, err = testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - _, err = testAdminClient.GetDocument( - context.Background(), - &api.GetDocumentRequest{ - ProjectName: defaultProjectName, - DocumentKey: testDocumentKey, - }, - ) - assert.NoError(t, err) - - // try to get document with non-existing document name - _, err = testAdminClient.GetDocument( - context.Background(), - &api.GetDocumentRequest{ - ProjectName: defaultProjectName, - DocumentKey: invalidChangePack.DocumentKey, - }, - ) - assert.Equal(t, codes.NotFound, status.Convert(err).Code()) + testcases.RunAdminGetDocumentTest(t, testClient, testAdminClient, testAdminAuthInterceptor) }) t.Run("admin list changes test", func(t *testing.T) { - testDocumentKey := helper.TestDocKey(t).String() - - resp, err := testAdminClient.LogIn( - context.Background(), - &api.LogInRequest{ - Username: helper.AdminUser, - Password: helper.AdminPassword, - }, - ) - assert.NoError(t, err) - - testAdminAuthInterceptor.SetToken(resp.Token) - - activateResp, err := testClient.ActivateClient( - context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, - ) - assert.NoError(t, err) - - packWithNoChanges := &api.ChangePack{ - DocumentKey: testDocumentKey, - Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, - } - - _, err = testClient.AttachDocument( - context.Background(), - &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, - ChangePack: packWithNoChanges, - }, - ) - assert.NoError(t, err) - - _, err = testAdminClient.ListChanges( - context.Background(), - &api.ListChangesRequest{ - ProjectName: defaultProjectName, - DocumentKey: testDocumentKey, - }, - ) - assert.NoError(t, err) - - // try to list changes with non-existing document name - _, err = testAdminClient.ListChanges( - context.Background(), - &api.ListChangesRequest{ - ProjectName: defaultProjectName, - DocumentKey: invalidChangePack.DocumentKey, - }, - ) - assert.Equal(t, codes.NotFound, status.Convert(err).Code()) + testcases.RunAdminListChangesTest(t, testClient, testAdminClient, testAdminAuthInterceptor) }) } diff --git a/server/rpc/testcases/testcases.go b/server/rpc/testcases/testcases.go new file mode 100644 index 000000000..5c1cbed2b --- /dev/null +++ b/server/rpc/testcases/testcases.go @@ -0,0 +1,1002 @@ +/* + * Copyright 2023 The Yorkie Authors. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Package testcases contains testcases for server +package testcases + +import ( + "context" + "encoding/hex" + "testing" + "time" + + "github.com/gogo/protobuf/types" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "github.com/yorkie-team/yorkie/admin" + api "github.com/yorkie-team/yorkie/api/yorkie/v1" + "github.com/yorkie-team/yorkie/test/helper" +) + +var ( + defaultProjectName = "default" + invalidSlugName = "@#$%^&*()_+" + + nilClientID = "000000000000000000000000" + emptyClientID = "" + invalidClientID = "invalid" + + invalidChangePack = &api.ChangePack{ + DocumentKey: "invalid", + Checkpoint: nil, + } +) + +// RunActivateAndDeactivateClientTest runs the ActivateClient and DeactivateClient test. +func RunActivateAndDeactivateClientTest( + t *testing.T, + testClient api.YorkieServiceClient, +) { + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: t.Name()}, + ) + assert.NoError(t, err) + + _, err = testClient.DeactivateClient( + context.Background(), + &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, + ) + assert.NoError(t, err) + + // invalid argument + _, err = testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: ""}, + ) + assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) + + _, err = testClient.DeactivateClient( + context.Background(), + &api.DeactivateClientRequest{ClientId: emptyClientID}, + ) + assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) + + // client not found + _, err = testClient.DeactivateClient( + context.Background(), + &api.DeactivateClientRequest{ClientId: nilClientID}, + ) + assert.Equal(t, codes.NotFound, status.Convert(err).Code()) + +} + +// RunAttachAndDetachDocumentTest runs the AttachDocument and DetachDocument test. +func RunAttachAndDetachDocumentTest( + t *testing.T, + testClient api.YorkieServiceClient, +) { + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: t.Name()}, + ) + assert.NoError(t, err) + + packWithNoChanges := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + } + + resPack, err := testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + // try to attach with invalid client ID + _, err = testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: invalidClientID, + ChangePack: packWithNoChanges, + }, + ) + assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) + + // try to attach with invalid client + _, err = testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: nilClientID, + ChangePack: packWithNoChanges, + }, + ) + assert.Equal(t, codes.NotFound, status.Convert(err).Code()) + + // try to attach already attached document + _, err = testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + + // try to attach invalid change pack + _, err = testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: invalidChangePack, + }, + ) + assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) + + _, err = testClient.DetachDocument( + context.Background(), + &api.DetachDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + // try to detach already detached document + _, err = testClient.DetachDocument( + context.Background(), + &api.DetachDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithNoChanges, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + + _, err = testClient.DetachDocument( + context.Background(), + &api.DetachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: invalidChangePack, + }, + ) + assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) + + // document not found + _, err = testClient.DetachDocument( + context.Background(), + &api.DetachDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: "000000000000000000000000", + ChangePack: &api.ChangePack{ + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + }, + }, + ) + assert.Equal(t, codes.NotFound, status.Convert(err).Code()) + + _, err = testClient.DeactivateClient( + context.Background(), + &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, + ) + assert.NoError(t, err) + + // try to attach the document with a deactivated client + _, err = testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) +} + +// RunAttachAndDetachRemovedDocumentTest runs the AttachDocument and DetachDocument test on a removed document. +func RunAttachAndDetachRemovedDocumentTest( + t *testing.T, + testClient api.YorkieServiceClient, +) { + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: t.Name()}, + ) + assert.NoError(t, err) + + packWithNoChanges := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + } + + packWithRemoveRequest := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + IsRemoved: true, + } + + resPack, err := testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + _, err = testClient.RemoveDocument( + context.Background(), + &api.RemoveDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithRemoveRequest, + }, + ) + assert.NoError(t, err) + + // try to detach document with same ID as removed document + // FailedPrecondition because document is not attached. + _, err = testClient.DetachDocument( + context.Background(), + &api.DetachDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithNoChanges, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + + // try to create new document with same key as removed document + resPack, err = testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + _, err = testClient.RemoveDocument( + context.Background(), + &api.RemoveDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithRemoveRequest, + }, + ) + assert.NoError(t, err) +} + +// RunPushPullChangeTest runs the PushChange and PullChange test. +func RunPushPullChangeTest( + t *testing.T, + testClient api.YorkieServiceClient, +) { + packWithNoChanges := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + } + + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: helper.TestDocKey(t).String()}, + ) + assert.NoError(t, err) + + actorID, _ := hex.DecodeString(activateResp.ClientId) + resPack, err := testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 1}, + Changes: []*api.Change{{ + Id: &api.ChangeID{ + ClientSeq: 1, + Lamport: 1, + ActorId: actorID, + }, + }}, + }, + }, + ) + assert.NoError(t, err) + + _, err = testClient.PushPullChanges( + context.Background(), + &api.PushPullChangesRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 2}, + Changes: []*api.Change{{ + Id: &api.ChangeID{ + ClientSeq: 2, + Lamport: 2, + ActorId: actorID, + }, + }}, + }, + }, + ) + assert.NoError(t, err) + + _, err = testClient.DetachDocument( + context.Background(), + &api.DetachDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 3}, + Changes: []*api.Change{{ + Id: &api.ChangeID{ + ClientSeq: 3, + Lamport: 3, + ActorId: actorID, + }, + }}, + }, + }, + ) + assert.NoError(t, err) + + // try to push/pull with detached document + _, err = testClient.PushPullChanges( + context.Background(), + &api.PushPullChangesRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithNoChanges, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + + // try to push/pull with invalid pack + _, err = testClient.PushPullChanges( + context.Background(), + &api.PushPullChangesRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: invalidChangePack, + }, + ) + assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) + + _, err = testClient.DeactivateClient( + context.Background(), + &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, + ) + assert.NoError(t, err) + + // try to push/pull with deactivated client + _, err = testClient.PushPullChanges( + context.Background(), + &api.PushPullChangesRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithNoChanges, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) +} + +// RunPushPullChangeOnRemovedDocumentTest runs the PushChange and PullChange test on a removed document. +func RunPushPullChangeOnRemovedDocumentTest( + t *testing.T, + testClient api.YorkieServiceClient, +) { + packWithNoChanges := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + } + + packWithRemoveRequest := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + IsRemoved: true, + } + + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: helper.TestDocKey(t).String()}, + ) + assert.NoError(t, err) + + resPack, err := testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + _, err = testClient.RemoveDocument( + context.Background(), + &api.RemoveDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithRemoveRequest, + }, + ) + assert.NoError(t, err) + + // try to push/pull on removed document + _, err = testClient.PushPullChanges( + context.Background(), + &api.PushPullChangesRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithNoChanges, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) +} + +// RunRemoveDocumentTest runs the RemoveDocument test. +func RunRemoveDocumentTest( + t *testing.T, + testClient api.YorkieServiceClient, +) { + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: t.Name()}, + ) + assert.NoError(t, err) + + packWithNoChanges := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + } + + packWithRemoveRequest := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + IsRemoved: true, + } + + resPack, err := testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + _, err = testClient.RemoveDocument( + context.Background(), + &api.RemoveDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithRemoveRequest, + }, + ) + assert.NoError(t, err) + + // try to remove removed document + _, err = testClient.RemoveDocument( + context.Background(), + &api.RemoveDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithRemoveRequest, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) +} + +// RunRemoveDocumentWithInvalidClientStateTest runs the RemoveDocument test with an invalid client state. +func RunRemoveDocumentWithInvalidClientStateTest( + t *testing.T, + testClient api.YorkieServiceClient, +) { + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: t.Name()}, + ) + assert.NoError(t, err) + + packWithNoChanges := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + } + + packWithRemoveRequest := &api.ChangePack{ + DocumentKey: helper.TestDocKey(t).String(), + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + IsRemoved: true, + } + + resPack, err := testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + _, err = testClient.DetachDocument( + context.Background(), + &api.DetachDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + // try to remove detached document + _, err = testClient.RemoveDocument( + context.Background(), + &api.RemoveDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithRemoveRequest, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) + + _, err = testClient.DeactivateClient( + context.Background(), + &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, + ) + assert.NoError(t, err) + + // try to remove document with a deactivated client + _, err = testClient.RemoveDocument( + context.Background(), + &api.RemoveDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentId: resPack.DocumentId, + ChangePack: packWithRemoveRequest, + }, + ) + assert.Equal(t, codes.FailedPrecondition, status.Convert(err).Code()) +} + +// RunWatchDocumentTest runs the WatchDocument test. +func RunWatchDocumentTest( + t *testing.T, + testClient api.YorkieServiceClient, +) { + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: t.Name()}, + ) + assert.NoError(t, err) + + docKey := helper.TestDocKey(t).String() + + packWithNoChanges := &api.ChangePack{ + DocumentKey: docKey, + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + } + + resPack, err := testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + // watch document + watchResp, err := testClient.WatchDocument( + context.Background(), + &api.WatchDocumentRequest{ + ClientId: activateResp.ClientId, + DocumentKey: docKey, + DocumentId: resPack.DocumentId, + }, + ) + assert.NoError(t, err) + + // check if stream is open + _, err = watchResp.Recv() + assert.NoError(t, err) + + // wait for MaxConnectionAge + MaxConnectionAgeGrace + time.Sleep(helper.RPCMaxConnectionAge + helper.RPCMaxConnectionAgeGrace) + + // check if stream has closed by server (EOF) + _, err = watchResp.Recv() + assert.Equal(t, codes.Unavailable, status.Code(err)) + assert.Contains(t, err.Error(), "EOF") +} + +// RunAdminSignUpTest runs the SignUp test in admin. +func RunAdminSignUpTest( + t *testing.T, + testAdminClient api.AdminServiceClient, +) { + adminUser := helper.TestSlugName(t) + adminPassword := helper.AdminPassword + "123!" + + _, err := testAdminClient.SignUp( + context.Background(), + &api.SignUpRequest{ + Username: adminUser, + Password: adminPassword, + }, + ) + assert.NoError(t, err) + + // try to sign up with existing username + _, err = testAdminClient.SignUp( + context.Background(), + &api.SignUpRequest{ + Username: adminUser, + Password: adminPassword, + }, + ) + assert.Equal(t, codes.AlreadyExists, status.Convert(err).Code()) +} + +// RunAdminLoginTest runs the Admin Login test. +func RunAdminLoginTest( + t *testing.T, + testAdminClient api.AdminServiceClient, +) { + _, err := testAdminClient.LogIn( + context.Background(), + &api.LogInRequest{ + Username: helper.AdminUser, + Password: helper.AdminPassword, + }, + ) + assert.NoError(t, err) + + // try to log in with invalid password + _, err = testAdminClient.LogIn( + context.Background(), + &api.LogInRequest{ + Username: helper.AdminUser, + Password: invalidSlugName, + }, + ) + assert.Equal(t, codes.Unauthenticated, status.Convert(err).Code()) +} + +// RunAdminCreateProjectTest runs the CreateProject test in admin. +func RunAdminCreateProjectTest( + t *testing.T, + testAdminClient api.AdminServiceClient, + testAdminAuthInterceptor *admin.AuthInterceptor, +) { + projectName := helper.TestSlugName(t) + + resp, err := testAdminClient.LogIn( + context.Background(), + &api.LogInRequest{ + Username: helper.AdminUser, + Password: helper.AdminPassword, + }, + ) + assert.NoError(t, err) + + testAdminAuthInterceptor.SetToken(resp.Token) + + _, err = testAdminClient.CreateProject( + context.Background(), + &api.CreateProjectRequest{ + Name: projectName, + }, + ) + assert.NoError(t, err) + + // try to create project with existing name + _, err = testAdminClient.CreateProject( + context.Background(), + &api.CreateProjectRequest{ + Name: projectName, + }, + ) + assert.Equal(t, codes.AlreadyExists, status.Convert(err).Code()) +} + +// RunAdminListProjectsTest runs the ListProjects test in admin. +func RunAdminListProjectsTest( + t *testing.T, + testAdminClient api.AdminServiceClient, + testAdminAuthInterceptor *admin.AuthInterceptor, +) { + resp, err := testAdminClient.LogIn( + context.Background(), + &api.LogInRequest{ + Username: helper.AdminUser, + Password: helper.AdminPassword, + }, + ) + assert.NoError(t, err) + + testAdminAuthInterceptor.SetToken(resp.Token) + + _, err = testAdminClient.CreateProject( + context.Background(), + &api.CreateProjectRequest{ + Name: helper.TestSlugName(t), + }, + ) + assert.NoError(t, err) + + _, err = testAdminClient.ListProjects( + context.Background(), + &api.ListProjectsRequest{}, + ) + assert.NoError(t, err) +} + +// RunAdminGetProjectTest runs the GetProject test in admin. +func RunAdminGetProjectTest( + t *testing.T, + testAdminClient api.AdminServiceClient, + testAdminAuthInterceptor *admin.AuthInterceptor, +) { + projectName := helper.TestSlugName(t) + + resp, err := testAdminClient.LogIn( + context.Background(), + &api.LogInRequest{ + Username: helper.AdminUser, + Password: helper.AdminPassword, + }, + ) + assert.NoError(t, err) + + testAdminAuthInterceptor.SetToken(resp.Token) + + _, err = testAdminClient.CreateProject( + context.Background(), + &api.CreateProjectRequest{ + Name: projectName, + }, + ) + assert.NoError(t, err) + + _, err = testAdminClient.GetProject( + context.Background(), + &api.GetProjectRequest{ + Name: projectName, + }, + ) + assert.NoError(t, err) + + // try to get project with non-existing name + _, err = testAdminClient.GetProject( + context.Background(), + &api.GetProjectRequest{ + Name: invalidSlugName, + }, + ) + assert.Equal(t, codes.NotFound, status.Convert(err).Code()) +} + +// RunAdminUpdateProjectTest runs the UpdateProject test in admin. +func RunAdminUpdateProjectTest( + t *testing.T, + testAdminClient api.AdminServiceClient, + testAdminAuthInterceptor *admin.AuthInterceptor, +) { + projectName := helper.TestSlugName(t) + + resp, err := testAdminClient.LogIn( + context.Background(), + &api.LogInRequest{ + Username: helper.AdminUser, + Password: helper.AdminPassword, + }, + ) + assert.NoError(t, err) + + testAdminAuthInterceptor.SetToken(resp.Token) + + createResp, err := testAdminClient.CreateProject( + context.Background(), + &api.CreateProjectRequest{ + Name: projectName, + }, + ) + assert.NoError(t, err) + + _, err = testAdminClient.UpdateProject( + context.Background(), + &api.UpdateProjectRequest{ + Id: createResp.Project.Id, + Fields: &api.UpdatableProjectFields{ + Name: &types.StringValue{Value: "updated"}, + }, + }, + ) + assert.NoError(t, err) + + // try to update project with invalid field + _, err = testAdminClient.UpdateProject( + context.Background(), + &api.UpdateProjectRequest{ + Id: projectName, + Fields: &api.UpdatableProjectFields{ + Name: &types.StringValue{Value: invalidSlugName}, + }, + }, + ) + assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) +} + +// RunAdminListDocumentsTest runs the ListDocuments test in admin. +func RunAdminListDocumentsTest( + t *testing.T, + testAdminClient api.AdminServiceClient, + testAdminAuthInterceptor *admin.AuthInterceptor, +) { + resp, err := testAdminClient.LogIn( + context.Background(), + &api.LogInRequest{ + Username: helper.AdminUser, + Password: helper.AdminPassword, + }, + ) + assert.NoError(t, err) + + testAdminAuthInterceptor.SetToken(resp.Token) + + _, err = testAdminClient.ListDocuments( + context.Background(), + &api.ListDocumentsRequest{ + ProjectName: defaultProjectName, + }, + ) + assert.NoError(t, err) + + // try to list documents with non-existing project name + _, err = testAdminClient.ListDocuments( + context.Background(), + &api.ListDocumentsRequest{ + ProjectName: invalidSlugName, + }, + ) + assert.Equal(t, codes.NotFound, status.Convert(err).Code()) +} + +// RunAdminGetDocumentTest runs the GetDocument test in admin. +func RunAdminGetDocumentTest( + t *testing.T, + testClient api.YorkieServiceClient, + testAdminClient api.AdminServiceClient, + testAdminAuthInterceptor *admin.AuthInterceptor, +) { + testDocumentKey := helper.TestDocKey(t).String() + + resp, err := testAdminClient.LogIn( + context.Background(), + &api.LogInRequest{ + Username: helper.AdminUser, + Password: helper.AdminPassword, + }, + ) + assert.NoError(t, err) + + testAdminAuthInterceptor.SetToken(resp.Token) + + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: t.Name()}, + ) + assert.NoError(t, err) + + packWithNoChanges := &api.ChangePack{ + DocumentKey: testDocumentKey, + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + } + + _, err = testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + _, err = testAdminClient.GetDocument( + context.Background(), + &api.GetDocumentRequest{ + ProjectName: defaultProjectName, + DocumentKey: testDocumentKey, + }, + ) + assert.NoError(t, err) + + // try to get document with non-existing document name + _, err = testAdminClient.GetDocument( + context.Background(), + &api.GetDocumentRequest{ + ProjectName: defaultProjectName, + DocumentKey: invalidChangePack.DocumentKey, + }, + ) + assert.Equal(t, codes.NotFound, status.Convert(err).Code()) +} + +// RunAdminListChangesTest runs the ListChanges test in admin. +func RunAdminListChangesTest( + t *testing.T, + testClient api.YorkieServiceClient, + testAdminClient api.AdminServiceClient, + testAdminAuthInterceptor *admin.AuthInterceptor, +) { + testDocumentKey := helper.TestDocKey(t).String() + + resp, err := testAdminClient.LogIn( + context.Background(), + &api.LogInRequest{ + Username: helper.AdminUser, + Password: helper.AdminPassword, + }, + ) + assert.NoError(t, err) + + testAdminAuthInterceptor.SetToken(resp.Token) + + activateResp, err := testClient.ActivateClient( + context.Background(), + &api.ActivateClientRequest{ClientKey: t.Name()}, + ) + assert.NoError(t, err) + + packWithNoChanges := &api.ChangePack{ + DocumentKey: testDocumentKey, + Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, + } + + _, err = testClient.AttachDocument( + context.Background(), + &api.AttachDocumentRequest{ + ClientId: activateResp.ClientId, + ChangePack: packWithNoChanges, + }, + ) + assert.NoError(t, err) + + _, err = testAdminClient.ListChanges( + context.Background(), + &api.ListChangesRequest{ + ProjectName: defaultProjectName, + DocumentKey: testDocumentKey, + }, + ) + assert.NoError(t, err) + + // try to list changes with non-existing document name + _, err = testAdminClient.ListChanges( + context.Background(), + &api.ListChangesRequest{ + ProjectName: defaultProjectName, + DocumentKey: invalidChangePack.DocumentKey, + }, + ) + assert.Equal(t, codes.NotFound, status.Convert(err).Code()) +} diff --git a/test/helper/helper.go b/test/helper/helper.go index 745b5bb06..e3dea2aaf 100644 --- a/test/helper/helper.go +++ b/test/helper/helper.go @@ -26,8 +26,13 @@ import ( gotime "time" "github.com/stretchr/testify/assert" + "go.mongodb.org/mongo-driver/bson" + gomongo "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + "go.mongodb.org/mongo-driver/mongo/readpref" adminClient "github.com/yorkie-team/yorkie/admin" + "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/internal/validation" "github.com/yorkie-team/yorkie/pkg/document" "github.com/yorkie-team/yorkie/pkg/document/change" @@ -39,8 +44,10 @@ import ( "github.com/yorkie-team/yorkie/pkg/index" "github.com/yorkie-team/yorkie/server" "github.com/yorkie-team/yorkie/server/backend" + "github.com/yorkie-team/yorkie/server/backend/database" "github.com/yorkie-team/yorkie/server/backend/database/mongo" "github.com/yorkie-team/yorkie/server/backend/housekeeping" + "github.com/yorkie-team/yorkie/server/logging" "github.com/yorkie-team/yorkie/server/profiling" "github.com/yorkie-team/yorkie/server/rpc" ) @@ -322,3 +329,129 @@ func NewRangeSlice(start, end int) []int { } return slice } + +// setupRawMongoClient returns the raw mongo client. +func setupRawMongoClient(databaseName string) (*gomongo.Client, error) { + conf := &mongo.Config{ + ConnectionTimeout: "5s", + ConnectionURI: "mongodb://localhost:27017", + YorkieDatabase: databaseName, + PingTimeout: "5s", + } + + ctx, cancel := context.WithTimeout(context.Background(), conf.ParseConnectionTimeout()) + defer cancel() + + client, err := gomongo.Connect( + ctx, + options.Client(). + ApplyURI(conf.ConnectionURI). + SetRegistry(mongo.NewRegistryBuilder().Build()), + ) + if err != nil { + return nil, fmt.Errorf("connect to mongo: %w", err) + } + + pingTimeout := conf.ParsePingTimeout() + ctxPing, cancel := context.WithTimeout(ctx, pingTimeout) + defer cancel() + + if err := client.Ping(ctxPing, readpref.Primary()); err != nil { + return nil, fmt.Errorf("ping mongo: %w", err) + } + + logging.DefaultLogger().Infof("MongoDB connected, URI: %s, DB: %s", conf.ConnectionURI, conf.YorkieDatabase) + + return client, nil +} + +// CleanUpAllCollections removes all data in every collection. +func CleanUpAllCollections(databaseName string) error { + cli, err := setupRawMongoClient(databaseName) + if err != nil { + return err + } + + collections := []string{mongo.ColProjects, mongo.ColUsers, mongo.ColClients, + mongo.ColDocuments, mongo.ColChanges, mongo.ColSnapshots, mongo.ColSyncedSeqs} + + for _, col := range collections { + _, err := cli.Database(databaseName).Collection(col).DeleteMany(context.Background(), bson.D{}) + if err != nil { + return err + } + } + return nil +} + +// CreateDummyDocumentWithID creates a new dummy document with the given ID and key. +func CreateDummyDocumentWithID( + databaseName string, + projectID types.ID, + docID types.ID, + docKey key.Key, +) error { + encodedProjectID, err := mongo.EncodeID(projectID) + if err != nil { + return err + } + encodedDocID, err := mongo.EncodeID(docID) + if err != nil { + return err + } + cli, err := setupRawMongoClient(databaseName) + if err != nil { + return err + } + _, err = cli.Database(databaseName).Collection(mongo.ColDocuments).InsertOne( + context.Background(), + bson.M{ + "_id": encodedDocID, + "project_id": encodedProjectID, + "key": docKey, + }, + ) + if err != nil { + return err + } + + return nil +} + +// FindDocInfosWithID finds the docInfo of the given projectID and docID. +func FindDocInfosWithID( + databaseName string, + projectID types.ID, + docID types.ID, +) ([]*database.DocInfo, error) { + ctx := context.Background() + encodedProjectID, err := mongo.EncodeID(projectID) + if err != nil { + return nil, err + } + encodedDocID, err := mongo.EncodeID(docID) + if err != nil { + return nil, err + } + cli, err := setupRawMongoClient(databaseName) + if err != nil { + return nil, err + } + + cursor, err := cli.Database(databaseName).Collection(mongo.ColDocuments).Find( + ctx, + bson.M{ + "_id": encodedDocID, + "project_id": encodedProjectID, + }, options.Find()) + if err != nil { + return nil, err + } + + var infos []*database.DocInfo + if err := cursor.All(ctx, &infos); err != nil { + return nil, err + } + + return infos, nil +} diff --git a/test/shard/mongo_client_test.go b/test/shard/mongo_client_test.go new file mode 100644 index 000000000..abaa0cecf --- /dev/null +++ b/test/shard/mongo_client_test.go @@ -0,0 +1,222 @@ +//go:build shard + +/* + * Copyright 2023 The Yorkie Authors. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package shard + +import ( + "context" + "fmt" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" + "github.com/yorkie-team/yorkie/server/backend/database" + "github.com/yorkie-team/yorkie/server/backend/database/mongo" + "github.com/yorkie-team/yorkie/server/backend/database/testcases" + "github.com/yorkie-team/yorkie/test/helper" +) + +const ( + shardedDBNameForMongoClient = "yorkie-meta-1" + dummyProjectID = types.ID("000000000000000000000000") + projectOneID = types.ID("000000000000000000000001") + projectTwoID = types.ID("000000000000000000000002") + dummyOwnerName = "dummy" + dummyClientID = types.ID("000000000000000000000000") + clientDeactivateThreshold = "1h" +) + +func setupMongoClient(databaseName string) (*mongo.Client, error) { + config := &mongo.Config{ + ConnectionTimeout: "5s", + ConnectionURI: "mongodb://localhost:27017", + YorkieDatabase: databaseName, + PingTimeout: "5s", + } + if err := config.Validate(); err != nil { + return nil, err + } + + cli, err := mongo.Dial(config) + if err != nil { + return nil, err + } + + return cli, nil +} + +func TestClientWithShardedDB(t *testing.T) { + // Cleanup the previous data in DB + assert.NoError(t, helper.CleanUpAllCollections(shardedDBNameForMongoClient)) + + cli, err := setupMongoClient(shardedDBNameForMongoClient) + assert.NoError(t, err) + + t.Run("RunFindDocInfo test", func(t *testing.T) { + testcases.RunFindDocInfoTest(t, cli, dummyProjectID) + }) + + t.Run("RunFindDocInfosByQuery test", func(t *testing.T) { + t.Skip("TODO(hackerwins): the order of docInfos is different with memDB") + testcases.RunFindDocInfosByQueryTest(t, cli, projectOneID) + }) + + t.Run("RunFindChangesBetweenServerSeqs test", func(t *testing.T) { + testcases.RunFindChangesBetweenServerSeqsTest(t, cli, dummyProjectID) + }) + + t.Run("RunFindClosestSnapshotInfo test", func(t *testing.T) { + testcases.RunFindClosestSnapshotInfoTest(t, cli, dummyProjectID) + }) + + t.Run("ListUserInfos test", func(t *testing.T) { + t.Skip("TODO(hackerwins): time is returned as Local") + testcases.RunListUserInfosTest(t, cli) + }) + + t.Run("FindProjectInfoByName test", func(t *testing.T) { + testcases.RunFindProjectInfoByNameTest(t, cli) + }) + + t.Run("ActivateClientDeactivateClient test", func(t *testing.T) { + testcases.RunActivateClientDeactivateClientTest(t, cli, dummyProjectID) + }) + + t.Run("UpdateProjectInfo test", func(t *testing.T) { + testcases.RunUpdateProjectInfoTest(t, cli) + }) + + t.Run("FindDocInfosByPaging test", func(t *testing.T) { + testcases.RunFindDocInfosByPagingTest(t, cli, projectTwoID) + }) + + t.Run("CreateChangeInfo test", func(t *testing.T) { + testcases.RunCreateChangeInfosTest(t, cli, dummyProjectID) + }) + + t.Run("UpdateClientInfoAfterPushPull test", func(t *testing.T) { + testcases.RunUpdateClientInfoAfterPushPullTest(t, cli, dummyProjectID) + }) + + t.Run("IsDocumentAttached test", func(t *testing.T) { + testcases.RunIsDocumentAttachedTest(t, cli, dummyProjectID) + }) + + t.Run("FindDeactivateCandidates test", func(t *testing.T) { + testcases.RunFindDeactivateCandidates(t, cli) + }) + + t.Run("FindDocInfoByKeyAndID with duplicate ID test", func(t *testing.T) { + ctx := context.Background() + + // 01. Initialize a project and create a document. + projectInfo, err := cli.CreateProjectInfo(ctx, t.Name(), dummyOwnerName, clientDeactivateThreshold) + assert.NoError(t, err) + + docKey1 := key.Key(fmt.Sprintf("%s%d", "duplicateIDTestDocKey", 0)) + docInfo1, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientID, docKey1, true) + assert.NoError(t, err) + + // 02. Create an extra document with duplicate ID. + docKey2 := key.Key(fmt.Sprintf("%s%d", "duplicateIDTestDocKey", 5)) + err = helper.CreateDummyDocumentWithID( + shardedDBNameForMongoClient, + projectInfo.ID, + docInfo1.ID, + docKey2, + ) + assert.NoError(t, err) + + // 03. Check if there are two documents with the same ID. + infos, err := helper.FindDocInfosWithID( + shardedDBNameForMongoClient, + projectInfo.ID, + docInfo1.ID, + ) + assert.NoError(t, err) + assert.Len(t, infos, 2) + + // 04. Check if the document is correctly found using docKey and docID. + result, err := cli.FindDocInfoByKeyAndID( + ctx, + docKey1, + docInfo1.ID, + ) + assert.NoError(t, err) + assert.Equal(t, docInfo1.Key, result.Key) + assert.Equal(t, docInfo1.ID, result.ID) + }) + + t.Run("FindDocInfosByPaging with duplicate ID test", func(t *testing.T) { + const totalDocCnt = 10 + const duplicateIDDocCnt = 1 + ctx := context.Background() + + // 01. Initialize a project and create documents. + projectInfo, err := cli.CreateProjectInfo(ctx, t.Name(), dummyOwnerName, clientDeactivateThreshold) + assert.NoError(t, err) + + var docInfos []*database.DocInfo + var duplicateID types.ID + for i := 0; i < totalDocCnt-duplicateIDDocCnt; i++ { + testDocKey := key.Key("duplicateIDTestDocKey" + strconv.Itoa(i)) + docInfo, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientID, testDocKey, true) + assert.NoError(t, err) + docInfos = append(docInfos, docInfo) + + if i == 0 { + duplicateID = docInfo.ID + } + } + // NOTE(sejongk): sorting is required because doc_id may not sequentially increase in a sharded DB cluster. + testcases.SortDocInfos(docInfos) + + // 02. Create an extra document with duplicate ID. + for i := totalDocCnt - duplicateIDDocCnt; i < totalDocCnt; i++ { + testDocKey := key.Key("duplicateIDTestDocKey" + strconv.Itoa(i)) + err = helper.CreateDummyDocumentWithID( + shardedDBNameForMongoClient, + projectInfo.ID, + duplicateID, + testDocKey, + ) + assert.NoError(t, err) + docInfos = append(docInfos, &database.DocInfo{ + ID: duplicateID, + Key: testDocKey, + }) + } + testcases.SortDocInfos(docInfos) + + docKeysInReverse := make([]key.Key, 0, totalDocCnt) + for _, docInfo := range docInfos { + docKeysInReverse = append([]key.Key{docInfo.Key}, docKeysInReverse...) + } + + // 03. List the documents. + result, err := cli.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[database.DocOffset]{ + PageSize: 10, + IsForward: false, + }) + assert.NoError(t, err) + testcases.AssertKeys(t, docKeysInReverse, result) + }) +} diff --git a/test/shard/server_test.go b/test/shard/server_test.go new file mode 100644 index 000000000..f49d54dc5 --- /dev/null +++ b/test/shard/server_test.go @@ -0,0 +1,213 @@ +//go:build shard + +/* + * Copyright 2023 The Yorkie Authors. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package shard + +import ( + "context" + "fmt" + "log" + "os" + "testing" + + "github.com/yorkie-team/yorkie/admin" + api "github.com/yorkie-team/yorkie/api/yorkie/v1" + "github.com/yorkie-team/yorkie/client" + "github.com/yorkie-team/yorkie/server/backend" + "github.com/yorkie-team/yorkie/server/backend/database" + "github.com/yorkie-team/yorkie/server/backend/database/mongo" + "github.com/yorkie-team/yorkie/server/backend/housekeeping" + "github.com/yorkie-team/yorkie/server/profiling/prometheus" + "github.com/yorkie-team/yorkie/server/rpc" + "github.com/yorkie-team/yorkie/server/rpc/testcases" + "github.com/yorkie-team/yorkie/test/helper" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" +) + +var ( + shardedDBNameForServer = "yorkie-meta-2" + testRPCServer *rpc.Server + testRPCAddr = fmt.Sprintf("localhost:%d", helper.RPCPort) + testClient api.YorkieServiceClient + testAdminAuthInterceptor *admin.AuthInterceptor + testAdminClient api.AdminServiceClient +) + +func TestMain(m *testing.M) { + // Cleanup the previous data in DB + err := helper.CleanUpAllCollections(shardedDBNameForServer) + if err != nil { + log.Fatal(err) + } + + met, err := prometheus.NewMetrics() + if err != nil { + log.Fatal(err) + } + + be, err := backend.New(&backend.Config{ + AdminUser: helper.AdminUser, + AdminPassword: helper.AdminPassword, + ClientDeactivateThreshold: helper.ClientDeactivateThreshold, + SnapshotThreshold: helper.SnapshotThreshold, + AuthWebhookCacheSize: helper.AuthWebhookSize, + ProjectInfoCacheSize: helper.ProjectInfoCacheSize, + ProjectInfoCacheTTL: helper.ProjectInfoCacheTTL.String(), + AdminTokenDuration: helper.AdminTokenDuration, + }, &mongo.Config{ + ConnectionURI: helper.MongoConnectionURI, + YorkieDatabase: shardedDBNameForServer, + ConnectionTimeout: helper.MongoConnectionTimeout, + PingTimeout: helper.MongoPingTimeout, + }, &housekeeping.Config{ + Interval: helper.HousekeepingInterval.String(), + CandidatesLimitPerProject: helper.HousekeepingCandidatesLimitPerProject, + ProjectFetchSize: helper.HousekeepingProjectFetchSize, + }, met) + if err != nil { + log.Fatal(err) + } + + project, err := be.DB.FindProjectInfoByID( + context.Background(), + database.DefaultProjectID, + ) + if err != nil { + log.Fatal(err) + } + + testRPCServer, err = rpc.NewServer(&rpc.Config{ + Port: helper.RPCPort, + MaxRequestBytes: helper.RPCMaxRequestBytes, + MaxConnectionAge: helper.RPCMaxConnectionAge.String(), + MaxConnectionAgeGrace: helper.RPCMaxConnectionAgeGrace.String(), + }, be) + if err != nil { + log.Fatal(err) + } + + if err := testRPCServer.Start(); err != nil { + log.Fatalf("failed rpc listen: %s\n", err) + } + + var dialOptions []grpc.DialOption + authInterceptor := client.NewAuthInterceptor(project.PublicKey, "") + dialOptions = append(dialOptions, grpc.WithUnaryInterceptor(authInterceptor.Unary())) + dialOptions = append(dialOptions, grpc.WithStreamInterceptor(authInterceptor.Stream())) + dialOptions = append(dialOptions, grpc.WithTransportCredentials(insecure.NewCredentials())) + + conn, err := grpc.Dial(testRPCAddr, dialOptions...) + if err != nil { + log.Fatal(err) + } + testClient = api.NewYorkieServiceClient(conn) + + credentials := grpc.WithTransportCredentials(insecure.NewCredentials()) + dialOptions = []grpc.DialOption{credentials} + + testAdminAuthInterceptor = admin.NewAuthInterceptor("") + dialOptions = append(dialOptions, grpc.WithUnaryInterceptor(testAdminAuthInterceptor.Unary())) + dialOptions = append(dialOptions, grpc.WithStreamInterceptor(testAdminAuthInterceptor.Stream())) + + adminConn, err := grpc.Dial(testRPCAddr, dialOptions...) + if err != nil { + log.Fatal(err) + } + testAdminClient = api.NewAdminServiceClient(adminConn) + + code := m.Run() + + if err := be.Shutdown(); err != nil { + log.Fatal(err) + } + testRPCServer.Shutdown(true) + os.Exit(code) +} + +func TestSDKRPCServerBackendWithShardedDB(t *testing.T) { + t.Run("activate/deactivate client test", func(t *testing.T) { + testcases.RunActivateAndDeactivateClientTest(t, testClient) + }) + + t.Run("attach/detach document test", func(t *testing.T) { + testcases.RunAttachAndDetachDocumentTest(t, testClient) + }) + + t.Run("attach/detach on removed document test", func(t *testing.T) { + testcases.RunAttachAndDetachRemovedDocumentTest(t, testClient) + }) + + t.Run("push/pull changes test", func(t *testing.T) { + testcases.RunPushPullChangeTest(t, testClient) + }) + + t.Run("push/pull on removed document test", func(t *testing.T) { + testcases.RunPushPullChangeOnRemovedDocumentTest(t, testClient) + }) + + t.Run("remove document test", func(t *testing.T) { + testcases.RunRemoveDocumentTest(t, testClient) + }) + + t.Run("remove document with invalid client state test", func(t *testing.T) { + testcases.RunRemoveDocumentWithInvalidClientStateTest(t, testClient) + }) + + t.Run("watch document test", func(t *testing.T) { + testcases.RunWatchDocumentTest(t, testClient) + }) +} + +func TestAdminRPCServerBackendWithShardedDB(t *testing.T) { + t.Run("admin signup test", func(t *testing.T) { + testcases.RunAdminSignUpTest(t, testAdminClient) + }) + + t.Run("admin login test", func(t *testing.T) { + testcases.RunAdminLoginTest(t, testAdminClient) + }) + + t.Run("admin create project test", func(t *testing.T) { + testcases.RunAdminCreateProjectTest(t, testAdminClient, testAdminAuthInterceptor) + }) + + t.Run("admin list projects test", func(t *testing.T) { + testcases.RunAdminListProjectsTest(t, testAdminClient, testAdminAuthInterceptor) + }) + + t.Run("admin get project test", func(t *testing.T) { + testcases.RunAdminGetProjectTest(t, testAdminClient, testAdminAuthInterceptor) + }) + + t.Run("admin update project test", func(t *testing.T) { + testcases.RunAdminUpdateProjectTest(t, testAdminClient, testAdminAuthInterceptor) + }) + + t.Run("admin list documents test", func(t *testing.T) { + testcases.RunAdminListDocumentsTest(t, testAdminClient, testAdminAuthInterceptor) + }) + + t.Run("admin get document test", func(t *testing.T) { + testcases.RunAdminGetDocumentTest(t, testClient, testAdminClient, testAdminAuthInterceptor) + }) + + t.Run("admin list changes test", func(t *testing.T) { + testcases.RunAdminListChangesTest(t, testClient, testAdminClient, testAdminAuthInterceptor) + }) +} From af4c73d7bb17ad2689c9adf934fe5db5b1fc8850 Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Sat, 25 Nov 2023 21:27:19 +0900 Subject: [PATCH 05/11] Apply interface changes --- .github/workflows/ci.yml | 2 +- .../backend/database/memory/housekeeping_test.go | 9 +++++++-- test/bench/push_pull_bench_test.go | 15 ++++++++------- test/integration/retention_test.go | 2 ++ 4 files changed, 18 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f5ef29432..1db5358eb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -114,4 +114,4 @@ jobs: run: sleep 30s - name: Run the tests with shard tag - run: go test -tags shard -race -v ./... + run: go test -tags shard -race -v ./test/shard/... diff --git a/server/backend/database/memory/housekeeping_test.go b/server/backend/database/memory/housekeeping_test.go index 5c1aeaeb0..d8c0ef894 100644 --- a/server/backend/database/memory/housekeeping_test.go +++ b/server/backend/database/memory/housekeeping_test.go @@ -43,7 +43,12 @@ func TestHousekeeping(t *testing.T) { userInfo, err := memdb.CreateUserInfo(ctx, "test", "test") assert.NoError(t, err) - project, err := memdb.CreateProjectInfo(ctx, database.DefaultProjectName, userInfo.ID, clientDeactivateThreshold) + project, err := memdb.CreateProjectInfo( + ctx, + database.DefaultProjectName, + userInfo.Username, + clientDeactivateThreshold, + ) assert.NoError(t, err) yesterday := gotime.Now().Add(-24 * gotime.Hour) @@ -123,7 +128,7 @@ func createDBandProjects(t *testing.T) (*memory.DB, []*database.ProjectInfo) { projects := make([]*database.ProjectInfo, 0) for i := 0; i < 10; i++ { - p, err := memdb.CreateProjectInfo(ctx, fmt.Sprintf("%d project", i), userInfo.ID, clientDeactivateThreshold) + p, err := memdb.CreateProjectInfo(ctx, fmt.Sprintf("%d project", i), userInfo.Username, clientDeactivateThreshold) assert.NoError(t, err) projects = append(projects, p) diff --git a/test/bench/push_pull_bench_test.go b/test/bench/push_pull_bench_test.go index cab46ea8d..b822d3152 100644 --- a/test/bench/push_pull_bench_test.go +++ b/test/bench/push_pull_bench_test.go @@ -25,6 +25,7 @@ import ( gotime "time" "github.com/stretchr/testify/assert" + "github.com/yorkie-team/yorkie/api/converter" "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/pkg/document" @@ -89,7 +90,7 @@ func setUpClientsAndDocs( assert.NoError(b, err) docInfo, err := be.DB.FindDocInfoByKeyAndOwner(ctx, database.DefaultProjectID, clientInfo.ID, docKey, true) assert.NoError(b, err) - assert.NoError(b, clientInfo.AttachDocument(docInfo.ID)) + assert.NoError(b, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(b, be.DB.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) bytesID, _ := clientInfo.ID.Bytes() @@ -136,7 +137,7 @@ func benchmarkPushChanges( docKey := getDocKey(b, i) clientInfos, docID, docs := setUpClientsAndDocs(ctx, 1, docKey, b, be) pack := createChangePack(changeCnt, docs[0], b) - docInfo, err := documents.FindDocInfo(ctx, be, project, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) assert.NoError(b, err) b.StartTimer() @@ -161,12 +162,12 @@ func benchmarkPullChanges( pushPack := createChangePack(changeCnt, pusherDoc, b) pullPack := createChangePack(0, pullerDoc, b) - docInfo, err := documents.FindDocInfo(ctx, be, project, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) assert.NoError(b, err) _, err = packs.PushPull(ctx, be, project, pusherClientInfo, docInfo, pushPack, types.SyncModePushPull) assert.NoError(b, err) - docInfo, err = documents.FindDocInfo(ctx, be, project, docID) + docInfo, err = documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) assert.NoError(b, err) b.StartTimer() @@ -192,7 +193,7 @@ func benchmarkPushSnapshots( for j := 0; j < snapshotCnt; j++ { b.StopTimer() pushPack := createChangePack(changeCnt, docs[0], b) - docInfo, err := documents.FindDocInfo(ctx, be, project, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) assert.NoError(b, err) b.StartTimer() @@ -226,12 +227,12 @@ func benchmarkPullSnapshot( pushPack := createChangePack(changeCnt, pusherDoc, b) pullPack := createChangePack(0, pullerDoc, b) - docInfo, err := documents.FindDocInfo(ctx, be, project, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) assert.NoError(b, err) _, err = packs.PushPull(ctx, be, project, pusherClientInfo, docInfo, pushPack, types.SyncModePushPull) assert.NoError(b, err) - docInfo, err = documents.FindDocInfo(ctx, be, project, docID) + docInfo, err = documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) assert.NoError(b, err) b.StartTimer() diff --git a/test/integration/retention_test.go b/test/integration/retention_test.go index fc36fa2df..70732062c 100644 --- a/test/integration/retention_test.go +++ b/test/integration/retention_test.go @@ -191,6 +191,7 @@ func TestRetention(t *testing.T) { changes, err := mongoCli.FindChangesBetweenServerSeqs( ctx, + docInfo.Key, docInfo.ID, change.InitialServerSeq, change.MaxServerSeq, @@ -229,6 +230,7 @@ func TestRetention(t *testing.T) { changes, err = mongoCli.FindChangesBetweenServerSeqs( ctx, + docInfo.Key, docInfo.ID, change.InitialServerSeq, change.MaxServerSeq, From 81bebc4e6aa8f98b9750d5ebb3419eec2a76a180 Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Tue, 28 Nov 2023 14:33:09 +0900 Subject: [PATCH 06/11] Change ref key of Clients from _id into (key, _id) --- api/yorkie/v1/yorkie.pb.go | 453 ++++++++++++++++-- api/yorkie/v1/yorkie.proto | 7 + .../sharding/test/scripts/init-mongos1.js | 34 +- client/client.go | 9 +- server/backend/database/database.go | 6 +- server/backend/database/memory/database.go | 54 ++- server/backend/database/memory/indexes.go | 15 +- server/backend/database/mongo/client.go | 49 +- server/backend/database/mongo/indexes.go | 2 +- server/backend/database/synced_seq_info.go | 1 + .../backend/database/testcases/testcases.go | 30 +- server/backend/housekeeping/housekeeping.go | 2 +- server/clients/clients.go | 14 +- server/rpc/testcases/testcases.go | 102 +++- server/rpc/yorkie_server.go | 22 +- test/helper/helper.go | 74 ++- test/shard/mongo_client_test.go | 41 ++ 17 files changed, 762 insertions(+), 153 deletions(-) diff --git a/api/yorkie/v1/yorkie.pb.go b/api/yorkie/v1/yorkie.pb.go index 67a52c275..e859f1689 100644 --- a/api/yorkie/v1/yorkie.pb.go +++ b/api/yorkie/v1/yorkie.pb.go @@ -122,6 +122,7 @@ func (m *ActivateClientResponse) GetClientId() string { type DeactivateClientRequest struct { ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` + ClientKey string `protobuf:"bytes,2,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -167,6 +168,13 @@ func (m *DeactivateClientRequest) GetClientId() string { return "" } +func (m *DeactivateClientRequest) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + type DeactivateClientResponse struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -209,6 +217,7 @@ var xxx_messageInfo_DeactivateClientResponse proto.InternalMessageInfo type AttachDocumentRequest struct { ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` ChangePack *ChangePack `protobuf:"bytes,2,opt,name=change_pack,json=changePack,proto3" json:"change_pack,omitempty"` + ClientKey string `protobuf:"bytes,3,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -261,6 +270,13 @@ func (m *AttachDocumentRequest) GetChangePack() *ChangePack { return nil } +func (m *AttachDocumentRequest) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + type AttachDocumentResponse struct { DocumentId string `protobuf:"bytes,1,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"` ChangePack *ChangePack `protobuf:"bytes,2,opt,name=change_pack,json=changePack,proto3" json:"change_pack,omitempty"` @@ -321,6 +337,7 @@ type DetachDocumentRequest struct { DocumentId string `protobuf:"bytes,2,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"` ChangePack *ChangePack `protobuf:"bytes,3,opt,name=change_pack,json=changePack,proto3" json:"change_pack,omitempty"` RemoveIfNotAttached bool `protobuf:"varint,4,opt,name=remove_if_not_attached,json=removeIfNotAttached,proto3" json:"remove_if_not_attached,omitempty"` + ClientKey string `protobuf:"bytes,5,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -387,6 +404,13 @@ func (m *DetachDocumentRequest) GetRemoveIfNotAttached() bool { return false } +func (m *DetachDocumentRequest) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + type DetachDocumentResponse struct { ChangePack *ChangePack `protobuf:"bytes,2,opt,name=change_pack,json=changePack,proto3" json:"change_pack,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -438,6 +462,7 @@ type WatchDocumentRequest struct { ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` DocumentId string `protobuf:"bytes,2,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"` DocumentKey string `protobuf:"bytes,3,opt,name=document_key,json=documentKey,proto3" json:"document_key,omitempty"` + ClientKey string `protobuf:"bytes,4,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -497,6 +522,13 @@ func (m *WatchDocumentRequest) GetDocumentKey() string { return "" } +func (m *WatchDocumentRequest) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + type WatchDocumentResponse struct { // Types that are valid to be assigned to Body: // *WatchDocumentResponse_Initialization_ @@ -636,6 +668,7 @@ type RemoveDocumentRequest struct { ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` DocumentId string `protobuf:"bytes,2,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"` ChangePack *ChangePack `protobuf:"bytes,3,opt,name=change_pack,json=changePack,proto3" json:"change_pack,omitempty"` + ClientKey string `protobuf:"bytes,4,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -695,6 +728,13 @@ func (m *RemoveDocumentRequest) GetChangePack() *ChangePack { return nil } +func (m *RemoveDocumentRequest) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + type RemoveDocumentResponse struct { ChangePack *ChangePack `protobuf:"bytes,1,opt,name=change_pack,json=changePack,proto3" json:"change_pack,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -747,6 +787,7 @@ type PushPullChangesRequest struct { DocumentId string `protobuf:"bytes,2,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"` ChangePack *ChangePack `protobuf:"bytes,3,opt,name=change_pack,json=changePack,proto3" json:"change_pack,omitempty"` PushOnly bool `protobuf:"varint,4,opt,name=push_only,json=pushOnly,proto3" json:"push_only,omitempty"` + ClientKey string `protobuf:"bytes,5,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -813,6 +854,13 @@ func (m *PushPullChangesRequest) GetPushOnly() bool { return false } +func (m *PushPullChangesRequest) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + type PushPullChangesResponse struct { ChangePack *ChangePack `protobuf:"bytes,1,opt,name=change_pack,json=changePack,proto3" json:"change_pack,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -866,6 +914,7 @@ type BroadcastRequest struct { Topic string `protobuf:"bytes,3,opt,name=topic,proto3" json:"topic,omitempty"` Payload []byte `protobuf:"bytes,4,opt,name=payload,proto3" json:"payload,omitempty"` DocumentKey string `protobuf:"bytes,5,opt,name=document_key,json=documentKey,proto3" json:"document_key,omitempty"` + ClientKey string `protobuf:"bytes,6,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -939,6 +988,13 @@ func (m *BroadcastRequest) GetDocumentKey() string { return "" } +func (m *BroadcastRequest) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + type BroadcastResponse struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -1001,53 +1057,55 @@ func init() { func init() { proto.RegisterFile("yorkie/v1/yorkie.proto", fileDescriptor_40070c858814ab24) } var fileDescriptor_40070c858814ab24 = []byte{ - // 730 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0x4d, 0x4f, 0xdb, 0x4c, - 0x10, 0xce, 0x02, 0xe1, 0x25, 0x13, 0xe0, 0x85, 0x85, 0x84, 0x34, 0xb4, 0x21, 0xb8, 0x17, 0x24, - 0xa4, 0xa4, 0x01, 0x95, 0x4b, 0x4f, 0x40, 0x2a, 0x11, 0x55, 0x6a, 0x53, 0x57, 0x2a, 0x02, 0xa9, - 0x8a, 0x16, 0x7b, 0x69, 0x56, 0x31, 0x5e, 0x63, 0x6f, 0x5c, 0xb9, 0xff, 0xa1, 0xf7, 0xde, 0x7b, - 0xec, 0xbf, 0xe8, 0xa9, 0x47, 0x8e, 0x3d, 0x56, 0xf4, 0x8f, 0x54, 0xf1, 0x3a, 0xc6, 0x76, 0xdc, - 0x40, 0x0b, 0x52, 0x7b, 0xcb, 0xce, 0xc7, 0x33, 0x1f, 0x9e, 0x67, 0x26, 0x50, 0xf4, 0xb8, 0xdd, - 0x63, 0xb4, 0xee, 0x36, 0xea, 0xf2, 0x57, 0xcd, 0xb2, 0xb9, 0xe0, 0x38, 0x17, 0xbc, 0xdc, 0x46, - 0xf9, 0xde, 0x95, 0x89, 0x4d, 0x1d, 0xde, 0xb7, 0x35, 0xea, 0x48, 0x2b, 0x65, 0x07, 0x0a, 0xbb, - 0x9a, 0x60, 0x2e, 0x11, 0x74, 0xdf, 0x60, 0xd4, 0x14, 0x2a, 0x3d, 0xef, 0x53, 0x47, 0xe0, 0x07, - 0x00, 0x9a, 0x2f, 0xe8, 0xf4, 0xa8, 0x57, 0x42, 0x55, 0xb4, 0x91, 0x53, 0x73, 0x52, 0xf2, 0x8c, - 0x7a, 0xca, 0x63, 0x28, 0x26, 0xfd, 0x1c, 0x8b, 0x9b, 0x0e, 0xc5, 0xab, 0x10, 0x98, 0x75, 0x98, - 0x1e, 0xf8, 0xcd, 0x48, 0x41, 0x4b, 0x57, 0x76, 0x60, 0xa5, 0x49, 0x49, 0x6a, 0xc0, 0xb1, 0x7e, - 0x65, 0x28, 0x8d, 0xfa, 0xc9, 0x80, 0x8a, 0x01, 0x85, 0x5d, 0x21, 0x88, 0xd6, 0x6d, 0x72, 0xad, - 0x7f, 0x76, 0x43, 0x44, 0xbc, 0x03, 0x79, 0xad, 0x4b, 0xcc, 0xb7, 0xb4, 0x63, 0x11, 0xad, 0x57, - 0x9a, 0xa8, 0xa2, 0x8d, 0xfc, 0x56, 0xa1, 0x16, 0x36, 0xad, 0xb6, 0xef, 0x6b, 0xdb, 0x44, 0xeb, - 0xa9, 0xa0, 0x85, 0xbf, 0x95, 0x73, 0x28, 0x26, 0xa3, 0x05, 0x85, 0xaf, 0x41, 0x5e, 0x0f, 0x64, - 0x57, 0x01, 0x61, 0x28, 0xba, 0x45, 0xc8, 0x2f, 0x08, 0x0a, 0x4d, 0xfa, 0xdb, 0x15, 0x26, 0xf2, - 0x99, 0xb8, 0x2e, 0x9f, 0xc9, 0x1b, 0xe6, 0x83, 0xb7, 0xa1, 0x68, 0xd3, 0x33, 0xee, 0xd2, 0x0e, - 0x3b, 0xed, 0x98, 0x5c, 0x74, 0x88, 0xdf, 0x10, 0xaa, 0x97, 0xa6, 0xaa, 0x68, 0x63, 0x46, 0x5d, - 0x92, 0xda, 0xd6, 0xe9, 0x73, 0x2e, 0x76, 0x03, 0x95, 0xd2, 0x86, 0x62, 0xb2, 0x86, 0xa0, 0x6f, - 0x7f, 0xda, 0x96, 0x77, 0xb0, 0x7c, 0x48, 0xc4, 0x5d, 0x37, 0x65, 0x1d, 0x66, 0x43, 0x83, 0xc1, - 0xe4, 0x4f, 0xfa, 0x16, 0xa1, 0xd3, 0x60, 0xf6, 0xbf, 0x21, 0x28, 0x24, 0x22, 0x07, 0xa5, 0x1c, - 0xc1, 0x3c, 0x33, 0x99, 0x60, 0xc4, 0x60, 0xef, 0x89, 0x60, 0xdc, 0xf4, 0xe3, 0xe7, 0xb7, 0xea, - 0x91, 0x6a, 0x52, 0x3d, 0x6b, 0xad, 0x98, 0xdb, 0x41, 0x46, 0x4d, 0x00, 0xe1, 0x4d, 0xc8, 0x52, - 0x97, 0x9a, 0x22, 0xe8, 0xcf, 0x52, 0x04, 0xb1, 0xc9, 0xb5, 0xa7, 0x03, 0xd5, 0x41, 0x46, 0x95, - 0x36, 0xe5, 0x3a, 0xcc, 0xc7, 0x01, 0x23, 0x74, 0x66, 0xba, 0x53, 0x42, 0xd5, 0xc9, 0x2b, 0x3a, - 0xb7, 0x74, 0x67, 0x6f, 0x1a, 0xa6, 0x4e, 0xb8, 0xee, 0x29, 0x1f, 0x10, 0x14, 0x54, 0xff, 0xeb, - 0xfd, 0x13, 0xa3, 0x36, 0x98, 0x9a, 0x64, 0x3a, 0xe9, 0x53, 0x83, 0x6e, 0x8a, 0xf8, 0x19, 0x41, - 0xb1, 0xdd, 0x77, 0xba, 0xed, 0xbe, 0x61, 0x48, 0x13, 0xe7, 0xef, 0xb2, 0x69, 0x15, 0x72, 0x56, - 0xdf, 0xe9, 0x76, 0xb8, 0x69, 0x78, 0x01, 0x81, 0x66, 0x06, 0x82, 0x17, 0xa6, 0xe1, 0x29, 0x2f, - 0x61, 0x65, 0x24, 0xd9, 0x5b, 0x36, 0xe0, 0x13, 0x82, 0x85, 0x3d, 0x9b, 0x13, 0x5d, 0x23, 0xce, - 0x1d, 0x7d, 0xdd, 0x65, 0xc8, 0x0a, 0x6e, 0x31, 0x2d, 0x20, 0x8b, 0x7c, 0xe0, 0x12, 0xfc, 0x67, - 0x11, 0xcf, 0xe0, 0x44, 0xee, 0x85, 0x59, 0x75, 0xf8, 0x1c, 0xe1, 0x58, 0x76, 0x94, 0x63, 0x4b, - 0xb0, 0x18, 0x49, 0x52, 0x96, 0xbc, 0x75, 0x91, 0x85, 0xb9, 0x23, 0xbf, 0xbe, 0x57, 0xd4, 0x76, - 0x99, 0x46, 0xf1, 0x21, 0xcc, 0xc7, 0xcf, 0x10, 0xae, 0x46, 0x3a, 0x90, 0x7a, 0xd9, 0xca, 0xeb, - 0x63, 0x2c, 0x82, 0x93, 0x92, 0xc1, 0x6f, 0x60, 0x21, 0x79, 0x70, 0xb0, 0x12, 0xe5, 0x5c, 0xfa, - 0x15, 0x2b, 0x3f, 0x1c, 0x6b, 0x13, 0xc2, 0x0f, 0xf2, 0x8e, 0x5d, 0x91, 0x78, 0xde, 0x69, 0xe7, - 0x2c, 0x9e, 0x77, 0xea, 0x09, 0x92, 0xc0, 0xf1, 0x35, 0x1b, 0x03, 0x4e, 0xbd, 0x22, 0x31, 0xe0, - 0xf4, 0x1d, 0x2d, 0x81, 0xe3, 0x4c, 0x8c, 0x01, 0xa7, 0xee, 0x8c, 0x18, 0x70, 0x3a, 0x8d, 0x95, - 0x0c, 0x3e, 0x86, 0xff, 0x13, 0x23, 0x8e, 0xa3, 0x7e, 0xe9, 0x5c, 0x2d, 0x2b, 0xe3, 0x4c, 0x42, - 0xec, 0xd7, 0x30, 0x17, 0x5b, 0xb7, 0x78, 0xed, 0xd7, 0x8b, 0x58, 0xe2, 0x56, 0xaf, 0xdb, 0xd4, - 0x4a, 0xe6, 0x11, 0xc2, 0x07, 0x90, 0x0b, 0xa7, 0x13, 0xaf, 0x46, 0x5c, 0x92, 0xc4, 0x2a, 0xdf, - 0x4f, 0x57, 0x0e, 0xb1, 0xf6, 0x36, 0xbf, 0x5e, 0x56, 0xd0, 0xc5, 0x65, 0x05, 0x7d, 0xbf, 0xac, - 0xa0, 0x8f, 0x3f, 0x2a, 0x19, 0x58, 0xd4, 0xa9, 0x3b, 0x74, 0x22, 0x16, 0xab, 0xb9, 0x8d, 0x36, - 0x3a, 0x9e, 0xaa, 0x3d, 0x71, 0x1b, 0x27, 0xd3, 0xfe, 0x7f, 0xb6, 0xed, 0x9f, 0x01, 0x00, 0x00, - 0xff, 0xff, 0x39, 0x50, 0xcd, 0x53, 0xf3, 0x09, 0x00, 0x00, + // 755 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0xdf, 0x4e, 0x13, 0x4d, + 0x14, 0xef, 0x40, 0xcb, 0x47, 0x4f, 0x81, 0x0f, 0x06, 0x5a, 0xfa, 0x95, 0xcf, 0x52, 0xd6, 0x1b, + 0x12, 0x92, 0xd6, 0x42, 0xe4, 0xc6, 0x2b, 0xa0, 0x26, 0x34, 0x24, 0x5a, 0xd7, 0x28, 0x81, 0xc4, + 0x34, 0xc3, 0xee, 0x60, 0x27, 0x2d, 0x3b, 0xa5, 0x3b, 0xdd, 0x64, 0x7d, 0x05, 0xef, 0x8d, 0x0f, + 0xe1, 0x53, 0x78, 0x65, 0xbc, 0xe2, 0xd2, 0x4b, 0xc5, 0x17, 0x31, 0xec, 0x6c, 0xb7, 0xbb, 0xc3, + 0xda, 0x22, 0x92, 0xe8, 0x5d, 0xe7, 0xcc, 0x39, 0xbf, 0xf9, 0x9d, 0xdf, 0x9e, 0x3f, 0x85, 0x9c, + 0xcb, 0x7b, 0x6d, 0x46, 0x2b, 0x4e, 0xb5, 0x22, 0x7f, 0x95, 0xbb, 0x3d, 0x2e, 0x38, 0x4e, 0xfb, + 0x27, 0xa7, 0x5a, 0xf8, 0x6f, 0xe8, 0xd2, 0xa3, 0x36, 0xef, 0xf7, 0x0c, 0x6a, 0x4b, 0x2f, 0x6d, + 0x1b, 0xb2, 0x3b, 0x86, 0x60, 0x0e, 0x11, 0x74, 0xaf, 0xc3, 0xa8, 0x25, 0x74, 0x7a, 0xde, 0xa7, + 0xb6, 0xc0, 0xf7, 0x00, 0x0c, 0xcf, 0xd0, 0x6c, 0x53, 0x37, 0x8f, 0x4a, 0x68, 0x3d, 0xad, 0xa7, + 0xa5, 0xe5, 0x80, 0xba, 0xda, 0x43, 0xc8, 0xa9, 0x71, 0x76, 0x97, 0x5b, 0x36, 0xc5, 0x2b, 0xe0, + 0xbb, 0x35, 0x99, 0xe9, 0xc7, 0x4d, 0x4b, 0x43, 0xdd, 0xd4, 0x5e, 0xc0, 0x72, 0x8d, 0x92, 0xd8, + 0x07, 0x47, 0xc5, 0x29, 0x6c, 0x26, 0x54, 0x36, 0x05, 0xc8, 0x5f, 0x87, 0x95, 0x7c, 0xb4, 0xb7, + 0x08, 0xb2, 0x3b, 0x42, 0x10, 0xa3, 0x55, 0xe3, 0x46, 0xff, 0xec, 0xa6, 0x2f, 0x6e, 0x43, 0xc6, + 0x68, 0x11, 0xeb, 0x35, 0x6d, 0x76, 0x89, 0xd1, 0xf6, 0x9e, 0xcc, 0x6c, 0x66, 0xcb, 0x81, 0xa8, + 0xe5, 0x3d, 0xef, 0xb6, 0x41, 0x8c, 0xb6, 0x0e, 0x46, 0xf0, 0x5b, 0x61, 0x3a, 0xa9, 0x32, 0x3d, + 0x87, 0x9c, 0x4a, 0xc6, 0xd7, 0x6d, 0x15, 0x32, 0xa6, 0x6f, 0x1b, 0xf2, 0x81, 0x81, 0xe9, 0xf6, + 0x8c, 0xb4, 0x6f, 0x08, 0xb2, 0x35, 0xfa, 0xcb, 0x02, 0x28, 0x7c, 0x26, 0xc6, 0xf1, 0x99, 0xbc, + 0xa9, 0x42, 0x5b, 0x90, 0xeb, 0xd1, 0x33, 0xee, 0xd0, 0x26, 0x3b, 0x6d, 0x5a, 0x5c, 0x34, 0x89, + 0x27, 0x08, 0x35, 0xf3, 0xc9, 0x12, 0x5a, 0x9f, 0xd6, 0x17, 0xe5, 0x6d, 0xfd, 0xf4, 0x09, 0x17, + 0x3b, 0xfe, 0x95, 0x22, 0x6b, 0x4a, 0x95, 0xb5, 0x01, 0x39, 0x35, 0x45, 0x5f, 0xd6, 0xdb, 0xaa, + 0xf6, 0x0e, 0xc1, 0xd2, 0x21, 0x11, 0x77, 0x2d, 0xda, 0x1a, 0xcc, 0x04, 0x0e, 0xc3, 0x02, 0x09, + 0x82, 0x0e, 0xa8, 0xab, 0xa4, 0x9a, 0x54, 0x53, 0xfd, 0x82, 0x20, 0xab, 0x10, 0xf3, 0x53, 0x3d, + 0x82, 0x39, 0x66, 0x31, 0xc1, 0x48, 0x87, 0xbd, 0x21, 0x82, 0x71, 0xcb, 0xa3, 0x97, 0xd9, 0xac, + 0x84, 0xb2, 0x8d, 0x8d, 0x2c, 0xd7, 0x23, 0x61, 0xfb, 0x09, 0x5d, 0x01, 0xc2, 0x1b, 0x90, 0xa2, + 0x0e, 0xb5, 0x84, 0xaf, 0xdf, 0x62, 0x08, 0xb1, 0xc6, 0x8d, 0xc7, 0x57, 0x57, 0xfb, 0x09, 0x5d, + 0xfa, 0x14, 0x2a, 0x30, 0x17, 0x05, 0x0c, 0xa5, 0xc4, 0x4c, 0x3b, 0x8f, 0x4a, 0x93, 0xc3, 0x94, + 0xea, 0xa6, 0xbd, 0x3b, 0x05, 0xc9, 0x13, 0x6e, 0xba, 0xda, 0x07, 0x04, 0x59, 0xdd, 0xfb, 0xf8, + 0x7f, 0x47, 0xa5, 0x8e, 0xf9, 0x12, 0x0d, 0xc8, 0xa9, 0x6c, 0xe3, 0x8b, 0x0e, 0xdd, 0xb4, 0xe8, + 0x3e, 0x23, 0xc8, 0x35, 0xfa, 0x76, 0xab, 0xd1, 0xef, 0x74, 0xa4, 0x8b, 0xfd, 0x67, 0x15, 0x58, + 0x81, 0x74, 0xb7, 0x6f, 0xb7, 0x9a, 0xdc, 0xea, 0xb8, 0x7e, 0x7b, 0x4e, 0x5f, 0x19, 0x9e, 0x5a, + 0x1d, 0x77, 0x5c, 0x4f, 0x3e, 0x83, 0xe5, 0x6b, 0xb9, 0xfc, 0xa6, 0x3e, 0x1f, 0x11, 0xcc, 0xef, + 0xf6, 0x38, 0x31, 0x0d, 0x62, 0xdf, 0x51, 0x6d, 0x2c, 0x41, 0x4a, 0xf0, 0x2e, 0x33, 0xfc, 0x4e, + 0x94, 0x07, 0x9c, 0x87, 0x7f, 0xba, 0xc4, 0xed, 0x70, 0x22, 0x87, 0xd2, 0x8c, 0x3e, 0x38, 0x5e, + 0x6b, 0xe0, 0xd4, 0xb8, 0x06, 0x9e, 0x52, 0x75, 0x59, 0x84, 0x85, 0x50, 0x0e, 0x52, 0x91, 0xcd, + 0x8b, 0x14, 0xcc, 0x1e, 0x79, 0xe9, 0x3f, 0xa7, 0x3d, 0x87, 0x19, 0x14, 0x1f, 0xc2, 0x5c, 0x74, + 0xc3, 0xe2, 0x52, 0x48, 0xa0, 0xd8, 0xa5, 0x5d, 0x58, 0x1b, 0xe1, 0xe1, 0xaf, 0xc3, 0x04, 0x7e, + 0x05, 0xf3, 0xea, 0xb2, 0xc4, 0x5a, 0xb8, 0xa1, 0xe3, 0x17, 0x74, 0xe1, 0xfe, 0x48, 0x9f, 0x00, + 0xfe, 0x8a, 0x77, 0x64, 0xc3, 0x45, 0x79, 0xc7, 0x6d, 0xe2, 0x28, 0xef, 0xd8, 0xf5, 0x28, 0x81, + 0xa3, 0x33, 0x3e, 0x02, 0x1c, 0xbb, 0xe1, 0x22, 0xc0, 0xf1, 0x0b, 0x42, 0x02, 0x47, 0xfb, 0x38, + 0x02, 0x1c, 0x3b, 0x90, 0x22, 0xc0, 0xf1, 0x43, 0x40, 0x4b, 0xe0, 0x63, 0xf8, 0x57, 0xe9, 0x00, + 0x1c, 0x8e, 0x8b, 0xef, 0xf4, 0x82, 0x36, 0xca, 0x25, 0xc0, 0x7e, 0x09, 0xb3, 0x91, 0x59, 0x8e, + 0x57, 0x7f, 0x3e, 0xe5, 0x25, 0x6e, 0x69, 0xdc, 0x1a, 0xd0, 0x12, 0x0f, 0x10, 0xde, 0x87, 0x74, + 0x50, 0x9d, 0x78, 0x25, 0x14, 0xa2, 0xf6, 0x5d, 0xe1, 0xff, 0xf8, 0xcb, 0x01, 0xd6, 0xee, 0xc6, + 0xa7, 0xcb, 0x22, 0xba, 0xb8, 0x2c, 0xa2, 0xaf, 0x97, 0x45, 0xf4, 0xfe, 0x7b, 0x31, 0x01, 0x0b, + 0x26, 0x75, 0x06, 0x41, 0xa4, 0xcb, 0xca, 0x4e, 0xb5, 0x81, 0x8e, 0x93, 0xe5, 0x47, 0x4e, 0xf5, + 0x64, 0xca, 0xfb, 0x3b, 0xba, 0xf5, 0x23, 0x00, 0x00, 0xff, 0xff, 0xf6, 0x59, 0xb6, 0x04, 0xce, + 0x0a, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1502,6 +1560,13 @@ func (m *DeactivateClientRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.ClientKey) > 0 { + i -= len(m.ClientKey) + copy(dAtA[i:], m.ClientKey) + i = encodeVarintYorkie(dAtA, i, uint64(len(m.ClientKey))) + i-- + dAtA[i] = 0x12 + } if len(m.ClientId) > 0 { i -= len(m.ClientId) copy(dAtA[i:], m.ClientId) @@ -1563,6 +1628,13 @@ func (m *AttachDocumentRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.ClientKey) > 0 { + i -= len(m.ClientKey) + copy(dAtA[i:], m.ClientKey) + i = encodeVarintYorkie(dAtA, i, uint64(len(m.ClientKey))) + i-- + dAtA[i] = 0x1a + } if m.ChangePack != nil { { size, err := m.ChangePack.MarshalToSizedBuffer(dAtA[:i]) @@ -1655,6 +1727,13 @@ func (m *DetachDocumentRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.ClientKey) > 0 { + i -= len(m.ClientKey) + copy(dAtA[i:], m.ClientKey) + i = encodeVarintYorkie(dAtA, i, uint64(len(m.ClientKey))) + i-- + dAtA[i] = 0x2a + } if m.RemoveIfNotAttached { i-- if m.RemoveIfNotAttached { @@ -1757,6 +1836,13 @@ func (m *WatchDocumentRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.ClientKey) > 0 { + i -= len(m.ClientKey) + copy(dAtA[i:], m.ClientKey) + i = encodeVarintYorkie(dAtA, i, uint64(len(m.ClientKey))) + i-- + dAtA[i] = 0x22 + } if len(m.DocumentKey) > 0 { i -= len(m.DocumentKey) copy(dAtA[i:], m.DocumentKey) @@ -1919,6 +2005,13 @@ func (m *RemoveDocumentRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.ClientKey) > 0 { + i -= len(m.ClientKey) + copy(dAtA[i:], m.ClientKey) + i = encodeVarintYorkie(dAtA, i, uint64(len(m.ClientKey))) + i-- + dAtA[i] = 0x22 + } if m.ChangePack != nil { { size, err := m.ChangePack.MarshalToSizedBuffer(dAtA[:i]) @@ -2011,6 +2104,13 @@ func (m *PushPullChangesRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.ClientKey) > 0 { + i -= len(m.ClientKey) + copy(dAtA[i:], m.ClientKey) + i = encodeVarintYorkie(dAtA, i, uint64(len(m.ClientKey))) + i-- + dAtA[i] = 0x2a + } if m.PushOnly { i-- if m.PushOnly { @@ -2113,6 +2213,13 @@ func (m *BroadcastRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.ClientKey) > 0 { + i -= len(m.ClientKey) + copy(dAtA[i:], m.ClientKey) + i = encodeVarintYorkie(dAtA, i, uint64(len(m.ClientKey))) + i-- + dAtA[i] = 0x32 + } if len(m.DocumentKey) > 0 { i -= len(m.DocumentKey) copy(dAtA[i:], m.DocumentKey) @@ -2231,6 +2338,10 @@ func (m *DeactivateClientRequest) Size() (n int) { if l > 0 { n += 1 + l + sovYorkie(uint64(l)) } + l = len(m.ClientKey) + if l > 0 { + n += 1 + l + sovYorkie(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2263,6 +2374,10 @@ func (m *AttachDocumentRequest) Size() (n int) { l = m.ChangePack.Size() n += 1 + l + sovYorkie(uint64(l)) } + l = len(m.ClientKey) + if l > 0 { + n += 1 + l + sovYorkie(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2310,6 +2425,10 @@ func (m *DetachDocumentRequest) Size() (n int) { if m.RemoveIfNotAttached { n += 2 } + l = len(m.ClientKey) + if l > 0 { + n += 1 + l + sovYorkie(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2350,6 +2469,10 @@ func (m *WatchDocumentRequest) Size() (n int) { if l > 0 { n += 1 + l + sovYorkie(uint64(l)) } + l = len(m.ClientKey) + if l > 0 { + n += 1 + l + sovYorkie(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2431,6 +2554,10 @@ func (m *RemoveDocumentRequest) Size() (n int) { l = m.ChangePack.Size() n += 1 + l + sovYorkie(uint64(l)) } + l = len(m.ClientKey) + if l > 0 { + n += 1 + l + sovYorkie(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2474,6 +2601,10 @@ func (m *PushPullChangesRequest) Size() (n int) { if m.PushOnly { n += 2 } + l = len(m.ClientKey) + if l > 0 { + n += 1 + l + sovYorkie(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2522,6 +2653,10 @@ func (m *BroadcastRequest) Size() (n int) { if l > 0 { n += 1 + l + sovYorkie(uint64(l)) } + l = len(m.ClientKey) + if l > 0 { + n += 1 + l + sovYorkie(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2773,6 +2908,38 @@ func (m *DeactivateClientRequest) Unmarshal(dAtA []byte) error { } m.ClientId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowYorkie + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthYorkie + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthYorkie + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipYorkie(dAtA[iNdEx:]) @@ -2943,6 +3110,38 @@ func (m *AttachDocumentRequest) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowYorkie + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthYorkie + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthYorkie + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipYorkie(dAtA[iNdEx:]) @@ -3233,6 +3432,38 @@ func (m *DetachDocumentRequest) Unmarshal(dAtA []byte) error { } } m.RemoveIfNotAttached = bool(v != 0) + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowYorkie + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthYorkie + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthYorkie + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipYorkie(dAtA[iNdEx:]) @@ -3467,6 +3698,38 @@ func (m *WatchDocumentRequest) Unmarshal(dAtA []byte) error { } m.DocumentKey = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowYorkie + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthYorkie + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthYorkie + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipYorkie(dAtA[iNdEx:]) @@ -3822,6 +4085,38 @@ func (m *RemoveDocumentRequest) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowYorkie + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthYorkie + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthYorkie + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipYorkie(dAtA[iNdEx:]) @@ -4080,6 +4375,38 @@ func (m *PushPullChangesRequest) Unmarshal(dAtA []byte) error { } } m.PushOnly = bool(v != 0) + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowYorkie + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthYorkie + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthYorkie + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipYorkie(dAtA[iNdEx:]) @@ -4380,6 +4707,38 @@ func (m *BroadcastRequest) Unmarshal(dAtA []byte) error { } m.DocumentKey = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 6: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientKey", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowYorkie + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthYorkie + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthYorkie + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientKey = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipYorkie(dAtA[iNdEx:]) diff --git a/api/yorkie/v1/yorkie.proto b/api/yorkie/v1/yorkie.proto index ced9b559e..00cc5f09d 100644 --- a/api/yorkie/v1/yorkie.proto +++ b/api/yorkie/v1/yorkie.proto @@ -49,6 +49,7 @@ message ActivateClientResponse { message DeactivateClientRequest { string client_id = 1; + string client_key = 2; } message DeactivateClientResponse { @@ -57,6 +58,7 @@ message DeactivateClientResponse { message AttachDocumentRequest { string client_id = 1; ChangePack change_pack = 2; + string client_key = 3; } message AttachDocumentResponse { @@ -69,6 +71,7 @@ message DetachDocumentRequest { string document_id = 2; ChangePack change_pack = 3; bool remove_if_not_attached = 4; + string client_key = 5; } message DetachDocumentResponse { @@ -79,6 +82,7 @@ message WatchDocumentRequest { string client_id = 1; string document_id = 2; string document_key = 3; + string client_key = 4; } message WatchDocumentResponse { @@ -96,6 +100,7 @@ message RemoveDocumentRequest { string client_id = 1; string document_id = 2; ChangePack change_pack = 3; + string client_key = 4; } message RemoveDocumentResponse { @@ -107,6 +112,7 @@ message PushPullChangesRequest { string document_id = 2; ChangePack change_pack = 3; bool push_only = 4; + string client_key = 5; } message PushPullChangesResponse { @@ -119,6 +125,7 @@ message BroadcastRequest { string topic = 3; bytes payload = 4; string document_key = 5; + string client_key = 6; } message BroadcastResponse { diff --git a/build/docker/sharding/test/scripts/init-mongos1.js b/build/docker/sharding/test/scripts/init-mongos1.js index 01f4fe750..f6e0bf90b 100644 --- a/build/docker/sharding/test/scripts/init-mongos1.js +++ b/build/docker/sharding/test/scripts/init-mongos1.js @@ -4,28 +4,44 @@ sh.addShard("shard-rs-2/shard2-1:27017") // The DB 'yorkie-meta-1' is for the mongo client test. sh.enableSharding("yorkie-meta-1") sh.shardCollection("yorkie-meta-1.users", { username: 1 }, true) -// sh.shardCollection("yorkie-meta-1.clients", { _id: 1 }, true) +sh.shardCollection("yorkie-meta-1.clients", { key: 1 }) sh.shardCollection("yorkie-meta-1.documents", { key: 1 }) sh.shardCollection("yorkie-meta-1.changes", { doc_key: 1 }) sh.shardCollection("yorkie-meta-1.snapshots", { doc_key: 1 }) sh.shardCollection("yorkie-meta-1.syncedseqs", { doc_key: 1 }) + +const docSplitKey = "duplicateIDTestDocKey5" +const clientSplitKey = "duplicateIDTestClientKey5" + // Split the inital range at "duplicateIDTestDocKey5" to allow doc_ids duplicate in different shards. -sh.splitAt("yorkie-meta-1.documents", { key: "duplicateIDTestDocKey5" }) +sh.splitAt("yorkie-meta-1.documents", { key: docSplitKey }) +// Move the chunk to another shard. +const currentDocShard = db.getSiblingDB("config").chunks.findOne({ min: { key: docSplitKey } }).shard +var nextDocShard = "" +if (currentDocShard == "shard-rs-1") { + nextDocShard = "shard-rs-2" +} else { + nextDocShard = "shard-rs-1" +} +db.adminCommand({ moveChunk: "yorkie-meta-1.documents", find: { key: docSplitKey }, to: nextDocShard }) + +// Split the inital range at "duplicateIDTestClientKey5" to allow client_ids duplicate in different shards. +sh.splitAt("yorkie-meta-1.clients", { key: clientSplitKey }) // Move the chunk to another shard. -const currentShard = db.getSiblingDB("config").chunks.findOne({ min: { key: 'duplicateIDTestDocKey5' } }).shard -var nextShard = "" -if (currentShard == "shard-rs-1") { - nextShard = "shard-rs-2" +const currentClientShard = db.getSiblingDB("config").chunks.findOne({ min: { key: clientSplitKey } }).shard +var nextClientShard = "" +if (currentClientShard == "shard-rs-1") { + nextClientShard = "shard-rs-2" } else { - nextShard = "shard-rs-1" + nextClientShard = "shard-rs-1" } -db.adminCommand({ moveChunk: "yorkie-meta-1.documents", find: { key: "duplicateIDTestDocKey5" }, to: nextShard }) +db.adminCommand({ moveChunk: "yorkie-meta-1.clients", find: { key: clientSplitKey }, to: nextClientShard }) // The DB 'yorkie-meta-2' is for the server test. sh.enableSharding("yorkie-meta-2") sh.shardCollection("yorkie-meta-2.users", { username: 1 }, true) -// sh.shardCollection("yorkie-meta-2.clients", { _id: 1 }, true) +sh.shardCollection("yorkie-meta-2.clients", { key: 1 }) sh.shardCollection("yorkie-meta-2.documents", { key: 1 }) sh.shardCollection("yorkie-meta-2.changes", { doc_key: 1 }) sh.shardCollection("yorkie-meta-2.snapshots", { doc_key: 1 }) diff --git a/client/client.go b/client/client.go index cc7ea686c..932be9351 100644 --- a/client/client.go +++ b/client/client.go @@ -233,7 +233,8 @@ func (c *Client) Deactivate(ctx context.Context) error { } _, err := c.client.DeactivateClient(withShardKey(ctx, c.options.APIKey), &api.DeactivateClientRequest{ - ClientId: c.id.String(), + ClientKey: c.key, + ClientId: c.id.String(), }) if err != nil { return err @@ -277,6 +278,7 @@ func (c *Client) Attach(ctx context.Context, doc *document.Document, options ... res, err := c.client.AttachDocument( withShardKey(ctx, c.options.APIKey, doc.Key().String()), &api.AttachDocumentRequest{ + ClientKey: c.key, ClientId: c.id.String(), ChangePack: pbChangePack, }, @@ -350,6 +352,7 @@ func (c *Client) Detach(ctx context.Context, doc *document.Document, options ... res, err := c.client.DetachDocument( withShardKey(ctx, c.options.APIKey, doc.Key().String()), &api.DetachDocumentRequest{ + ClientKey: c.key, ClientId: c.id.String(), DocumentId: attachment.docID.String(), ChangePack: pbChangePack, @@ -413,6 +416,7 @@ func (c *Client) Watch( stream, err := c.client.WatchDocument( withShardKey(ctx, c.options.APIKey, doc.Key().String()), &api.WatchDocumentRequest{ + ClientKey: c.key, ClientId: c.id.String(), DocumentKey: doc.Key().String(), DocumentId: attachment.docID.String(), @@ -600,6 +604,7 @@ func (c *Client) pushPullChanges(ctx context.Context, opt SyncOptions) error { res, err := c.client.PushPullChanges( withShardKey(ctx, c.options.APIKey, opt.key.String()), &api.PushPullChangesRequest{ + ClientKey: c.key, ClientId: c.id.String(), DocumentId: attachment.docID.String(), ChangePack: pbChangePack, @@ -645,6 +650,7 @@ func (c *Client) Remove(ctx context.Context, doc *document.Document) error { res, err := c.client.RemoveDocument( withShardKey(ctx, c.options.APIKey, doc.Key().String()), &api.RemoveDocumentRequest{ + ClientKey: c.key, ClientId: c.id.String(), DocumentId: attachment.docID.String(), ChangePack: pbChangePack, @@ -682,6 +688,7 @@ func (c *Client) broadcast(ctx context.Context, doc *document.Document, topic st _, err := c.client.Broadcast( withShardKey(ctx, c.options.APIKey, doc.Key().String()), &api.BroadcastRequest{ + ClientKey: c.key, ClientId: c.id.String(), DocumentKey: doc.Key().String(), DocumentId: attachment.docID.String(), diff --git a/server/backend/database/database.go b/server/backend/database/database.go index ad5608f18..83ead1be2 100644 --- a/server/backend/database/database.go +++ b/server/backend/database/database.go @@ -127,10 +127,10 @@ type Database interface { ActivateClient(ctx context.Context, projectID types.ID, key string) (*ClientInfo, error) // DeactivateClient deactivates the client of the given ID. - DeactivateClient(ctx context.Context, projectID, clientID types.ID) (*ClientInfo, error) + DeactivateClient(ctx context.Context, clientKey string, clientID types.ID) (*ClientInfo, error) - // FindClientInfoByID finds the client of the given ID. - FindClientInfoByID(ctx context.Context, projectID, clientID types.ID) (*ClientInfo, error) + // FindClientInfoByKeyAndID finds the client of the given ID. + FindClientInfoByKeyAndID(ctx context.Context, clientKey string, clientID types.ID) (*ClientInfo, error) // UpdateClientInfoAfterPushPull updates the client from the given clientInfo // after handling PushPull. diff --git a/server/backend/database/memory/database.go b/server/backend/database/memory/database.go index 225cac1e7..76e07cbbb 100644 --- a/server/backend/database/memory/database.go +++ b/server/backend/database/memory/database.go @@ -442,7 +442,11 @@ func (d *DB) ActivateClient( } // DeactivateClient deactivates a client. -func (d *DB) DeactivateClient(_ context.Context, projectID, clientID types.ID) (*database.ClientInfo, error) { +func (d *DB) DeactivateClient( + _ context.Context, + clientKey string, + clientID types.ID, +) (*database.ClientInfo, error) { if err := clientID.Validate(); err != nil { return nil, err } @@ -450,9 +454,14 @@ func (d *DB) DeactivateClient(_ context.Context, projectID, clientID types.ID) ( txn := d.db.Txn(true) defer txn.Abort() - raw, err := txn.First(tblClients, "id", clientID.String()) + raw, err := txn.First( + tblClients, + "key_id", + clientKey, + clientID.String(), + ) if err != nil { - return nil, fmt.Errorf("find client by id: %w", err) + return nil, fmt.Errorf("find client by key and id: %w", err) } if raw == nil { @@ -460,9 +469,6 @@ func (d *DB) DeactivateClient(_ context.Context, projectID, clientID types.ID) ( } clientInfo := raw.(*database.ClientInfo) - if err := clientInfo.CheckIfInProject(projectID); err != nil { - return nil, err - } // NOTE(hackerwins): When retrieving objects from go-memdb, references to // the stored objects are returned instead of new objects. This can cause @@ -477,8 +483,12 @@ func (d *DB) DeactivateClient(_ context.Context, projectID, clientID types.ID) ( return clientInfo, nil } -// FindClientInfoByID finds a client by ID. -func (d *DB) FindClientInfoByID(_ context.Context, projectID, clientID types.ID) (*database.ClientInfo, error) { +// FindClientInfoByKeyAndID finds a client by the given key and ID. +func (d *DB) FindClientInfoByKeyAndID( + _ context.Context, + clientKey string, + clientID types.ID, +) (*database.ClientInfo, error) { if err := clientID.Validate(); err != nil { return nil, err } @@ -486,19 +496,20 @@ func (d *DB) FindClientInfoByID(_ context.Context, projectID, clientID types.ID) txn := d.db.Txn(false) defer txn.Abort() - raw, err := txn.First(tblClients, "id", clientID.String()) + raw, err := txn.First( + tblClients, + "key_id", + clientKey, + clientID.String(), + ) if err != nil { - return nil, fmt.Errorf("find client by id: %w", err) + return nil, fmt.Errorf("find client by key and id: %w", err) } if raw == nil { return nil, fmt.Errorf("%s: %w", clientID, database.ErrClientNotFound) } clientInfo := raw.(*database.ClientInfo) - if err := clientInfo.CheckIfInProject(projectID); err != nil { - return nil, err - } - return clientInfo.DeepCopy(), nil } @@ -518,9 +529,14 @@ func (d *DB) UpdateClientInfoAfterPushPull( txn := d.db.Txn(true) defer txn.Abort() - raw, err := txn.First(tblClients, "id", clientInfo.ID.String()) + raw, err := txn.First( + tblClients, + "key_id", + clientInfo.Key, + clientInfo.ID.String(), + ) if err != nil { - return fmt.Errorf("find client by id: %w", err) + return fmt.Errorf("find client by key and id: %w", err) } if raw == nil { return fmt.Errorf("%s: %w", clientInfo.ID, database.ErrClientNotFound) @@ -1171,9 +1187,10 @@ func (d *DB) UpdateSyncedSeq( if !isAttached { if _, err = txn.DeleteAll( tblSyncedSeqs, - "doc_key_doc_id_client_id", + "doc_key_doc_id_client_key_client_id", docKey.String(), docID.String(), + clientInfo.Key, clientInfo.ID.String(), ); err != nil { return fmt.Errorf("delete syncedseqs of the document (%s.%s): %w", @@ -1190,9 +1207,10 @@ func (d *DB) UpdateSyncedSeq( raw, err := txn.First( tblSyncedSeqs, - "doc_key_doc_id_client_id", + "doc_key_doc_id_client_key_client_id", docKey.String(), docID.String(), + clientInfo.Key, clientInfo.ID.String(), ) if err != nil { diff --git a/server/backend/database/memory/indexes.go b/server/backend/database/memory/indexes.go index 72ed0ae84..235f104e2 100644 --- a/server/backend/database/memory/indexes.go +++ b/server/backend/database/memory/indexes.go @@ -83,6 +83,16 @@ var schema = &memdb.DBSchema{ Unique: true, Indexer: &memdb.StringFieldIndex{Field: "ID"}, }, + "key_id": { + Name: "key_id", + Unique: true, + Indexer: &memdb.CompoundIndex{ + Indexes: []memdb.Indexer{ + &memdb.StringFieldIndex{Field: "Key"}, + &memdb.StringFieldIndex{Field: "ID"}, + }, + }, + }, "project_id": { Name: "project_id", Indexer: &memdb.StringFieldIndex{Field: "ProjectID"}, @@ -207,13 +217,14 @@ var schema = &memdb.DBSchema{ Unique: true, Indexer: &memdb.StringFieldIndex{Field: "ID"}, }, - "doc_key_doc_id_client_id": { - Name: "doc_key_doc_id_client_id", + "doc_key_doc_id_client_key_client_id": { + Name: "doc_key_doc_id_client_key_client_id", Unique: true, Indexer: &memdb.CompoundIndex{ Indexes: []memdb.Indexer{ &memdb.StringFieldIndex{Field: "DocKey"}, &memdb.StringFieldIndex{Field: "DocID"}, + &memdb.StringFieldIndex{Field: "ClientKey"}, &memdb.StringFieldIndex{Field: "ClientID"}, }, }, diff --git a/server/backend/database/mongo/client.go b/server/backend/database/mongo/client.go index 531a07dd2..30b6018f9 100644 --- a/server/backend/database/mongo/client.go +++ b/server/backend/database/mongo/client.go @@ -469,6 +469,7 @@ func (c *Client) ActivateClient(ctx context.Context, projectID types.ID, key str var result *mongo.SingleResult if res.UpsertedCount > 0 { result = c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ + "key": key, "_id": res.UpsertedID, }, bson.M{ "$set": bson.M{ @@ -477,7 +478,8 @@ func (c *Client) ActivateClient(ctx context.Context, projectID types.ID, key str }) } else { result = c.collection(ColClients).FindOne(ctx, bson.M{ - "key": key, + "project_id": encodedProjectID, + "key": key, }) } @@ -490,19 +492,19 @@ func (c *Client) ActivateClient(ctx context.Context, projectID types.ID, key str } // DeactivateClient deactivates the client of the given ID. -func (c *Client) DeactivateClient(ctx context.Context, projectID, clientID types.ID) (*database.ClientInfo, error) { - encodedProjectID, err := EncodeID(projectID) - if err != nil { - return nil, err - } +func (c *Client) DeactivateClient( + ctx context.Context, + clientKey string, + clientID types.ID, +) (*database.ClientInfo, error) { encodedClientID, err := EncodeID(clientID) if err != nil { return nil, err } res := c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ - "_id": encodedClientID, - "project_id": encodedProjectID, + "key": clientKey, + "_id": encodedClientID, }, bson.M{ "$set": bson.M{ "status": database.ClientDeactivated, @@ -521,20 +523,20 @@ func (c *Client) DeactivateClient(ctx context.Context, projectID, clientID types return &clientInfo, nil } -// FindClientInfoByID finds the client of the given ID. -func (c *Client) FindClientInfoByID(ctx context.Context, projectID, clientID types.ID) (*database.ClientInfo, error) { - encodedProjectID, err := EncodeID(projectID) - if err != nil { - return nil, err - } +// FindClientInfoByKeyAndID finds the client of the given key and ID. +func (c *Client) FindClientInfoByKeyAndID( + ctx context.Context, + clientKey string, + clientID types.ID, +) (*database.ClientInfo, error) { encodedClientID, err := EncodeID(clientID) if err != nil { return nil, err } result := c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ - "_id": encodedClientID, - "project_id": encodedProjectID, + "key": clientKey, + "_id": encodedClientID, }, bson.M{ "$set": bson.M{ "updated_at": gotime.Now(), @@ -597,6 +599,7 @@ func (c *Client) UpdateClientInfoAfterPushPull( } result := c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ + "key": clientInfo.Key, "_id": encodedClientID, }, updater) @@ -1332,9 +1335,10 @@ func (c *Client) UpdateSyncedSeq( if !isAttached { if _, err = c.collection(ColSyncedSeqs).DeleteOne(ctx, bson.M{ - "doc_key": docKey, - "doc_id": encodedDocID, - "client_id": encodedClientID, + "doc_key": docKey, + "doc_id": encodedDocID, + "client_key": clientInfo.Key, + "client_id": encodedClientID, }, options.Delete()); err != nil { return fmt.Errorf("delete synced seq: %w", err) } @@ -1347,9 +1351,10 @@ func (c *Client) UpdateSyncedSeq( } if _, err = c.collection(ColSyncedSeqs).UpdateOne(ctx, bson.M{ - "doc_key": docKey, - "doc_id": encodedDocID, - "client_id": encodedClientID, + "doc_key": docKey, + "doc_id": encodedDocID, + "client_key": clientInfo.Key, + "client_id": encodedClientID, }, bson.M{ "$set": bson.M{ "lamport": ticket.Lamport(), diff --git a/server/backend/database/mongo/indexes.go b/server/backend/database/mongo/indexes.go index 5512ca80a..2eb425096 100644 --- a/server/backend/database/mongo/indexes.go +++ b/server/backend/database/mongo/indexes.go @@ -76,8 +76,8 @@ var collectionInfos = []collectionInfo{ name: ColClients, indexes: []mongo.IndexModel{{ Keys: bsonx.Doc{ - {Key: "project_id", Value: bsonx.Int32(1)}, {Key: "key", Value: bsonx.Int32(1)}, + {Key: "project_id", Value: bsonx.Int32(1)}, }, Options: options.Index().SetUnique(true), }, { diff --git a/server/backend/database/synced_seq_info.go b/server/backend/database/synced_seq_info.go index 9cfc87822..b860fff9d 100644 --- a/server/backend/database/synced_seq_info.go +++ b/server/backend/database/synced_seq_info.go @@ -27,6 +27,7 @@ type SyncedSeqInfo struct { ID types.ID `bson:"_id"` DocKey key.Key `bson:"doc_key"` DocID types.ID `bson:"doc_id"` + ClientKey key.Key `bson:"client_key"` ClientID types.ID `bson:"client_id"` Lamport int64 `bson:"lamport"` ActorID types.ID `bson:"actor_id"` diff --git a/server/backend/database/testcases/testcases.go b/server/backend/database/testcases/testcases.go index e14749fa3..7702a2293 100644 --- a/server/backend/database/testcases/testcases.go +++ b/server/backend/database/testcases/testcases.go @@ -44,6 +44,7 @@ import ( const ( dummyOwnerName = "dummy" otherOwnerName = "other" + dummyClientKey = "dummy" dummyClientID = types.ID("000000000000000000000000") clientDeactivateThreshold = "1h" ) @@ -284,13 +285,13 @@ func RunListUserInfosTest(t *testing.T, db database.Database) { func RunActivateClientDeactivateClientTest(t *testing.T, db database.Database, projectID types.ID) { t.Run("activate and find client test", func(t *testing.T) { ctx := context.Background() - _, err := db.FindClientInfoByID(ctx, projectID, dummyClientID) + _, err := db.FindClientInfoByKeyAndID(ctx, dummyClientKey, dummyClientID) assert.ErrorIs(t, err, database.ErrClientNotFound) clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) - found, err := db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + found, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.NoError(t, err) assert.Equal(t, clientInfo.Key, found.Key) }) @@ -299,7 +300,7 @@ func RunActivateClientDeactivateClientTest(t *testing.T, db database.Database, p ctx := context.Background() // try to deactivate the client with not exists ID. - _, err := db.DeactivateClient(ctx, projectID, dummyClientID) + _, err := db.DeactivateClient(ctx, dummyClientKey, dummyClientID) assert.ErrorIs(t, err, database.ErrClientNotFound) clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) @@ -314,15 +315,16 @@ func RunActivateClientDeactivateClientTest(t *testing.T, db database.Database, p assert.Equal(t, t.Name(), clientInfo.Key) assert.Equal(t, database.ClientActivated, clientInfo.Status) + clientKey := clientInfo.Key clientID := clientInfo.ID - clientInfo, err = db.DeactivateClient(ctx, projectID, clientID) + clientInfo, err = db.DeactivateClient(ctx, clientKey, clientID) assert.NoError(t, err) assert.Equal(t, t.Name(), clientInfo.Key) assert.Equal(t, database.ClientDeactivated, clientInfo.Status) // try to deactivate the client twice. - clientInfo, err = db.DeactivateClient(ctx, projectID, clientID) + clientInfo, err = db.DeactivateClient(ctx, clientKey, clientID) assert.NoError(t, err) assert.Equal(t, t.Name(), clientInfo.Key) assert.Equal(t, database.ClientDeactivated, clientInfo.Status) @@ -846,7 +848,7 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type assert.NotEqual(t, gotime.Time{}, docInfo.RemovedAt) // Check whether DocumentRemoved status is set in clientInfo - clientInfo, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + clientInfo, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.NoError(t, err) assert.NotEqual(t, database.DocumentRemoved, clientInfo.Documents[docKey][docInfo.ID].Status) }) @@ -882,7 +884,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err := db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + result, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(0)) assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(0)) @@ -902,7 +904,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 1 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err := db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + result, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(1)) assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(1)) @@ -913,7 +915,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 5 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + result, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(3)) assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(5)) @@ -924,7 +926,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 3 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + result, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(3)) assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(5)) @@ -944,7 +946,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 1 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err := db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + result, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(1)) assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(1)) @@ -953,7 +955,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr assert.NoError(t, clientInfo.DetachDocument(docKey, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + result, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentDetached) assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(0)) assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(0)) @@ -973,7 +975,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 1 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err := db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + result, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(1)) assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(1)) @@ -982,7 +984,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr assert.NoError(t, clientInfo.RemoveDocument(docKey, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err = db.FindClientInfoByID(ctx, projectID, clientInfo.ID) + result, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentRemoved) assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(0)) assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(0)) diff --git a/server/backend/housekeeping/housekeeping.go b/server/backend/housekeeping/housekeeping.go index 2672ab182..23475b9be 100644 --- a/server/backend/housekeeping/housekeeping.go +++ b/server/backend/housekeeping/housekeeping.go @@ -163,7 +163,7 @@ func (h *Housekeeping) deactivateCandidates( if _, err := clients.Deactivate( ctx, h.database, - clientInfo.ProjectID, + clientInfo.Key, clientInfo.ID, ); err != nil { return database.DefaultProjectID, err diff --git a/server/clients/clients.go b/server/clients/clients.go index 7f75be99b..a5455e868 100644 --- a/server/clients/clients.go +++ b/server/clients/clients.go @@ -48,12 +48,12 @@ func Activate( func Deactivate( ctx context.Context, db database.Database, - projectID types.ID, + clientKey string, clientID types.ID, ) (*database.ClientInfo, error) { - clientInfo, err := db.FindClientInfoByID( + clientInfo, err := db.FindClientInfoByKeyAndID( ctx, - projectID, + clientKey, clientID, ) if err != nil { @@ -91,19 +91,19 @@ func Deactivate( } } - return db.DeactivateClient(ctx, projectID, clientID) + return db.DeactivateClient(ctx, clientKey, clientID) } // FindClientInfo finds the client with the given id. func FindClientInfo( ctx context.Context, db database.Database, - project *types.Project, + clientKey string, clientID *time.ActorID, ) (*database.ClientInfo, error) { - return db.FindClientInfoByID( + return db.FindClientInfoByKeyAndID( ctx, - project.ID, + clientKey, types.IDFromActorID(clientID), ) } diff --git a/server/rpc/testcases/testcases.go b/server/rpc/testcases/testcases.go index 5c1cbed2b..b169fb66c 100644 --- a/server/rpc/testcases/testcases.go +++ b/server/rpc/testcases/testcases.go @@ -52,15 +52,19 @@ func RunActivateAndDeactivateClientTest( t *testing.T, testClient api.YorkieServiceClient, ) { + clientKey := t.Name() activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) _, err = testClient.DeactivateClient( context.Background(), - &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, + &api.DeactivateClientRequest{ + ClientKey: clientKey, + ClientId: activateResp.ClientId, + }, ) assert.NoError(t, err) @@ -73,14 +77,20 @@ func RunActivateAndDeactivateClientTest( _, err = testClient.DeactivateClient( context.Background(), - &api.DeactivateClientRequest{ClientId: emptyClientID}, + &api.DeactivateClientRequest{ + ClientKey: clientKey, + ClientId: emptyClientID, + }, ) assert.Equal(t, codes.InvalidArgument, status.Convert(err).Code()) // client not found _, err = testClient.DeactivateClient( context.Background(), - &api.DeactivateClientRequest{ClientId: nilClientID}, + &api.DeactivateClientRequest{ + ClientKey: clientKey, + ClientId: nilClientID, + }, ) assert.Equal(t, codes.NotFound, status.Convert(err).Code()) @@ -91,9 +101,10 @@ func RunAttachAndDetachDocumentTest( t *testing.T, testClient api.YorkieServiceClient, ) { + clientKey := t.Name() activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) @@ -105,6 +116,7 @@ func RunAttachAndDetachDocumentTest( resPack, err := testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -115,6 +127,7 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: invalidClientID, ChangePack: packWithNoChanges, }, @@ -125,6 +138,7 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: nilClientID, ChangePack: packWithNoChanges, }, @@ -135,6 +149,7 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -145,6 +160,7 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: invalidChangePack, }, @@ -154,6 +170,7 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.DetachDocument( context.Background(), &api.DetachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithNoChanges, @@ -165,6 +182,7 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.DetachDocument( context.Background(), &api.DetachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithNoChanges, @@ -175,6 +193,7 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.DetachDocument( context.Background(), &api.DetachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: invalidChangePack, }, @@ -185,6 +204,7 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.DetachDocument( context.Background(), &api.DetachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: "000000000000000000000000", ChangePack: &api.ChangePack{ @@ -196,7 +216,10 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.DeactivateClient( context.Background(), - &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, + &api.DeactivateClientRequest{ + ClientKey: clientKey, + ClientId: activateResp.ClientId, + }, ) assert.NoError(t, err) @@ -204,6 +227,7 @@ func RunAttachAndDetachDocumentTest( _, err = testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -216,9 +240,11 @@ func RunAttachAndDetachRemovedDocumentTest( t *testing.T, testClient api.YorkieServiceClient, ) { + clientKey := t.Name() + activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) @@ -236,6 +262,7 @@ func RunAttachAndDetachRemovedDocumentTest( resPack, err := testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -245,6 +272,7 @@ func RunAttachAndDetachRemovedDocumentTest( _, err = testClient.RemoveDocument( context.Background(), &api.RemoveDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithRemoveRequest, @@ -257,6 +285,7 @@ func RunAttachAndDetachRemovedDocumentTest( _, err = testClient.DetachDocument( context.Background(), &api.DetachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithNoChanges, @@ -268,6 +297,7 @@ func RunAttachAndDetachRemovedDocumentTest( resPack, err = testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -277,6 +307,7 @@ func RunAttachAndDetachRemovedDocumentTest( _, err = testClient.RemoveDocument( context.Background(), &api.RemoveDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithRemoveRequest, @@ -290,6 +321,8 @@ func RunPushPullChangeTest( t *testing.T, testClient api.YorkieServiceClient, ) { + clientKey := helper.TestDocKey(t).String() + packWithNoChanges := &api.ChangePack{ DocumentKey: helper.TestDocKey(t).String(), Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, @@ -297,7 +330,7 @@ func RunPushPullChangeTest( activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: helper.TestDocKey(t).String()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) @@ -305,7 +338,8 @@ func RunPushPullChangeTest( resPack, err := testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ - ClientId: activateResp.ClientId, + ClientKey: clientKey, + ClientId: activateResp.ClientId, ChangePack: &api.ChangePack{ DocumentKey: helper.TestDocKey(t).String(), Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 1}, @@ -324,6 +358,7 @@ func RunPushPullChangeTest( _, err = testClient.PushPullChanges( context.Background(), &api.PushPullChangesRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: &api.ChangePack{ @@ -344,6 +379,7 @@ func RunPushPullChangeTest( _, err = testClient.DetachDocument( context.Background(), &api.DetachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: &api.ChangePack{ @@ -365,6 +401,7 @@ func RunPushPullChangeTest( _, err = testClient.PushPullChanges( context.Background(), &api.PushPullChangesRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithNoChanges, @@ -376,6 +413,7 @@ func RunPushPullChangeTest( _, err = testClient.PushPullChanges( context.Background(), &api.PushPullChangesRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: invalidChangePack, @@ -385,7 +423,10 @@ func RunPushPullChangeTest( _, err = testClient.DeactivateClient( context.Background(), - &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, + &api.DeactivateClientRequest{ + ClientKey: clientKey, + ClientId: activateResp.ClientId, + }, ) assert.NoError(t, err) @@ -393,6 +434,7 @@ func RunPushPullChangeTest( _, err = testClient.PushPullChanges( context.Background(), &api.PushPullChangesRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithNoChanges, @@ -406,6 +448,7 @@ func RunPushPullChangeOnRemovedDocumentTest( t *testing.T, testClient api.YorkieServiceClient, ) { + clientKey := helper.TestDocKey(t).String() packWithNoChanges := &api.ChangePack{ DocumentKey: helper.TestDocKey(t).String(), Checkpoint: &api.Checkpoint{ServerSeq: 0, ClientSeq: 0}, @@ -419,13 +462,14 @@ func RunPushPullChangeOnRemovedDocumentTest( activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: helper.TestDocKey(t).String()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) resPack, err := testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -435,6 +479,7 @@ func RunPushPullChangeOnRemovedDocumentTest( _, err = testClient.RemoveDocument( context.Background(), &api.RemoveDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithRemoveRequest, @@ -446,6 +491,7 @@ func RunPushPullChangeOnRemovedDocumentTest( _, err = testClient.PushPullChanges( context.Background(), &api.PushPullChangesRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithNoChanges, @@ -459,9 +505,11 @@ func RunRemoveDocumentTest( t *testing.T, testClient api.YorkieServiceClient, ) { + clientKey := t.Name() + activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) @@ -479,6 +527,7 @@ func RunRemoveDocumentTest( resPack, err := testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -488,6 +537,7 @@ func RunRemoveDocumentTest( _, err = testClient.RemoveDocument( context.Background(), &api.RemoveDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithRemoveRequest, @@ -499,6 +549,7 @@ func RunRemoveDocumentTest( _, err = testClient.RemoveDocument( context.Background(), &api.RemoveDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithRemoveRequest, @@ -512,9 +563,11 @@ func RunRemoveDocumentWithInvalidClientStateTest( t *testing.T, testClient api.YorkieServiceClient, ) { + clientKey := t.Name() + activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) @@ -532,6 +585,7 @@ func RunRemoveDocumentWithInvalidClientStateTest( resPack, err := testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -541,6 +595,7 @@ func RunRemoveDocumentWithInvalidClientStateTest( _, err = testClient.DetachDocument( context.Background(), &api.DetachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithNoChanges, @@ -552,6 +607,7 @@ func RunRemoveDocumentWithInvalidClientStateTest( _, err = testClient.RemoveDocument( context.Background(), &api.RemoveDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithRemoveRequest, @@ -561,7 +617,10 @@ func RunRemoveDocumentWithInvalidClientStateTest( _, err = testClient.DeactivateClient( context.Background(), - &api.DeactivateClientRequest{ClientId: activateResp.ClientId}, + &api.DeactivateClientRequest{ + ClientKey: clientKey, + ClientId: activateResp.ClientId, + }, ) assert.NoError(t, err) @@ -569,6 +628,7 @@ func RunRemoveDocumentWithInvalidClientStateTest( _, err = testClient.RemoveDocument( context.Background(), &api.RemoveDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentId: resPack.DocumentId, ChangePack: packWithRemoveRequest, @@ -582,9 +642,11 @@ func RunWatchDocumentTest( t *testing.T, testClient api.YorkieServiceClient, ) { + clientKey := t.Name() + activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) @@ -598,6 +660,7 @@ func RunWatchDocumentTest( resPack, err := testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -608,6 +671,7 @@ func RunWatchDocumentTest( watchResp, err := testClient.WatchDocument( context.Background(), &api.WatchDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, DocumentKey: docKey, DocumentId: resPack.DocumentId, @@ -888,6 +952,7 @@ func RunAdminGetDocumentTest( testAdminClient api.AdminServiceClient, testAdminAuthInterceptor *admin.AuthInterceptor, ) { + clientKey := t.Name() testDocumentKey := helper.TestDocKey(t).String() resp, err := testAdminClient.LogIn( @@ -903,7 +968,7 @@ func RunAdminGetDocumentTest( activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) @@ -915,6 +980,7 @@ func RunAdminGetDocumentTest( _, err = testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, @@ -948,6 +1014,7 @@ func RunAdminListChangesTest( testAdminClient api.AdminServiceClient, testAdminAuthInterceptor *admin.AuthInterceptor, ) { + clientKey := t.Name() testDocumentKey := helper.TestDocKey(t).String() resp, err := testAdminClient.LogIn( @@ -963,7 +1030,7 @@ func RunAdminListChangesTest( activateResp, err := testClient.ActivateClient( context.Background(), - &api.ActivateClientRequest{ClientKey: t.Name()}, + &api.ActivateClientRequest{ClientKey: clientKey}, ) assert.NoError(t, err) @@ -975,6 +1042,7 @@ func RunAdminListChangesTest( _, err = testClient.AttachDocument( context.Background(), &api.AttachDocumentRequest{ + ClientKey: clientKey, ClientId: activateResp.ClientId, ChangePack: packWithNoChanges, }, diff --git a/server/rpc/yorkie_server.go b/server/rpc/yorkie_server.go index f41789dc1..800c4fa4a 100644 --- a/server/rpc/yorkie_server.go +++ b/server/rpc/yorkie_server.go @@ -90,8 +90,7 @@ func (s *yorkieServer) DeactivateClient( return nil, err } - project := projects.From(ctx) - _, err = clients.Deactivate(ctx, s.backend.DB, project.ID, types.IDFromActorID(actorID)) + _, err = clients.Deactivate(ctx, s.backend.DB, req.ClientKey, types.IDFromActorID(actorID)) if err != nil { return nil, err } @@ -139,7 +138,7 @@ func (s *yorkieServer) AttachDocument( } }() - clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, project, actorID) + clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, actorID) if err != nil { return nil, err } @@ -209,7 +208,7 @@ func (s *yorkieServer) DetachDocument( } }() - clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, project, actorID) + clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, actorID) if err != nil { return nil, err } @@ -305,7 +304,7 @@ func (s *yorkieServer) PushPullChanges( syncMode = types.SyncModePushOnly } - clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, project, actorID) + clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, actorID) if err != nil { return nil, err } @@ -367,8 +366,12 @@ func (s *yorkieServer) WatchDocument( return err } - project := projects.From(stream.Context()) - if _, err = clients.FindClientInfo(stream.Context(), s.backend.DB, project, clientID); err != nil { + if _, err = clients.FindClientInfo( + stream.Context(), + s.backend.DB, + req.ClientKey, + clientID, + ); err != nil { return err } @@ -484,7 +487,7 @@ func (s *yorkieServer) RemoveDocument( }() } - clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, project, actorID) + clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, actorID) if err != nil { return nil, err } @@ -590,8 +593,7 @@ func (s *yorkieServer) Broadcast( return nil, err } - project := projects.From(ctx) - if _, err = clients.FindClientInfo(ctx, s.backend.DB, project, clientID); err != nil { + if _, err = clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, clientID); err != nil { return nil, err } diff --git a/test/helper/helper.go b/test/helper/helper.go index e3dea2aaf..95049f274 100644 --- a/test/helper/helper.go +++ b/test/helper/helper.go @@ -418,7 +418,7 @@ func CreateDummyDocumentWithID( return nil } -// FindDocInfosWithID finds the docInfo of the given projectID and docID. +// FindDocInfosWithID finds the docInfos of the given projectID and docID. func FindDocInfosWithID( databaseName string, projectID types.ID, @@ -455,3 +455,75 @@ func FindDocInfosWithID( return infos, nil } + +// CreateDummyClientWithID creates a new dummy document with the given ID and key. +func CreateDummyClientWithID( + databaseName string, + projectID types.ID, + clientKey string, + clientID types.ID, +) error { + encodedProjectID, err := mongo.EncodeID(projectID) + if err != nil { + return err + } + encodedclientID, err := mongo.EncodeID(clientID) + if err != nil { + return err + } + cli, err := setupRawMongoClient(databaseName) + if err != nil { + return err + } + _, err = cli.Database(databaseName).Collection(mongo.ColClients).InsertOne( + context.Background(), + bson.M{ + "_id": encodedclientID, + "project_id": encodedProjectID, + "key": clientKey, + }, + ) + if err != nil { + return err + } + + return nil +} + +// FindClientInfosWithID finds the clientInfos of the given projectID and clientID. +func FindClientInfosWithID( + databaseName string, + projectID types.ID, + clientID types.ID, +) ([]*database.DocInfo, error) { + ctx := context.Background() + encodedProjectID, err := mongo.EncodeID(projectID) + if err != nil { + return nil, err + } + encodedclientID, err := mongo.EncodeID(clientID) + if err != nil { + return nil, err + } + cli, err := setupRawMongoClient(databaseName) + if err != nil { + return nil, err + } + + cursor, err := cli.Database(databaseName).Collection(mongo.ColClients).Find( + ctx, + bson.M{ + "_id": encodedclientID, + "project_id": encodedProjectID, + }, options.Find()) + if err != nil { + return nil, err + } + + var infos []*database.DocInfo + if err := cursor.All(ctx, &infos); err != nil { + return nil, err + } + + return infos, nil +} diff --git a/test/shard/mongo_client_test.go b/test/shard/mongo_client_test.go index abaa0cecf..d081c6567 100644 --- a/test/shard/mongo_client_test.go +++ b/test/shard/mongo_client_test.go @@ -219,4 +219,45 @@ func TestClientWithShardedDB(t *testing.T) { assert.NoError(t, err) testcases.AssertKeys(t, docKeysInReverse, result) }) + + t.Run("FindClientInfoByKeyAndID with duplicate ID test", func(t *testing.T) { + ctx := context.Background() + + // 01. Initialize a project and activate a client. + projectInfo, err := cli.CreateProjectInfo(ctx, t.Name(), dummyOwnerName, clientDeactivateThreshold) + assert.NoError(t, err) + + clientKey1 := fmt.Sprintf("%s%d", "duplicateIDTestClientKey", 0) + clientInfo1, err := cli.ActivateClient(ctx, projectInfo.ID, clientKey1) + assert.NoError(t, err) + + // 02. Create an extra client with duplicate ID. + clientKey2 := fmt.Sprintf("%s%d", "duplicateIDTestClientKey", 5) + err = helper.CreateDummyClientWithID( + shardedDBNameForMongoClient, + projectInfo.ID, + clientKey2, + clientInfo1.ID, + ) + assert.NoError(t, err) + + // 03. Check if there are two clients with the same ID. + infos, err := helper.FindClientInfosWithID( + shardedDBNameForMongoClient, + projectInfo.ID, + clientInfo1.ID, + ) + assert.NoError(t, err) + assert.Len(t, infos, 2) + + // 04. Check if the client is correctly found using clientKey and clientID. + result, err := cli.FindClientInfoByKeyAndID( + ctx, + clientKey1, + clientInfo1.ID, + ) + assert.NoError(t, err) + assert.Equal(t, clientInfo1.Key, result.Key) + assert.Equal(t, clientInfo1.ID, result.ID) + }) } From 5b9e445a8b486540227722159c2b6291a2933d18 Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Tue, 28 Nov 2023 17:06:25 +0900 Subject: [PATCH 07/11] Add owner_key at Documents --- server/backend/database/database.go | 1 + server/backend/database/doc_info.go | 10 ++-- server/backend/database/memory/database.go | 4 +- server/backend/database/mongo/client.go | 4 +- .../backend/database/testcases/testcases.go | 48 +++++++++---------- server/documents/documents.go | 3 ++ test/bench/push_pull_bench_test.go | 2 +- test/shard/mongo_client_test.go | 5 +- 8 files changed, 45 insertions(+), 32 deletions(-) diff --git a/server/backend/database/database.go b/server/backend/database/database.go index 83ead1be2..5932a4ca5 100644 --- a/server/backend/database/database.go +++ b/server/backend/database/database.go @@ -157,6 +157,7 @@ type Database interface { FindDocInfoByKeyAndOwner( ctx context.Context, projectID types.ID, + clientKey string, clientID types.ID, docKey key.Key, createDocIfNotExist bool, diff --git a/server/backend/database/doc_info.go b/server/backend/database/doc_info.go index 847458458..11b18a7fc 100644 --- a/server/backend/database/doc_info.go +++ b/server/backend/database/doc_info.go @@ -37,8 +37,11 @@ type DocInfo struct { // ServerSeq is the sequence number of the last change of the document on the server. ServerSeq int64 `bson:"server_seq"` - // Owner is the owner(ID of the client) of the document. - Owner types.ID `bson:"owner"` + // OwnerKey is the key of the document owner. + OwnerKey string `bson:"owner_key"` + + // OwnerKey is the ID of the document owner. + OwnerID types.ID `bson:"owner_id"` // CreatedAt is the time when the document is created. CreatedAt time.Time `bson:"created_at"` @@ -75,7 +78,8 @@ func (info *DocInfo) DeepCopy() *DocInfo { ProjectID: info.ProjectID, Key: info.Key, ServerSeq: info.ServerSeq, - Owner: info.Owner, + OwnerKey: info.OwnerKey, + OwnerID: info.OwnerID, CreatedAt: info.CreatedAt, AccessedAt: info.AccessedAt, UpdatedAt: info.UpdatedAt, diff --git a/server/backend/database/memory/database.go b/server/backend/database/memory/database.go index 76e07cbbb..1bdcfd6e7 100644 --- a/server/backend/database/memory/database.go +++ b/server/backend/database/memory/database.go @@ -662,6 +662,7 @@ func (d *DB) FindDeactivateCandidates( func (d *DB) FindDocInfoByKeyAndOwner( _ context.Context, projectID types.ID, + clientKey string, clientID types.ID, key key.Key, createDocIfNotExist bool, @@ -703,7 +704,8 @@ func (d *DB) FindDocInfoByKeyAndOwner( ID: newID(), ProjectID: projectID, Key: key, - Owner: clientID, + OwnerKey: clientKey, + OwnerID: clientID, ServerSeq: 0, CreatedAt: now, AccessedAt: now, diff --git a/server/backend/database/mongo/client.go b/server/backend/database/mongo/client.go index 30b6018f9..f71824be5 100644 --- a/server/backend/database/mongo/client.go +++ b/server/backend/database/mongo/client.go @@ -686,6 +686,7 @@ func (c *Client) FindDeactivateCandidates( func (c *Client) FindDocInfoByKeyAndOwner( ctx context.Context, projectID types.ID, + clientKey string, clientID types.ID, docKey key.Key, createDocIfNotExist bool, @@ -723,7 +724,8 @@ func (c *Client) FindDocInfoByKeyAndOwner( "_id": res.UpsertedID, }, bson.M{ "$set": bson.M{ - "owner": encodedOwnerID, + "owner_key": clientKey, + "owner_id": encodedOwnerID, "server_seq": 0, "created_at": now, }, diff --git a/server/backend/database/testcases/testcases.go b/server/backend/database/testcases/testcases.go index 7702a2293..6252dae78 100644 --- a/server/backend/database/testcases/testcases.go +++ b/server/backend/database/testcases/testcases.go @@ -64,10 +64,10 @@ func RunFindDocInfoTest( assert.ErrorIs(t, err, database.ErrDocumentNotFound) docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - _, err = db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, false) + _, err = db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, false) assert.ErrorIs(t, err, database.ErrDocumentNotFound) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, err) assert.Equal(t, docKey, docInfo.Key) }) @@ -145,7 +145,7 @@ func RunFindDocInfosByQueryTest( "test0", "test1", "test2", "test3", "test10", "test11", "test20", "test21", "test22", "test23"} for _, docKey := range docKeys { - _, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, key.Key(docKey), true) + _, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, key.Key(docKey), true) assert.NoError(t, err) } @@ -176,7 +176,7 @@ func RunFindChangesBetweenServerSeqsTest( docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) @@ -225,7 +225,7 @@ func RunFindClosestSnapshotInfoTest(t *testing.T, db database.Database, projectI clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) bytesID, _ := clientInfo.ID.Bytes() actorID, _ := time.ActorIDFromBytes(bytesID) - docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) doc := document.New(key.Key(t.Name())) doc.SetActor(actorID) @@ -439,7 +439,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) docInfos := make([]*database.DocInfo, 0, totalSize) for i := 0; i < totalSize; i++ { - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, key.Key(fmt.Sprintf("%d", i)), true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, key.Key(fmt.Sprintf("%d", i)), true) assert.NoError(t, err) docInfos = append(docInfos, docInfo) } @@ -517,7 +517,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t var dummyDocInfos []*database.DocInfo for i := 0; i <= testDocCnt; i++ { testDocKey := key.Key(fmt.Sprintf("%s%02d", "testdockey", i)) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, testProjectInfo.ID, dummyClientID, testDocKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, testProjectInfo.ID, dummyClientKey, dummyClientID, testDocKey, true) assert.NoError(t, err) dummyDocInfos = append(dummyDocInfos, docInfo) } @@ -647,7 +647,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t var docInfos []*database.DocInfo for i := 0; i < testDocCnt; i++ { testDocKey := key.Key("key" + strconv.Itoa(i)) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientID, testDocKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientKey, dummyClientID, testDocKey, true) assert.NoError(t, err) docInfos = append(docInfos, docInfo) } @@ -739,7 +739,7 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type // 01. Create a client and a document then attach the document to the client. clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) @@ -757,7 +757,7 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type // 01. Create a client and a document then attach the document to the client. clientInfo1, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.ID, docKey, true) + docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.Key, clientInfo1.ID, docKey, true) assert.NoError(t, clientInfo1.AttachDocument(docInfo1.Key, docInfo1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo1)) @@ -767,7 +767,7 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type assert.NoError(t, err) // 03. Create a document with same key and check they have same key but different id. - docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.ID, docKey, true) + docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.Key, clientInfo1.ID, docKey, true) assert.NoError(t, clientInfo1.AttachDocument(docInfo2.Key, docInfo2.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo2)) assert.Equal(t, docInfo1.Key, docInfo2.Key) @@ -781,7 +781,7 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type // 01. Create a client and a document then attach the document to the client. clientInfo1, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.ID, docKey, true) + docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.Key, clientInfo1.ID, docKey, true) assert.NoError(t, clientInfo1.AttachDocument(docInfo1.Key, docInfo1.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo1)) @@ -808,7 +808,7 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type assert.NoError(t, err) // 04. Create a document with same key and check they have same key but different id. - docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.ID, docKey, true) + docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.Key, clientInfo1.ID, docKey, true) assert.NoError(t, clientInfo1.AttachDocument(docInfo2.Key, docInfo2.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo2)) assert.Equal(t, docInfo1.Key, docInfo2.Key) @@ -830,7 +830,7 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) @@ -864,7 +864,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr assert.NoError(t, err) docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, err) err = db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo) @@ -878,7 +878,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr assert.NoError(t, err) docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, err) assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) @@ -896,7 +896,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr assert.NoError(t, err) docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, err) assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) @@ -938,7 +938,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr assert.NoError(t, err) docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, err) assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) @@ -967,7 +967,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr assert.NoError(t, err) docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, err) assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) @@ -996,7 +996,7 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr assert.NoError(t, err) docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(t, err) assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) @@ -1020,7 +1020,7 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ assert.NoError(t, err) c2, err := db.ActivateClient(ctx, projectID, t.Name()+"2") assert.NoError(t, err) - d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.ID, helper.TestDocKey(t), true) + d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.Key, c1.ID, helper.TestDocKey(t), true) assert.NoError(t, err) // 01. Check if document is attached without attaching @@ -1072,9 +1072,9 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ // 00. Create a client and two documents c1, err := db.ActivateClient(ctx, projectID, t.Name()+"1") assert.NoError(t, err) - d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.ID, helper.TestDocKey(t)+"1", true) + d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.Key, c1.ID, helper.TestDocKey(t)+"1", true) assert.NoError(t, err) - d2, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.ID, helper.TestDocKey(t)+"2", true) + d2, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.Key, c1.ID, helper.TestDocKey(t)+"2", true) assert.NoError(t, err) // 01. Check if documents are attached after attaching @@ -1116,7 +1116,7 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ assert.NoError(t, err) c2, err := db.ActivateClient(ctx, projectID, t.Name()+"2") assert.NoError(t, err) - d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.ID, helper.TestDocKey(t), true) + d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.Key, c1.ID, helper.TestDocKey(t), true) assert.NoError(t, err) // 01. Check if document is attached without attaching diff --git a/server/documents/documents.go b/server/documents/documents.go index 60ca64de0..cba2c6389 100644 --- a/server/documents/documents.go +++ b/server/documents/documents.go @@ -101,6 +101,7 @@ func GetDocumentSummary( docInfo, err := be.DB.FindDocInfoByKeyAndOwner( ctx, project.ID, + "", types.IDFromActorID(time.InitialActorID), k, false, @@ -135,6 +136,7 @@ func GetDocumentByServerSeq( docInfo, err := be.DB.FindDocInfoByKeyAndOwner( ctx, project.ID, + "", types.IDFromActorID(time.InitialActorID), k, false, @@ -218,6 +220,7 @@ func FindDocInfoByKeyAndOwner( return be.DB.FindDocInfoByKeyAndOwner( ctx, project.ID, + clientInfo.Key, clientInfo.ID, docKey, createDocIfNotExist, diff --git a/test/bench/push_pull_bench_test.go b/test/bench/push_pull_bench_test.go index b822d3152..c9d4780f2 100644 --- a/test/bench/push_pull_bench_test.go +++ b/test/bench/push_pull_bench_test.go @@ -88,7 +88,7 @@ func setUpClientsAndDocs( for i := 0; i < n; i++ { clientInfo, err := be.DB.ActivateClient(ctx, database.DefaultProjectID, fmt.Sprintf("client-%d", i)) assert.NoError(b, err) - docInfo, err := be.DB.FindDocInfoByKeyAndOwner(ctx, database.DefaultProjectID, clientInfo.ID, docKey, true) + docInfo, err := be.DB.FindDocInfoByKeyAndOwner(ctx, database.DefaultProjectID, clientInfo.Key, clientInfo.ID, docKey, true) assert.NoError(b, err) assert.NoError(b, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) assert.NoError(b, be.DB.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) diff --git a/test/shard/mongo_client_test.go b/test/shard/mongo_client_test.go index d081c6567..f5ade74b0 100644 --- a/test/shard/mongo_client_test.go +++ b/test/shard/mongo_client_test.go @@ -40,6 +40,7 @@ const ( projectOneID = types.ID("000000000000000000000001") projectTwoID = types.ID("000000000000000000000002") dummyOwnerName = "dummy" + dummyClientKey = "dummy" dummyClientID = types.ID("000000000000000000000000") clientDeactivateThreshold = "1h" ) @@ -132,7 +133,7 @@ func TestClientWithShardedDB(t *testing.T) { assert.NoError(t, err) docKey1 := key.Key(fmt.Sprintf("%s%d", "duplicateIDTestDocKey", 0)) - docInfo1, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientID, docKey1, true) + docInfo1, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientKey, dummyClientID, docKey1, true) assert.NoError(t, err) // 02. Create an extra document with duplicate ID. @@ -178,7 +179,7 @@ func TestClientWithShardedDB(t *testing.T) { var duplicateID types.ID for i := 0; i < totalDocCnt-duplicateIDDocCnt; i++ { testDocKey := key.Key("duplicateIDTestDocKey" + strconv.Itoa(i)) - docInfo, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientID, testDocKey, true) + docInfo, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientKey, dummyClientID, testDocKey, true) assert.NoError(t, err) docInfos = append(docInfos, docInfo) From 2a0c2bcefc90109fd116f8554f2cdfb7765ee230 Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Wed, 29 Nov 2023 14:37:37 +0900 Subject: [PATCH 08/11] Introduce types DocRefKey and ClientRefKey --- admin/client.go | 3 +- api/types/resource_ref_key.go | 64 +++ cmd/yorkie/document/list.go | 4 +- server/backend/database/client_info.go | 109 ++-- server/backend/database/client_info_test.go | 64 ++- server/backend/database/database.go | 84 +-- server/backend/database/memory/database.go | 208 ++++--- server/backend/database/mongo/client.go | 171 +++--- server/backend/database/mongo/registry.go | 60 ++ .../backend/database/testcases/testcases.go | 520 ++++++++++++------ server/backend/housekeeping/housekeeping.go | 6 +- server/clients/clients.go | 69 +-- server/documents/documents.go | 43 +- server/packs/history.go | 21 +- server/packs/packs.go | 22 +- server/packs/pushpull.go | 11 +- server/packs/snapshots.go | 37 +- server/rpc/admin_server.go | 10 +- server/rpc/yorkie_server.go | 102 ++-- test/bench/push_pull_bench_test.go | 40 +- test/integration/document_test.go | 4 +- test/integration/retention_test.go | 11 +- test/shard/mongo_client_test.go | 24 +- 23 files changed, 998 insertions(+), 689 deletions(-) create mode 100644 api/types/resource_ref_key.go diff --git a/admin/client.go b/admin/client.go index a75d35372..fc261bfed 100644 --- a/admin/client.go +++ b/admin/client.go @@ -34,7 +34,6 @@ import ( api "github.com/yorkie-team/yorkie/api/yorkie/v1" "github.com/yorkie-team/yorkie/pkg/document" "github.com/yorkie-team/yorkie/pkg/document/key" - "github.com/yorkie-team/yorkie/server/backend/database" ) // Option configures Options. @@ -249,7 +248,7 @@ func (c *Client) UpdateProject( func (c *Client) ListDocuments( ctx context.Context, projectName string, - previousOffset database.DocOffset, + previousOffset types.DocRefKey, pageSize int32, isForward bool, includeSnapshot bool, diff --git a/api/types/resource_ref_key.go b/api/types/resource_ref_key.go new file mode 100644 index 000000000..c05cd63ce --- /dev/null +++ b/api/types/resource_ref_key.go @@ -0,0 +1,64 @@ +/* + * Copyright 2023 The Yorkie Authors. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package types + +import ( + "errors" + "fmt" + "strings" + + "github.com/yorkie-team/yorkie/pkg/document/key" +) + +// DocRefKey represents an identifier used to reference a document. +type DocRefKey struct { + Key key.Key + ID ID +} + +// String returns the string representation of the given DocRefKey. +func (r *DocRefKey) String() string { + return fmt.Sprintf("Doc (%s.%s)", r.Key, r.ID) +} + +// Set parses the given string (format: `docKey},{docID}`) and assigns the values +// to the given DocRefKey. +func (r *DocRefKey) Set(v string) error { + parsed := strings.Split(v, ",") + if len(parsed) != 2 { + return errors.New("use the format 'docKey,docID' for the input") + } + r.Key = key.Key(parsed[0]) + r.ID = ID(parsed[1]) + return nil +} + +// Type returns the type string of the given DocRefKey, used in cli help text. +func (r *DocRefKey) Type() string { + return "DocumentRefKey" +} + +// ClientRefKey represents an identifier used to reference a client. +type ClientRefKey struct { + Key string + ID ID +} + +// String returns the string representation of the given ClientRefKey. +func (r *ClientRefKey) String() string { + return fmt.Sprintf("Client (%s.%s)", r.Key, r.ID) +} diff --git a/cmd/yorkie/document/list.go b/cmd/yorkie/document/list.go index e1e10a8a8..50a3c16a9 100644 --- a/cmd/yorkie/document/list.go +++ b/cmd/yorkie/document/list.go @@ -26,13 +26,13 @@ import ( "github.com/spf13/viper" "github.com/yorkie-team/yorkie/admin" + "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/cmd/yorkie/config" "github.com/yorkie-team/yorkie/pkg/units" - "github.com/yorkie-team/yorkie/server/backend/database" ) var ( - previousOffset database.DocOffset + previousOffset types.DocRefKey pageSize int32 isForward bool ) diff --git a/server/backend/database/client_info.go b/server/backend/database/client_info.go index 0cf3cec7b..08aec072f 100644 --- a/server/backend/database/client_info.go +++ b/server/backend/database/client_info.go @@ -23,7 +23,6 @@ import ( "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/pkg/document/change" - "github.com/yorkie-team/yorkie/pkg/document/key" ) // Below are the errors may occur depending on the document and client status. @@ -55,6 +54,9 @@ type ClientDocInfo struct { ClientSeq uint32 `bson:"client_seq"` } +// ClientDocInfoMap is a map that associates DocRefKey with ClientDocInfo instances. +type ClientDocInfoMap map[types.DocRefKey]*ClientDocInfo + // ClientInfo is a structure representing information of a client. type ClientInfo struct { // ID is the unique ID of the client. @@ -70,7 +72,7 @@ type ClientInfo struct { Status string `bson:"status"` // Documents is a map of document which is attached to the client. - Documents map[key.Key]map[types.ID]*ClientDocInfo `bson:"documents"` + Documents ClientDocInfoMap `bson:"documents"` // CreatedAt is the time when the client was created. CreatedAt time.Time `bson:"created_at"` @@ -102,26 +104,22 @@ func (i *ClientInfo) Deactivate() { } // AttachDocument attaches the given document to this client. -func (i *ClientInfo) AttachDocument(docKey key.Key, docID types.ID) error { +func (i *ClientInfo) AttachDocument(docRef types.DocRefKey) error { if i.Status != ClientActivated { - return fmt.Errorf("client(%s) attaches document(%s.%s): %w", - i.ID.String(), docKey.String(), docID.String(), ErrClientNotActivated) + return fmt.Errorf("client(%s) attaches %s: %w", + i.ID.String(), docRef, ErrClientNotActivated) } if i.Documents == nil { - i.Documents = make(map[key.Key]map[types.ID]*ClientDocInfo) - } - - if i.Documents[docKey] == nil { - i.Documents[docKey] = make(map[types.ID]*ClientDocInfo) + i.Documents = make(map[types.DocRefKey]*ClientDocInfo) } - if i.hasDocument(docKey, docID) && i.Documents[docKey][docID].Status == DocumentAttached { - return fmt.Errorf("client(%s) attaches document(%s.%s): %w", - i.ID.String(), docKey.String(), docID.String(), ErrDocumentAlreadyAttached) + if i.hasDocument(docRef) && i.Documents[docRef].Status == DocumentAttached { + return fmt.Errorf("client(%s) attaches %s: %w", + i.ID.String(), docRef, ErrDocumentAlreadyAttached) } - i.Documents[docKey][docID] = &ClientDocInfo{ + i.Documents[docRef] = &ClientDocInfo{ Status: DocumentAttached, ServerSeq: 0, ClientSeq: 0, @@ -132,46 +130,46 @@ func (i *ClientInfo) AttachDocument(docKey key.Key, docID types.ID) error { } // DetachDocument detaches the given document from this client. -func (i *ClientInfo) DetachDocument(docKey key.Key, docID types.ID) error { - if err := i.EnsureDocumentAttached(docKey, docID); err != nil { +func (i *ClientInfo) DetachDocument(docRef types.DocRefKey) error { + if err := i.EnsureDocumentAttached(docRef); err != nil { return err } - i.Documents[docKey][docID].Status = DocumentDetached - i.Documents[docKey][docID].ClientSeq = 0 - i.Documents[docKey][docID].ServerSeq = 0 + i.Documents[docRef].Status = DocumentDetached + i.Documents[docRef].ClientSeq = 0 + i.Documents[docRef].ServerSeq = 0 i.UpdatedAt = time.Now() return nil } // RemoveDocument removes the given document from this client. -func (i *ClientInfo) RemoveDocument(docKey key.Key, docID types.ID) error { - if err := i.EnsureDocumentAttached(docKey, docID); err != nil { +func (i *ClientInfo) RemoveDocument(docRef types.DocRefKey) error { + if err := i.EnsureDocumentAttached(docRef); err != nil { return err } - i.Documents[docKey][docID].Status = DocumentRemoved - i.Documents[docKey][docID].ClientSeq = 0 - i.Documents[docKey][docID].ServerSeq = 0 + i.Documents[docRef].Status = DocumentRemoved + i.Documents[docRef].ClientSeq = 0 + i.Documents[docRef].ServerSeq = 0 i.UpdatedAt = time.Now() return nil } // IsAttached returns whether the given document is attached to this client. -func (i *ClientInfo) IsAttached(docKey key.Key, docID types.ID) (bool, error) { - if !i.hasDocument(docKey, docID) { - return false, fmt.Errorf("check document(%s.%s) is attached: %w", - docKey.String(), docID.String(), ErrDocumentNeverAttached) +func (i *ClientInfo) IsAttached(docRef types.DocRefKey) (bool, error) { + if !i.hasDocument(docRef) { + return false, fmt.Errorf("check %s is attached: %w", + docRef, ErrDocumentNeverAttached) } - return i.Documents[docKey][docID].Status == DocumentAttached, nil + return i.Documents[docRef].Status == DocumentAttached, nil } // Checkpoint returns the checkpoint of the given document. -func (i *ClientInfo) Checkpoint(docKey key.Key, docID types.ID) change.Checkpoint { - clientDocInfo := i.Documents[docKey][docID] +func (i *ClientInfo) Checkpoint(docRef types.DocRefKey) change.Checkpoint { + clientDocInfo := i.Documents[docRef] if clientDocInfo == nil { return change.InitialCheckpoint } @@ -181,38 +179,30 @@ func (i *ClientInfo) Checkpoint(docKey key.Key, docID types.ID) change.Checkpoin // UpdateCheckpoint updates the checkpoint of the given document. func (i *ClientInfo) UpdateCheckpoint( - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, cp change.Checkpoint, ) error { - if !i.hasDocument(docKey, docID) { - return fmt.Errorf("update checkpoint in document(%s.%s): %w", - docKey.String(), docID.String(), ErrDocumentNeverAttached) + if !i.hasDocument(docRef) { + return fmt.Errorf("update checkpoint in %s: %w", docRef, ErrDocumentNeverAttached) } - i.Documents[docKey][docID].ServerSeq = cp.ServerSeq - i.Documents[docKey][docID].ClientSeq = cp.ClientSeq + i.Documents[docRef].ServerSeq = cp.ServerSeq + i.Documents[docRef].ClientSeq = cp.ClientSeq i.UpdatedAt = time.Now() return nil } // EnsureDocumentAttached ensures the given document is attached. -func (i *ClientInfo) EnsureDocumentAttached(docKey key.Key, docID types.ID) error { +func (i *ClientInfo) EnsureDocumentAttached(docRef types.DocRefKey) error { if i.Status != ClientActivated { - return fmt.Errorf("ensure attached document(%s.%s) in client(%s): %w", - docKey.String(), docID.String(), - i.ID.String(), - ErrClientNotActivated, - ) + return fmt.Errorf("ensure attached %s in client(%s): %w", + docRef, i.ID.String(), ErrClientNotActivated) } - if !i.hasDocument(docKey, docID) || i.Documents[docKey][docID].Status != DocumentAttached { - return fmt.Errorf("ensure attached document(%s.%s) in client(%s): %w", - docKey.String(), docID.String(), - i.ID.String(), - ErrDocumentNotAttached, - ) + if !i.hasDocument(docRef) || i.Documents[docRef].Status != DocumentAttached { + return fmt.Errorf("ensure attached %s in client(%s): %w", + docRef, i.ID.String(), ErrDocumentNotAttached) } return nil @@ -224,15 +214,12 @@ func (i *ClientInfo) DeepCopy() *ClientInfo { return nil } - documents := make(map[key.Key]map[types.ID]*ClientDocInfo, len(i.Documents)) - for docKey, v := range i.Documents { - documents[docKey] = make(map[types.ID]*ClientDocInfo, len(i.Documents[docKey])) - for docID, docInfo := range v { - documents[docKey][docID] = &ClientDocInfo{ - Status: docInfo.Status, - ServerSeq: docInfo.ServerSeq, - ClientSeq: docInfo.ClientSeq, - } + documents := make(map[types.DocRefKey]*ClientDocInfo, len(i.Documents)) + for docRef, docInfo := range i.Documents { + documents[docRef] = &ClientDocInfo{ + Status: docInfo.Status, + ServerSeq: docInfo.ServerSeq, + ClientSeq: docInfo.ClientSeq, } } @@ -247,6 +234,6 @@ func (i *ClientInfo) DeepCopy() *ClientInfo { } } -func (i *ClientInfo) hasDocument(docKey key.Key, docID types.ID) bool { - return i.Documents != nil && i.Documents[docKey][docID] != nil +func (i *ClientInfo) hasDocument(docRef types.DocRefKey) bool { + return i.Documents != nil && i.Documents[docRef] != nil } diff --git a/server/backend/database/client_info_test.go b/server/backend/database/client_info_test.go index e8a6b4548..e7c586d26 100644 --- a/server/backend/database/client_info_test.go +++ b/server/backend/database/client_info_test.go @@ -35,28 +35,32 @@ func TestClientInfo(t *testing.T) { clientInfo := database.ClientInfo{ Status: database.ClientActivated, } + dummyDocRef := types.DocRefKey{ + Key: dummyDocKey, + ID: dummyDocID, + } - err := clientInfo.AttachDocument(dummyDocKey, dummyDocID) + err := clientInfo.AttachDocument(dummyDocRef) assert.NoError(t, err) - isAttached, err := clientInfo.IsAttached(dummyDocKey, dummyDocID) + isAttached, err := clientInfo.IsAttached(dummyDocRef) assert.NoError(t, err) assert.True(t, isAttached) - err = clientInfo.UpdateCheckpoint(dummyDocKey, dummyDocID, change.MaxCheckpoint) + err = clientInfo.UpdateCheckpoint(dummyDocRef, change.MaxCheckpoint) assert.NoError(t, err) - err = clientInfo.EnsureDocumentAttached(dummyDocKey, dummyDocID) + err = clientInfo.EnsureDocumentAttached(dummyDocRef) assert.NoError(t, err) - err = clientInfo.DetachDocument(dummyDocKey, dummyDocID) + err = clientInfo.DetachDocument(dummyDocRef) assert.NoError(t, err) - isAttached, err = clientInfo.IsAttached(dummyDocKey, dummyDocID) + isAttached, err = clientInfo.IsAttached(dummyDocRef) assert.NoError(t, err) assert.False(t, isAttached) - err = clientInfo.AttachDocument(dummyDocKey, dummyDocID) + err = clientInfo.AttachDocument(dummyDocRef) assert.NoError(t, err) - isAttached, err = clientInfo.IsAttached(dummyDocKey, dummyDocID) + isAttached, err = clientInfo.IsAttached(dummyDocRef) assert.NoError(t, err) assert.True(t, isAttached) @@ -87,16 +91,20 @@ func TestClientInfo(t *testing.T) { clientInfo := database.ClientInfo{ Status: database.ClientActivated, } + dummyDocRef := types.DocRefKey{ + Key: dummyDocKey, + ID: dummyDocID, + } - err := clientInfo.AttachDocument(dummyDocKey, dummyDocID) + err := clientInfo.AttachDocument(dummyDocRef) assert.NoError(t, err) - isAttached, err := clientInfo.IsAttached(dummyDocKey, dummyDocID) + isAttached, err := clientInfo.IsAttached(dummyDocRef) assert.NoError(t, err) assert.True(t, isAttached) clientInfo.Deactivate() - err = clientInfo.EnsureDocumentAttached(dummyDocKey, dummyDocID) + err = clientInfo.EnsureDocumentAttached(dummyDocRef) assert.ErrorIs(t, err, database.ErrClientNotActivated) }) @@ -104,14 +112,18 @@ func TestClientInfo(t *testing.T) { clientInfo := database.ClientInfo{ Status: database.ClientDeactivated, } + dummyDocRef := types.DocRefKey{ + Key: dummyDocKey, + ID: dummyDocID, + } - err := clientInfo.AttachDocument(dummyDocKey, dummyDocID) + err := clientInfo.AttachDocument(dummyDocRef) assert.ErrorIs(t, err, database.ErrClientNotActivated) - err = clientInfo.EnsureDocumentAttached(dummyDocKey, dummyDocID) + err = clientInfo.EnsureDocumentAttached(dummyDocRef) assert.ErrorIs(t, err, database.ErrClientNotActivated) - err = clientInfo.DetachDocument(dummyDocKey, dummyDocID) + err = clientInfo.DetachDocument(dummyDocRef) assert.ErrorIs(t, err, database.ErrClientNotActivated) }) @@ -119,8 +131,12 @@ func TestClientInfo(t *testing.T) { clientInfo := database.ClientInfo{ Status: database.ClientActivated, } + dummyDocRef := types.DocRefKey{ + Key: dummyDocKey, + ID: dummyDocID, + } - err := clientInfo.DetachDocument(dummyDocKey, dummyDocID) + err := clientInfo.DetachDocument(dummyDocRef) assert.ErrorIs(t, err, database.ErrDocumentNotAttached) }) @@ -128,11 +144,15 @@ func TestClientInfo(t *testing.T) { clientInfo := database.ClientInfo{ Status: database.ClientActivated, } + dummyDocRef := types.DocRefKey{ + Key: dummyDocKey, + ID: dummyDocID, + } - _, err := clientInfo.IsAttached(dummyDocKey, dummyDocID) + _, err := clientInfo.IsAttached(dummyDocRef) assert.ErrorIs(t, err, database.ErrDocumentNeverAttached) - err = clientInfo.UpdateCheckpoint(dummyDocKey, dummyDocID, change.MaxCheckpoint) + err = clientInfo.UpdateCheckpoint(dummyDocRef, change.MaxCheckpoint) assert.ErrorIs(t, err, database.ErrDocumentNeverAttached) }) @@ -140,14 +160,18 @@ func TestClientInfo(t *testing.T) { clientInfo := database.ClientInfo{ Status: database.ClientActivated, } + dummyDocRef := types.DocRefKey{ + Key: dummyDocKey, + ID: dummyDocID, + } - err := clientInfo.AttachDocument(dummyDocKey, dummyDocID) + err := clientInfo.AttachDocument(dummyDocRef) assert.NoError(t, err) - isAttached, err := clientInfo.IsAttached(dummyDocKey, dummyDocID) + isAttached, err := clientInfo.IsAttached(dummyDocRef) assert.NoError(t, err) assert.True(t, isAttached) - err = clientInfo.AttachDocument(dummyDocKey, dummyDocID) + err = clientInfo.AttachDocument(dummyDocRef) assert.ErrorIs(t, err, database.ErrDocumentAlreadyAttached) }) } diff --git a/server/backend/database/database.go b/server/backend/database/database.go index 5932a4ca5..28835f3d1 100644 --- a/server/backend/database/database.go +++ b/server/backend/database/database.go @@ -20,8 +20,6 @@ package database import ( "context" "errors" - "fmt" - "strings" "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/pkg/document" @@ -126,11 +124,11 @@ type Database interface { // ActivateClient activates the client of the given key. ActivateClient(ctx context.Context, projectID types.ID, key string) (*ClientInfo, error) - // DeactivateClient deactivates the client of the given ID. - DeactivateClient(ctx context.Context, clientKey string, clientID types.ID) (*ClientInfo, error) + // DeactivateClient deactivates the client of the given key and ID. + DeactivateClient(ctx context.Context, clientRef types.ClientRefKey) (*ClientInfo, error) - // FindClientInfoByKeyAndID finds the client of the given ID. - FindClientInfoByKeyAndID(ctx context.Context, clientKey string, clientID types.ID) (*ClientInfo, error) + // FindClientInfoByKeyAndID finds the client of the given key and ID. + FindClientInfoByKeyAndID(ctx context.Context, clientRef types.ClientRefKey) (*ClientInfo, error) // UpdateClientInfoAfterPushPull updates the client from the given clientInfo // after handling PushPull. @@ -157,24 +155,21 @@ type Database interface { FindDocInfoByKeyAndOwner( ctx context.Context, projectID types.ID, - clientKey string, - clientID types.ID, docKey key.Key, + ownerRef types.ClientRefKey, createDocIfNotExist bool, ) (*DocInfo, error) // FindDocInfoByKeyAndID finds the document of the given key and ID. FindDocInfoByKeyAndID( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) (*DocInfo, error) // UpdateDocInfoStatusToRemoved updates the document status to removed. UpdateDocInfoStatusToRemoved( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) error // CreateChangeInfos stores the given changes then updates the given docInfo. @@ -190,15 +185,13 @@ type Database interface { // save storage. PurgeStaleChanges( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) error // FindChangesBetweenServerSeqs returns the changes between two server sequences. FindChangesBetweenServerSeqs( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, from int64, to int64, ) ([]*change.Change, error) @@ -206,8 +199,7 @@ type Database interface { // FindChangeInfosBetweenServerSeqs returns the changeInfos between two server sequences. FindChangeInfosBetweenServerSeqs( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, from int64, to int64, ) ([]*ChangeInfo, error) @@ -215,24 +207,21 @@ type Database interface { // CreateSnapshotInfo stores the snapshot of the given document. CreateSnapshotInfo( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, doc *document.InternalDocument, ) error - // FindSnapshotInfoByID returns the snapshot by the given id. - FindSnapshotInfoByID( + // FindSnapshotInfo returns the snapshot by the given doc_key, doc_id and server_seq. + FindSnapshotInfo( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) (*SnapshotInfo, error) // FindClosestSnapshotInfo finds the closest snapshot info in a given serverSeq. FindClosestSnapshotInfo( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, includeSnapshot bool, ) (*SnapshotInfo, error) @@ -240,8 +229,7 @@ type Database interface { // FindMinSyncedSeqInfo finds the minimum synced sequence info. FindMinSyncedSeqInfo( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) (*SyncedSeqInfo, error) // UpdateAndFindMinSyncedTicket updates the given serverSeq of the given client @@ -249,8 +237,7 @@ type Database interface { UpdateAndFindMinSyncedTicket( ctx context.Context, clientInfo *ClientInfo, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) (*time.Ticket, error) @@ -258,8 +245,7 @@ type Database interface { UpdateSyncedSeq( ctx context.Context, clientInfo *ClientInfo, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) error @@ -267,7 +253,7 @@ type Database interface { FindDocInfosByPaging( ctx context.Context, projectID types.ID, - paging types.Paging[DocOffset], + paging types.Paging[types.DocRefKey], ) ([]*DocInfo, error) // FindDocInfosByQuery returns the documentInfos which match the given query. @@ -282,35 +268,7 @@ type Database interface { IsDocumentAttached( ctx context.Context, projectID types.ID, - docKey key.Key, - docID types.ID, - excludeClientID types.ID, + docRef types.DocRefKey, + excludeClientRef types.ClientRefKey, ) (bool, error) } - -// DocOffset represents a paging offset when listing documents. -type DocOffset struct { - Key key.Key - ID types.ID -} - -// String is used both by fmt.Print and by Cobra in help text -func (o *DocOffset) String() string { - return fmt.Sprintf("%s.%s", o.Key, o.ID) -} - -// Set must have pointer receiver so it doesn't change the value of a copy -func (o *DocOffset) Set(v string) error { - parsed := strings.Split(v, ",") - if len(parsed) != 2 { - return errors.New("use the format 'docKey,docID' for the input") - } - o.Key = key.Key(parsed[0]) - o.ID = types.ID(parsed[1]) - return nil -} - -// Type is only used in help text -func (o *DocOffset) Type() string { - return "DocumentOffset" -} diff --git a/server/backend/database/memory/database.go b/server/backend/database/memory/database.go index 1bdcfd6e7..3b3827d70 100644 --- a/server/backend/database/memory/database.go +++ b/server/backend/database/memory/database.go @@ -444,10 +444,9 @@ func (d *DB) ActivateClient( // DeactivateClient deactivates a client. func (d *DB) DeactivateClient( _ context.Context, - clientKey string, - clientID types.ID, + clientRef types.ClientRefKey, ) (*database.ClientInfo, error) { - if err := clientID.Validate(); err != nil { + if err := clientRef.ID.Validate(); err != nil { return nil, err } @@ -457,15 +456,15 @@ func (d *DB) DeactivateClient( raw, err := txn.First( tblClients, "key_id", - clientKey, - clientID.String(), + clientRef.Key, + clientRef.ID.String(), ) if err != nil { return nil, fmt.Errorf("find client by key and id: %w", err) } if raw == nil { - return nil, fmt.Errorf("%s: %w", clientID, database.ErrClientNotFound) + return nil, fmt.Errorf("%s: %w", clientRef, database.ErrClientNotFound) } clientInfo := raw.(*database.ClientInfo) @@ -486,10 +485,9 @@ func (d *DB) DeactivateClient( // FindClientInfoByKeyAndID finds a client by the given key and ID. func (d *DB) FindClientInfoByKeyAndID( _ context.Context, - clientKey string, - clientID types.ID, + clientRef types.ClientRefKey, ) (*database.ClientInfo, error) { - if err := clientID.Validate(); err != nil { + if err := clientRef.ID.Validate(); err != nil { return nil, err } @@ -499,14 +497,14 @@ func (d *DB) FindClientInfoByKeyAndID( raw, err := txn.First( tblClients, "key_id", - clientKey, - clientID.String(), + clientRef.Key, + clientRef.ID.String(), ) if err != nil { return nil, fmt.Errorf("find client by key and id: %w", err) } if raw == nil { - return nil, fmt.Errorf("%s: %w", clientID, database.ErrClientNotFound) + return nil, fmt.Errorf("%s: %w", clientRef, database.ErrClientNotFound) } clientInfo := raw.(*database.ClientInfo) @@ -520,8 +518,12 @@ func (d *DB) UpdateClientInfoAfterPushPull( clientInfo *database.ClientInfo, docInfo *database.DocInfo, ) error { - clientDocInfo := clientInfo.Documents[docInfo.Key][docInfo.ID] - attached, err := clientInfo.IsAttached(docInfo.Key, docInfo.ID) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + clientDocInfo := clientInfo.Documents[docRef] + attached, err := clientInfo.IsAttached(docRef) if err != nil { return err } @@ -545,20 +547,16 @@ func (d *DB) UpdateClientInfoAfterPushPull( loaded := raw.(*database.ClientInfo).DeepCopy() if !attached { - loaded.Documents[docInfo.Key][docInfo.ID] = &database.ClientDocInfo{ + loaded.Documents[docRef] = &database.ClientDocInfo{ Status: clientDocInfo.Status, } loaded.UpdatedAt = gotime.Now() } else { - if _, ok := loaded.Documents[docInfo.Key]; !ok { - loaded.Documents[docInfo.Key] = make(map[types.ID]*database.ClientDocInfo) - } - - if _, ok := loaded.Documents[docInfo.Key][docInfo.ID]; !ok { - loaded.Documents[docInfo.Key][docInfo.ID] = &database.ClientDocInfo{} + if _, ok := loaded.Documents[docRef]; !ok { + loaded.Documents[docRef] = &database.ClientDocInfo{} } - loadedClientDocInfo := loaded.Documents[docInfo.Key][docInfo.ID] + loadedClientDocInfo := loaded.Documents[docRef] serverSeq := loadedClientDocInfo.ServerSeq if clientDocInfo.ServerSeq > loadedClientDocInfo.ServerSeq { serverSeq = clientDocInfo.ServerSeq @@ -567,7 +565,7 @@ func (d *DB) UpdateClientInfoAfterPushPull( if clientDocInfo.ClientSeq > loadedClientDocInfo.ClientSeq { clientSeq = clientDocInfo.ClientSeq } - loaded.Documents[docInfo.Key][docInfo.ID] = &database.ClientDocInfo{ + loaded.Documents[docRef] = &database.ClientDocInfo{ ServerSeq: serverSeq, ClientSeq: clientSeq, Status: clientDocInfo.Status, @@ -662,9 +660,8 @@ func (d *DB) FindDeactivateCandidates( func (d *DB) FindDocInfoByKeyAndOwner( _ context.Context, projectID types.ID, - clientKey string, - clientID types.ID, key key.Key, + clientRef types.ClientRefKey, createDocIfNotExist bool, ) (*database.DocInfo, error) { txn := d.db.Txn(true) @@ -704,8 +701,8 @@ func (d *DB) FindDocInfoByKeyAndOwner( ID: newID(), ProjectID: projectID, Key: key, - OwnerKey: clientKey, - OwnerID: clientID, + OwnerKey: clientRef.Key, + OwnerID: clientRef.ID, ServerSeq: 0, CreatedAt: now, AccessedAt: now, @@ -744,26 +741,23 @@ func (d *DB) FindDocInfoByKey( // FindDocInfoByKeyAndID finds a docInfo of the given ID. func (d *DB) FindDocInfoByKeyAndID( _ context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) (*database.DocInfo, error) { txn := d.db.Txn(true) defer txn.Abort() - raw, err := txn.First(tblDocuments, "key_id", docKey.String(), docID.String()) + raw, err := txn.First(tblDocuments, "key_id", docRef.Key.String(), docRef.ID.String()) if err != nil { return nil, fmt.Errorf("find document by key and ID: %w", err) } if raw == nil { - return nil, fmt.Errorf("finding doc info by key and ID(%s.%s): %w", - docKey, docID, database.ErrDocumentNotFound) + return nil, fmt.Errorf("finding doc info by %s: %w", docRef, database.ErrDocumentNotFound) } docInfo := raw.(*database.DocInfo) - if docInfo.Key != docKey && docInfo.ID != docID { - return nil, fmt.Errorf("finding doc info by key and ID(%s.%s): %w", - docKey, docID, database.ErrDocumentNotFound) + if docInfo.Key != docRef.Key && docInfo.ID != docRef.ID { + return nil, fmt.Errorf("finding doc info by %s: %w", docRef, database.ErrDocumentNotFound) } return docInfo.DeepCopy(), nil @@ -772,26 +766,23 @@ func (d *DB) FindDocInfoByKeyAndID( // UpdateDocInfoStatusToRemoved updates the status of the document to removed. func (d *DB) UpdateDocInfoStatusToRemoved( _ context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) error { txn := d.db.Txn(true) defer txn.Abort() - raw, err := txn.First(tblDocuments, "key_id", docKey.String(), docID.String()) + raw, err := txn.First(tblDocuments, "key_id", docRef.Key.String(), docRef.ID.String()) if err != nil { return fmt.Errorf("find document by key and ID: %w", err) } if raw == nil { - return fmt.Errorf("finding doc info by key and ID(%s.%s): %w", - docKey, docID, database.ErrDocumentNotFound) + return fmt.Errorf("finding doc info by %s: %w", docRef, database.ErrDocumentNotFound) } docInfo := raw.(*database.DocInfo) - if docInfo.Key != docKey && docInfo.ID != docID { - return fmt.Errorf("finding doc info by key and ID(%s.%s): %w", - docKey, docID, database.ErrDocumentNotFound) + if docInfo.Key != docRef.Key && docInfo.ID != docRef.ID { + return fmt.Errorf("finding doc info by %s: %w", docRef, database.ErrDocumentNotFound) } docInfo.RemovedAt = gotime.Now() @@ -885,8 +876,7 @@ func (d *DB) CreateChangeInfos( // save storage. func (d *DB) PurgeStaleChanges( _ context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) error { txn := d.db.Txn(true) defer txn.Abort() @@ -901,7 +891,7 @@ func (d *DB) PurgeStaleChanges( minSyncedServerSeq := change.MaxServerSeq for raw := it.Next(); raw != nil; raw = it.Next() { info := raw.(*database.SyncedSeqInfo) - if info.DocKey == docKey && info.DocID == docID && + if info.DocKey == docRef.Key && info.DocID == docRef.ID && info.ServerSeq < minSyncedServerSeq { minSyncedServerSeq = info.ServerSeq } @@ -914,8 +904,8 @@ func (d *DB) PurgeStaleChanges( iterator, err := txn.ReverseLowerBound( tblChanges, "doc_key_doc_id_server_seq", - docKey.String(), - docID.String(), + docRef.Key.String(), + docRef.ID.String(), minSyncedServerSeq, ) if err != nil { @@ -935,12 +925,11 @@ func (d *DB) PurgeStaleChanges( // FindChangesBetweenServerSeqs returns the changes between two server sequences. func (d *DB) FindChangesBetweenServerSeqs( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, from int64, to int64, ) ([]*change.Change, error) { - infos, err := d.FindChangeInfosBetweenServerSeqs(ctx, docKey, docID, from, to) + infos, err := d.FindChangeInfosBetweenServerSeqs(ctx, docRef, from, to) if err != nil { return nil, err } @@ -961,8 +950,7 @@ func (d *DB) FindChangesBetweenServerSeqs( // FindChangeInfosBetweenServerSeqs returns the changeInfos between two server sequences. func (d *DB) FindChangeInfosBetweenServerSeqs( _ context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, from int64, to int64, ) ([]*database.ChangeInfo, error) { @@ -974,8 +962,8 @@ func (d *DB) FindChangeInfosBetweenServerSeqs( iterator, err := txn.LowerBound( tblChanges, "doc_key_doc_id_server_seq", - docKey.String(), - docID.String(), + docRef.Key.String(), + docRef.ID.String(), from, ) if err != nil { @@ -984,7 +972,7 @@ func (d *DB) FindChangeInfosBetweenServerSeqs( for raw := iterator.Next(); raw != nil; raw = iterator.Next() { info := raw.(*database.ChangeInfo) - if info.DocKey != docKey || info.DocID != docID || info.ServerSeq > to { + if info.DocKey != docRef.Key || info.DocID != docRef.ID || info.ServerSeq > to { break } infos = append(infos, info.DeepCopy()) @@ -995,8 +983,7 @@ func (d *DB) FindChangeInfosBetweenServerSeqs( // CreateSnapshotInfo stores the snapshot of the given document. func (d *DB) CreateSnapshotInfo( _ context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, doc *document.InternalDocument, ) error { snapshot, err := converter.SnapshotToBytes(doc.RootObject(), doc.AllPresences()) @@ -1009,8 +996,8 @@ func (d *DB) CreateSnapshotInfo( if err := txn.Insert(tblSnapshots, &database.SnapshotInfo{ ID: newID(), - DocKey: docKey, - DocID: docID, + DocKey: docRef.Key, + DocID: docRef.ID, ServerSeq: doc.Checkpoint().ServerSeq, Lamport: doc.Lamport(), Snapshot: snapshot, @@ -1022,23 +1009,26 @@ func (d *DB) CreateSnapshotInfo( return nil } -// FindSnapshotInfoByID returns the snapshot by the given id. -func (d *DB) FindSnapshotInfoByID( +// FindSnapshotInfo returns the snapshot by the given doc_key, doc_id and server_seq. +func (d *DB) FindSnapshotInfo( _ context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) (*database.SnapshotInfo, error) { txn := d.db.Txn(false) defer txn.Abort() raw, err := txn.First( - tblSnapshots, "doc_key_doc_id_server_seq", docKey.String(), docID.String(), serverSeq) + tblSnapshots, + "doc_key_doc_id_server_seq", + docRef.Key.String(), + docRef.ID.String(), + serverSeq, + ) if err != nil { return nil, fmt.Errorf("find snapshot by (docKey, docID, serverSeq): %w", err) } if raw == nil { - return nil, fmt.Errorf("(%s.%s.%d): %w", - docKey, docID, serverSeq, database.ErrSnapshotNotFound) + return nil, fmt.Errorf("(%s.%d): %w", docRef, serverSeq, database.ErrSnapshotNotFound) } return raw.(*database.SnapshotInfo).DeepCopy(), nil @@ -1047,16 +1037,20 @@ func (d *DB) FindSnapshotInfoByID( // FindClosestSnapshotInfo finds the last snapshot of the given document. func (d *DB) FindClosestSnapshotInfo( _ context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, includeSnapshot bool, ) (*database.SnapshotInfo, error) { txn := d.db.Txn(false) defer txn.Abort() - iterator, err := txn.ReverseLowerBound(tblSnapshots, "doc_key_doc_id_server_seq", - docKey.String(), docID.String(), serverSeq) + iterator, err := txn.ReverseLowerBound( + tblSnapshots, + "doc_key_doc_id_server_seq", + docRef.Key.String(), + docRef.ID.String(), + serverSeq, + ) if err != nil { return nil, fmt.Errorf("fetch snapshots before %d: %w", serverSeq, err) } @@ -1064,7 +1058,7 @@ func (d *DB) FindClosestSnapshotInfo( var snapshotInfo *database.SnapshotInfo for raw := iterator.Next(); raw != nil; raw = iterator.Next() { info := raw.(*database.SnapshotInfo) - if info.DocKey == docKey && info.DocID == docID { + if info.DocKey == docRef.Key && info.DocID == docRef.ID { snapshotInfo = &database.SnapshotInfo{ ID: info.ID, DocKey: info.DocKey, @@ -1090,8 +1084,7 @@ func (d *DB) FindClosestSnapshotInfo( // FindMinSyncedSeqInfo finds the minimum synced sequence info. func (d *DB) FindMinSyncedSeqInfo( _ context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) (*database.SyncedSeqInfo, error) { txn := d.db.Txn(false) defer txn.Abort() @@ -1105,7 +1098,7 @@ func (d *DB) FindMinSyncedSeqInfo( minSyncedServerSeq := change.MaxServerSeq for raw := it.Next(); raw != nil; raw = it.Next() { info := raw.(*database.SyncedSeqInfo) - if info.DocKey == docKey && info.DocID == docID && info.ServerSeq < minSyncedServerSeq { + if info.DocKey == docRef.Key && info.DocID == docRef.ID && info.ServerSeq < minSyncedServerSeq { minSyncedServerSeq = info.ServerSeq syncedSeqInfo = info } @@ -1122,11 +1115,10 @@ func (d *DB) FindMinSyncedSeqInfo( func (d *DB) UpdateAndFindMinSyncedTicket( ctx context.Context, clientInfo *database.ClientInfo, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) (*time.Ticket, error) { - if err := d.UpdateSyncedSeq(ctx, clientInfo, docKey, docID, serverSeq); err != nil { + if err := d.UpdateSyncedSeq(ctx, clientInfo, docRef, serverSeq); err != nil { return nil, err } @@ -1136,20 +1128,19 @@ func (d *DB) UpdateAndFindMinSyncedTicket( iterator, err := txn.LowerBound( tblSyncedSeqs, "doc_key_doc_id_lamport_actor_id", - docKey.String(), - docID.String(), + docRef.Key.String(), + docRef.ID.String(), int64(0), time.InitialActorID.String(), ) if err != nil { - return nil, fmt.Errorf("fetch smallest syncedseq of the document (%s.%s): %w", - docKey.String(), docID.String(), err) + return nil, fmt.Errorf("fetch smallest syncedseq of %s: %w", docRef, err) } var syncedSeqInfo *database.SyncedSeqInfo if raw := iterator.Next(); raw != nil { info := raw.(*database.SyncedSeqInfo) - if info.DocKey == docKey && info.DocID == docID { + if info.DocKey == docRef.Key && info.DocID == docRef.ID { syncedSeqInfo = info } } @@ -1174,14 +1165,13 @@ func (d *DB) UpdateAndFindMinSyncedTicket( func (d *DB) UpdateSyncedSeq( _ context.Context, clientInfo *database.ClientInfo, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) error { txn := d.db.Txn(true) defer txn.Abort() - isAttached, err := clientInfo.IsAttached(docKey, docID) + isAttached, err := clientInfo.IsAttached(docRef) if err != nil { return err } @@ -1190,19 +1180,18 @@ func (d *DB) UpdateSyncedSeq( if _, err = txn.DeleteAll( tblSyncedSeqs, "doc_key_doc_id_client_key_client_id", - docKey.String(), - docID.String(), + docRef.Key.String(), + docRef.ID.String(), clientInfo.Key, clientInfo.ID.String(), ); err != nil { - return fmt.Errorf("delete syncedseqs of the document (%s.%s): %w", - docKey.String(), docID.String(), err) + return fmt.Errorf("delete syncedseqs of %s: %w", docRef, err) } txn.Commit() return nil } - ticket, err := d.findTicketByServerSeq(txn, docKey, docID, serverSeq) + ticket, err := d.findTicketByServerSeq(txn, docRef, serverSeq) if err != nil { return err } @@ -1210,19 +1199,18 @@ func (d *DB) UpdateSyncedSeq( raw, err := txn.First( tblSyncedSeqs, "doc_key_doc_id_client_key_client_id", - docKey.String(), - docID.String(), + docRef.Key.String(), + docRef.ID.String(), clientInfo.Key, clientInfo.ID.String(), ) if err != nil { - return fmt.Errorf("fetch syncedseqs of the document (%s.%s): %w", - docKey.String(), docID.String(), err) + return fmt.Errorf("fetch syncedseqs of %s: %w", docRef, err) } syncedSeqInfo := &database.SyncedSeqInfo{ - DocKey: docKey, - DocID: docID, + DocKey: docRef.Key, + DocID: docRef.ID, ClientID: clientInfo.ID, Lamport: ticket.Lamport(), ActorID: types.ID(ticket.ActorID().String()), @@ -1235,8 +1223,7 @@ func (d *DB) UpdateSyncedSeq( } if err := txn.Insert(tblSyncedSeqs, syncedSeqInfo); err != nil { - return fmt.Errorf("insert syncedseqs of the document (%s.%s): %w", - docKey.String(), docID.String(), err) + return fmt.Errorf("insert syncedseqs of %s: %w", docRef, err) } txn.Commit() @@ -1247,7 +1234,7 @@ func (d *DB) UpdateSyncedSeq( func (d *DB) FindDocInfosByPaging( _ context.Context, projectID types.ID, - paging types.Paging[database.DocOffset], + paging types.Paging[types.DocRefKey], ) ([]*database.DocInfo, error) { txn := d.db.Txn(false) defer txn.Abort() @@ -1336,9 +1323,8 @@ func (d *DB) FindDocInfosByQuery( func (d *DB) IsDocumentAttached( _ context.Context, projectID types.ID, - docKey key.Key, - docID types.ID, - excludeClientID types.ID, + docRef types.DocRefKey, + excludeClientRef types.ClientRefKey, ) (bool, error) { txn := d.db.Txn(false) defer txn.Abort() @@ -1353,10 +1339,10 @@ func (d *DB) IsDocumentAttached( for raw := it.Next(); raw != nil; raw = it.Next() { clientInfo := raw.(*database.ClientInfo) - if clientInfo.ID == excludeClientID { + if clientInfo.ID == excludeClientRef.ID && clientInfo.Key == excludeClientRef.Key { continue } - clientDocInfo := clientInfo.Documents[docKey][docID] + clientDocInfo := clientInfo.Documents[docRef] if clientDocInfo == nil { continue } @@ -1370,8 +1356,7 @@ func (d *DB) IsDocumentAttached( func (d *DB) findTicketByServerSeq( txn *memdb.Txn, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) (*time.Ticket, error) { if serverSeq == change.InitialServerSeq { @@ -1381,18 +1366,17 @@ func (d *DB) findTicketByServerSeq( raw, err := txn.First( tblChanges, "doc_key_doc_id_server_seq", - docKey.String(), - docID.String(), + docRef.Key.String(), + docRef.ID.String(), serverSeq, ) if err != nil { - return nil, fmt.Errorf("fetch change of the document (%s.%s): %w", - docKey.String(), docID.String(), err) + return nil, fmt.Errorf("fetch change of %s: %w", docRef, err) } if raw == nil { return nil, fmt.Errorf( "docKey %s, docID %s, serverSeq %d: %w", - docKey.String(), docID.String(), + docRef.Key.String(), docRef.ID.String(), serverSeq, database.ErrDocumentNotFound, ) diff --git a/server/backend/database/mongo/client.go b/server/backend/database/mongo/client.go index f71824be5..ee3121440 100644 --- a/server/backend/database/mongo/client.go +++ b/server/backend/database/mongo/client.go @@ -494,16 +494,15 @@ func (c *Client) ActivateClient(ctx context.Context, projectID types.ID, key str // DeactivateClient deactivates the client of the given ID. func (c *Client) DeactivateClient( ctx context.Context, - clientKey string, - clientID types.ID, + clientRef types.ClientRefKey, ) (*database.ClientInfo, error) { - encodedClientID, err := EncodeID(clientID) + encodedClientID, err := EncodeID(clientRef.ID) if err != nil { return nil, err } res := c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ - "key": clientKey, + "key": clientRef.Key, "_id": encodedClientID, }, bson.M{ "$set": bson.M{ @@ -515,7 +514,7 @@ func (c *Client) DeactivateClient( clientInfo := database.ClientInfo{} if err := res.Decode(&clientInfo); err != nil { if err == mongo.ErrNoDocuments { - return nil, fmt.Errorf("%s: %w", clientID, database.ErrClientNotFound) + return nil, fmt.Errorf("%s: %w", clientRef, database.ErrClientNotFound) } return nil, fmt.Errorf("decode client info: %w", err) } @@ -526,16 +525,15 @@ func (c *Client) DeactivateClient( // FindClientInfoByKeyAndID finds the client of the given key and ID. func (c *Client) FindClientInfoByKeyAndID( ctx context.Context, - clientKey string, - clientID types.ID, + clientRef types.ClientRefKey, ) (*database.ClientInfo, error) { - encodedClientID, err := EncodeID(clientID) + encodedClientID, err := EncodeID(clientRef.ID) if err != nil { return nil, err } result := c.collection(ColClients).FindOneAndUpdate(ctx, bson.M{ - "key": clientKey, + "key": clientRef.Key, "_id": encodedClientID, }, bson.M{ "$set": bson.M{ @@ -546,7 +544,7 @@ func (c *Client) FindClientInfoByKeyAndID( clientInfo := database.ClientInfo{} if err := result.Decode(&clientInfo); err != nil { if err == mongo.ErrNoDocuments { - return nil, fmt.Errorf("%s: %w", clientID, database.ErrClientNotFound) + return nil, fmt.Errorf("%s: %w", clientRef, database.ErrClientNotFound) } } @@ -565,8 +563,13 @@ func (c *Client) UpdateClientInfoAfterPushPull( return err } - clientDocInfoKey := getClientDocInfoKey(docInfo.Key, docInfo.ID) - clientDocInfo, ok := clientInfo.Documents[docInfo.Key][docInfo.ID] + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + + clientDocInfoKey := getClientDocInfoKey(docRef) + clientDocInfo, ok := clientInfo.Documents[docRef] if !ok { return fmt.Errorf("client doc info: %w", database.ErrDocumentNeverAttached) } @@ -582,7 +585,7 @@ func (c *Client) UpdateClientInfoAfterPushPull( }, } - attached, err := clientInfo.IsAttached(docInfo.Key, docInfo.ID) + attached, err := clientInfo.IsAttached(docRef) if err != nil { return err } @@ -605,7 +608,7 @@ func (c *Client) UpdateClientInfoAfterPushPull( if result.Err() != nil { if result.Err() == mongo.ErrNoDocuments { - return fmt.Errorf("%s: %w", clientInfo.Key, database.ErrClientNotFound) + return fmt.Errorf("%s.%s: %w", clientInfo.Key, clientInfo.ID, database.ErrClientNotFound) } return fmt.Errorf("update client info: %w", result.Err()) } @@ -686,16 +689,15 @@ func (c *Client) FindDeactivateCandidates( func (c *Client) FindDocInfoByKeyAndOwner( ctx context.Context, projectID types.ID, - clientKey string, - clientID types.ID, docKey key.Key, + ownerRef types.ClientRefKey, createDocIfNotExist bool, ) (*database.DocInfo, error) { encodedProjectID, err := EncodeID(projectID) if err != nil { return nil, err } - encodedOwnerID, err := EncodeID(clientID) + encodedOwnerID, err := EncodeID(ownerRef.ID) if err != nil { return nil, err } @@ -724,7 +726,7 @@ func (c *Client) FindDocInfoByKeyAndOwner( "_id": res.UpsertedID, }, bson.M{ "$set": bson.M{ - "owner_key": clientKey, + "owner_key": ownerRef.Key, "owner_id": encodedOwnerID, "server_seq": 0, "created_at": now, @@ -784,20 +786,19 @@ func (c *Client) FindDocInfoByKey( // FindDocInfoByKeyAndID finds a docInfo of the given ID. func (c *Client) FindDocInfoByKeyAndID( ctx context.Context, - key key.Key, - id types.ID, + docRef types.DocRefKey, ) (*database.DocInfo, error) { - encodedDocID, err := EncodeID(id) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return nil, err } result := c.collection(ColDocuments).FindOne(ctx, bson.M{ - "key": key, + "key": docRef.Key, "_id": encodedDocID, }) if result.Err() == mongo.ErrNoDocuments { - return nil, fmt.Errorf("%s: %w", id, database.ErrDocumentNotFound) + return nil, fmt.Errorf("%s: %w", docRef, database.ErrDocumentNotFound) } if result.Err() != nil { return nil, fmt.Errorf("find document: %w", result.Err()) @@ -814,16 +815,15 @@ func (c *Client) FindDocInfoByKeyAndID( // UpdateDocInfoStatusToRemoved updates the document status to removed. func (c *Client) UpdateDocInfoStatusToRemoved( ctx context.Context, - key key.Key, - id types.ID, + docRef types.DocRefKey, ) error { - encodedDocID, err := EncodeID(id) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return err } result := c.collection(ColDocuments).FindOneAndUpdate(ctx, bson.M{ - "key": key, + "key": docRef.Key, "_id": encodedDocID, }, bson.M{ "$set": bson.M{ @@ -832,7 +832,7 @@ func (c *Client) UpdateDocInfoStatusToRemoved( }, options.FindOneAndUpdate().SetReturnDocument(options.After)) if result.Err() == mongo.ErrNoDocuments { - return fmt.Errorf("%s.%s: %w", key, id, database.ErrDocumentNotFound) + return fmt.Errorf("%s: %w", docRef, database.ErrDocumentNotFound) } if result.Err() != nil { return fmt.Errorf("update document info status to removed: %w", result.Err()) @@ -911,7 +911,7 @@ func (c *Client) CreateChangeInfos( return fmt.Errorf("update document: %w", err) } if res.MatchedCount == 0 { - return fmt.Errorf("%s: %w", docInfo.ID, database.ErrConflictOnUpdate) + return fmt.Errorf("%s.%s: %w", docInfo.Key, docInfo.ID, database.ErrConflictOnUpdate) } if isRemoved { docInfo.RemovedAt = now @@ -924,10 +924,9 @@ func (c *Client) CreateChangeInfos( // save storage. func (c *Client) PurgeStaleChanges( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) error { - encodedDocID, err := EncodeID(docID) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return err } @@ -937,7 +936,7 @@ func (c *Client) PurgeStaleChanges( result := c.collection(ColSyncedSeqs).FindOne( ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, }, options.FindOne().SetSort(bson.M{"server_seq": 1}), @@ -957,7 +956,7 @@ func (c *Client) PurgeStaleChanges( if _, err := c.collection(ColChanges).DeleteMany( ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, "server_seq": bson.M{"$lt": minSyncedSeqInfo.ServerSeq}, }, @@ -972,12 +971,11 @@ func (c *Client) PurgeStaleChanges( // FindChangesBetweenServerSeqs returns the changes between two server sequences. func (c *Client) FindChangesBetweenServerSeqs( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, from int64, to int64, ) ([]*change.Change, error) { - infos, err := c.FindChangeInfosBetweenServerSeqs(ctx, docKey, docID, from, to) + infos, err := c.FindChangeInfosBetweenServerSeqs(ctx, docRef, from, to) if err != nil { return nil, err } @@ -997,18 +995,17 @@ func (c *Client) FindChangesBetweenServerSeqs( // FindChangeInfosBetweenServerSeqs returns the changeInfos between two server sequences. func (c *Client) FindChangeInfosBetweenServerSeqs( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, from int64, to int64, ) ([]*database.ChangeInfo, error) { - encodedDocID, err := EncodeID(docID) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return nil, err } cursor, err := c.collection(ColChanges).Find(ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, "server_seq": bson.M{ "$gte": from, @@ -1030,11 +1027,10 @@ func (c *Client) FindChangeInfosBetweenServerSeqs( // CreateSnapshotInfo stores the snapshot of the given document. func (c *Client) CreateSnapshotInfo( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, doc *document.InternalDocument, ) error { - encodedDocID, err := EncodeID(docID) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return err } @@ -1044,7 +1040,7 @@ func (c *Client) CreateSnapshotInfo( } if _, err := c.collection(ColSnapshots).InsertOne(ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, "server_seq": doc.Checkpoint().ServerSeq, "lamport": doc.Lamport(), @@ -1057,20 +1053,19 @@ func (c *Client) CreateSnapshotInfo( return nil } -// FindSnapshotInfoByID returns the snapshot by the given id. -func (c *Client) FindSnapshotInfoByID( +// FindSnapshotInfo returns the snapshot by the given doc_key, doc_id and server_seq. +func (c *Client) FindSnapshotInfo( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) (*database.SnapshotInfo, error) { - encodedDocID, err := EncodeID(docID) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return nil, err } result := c.collection(ColSnapshots).FindOne(ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, "server_seq": serverSeq, }) @@ -1093,12 +1088,11 @@ func (c *Client) FindSnapshotInfoByID( // FindClosestSnapshotInfo finds the last snapshot of the given document. func (c *Client) FindClosestSnapshotInfo( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, includeSnapshot bool, ) (*database.SnapshotInfo, error) { - encodedDocID, err := EncodeID(docID) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return nil, err } @@ -1112,7 +1106,7 @@ func (c *Client) FindClosestSnapshotInfo( } result := c.collection(ColSnapshots).FindOne(ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, "server_seq": bson.M{ "$lte": serverSeq, @@ -1137,16 +1131,15 @@ func (c *Client) FindClosestSnapshotInfo( // FindMinSyncedSeqInfo finds the minimum synced sequence info. func (c *Client) FindMinSyncedSeqInfo( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) (*database.SyncedSeqInfo, error) { - encodedDocID, err := EncodeID(docID) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return nil, err } syncedSeqResult := c.collection(ColSyncedSeqs).FindOne(ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, }, options.FindOne().SetSort(bson.D{ {Key: "server_seq", Value: 1}, @@ -1172,22 +1165,21 @@ func (c *Client) FindMinSyncedSeqInfo( func (c *Client) UpdateAndFindMinSyncedTicket( ctx context.Context, clientInfo *database.ClientInfo, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) (*time.Ticket, error) { - if err := c.UpdateSyncedSeq(ctx, clientInfo, docKey, docID, serverSeq); err != nil { + if err := c.UpdateSyncedSeq(ctx, clientInfo, docRef, serverSeq); err != nil { return nil, err } - encodedDocID, err := EncodeID(docID) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return nil, err } // 02. find min synced seq of the given document. result := c.collection(ColSyncedSeqs).FindOne(ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, }, options.FindOne().SetSort(bson.D{ {Key: "lamport", Value: 1}, @@ -1224,7 +1216,7 @@ func (c *Client) UpdateAndFindMinSyncedTicket( func (c *Client) FindDocInfosByPaging( ctx context.Context, projectID types.ID, - paging types.Paging[database.DocOffset], + paging types.Paging[types.DocRefKey], ) ([]*database.DocInfo, error) { encodedProjectID, err := EncodeID(projectID) if err != nil { @@ -1316,11 +1308,10 @@ func (c *Client) FindDocInfosByQuery( func (c *Client) UpdateSyncedSeq( ctx context.Context, clientInfo *database.ClientInfo, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) error { - encodedDocID, err := EncodeID(docID) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return err } @@ -1330,14 +1321,14 @@ func (c *Client) UpdateSyncedSeq( } // 01. update synced seq of the given client. - isAttached, err := clientInfo.IsAttached(docKey, docID) + isAttached, err := clientInfo.IsAttached(docRef) if err != nil { return err } if !isAttached { if _, err = c.collection(ColSyncedSeqs).DeleteOne(ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, "client_key": clientInfo.Key, "client_id": encodedClientID, @@ -1347,13 +1338,13 @@ func (c *Client) UpdateSyncedSeq( return nil } - ticket, err := c.findTicketByServerSeq(ctx, docKey, docID, serverSeq) + ticket, err := c.findTicketByServerSeq(ctx, docRef, serverSeq) if err != nil { return err } if _, err = c.collection(ColSyncedSeqs).UpdateOne(ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, "client_key": clientInfo.Key, "client_id": encodedClientID, @@ -1374,28 +1365,30 @@ func (c *Client) UpdateSyncedSeq( func (c *Client) IsDocumentAttached( ctx context.Context, projectID types.ID, - docKey key.Key, - docID types.ID, - excludeClientID types.ID, + docRef types.DocRefKey, + excludeClientRef types.ClientRefKey, ) (bool, error) { encodedProjectID, err := EncodeID(projectID) if err != nil { return false, err } - clientDocInfoKey := getClientDocInfoKey(docKey, docID) + clientDocInfoKey := getClientDocInfoKey(docRef) filter := bson.M{ - "project_id": encodedProjectID, - clientDocInfoKey + "status": database.DocumentAttached, + "project_id": encodedProjectID, + clientDocInfoKey + StatusKey: database.DocumentAttached, } - if excludeClientID != "" { - encodedExcludeClientID, err := EncodeID(excludeClientID) + if excludeClientRef.Key != "" && excludeClientRef.ID != "" { + encodedExcludeClientID, err := EncodeID(excludeClientRef.ID) if err != nil { return false, err } - filter["_id"] = bson.M{"$ne": encodedExcludeClientID} + filter["$and"] = []bson.M{ + {"_id": bson.M{"$ne": encodedExcludeClientID}}, + {"key": bson.M{"$ne": excludeClientRef.Key}}, + } } result := c.collection(ColClients).FindOne(ctx, filter) @@ -1408,28 +1401,27 @@ func (c *Client) IsDocumentAttached( func (c *Client) findTicketByServerSeq( ctx context.Context, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, serverSeq int64, ) (*time.Ticket, error) { if serverSeq == change.InitialServerSeq { return time.InitialTicket, nil } - encodedDocID, err := EncodeID(docID) + encodedDocID, err := EncodeID(docRef.ID) if err != nil { return nil, err } result := c.collection(ColChanges).FindOne(ctx, bson.M{ - "doc_key": docKey, + "doc_key": docRef.Key, "doc_id": encodedDocID, "server_seq": serverSeq, }) if result.Err() == mongo.ErrNoDocuments { return nil, fmt.Errorf( - "change docID=%s serverSeq=%d: %w", - docID.String(), + "change doc=%s serverSeq=%d: %w", + docRef, serverSeq, database.ErrDocumentNotFound, ) @@ -1482,8 +1474,7 @@ func escapeRegex(str string) string { return buf.String() } func getClientDocInfoKey( - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) string { - return fmt.Sprintf("documents.%s.%s.", docKey, docID.String()) + return fmt.Sprintf("documents.%s.%s.", docRef.Key, docRef.ID) } diff --git a/server/backend/database/mongo/registry.go b/server/backend/database/mongo/registry.go index 6c27983ec..f49e326b8 100644 --- a/server/backend/database/mongo/registry.go +++ b/server/backend/database/mongo/registry.go @@ -17,13 +17,17 @@ package mongo import ( + "fmt" "reflect" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/bsoncodec" "go.mongodb.org/mongo-driver/bson/bsonoptions" + "go.mongodb.org/mongo-driver/bson/bsonrw" "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" + "github.com/yorkie-team/yorkie/server/backend/database" ) // NewRegistryBuilder returns a new registry builder with the default encoder and decoder. @@ -39,5 +43,61 @@ func NewRegistryBuilder() *bsoncodec.RegistryBuilder { bsoncodec.NewStringCodec(bsonoptions.StringCodec().SetDecodeObjectIDAsHex(true)), ) + // Register a decoder that converts the `documents` field in the clients collection + // into `database.ClientDocInfo.Documents`. The `documents` field is a two level map + // containing a number of `doc_key`.`doc_id`.{`client_seq`, `server_seq`, `status`}s. + rb.RegisterTypeDecoder( + reflect.TypeOf(make(database.ClientDocInfoMap)), + bsoncodec.ValueDecoderFunc(func(_ bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + docs, err := vr.ReadDocument() + if err != nil { + return fmt.Errorf("read documents: %w", err) + } + if val.IsNil() { + val.Set(reflect.MakeMap(val.Type())) + } + + for { + docKey, docInfoByDocIDMapReader, err := docs.ReadElement() + if err != nil { + if err == bsonrw.ErrEOD { + break + } + return fmt.Errorf("read the element in documents: %w", err) + } + docInfoByDocIDMap, err := docInfoByDocIDMapReader.ReadDocument() + if err != nil { + return fmt.Errorf("read docInfoByDocID: %w", err) + } + for { + docID, docInfoReader, err := docInfoByDocIDMap.ReadElement() + if err != nil { + if err == bsonrw.ErrEOD { + break + } + return fmt.Errorf("read the element in docInfoByDocID: %w", err) + } + + docInfo := &database.ClientDocInfo{} + docInfoDecoder, err := bson.NewDecoder(docInfoReader) + if err != nil { + return fmt.Errorf("create docInfoDecoder: %w", err) + } + err = docInfoDecoder.Decode(docInfo) + if err != nil { + return fmt.Errorf("decode docInfo: %w", err) + } + + docRef := reflect.ValueOf(types.DocRefKey{ + Key: key.Key(docKey), + ID: types.ID(docID), + }) + val.SetMapIndex(docRef, reflect.ValueOf(docInfo)) + } + } + + return nil + })) + return rb } diff --git a/server/backend/database/testcases/testcases.go b/server/backend/database/testcases/testcases.go index 6252dae78..7be415416 100644 --- a/server/backend/database/testcases/testcases.go +++ b/server/backend/database/testcases/testcases.go @@ -60,14 +60,22 @@ func RunFindDocInfoTest( clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) - _, err = db.FindDocInfoByKeyAndID(context.Background(), "dummy", dummyClientID) + _, err = db.FindDocInfoByKeyAndID(context.Background(), types.DocRefKey{ + Key: "dummy", ID: dummyClientID, + }) assert.ErrorIs(t, err, database.ErrDocumentNotFound) docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - _, err = db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, false) + _, err = db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, false) assert.ErrorIs(t, err, database.ErrDocumentNotFound) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, true) assert.NoError(t, err) assert.Equal(t, docKey, docInfo.Key) }) @@ -145,7 +153,10 @@ func RunFindDocInfosByQueryTest( "test0", "test1", "test2", "test3", "test10", "test11", "test20", "test21", "test22", "test23"} for _, docKey := range docKeys { - _, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, key.Key(docKey), true) + _, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, key.Key(docKey), types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, true) assert.NoError(t, err) } @@ -176,8 +187,15 @@ func RunFindChangesBetweenServerSeqsTest( docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) - assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) + docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, true) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + assert.NoError(t, clientInfo.AttachDocument(docRef)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) bytesID, _ := clientInfo.ID.Bytes() @@ -206,8 +224,7 @@ func RunFindChangesBetweenServerSeqsTest( // Find changes loadedChanges, err := db.FindChangesBetweenServerSeqs( ctx, - docInfo.Key, - docInfo.ID, + docRef, 6, 10, ) @@ -225,36 +242,43 @@ func RunFindClosestSnapshotInfoTest(t *testing.T, db database.Database, projectI clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) bytesID, _ := clientInfo.ID.Bytes() actorID, _ := time.ActorIDFromBytes(bytesID) - docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) + docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, true) doc := document.New(key.Key(t.Name())) doc.SetActor(actorID) - assert.NoError(t, doc.Update(func(root *json.Object, p *presence.Presence) error { root.SetNewArray("array") return nil })) - assert.NoError(t, db.CreateSnapshotInfo(ctx, docKey, docInfo.ID, doc.InternalDocument())) - snapshot, err := db.FindClosestSnapshotInfo(ctx, docKey, docInfo.ID, change.MaxCheckpoint.ServerSeq, true) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + + assert.NoError(t, db.CreateSnapshotInfo(ctx, docRef, doc.InternalDocument())) + snapshot, err := db.FindClosestSnapshotInfo(ctx, docRef, change.MaxCheckpoint.ServerSeq, true) assert.NoError(t, err) assert.Equal(t, int64(0), snapshot.ServerSeq) pack := change.NewPack(doc.Key(), doc.Checkpoint().NextServerSeq(1), nil, nil) assert.NoError(t, doc.ApplyChangePack(pack)) - assert.NoError(t, db.CreateSnapshotInfo(ctx, docKey, docInfo.ID, doc.InternalDocument())) - snapshot, err = db.FindClosestSnapshotInfo(ctx, docKey, docInfo.ID, change.MaxCheckpoint.ServerSeq, true) + assert.NoError(t, db.CreateSnapshotInfo(ctx, docRef, doc.InternalDocument())) + snapshot, err = db.FindClosestSnapshotInfo(ctx, docRef, change.MaxCheckpoint.ServerSeq, true) assert.NoError(t, err) assert.Equal(t, int64(1), snapshot.ServerSeq) pack = change.NewPack(doc.Key(), doc.Checkpoint().NextServerSeq(2), nil, nil) assert.NoError(t, doc.ApplyChangePack(pack)) - assert.NoError(t, db.CreateSnapshotInfo(ctx, docKey, docInfo.ID, doc.InternalDocument())) - snapshot, err = db.FindClosestSnapshotInfo(ctx, docKey, docInfo.ID, change.MaxCheckpoint.ServerSeq, true) + assert.NoError(t, db.CreateSnapshotInfo(ctx, docRef, doc.InternalDocument())) + snapshot, err = db.FindClosestSnapshotInfo(ctx, docRef, change.MaxCheckpoint.ServerSeq, true) assert.NoError(t, err) assert.Equal(t, int64(2), snapshot.ServerSeq) - snapshot, err = db.FindClosestSnapshotInfo(ctx, docKey, docInfo.ID, 1, true) + snapshot, err = db.FindClosestSnapshotInfo(ctx, docRef, 1, true) assert.NoError(t, err) assert.Equal(t, int64(1), snapshot.ServerSeq) }) @@ -285,13 +309,19 @@ func RunListUserInfosTest(t *testing.T, db database.Database) { func RunActivateClientDeactivateClientTest(t *testing.T, db database.Database, projectID types.ID) { t.Run("activate and find client test", func(t *testing.T) { ctx := context.Background() - _, err := db.FindClientInfoByKeyAndID(ctx, dummyClientKey, dummyClientID) + _, err := db.FindClientInfoByKeyAndID(ctx, types.ClientRefKey{ + Key: dummyClientKey, + ID: dummyClientID, + }) assert.ErrorIs(t, err, database.ErrClientNotFound) clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) - found, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) + found, err := db.FindClientInfoByKeyAndID(ctx, types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }) assert.NoError(t, err) assert.Equal(t, clientInfo.Key, found.Key) }) @@ -300,7 +330,10 @@ func RunActivateClientDeactivateClientTest(t *testing.T, db database.Database, p ctx := context.Background() // try to deactivate the client with not exists ID. - _, err := db.DeactivateClient(ctx, dummyClientKey, dummyClientID) + _, err := db.DeactivateClient(ctx, types.ClientRefKey{ + Key: dummyClientKey, + ID: dummyClientID, + }) assert.ErrorIs(t, err, database.ErrClientNotFound) clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) @@ -315,16 +348,19 @@ func RunActivateClientDeactivateClientTest(t *testing.T, db database.Database, p assert.Equal(t, t.Name(), clientInfo.Key) assert.Equal(t, database.ClientActivated, clientInfo.Status) - clientKey := clientInfo.Key - clientID := clientInfo.ID - - clientInfo, err = db.DeactivateClient(ctx, clientKey, clientID) + clientInfo, err = db.DeactivateClient(ctx, types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }) assert.NoError(t, err) assert.Equal(t, t.Name(), clientInfo.Key) assert.Equal(t, database.ClientDeactivated, clientInfo.Status) // try to deactivate the client twice. - clientInfo, err = db.DeactivateClient(ctx, clientKey, clientID) + clientInfo, err = db.DeactivateClient(ctx, types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }) assert.NoError(t, err) assert.Equal(t, t.Name(), clientInfo.Key) assert.Equal(t, database.ClientDeactivated, clientInfo.Status) @@ -439,7 +475,10 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) docInfos := make([]*database.DocInfo, 0, totalSize) for i := 0; i < totalSize; i++ { - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, key.Key(fmt.Sprintf("%d", i)), true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, key.Key(fmt.Sprintf("%d", i)), types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, true) assert.NoError(t, err) docInfos = append(docInfos, docInfo) } @@ -454,13 +493,13 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t } // initial page, offset is empty - infos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{PageSize: pageSize}) + infos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.DocRefKey]{PageSize: pageSize}) assert.NoError(t, err) AssertKeys(t, docKeysInReverse[:pageSize], infos) // backward - infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ - Offset: database.DocOffset{ + infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.DocRefKey]{ + Offset: types.DocRefKey{ Key: infos[len(infos)-1].Key, ID: infos[len(infos)-1].ID, }, @@ -470,8 +509,8 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t AssertKeys(t, docKeysInReverse[pageSize:], infos) // backward again - emptyInfos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ - Offset: database.DocOffset{ + emptyInfos, err := db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.DocRefKey]{ + Offset: types.DocRefKey{ Key: infos[len(infos)-1].Key, ID: infos[len(infos)-1].ID, }, @@ -481,8 +520,8 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t AssertKeys(t, nil, emptyInfos) // forward - infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ - Offset: database.DocOffset{ + infos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.DocRefKey]{ + Offset: types.DocRefKey{ Key: infos[0].Key, ID: infos[0].ID, }, @@ -493,8 +532,8 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t AssertKeys(t, docKeys[totalSize-pageSize:], infos) // forward again - emptyInfos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[database.DocOffset]{ - Offset: database.DocOffset{ + emptyInfos, err = db.FindDocInfosByPaging(ctx, projectID, types.Paging[types.DocRefKey]{ + Offset: types.DocRefKey{ Key: infos[len(infos)-1].Key, ID: infos[len(infos)-1].ID, }, @@ -517,7 +556,10 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t var dummyDocInfos []*database.DocInfo for i := 0; i <= testDocCnt; i++ { testDocKey := key.Key(fmt.Sprintf("%s%02d", "testdockey", i)) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, testProjectInfo.ID, dummyClientKey, dummyClientID, testDocKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, testProjectInfo.ID, testDocKey, types.ClientRefKey{ + Key: dummyClientKey, + ID: dummyClientID, + }, true) assert.NoError(t, err) dummyDocInfos = append(dummyDocInfos, docInfo) } @@ -527,14 +569,14 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t cases := []struct { name string - offset database.DocOffset + offset types.DocRefKey pageSize int isForward bool testResult []int }{ { name: "FindDocInfosByPaging no flag test", - offset: database.DocOffset{ + offset: types.DocRefKey{ Key: "", ID: "", }, @@ -544,7 +586,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }, { name: "FindDocInfosByPaging --forward test", - offset: database.DocOffset{ + offset: types.DocRefKey{ Key: "", ID: "", }, @@ -554,7 +596,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }, { name: "FindDocInfosByPaging --size test", - offset: database.DocOffset{ + offset: types.DocRefKey{ Key: "", ID: "", }, @@ -564,7 +606,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }, { name: "FindDocInfosByPaging --size --forward test", - offset: database.DocOffset{ + offset: types.DocRefKey{ Key: "", ID: "", }, @@ -574,7 +616,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }, { name: "FindDocInfosByPaging --offset test", - offset: database.DocOffset{ + offset: types.DocRefKey{ Key: dummyDocInfos[13].Key, ID: dummyDocInfos[13].ID, }, @@ -584,7 +626,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }, { name: "FindDocInfosByPaging --forward --offset test", - offset: database.DocOffset{ + offset: types.DocRefKey{ Key: dummyDocInfos[13].Key, ID: dummyDocInfos[13].ID, }, @@ -594,7 +636,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }, { name: "FindDocInfosByPaging --size --offset test", - offset: database.DocOffset{ + offset: types.DocRefKey{ Key: dummyDocInfos[13].Key, ID: dummyDocInfos[13].ID, }, @@ -604,7 +646,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t }, { name: "FindDocInfosByPaging --size --forward --offset test", - offset: database.DocOffset{ + offset: types.DocRefKey{ Key: dummyDocInfos[13].Key, ID: dummyDocInfos[13].ID, }, @@ -617,7 +659,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t for _, c := range cases { t.Run(c.name, func(t *testing.T) { ctx := context.Background() - testPaging := types.Paging[database.DocOffset]{ + testPaging := types.Paging[types.DocRefKey]{ Offset: c.offset, PageSize: c.pageSize, IsForward: c.isForward, @@ -647,7 +689,10 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t var docInfos []*database.DocInfo for i := 0; i < testDocCnt; i++ { testDocKey := key.Key("key" + strconv.Itoa(i)) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientKey, dummyClientID, testDocKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, testDocKey, types.ClientRefKey{ + Key: dummyClientKey, + ID: dummyClientID, + }, true) assert.NoError(t, err) docInfos = append(docInfos, docInfo) } @@ -660,7 +705,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t } // 02. List the documents. - result, err := db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[database.DocOffset]{ + result, err := db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[types.DocRefKey]{ PageSize: 10, IsForward: false, }) @@ -675,7 +720,7 @@ func RunFindDocInfosByPagingTest(t *testing.T, db database.Database, projectID t assert.NoError(t, err) // 04. List the documents again and check the filtered result. - result, err = db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[database.DocOffset]{ + result, err = db.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[types.DocRefKey]{ PageSize: 10, IsForward: false, }) @@ -739,14 +784,21 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type // 01. Create a client and a document then attach the document to the client. clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) - assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) + docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, true) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + assert.NoError(t, clientInfo.AttachDocument(docRef)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) // 02. Remove the document and check the document is removed. err := db.CreateChangeInfos(ctx, docInfo, 0, []*change.Change{}, true) assert.NoError(t, err) - docInfo, err = db.FindDocInfoByKeyAndID(ctx, docInfo.Key, docInfo.ID) + docInfo, err = db.FindDocInfoByKeyAndID(ctx, docRef) assert.NoError(t, err) assert.Equal(t, false, docInfo.RemovedAt.IsZero()) }) @@ -757,18 +809,30 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type // 01. Create a client and a document then attach the document to the client. clientInfo1, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.Key, clientInfo1.ID, docKey, true) - assert.NoError(t, clientInfo1.AttachDocument(docInfo1.Key, docInfo1.ID)) + clientRef1 := types.ClientRefKey{ + Key: clientInfo1.Key, + ID: clientInfo1.ID, + } + docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef1, true) + docRef1 := types.DocRefKey{ + Key: docInfo1.Key, + ID: docInfo1.ID, + } + assert.NoError(t, clientInfo1.AttachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo1)) // 02. Remove the document. - assert.NoError(t, clientInfo1.RemoveDocument(docInfo1.Key, docInfo1.ID)) + assert.NoError(t, clientInfo1.RemoveDocument(docRef1)) err := db.CreateChangeInfos(ctx, docInfo1, 0, []*change.Change{}, true) assert.NoError(t, err) // 03. Create a document with same key and check they have same key but different id. - docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.Key, clientInfo1.ID, docKey, true) - assert.NoError(t, clientInfo1.AttachDocument(docInfo2.Key, docInfo2.ID)) + docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef1, true) + docRef2 := types.DocRefKey{ + Key: docInfo2.Key, + ID: docInfo2.ID, + } + assert.NoError(t, clientInfo1.AttachDocument(docRef2)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo2)) assert.Equal(t, docInfo1.Key, docInfo2.Key) assert.NotEqual(t, docInfo1.ID, docInfo2.ID) @@ -781,8 +845,16 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type // 01. Create a client and a document then attach the document to the client. clientInfo1, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.Key, clientInfo1.ID, docKey, true) - assert.NoError(t, clientInfo1.AttachDocument(docInfo1.Key, docInfo1.ID)) + clientRef1 := types.ClientRefKey{ + Key: clientInfo1.Key, + ID: clientInfo1.ID, + } + docInfo1, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef1, true) + docRef1 := types.DocRefKey{ + Key: docInfo1.Key, + ID: docInfo1.ID, + } + assert.NoError(t, clientInfo1.AttachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo1)) // 02. Generate changes at the document. @@ -803,24 +875,28 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type pack1 := doc1.CreateChangePack() // 03. Store changes and remove the document. - assert.NoError(t, clientInfo1.RemoveDocument(docInfo1.Key, docInfo1.ID)) + assert.NoError(t, clientInfo1.RemoveDocument(docRef1)) err := db.CreateChangeInfos(ctx, docInfo1, 0, pack1.Changes, true) assert.NoError(t, err) // 04. Create a document with same key and check they have same key but different id. - docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo1.Key, clientInfo1.ID, docKey, true) - assert.NoError(t, clientInfo1.AttachDocument(docInfo2.Key, docInfo2.ID)) + docInfo2, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef1, true) + docRef2 := types.DocRefKey{ + Key: docInfo2.Key, + ID: docInfo2.ID, + } + assert.NoError(t, clientInfo1.AttachDocument(docRef2)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo1, docInfo2)) assert.Equal(t, docInfo1.Key, docInfo2.Key) assert.NotEqual(t, docInfo1.ID, docInfo2.ID) // 05. Check whether the changes of the removed document are referencing the removed document. - changeInfos1, err := db.FindChangeInfosBetweenServerSeqs(ctx, docKey, docInfo1.ID, 0, 1) + changeInfos1, err := db.FindChangeInfosBetweenServerSeqs(ctx, docRef1, 0, 1) assert.NoError(t, err) assert.Len(t, changeInfos1, 1) // 06. Check whether the changes of the removed document aren't referencing the active document. - changeInfos2, err := db.FindChangeInfosBetweenServerSeqs(ctx, docKey, docInfo2.ID, 0, 1) + changeInfos2, err := db.FindChangeInfosBetweenServerSeqs(ctx, docRef2, 0, 1) assert.NoError(t, err) assert.Len(t, changeInfos2, 0) }) @@ -830,27 +906,35 @@ func RunCreateChangeInfosTest(t *testing.T, db database.Database, projectID type docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) clientInfo, _ := db.ActivateClient(ctx, projectID, t.Name()) - docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) - assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) + clientRef := types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + } + docInfo, _ := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef, true) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + assert.NoError(t, clientInfo.AttachDocument(docRef)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) doc := document.New(key.Key(t.Name())) pack := doc.CreateChangePack() // Set removed_at in docInfo and store changes - assert.NoError(t, clientInfo.RemoveDocument(docInfo.Key, docInfo.ID)) + assert.NoError(t, clientInfo.RemoveDocument(docRef)) err := db.CreateChangeInfos(ctx, docInfo, 0, pack.Changes, true) assert.NoError(t, err) // Check whether removed_at is set in docInfo - docInfo, err = db.FindDocInfoByKeyAndID(ctx, docInfo.Key, docInfo.ID) + docInfo, err = db.FindDocInfoByKeyAndID(ctx, docRef) assert.NoError(t, err) assert.NotEqual(t, gotime.Time{}, docInfo.RemovedAt) // Check whether DocumentRemoved status is set in clientInfo - clientInfo, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) + clientInfo, err = db.FindClientInfoByKeyAndID(ctx, clientRef) assert.NoError(t, err) - assert.NotEqual(t, database.DocumentRemoved, clientInfo.Documents[docKey][docInfo.ID].Status) + assert.NotEqual(t, database.DocumentRemoved, clientInfo.Documents[docRef].Status) }) } @@ -863,13 +947,21 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) + clientRef := types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + } docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef, true) assert.NoError(t, err) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } err = db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo) assert.ErrorIs(t, err, database.ErrDocumentNeverAttached) - assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) + assert.NoError(t, clientInfo.AttachDocument(docRef)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) }) @@ -877,17 +969,25 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) + clientRef := types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + } docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + assert.NoError(t, clientInfo.AttachDocument(docRef)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) - assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(0)) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(0)) + result, err := db.FindClientInfoByKeyAndID(ctx, clientRef) + assert.Equal(t, result.Documents[docRef].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docRef].ServerSeq, int64(0)) + assert.Equal(t, result.Documents[docRef].ClientSeq, uint32(0)) assert.NoError(t, err) }) @@ -895,41 +995,49 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) + clientRef := types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + } docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) - clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 1 - clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 1 + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + assert.NoError(t, clientInfo.AttachDocument(docRef)) + clientInfo.Documents[docRef].ServerSeq = 1 + clientInfo.Documents[docRef].ClientSeq = 1 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) - assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(1)) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(1)) + result, err := db.FindClientInfoByKeyAndID(ctx, clientRef) + assert.Equal(t, result.Documents[docRef].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docRef].ServerSeq, int64(1)) + assert.Equal(t, result.Documents[docRef].ClientSeq, uint32(1)) assert.NoError(t, err) // update with larger seq - clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 3 - clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 5 + clientInfo.Documents[docRef].ServerSeq = 3 + clientInfo.Documents[docRef].ClientSeq = 5 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) - assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(3)) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(5)) + result, err = db.FindClientInfoByKeyAndID(ctx, clientRef) + assert.Equal(t, result.Documents[docRef].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docRef].ServerSeq, int64(3)) + assert.Equal(t, result.Documents[docRef].ClientSeq, uint32(5)) assert.NoError(t, err) // update with smaller seq(should be ignored) - clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 2 - clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 3 + clientInfo.Documents[docRef].ServerSeq = 2 + clientInfo.Documents[docRef].ClientSeq = 3 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) - assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(3)) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(5)) + result, err = db.FindClientInfoByKeyAndID(ctx, clientRef) + assert.Equal(t, result.Documents[docRef].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docRef].ServerSeq, int64(3)) + assert.Equal(t, result.Documents[docRef].ClientSeq, uint32(5)) assert.NoError(t, err) }) @@ -937,28 +1045,36 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) + clientRef := types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + } docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) - clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 1 - clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 1 + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + assert.NoError(t, clientInfo.AttachDocument(docRef)) + clientInfo.Documents[docRef].ServerSeq = 1 + clientInfo.Documents[docRef].ClientSeq = 1 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) - assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(1)) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(1)) + result, err := db.FindClientInfoByKeyAndID(ctx, clientRef) + assert.Equal(t, result.Documents[docRef].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docRef].ServerSeq, int64(1)) + assert.Equal(t, result.Documents[docRef].ClientSeq, uint32(1)) assert.NoError(t, err) - assert.NoError(t, clientInfo.DetachDocument(docKey, docInfo.ID)) + assert.NoError(t, clientInfo.DetachDocument(docRef)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) - assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentDetached) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(0)) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(0)) + result, err = db.FindClientInfoByKeyAndID(ctx, clientRef) + assert.Equal(t, result.Documents[docRef].Status, database.DocumentDetached) + assert.Equal(t, result.Documents[docRef].ServerSeq, int64(0)) + assert.Equal(t, result.Documents[docRef].ClientSeq, uint32(0)) assert.NoError(t, err) }) @@ -966,28 +1082,36 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) + clientRef := types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + } docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) - clientInfo.Documents[docKey][docInfo.ID].ServerSeq = 1 - clientInfo.Documents[docKey][docInfo.ID].ClientSeq = 1 + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + assert.NoError(t, clientInfo.AttachDocument(docRef)) + clientInfo.Documents[docRef].ServerSeq = 1 + clientInfo.Documents[docRef].ClientSeq = 1 assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err := db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) - assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentAttached) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(1)) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(1)) + result, err := db.FindClientInfoByKeyAndID(ctx, clientRef) + assert.Equal(t, result.Documents[docRef].Status, database.DocumentAttached) + assert.Equal(t, result.Documents[docRef].ServerSeq, int64(1)) + assert.Equal(t, result.Documents[docRef].ClientSeq, uint32(1)) assert.NoError(t, err) - assert.NoError(t, clientInfo.RemoveDocument(docKey, docInfo.ID)) + assert.NoError(t, clientInfo.RemoveDocument(docRef)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) - result, err = db.FindClientInfoByKeyAndID(ctx, clientInfo.Key, clientInfo.ID) - assert.Equal(t, result.Documents[docKey][docInfo.ID].Status, database.DocumentRemoved) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ServerSeq, int64(0)) - assert.Equal(t, result.Documents[docKey][docInfo.ID].ClientSeq, uint32(0)) + result, err = db.FindClientInfoByKeyAndID(ctx, clientRef) + assert.Equal(t, result.Documents[docRef].Status, database.DocumentRemoved) + assert.Equal(t, result.Documents[docRef].ServerSeq, int64(0)) + assert.Equal(t, result.Documents[docRef].ClientSeq, uint32(0)) assert.NoError(t, err) }) @@ -995,11 +1119,19 @@ func RunUpdateClientInfoAfterPushPullTest(t *testing.T, db database.Database, pr clientInfo, err := db.ActivateClient(ctx, projectID, t.Name()) assert.NoError(t, err) + clientRef := types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + } docKey := key.Key(fmt.Sprintf("tests$%s", t.Name())) - docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, clientInfo.Key, clientInfo.ID, docKey, true) + docInfo, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, docKey, clientRef, true) assert.NoError(t, err) - assert.NoError(t, clientInfo.AttachDocument(docKey, docInfo.ID)) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + assert.NoError(t, clientInfo.AttachDocument(docRef)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) clientInfo.ID = "invalid clientInfo id" @@ -1020,48 +1152,59 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ assert.NoError(t, err) c2, err := db.ActivateClient(ctx, projectID, t.Name()+"2") assert.NoError(t, err) - d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.Key, c1.ID, helper.TestDocKey(t), true) + d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, helper.TestDocKey(t), types.ClientRefKey{ + Key: c1.Key, + ID: c1.ID, + }, true) assert.NoError(t, err) + docRef1 := types.DocRefKey{ + Key: d1.Key, + ID: d1.ID, + } + emptyClientRef := types.ClientRefKey{ + Key: "", ID: "", + } + // 01. Check if document is attached without attaching - attached, err := db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err := db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.False(t, attached) // 02. Check if document is attached after attaching - assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.AttachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.True(t, attached) // 03. Check if document is attached after detaching - assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.DetachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.False(t, attached) // 04. Check if document is attached after two clients attaching - assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.AttachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - assert.NoError(t, c2.AttachDocument(d1.Key, d1.ID)) + assert.NoError(t, c2.AttachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c2, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.True(t, attached) // 05. Check if document is attached after a client detaching - assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.DetachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.True(t, attached) // 06. Check if document is attached after another client detaching - assert.NoError(t, c2.DetachDocument(d1.Key, d1.ID)) + assert.NoError(t, c2.DetachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c2, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.False(t, attached) }) @@ -1072,38 +1215,56 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ // 00. Create a client and two documents c1, err := db.ActivateClient(ctx, projectID, t.Name()+"1") assert.NoError(t, err) - d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.Key, c1.ID, helper.TestDocKey(t)+"1", true) + d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, helper.TestDocKey(t)+"1", types.ClientRefKey{ + Key: c1.Key, + ID: c1.ID, + }, true) assert.NoError(t, err) - d2, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.Key, c1.ID, helper.TestDocKey(t)+"2", true) + d2, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, helper.TestDocKey(t)+"2", types.ClientRefKey{ + Key: c1.Key, + ID: c1.ID, + }, true) assert.NoError(t, err) + docRef1 := types.DocRefKey{ + Key: d1.Key, + ID: d1.ID, + } + docRef2 := types.DocRefKey{ + Key: d2.Key, + ID: d2.ID, + } + emptyClientRef := types.ClientRefKey{ + Key: "", ID: "", + } + // 01. Check if documents are attached after attaching - assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.AttachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err := db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err := db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.True(t, attached) - assert.NoError(t, c1.AttachDocument(d2.Key, d2.ID)) + assert.NoError(t, c1.AttachDocument(docRef2)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d2)) - attached, err = db.IsDocumentAttached(ctx, projectID, d2.Key, d2.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef2, emptyClientRef) assert.NoError(t, err) assert.True(t, attached) // 02. Check if a document is attached after detaching another document - assert.NoError(t, c1.DetachDocument(d2.Key, d2.ID)) + assert.NoError(t, c1.DetachDocument(docRef2)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d2)) - attached, err = db.IsDocumentAttached(ctx, projectID, d2.Key, d2.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef2, emptyClientRef) assert.NoError(t, err) assert.False(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.True(t, attached) // 03. Check if a document is attached after detaching remaining document - assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.DetachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.False(t, attached) }) @@ -1116,72 +1277,89 @@ func RunIsDocumentAttachedTest(t *testing.T, db database.Database, projectID typ assert.NoError(t, err) c2, err := db.ActivateClient(ctx, projectID, t.Name()+"2") assert.NoError(t, err) - d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, c1.Key, c1.ID, helper.TestDocKey(t), true) + + clientRef1 := types.ClientRefKey{ + Key: c1.Key, + ID: c1.ID, + } + clientRef2 := types.ClientRefKey{ + Key: c2.Key, + ID: c2.ID, + } + + d1, err := db.FindDocInfoByKeyAndOwner(ctx, projectID, helper.TestDocKey(t), clientRef1, true) assert.NoError(t, err) + docRef1 := types.DocRefKey{ + Key: d1.Key, + ID: d1.ID, + } + emptyClientRef := types.ClientRefKey{ + Key: "", ID: "", + } // 01. Check if document is attached without attaching - attached, err := db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err := db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.False(t, attached) // 02. Check if document is attached after attaching - assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.AttachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, clientRef1) assert.NoError(t, err) assert.False(t, attached) // 03. Check if document is attached after detaching - assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.DetachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.False(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, clientRef1) assert.NoError(t, err) assert.False(t, attached) // 04. Check if document is attached after two clients attaching - assert.NoError(t, c1.AttachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.AttachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - assert.NoError(t, c2.AttachDocument(d1.Key, d1.ID)) + assert.NoError(t, c2.AttachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c2, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, clientRef1) assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c2.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, clientRef2) assert.NoError(t, err) assert.True(t, attached) // 05. Check if document is attached after a client detaching - assert.NoError(t, c1.DetachDocument(d1.Key, d1.ID)) + assert.NoError(t, c1.DetachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c1, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, clientRef1) assert.NoError(t, err) assert.True(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c2.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, clientRef2) assert.NoError(t, err) assert.False(t, attached) // 06. Check if document is attached after another client detaching - assert.NoError(t, c2.DetachDocument(d1.Key, d1.ID)) + assert.NoError(t, c2.DetachDocument(docRef1)) assert.NoError(t, db.UpdateClientInfoAfterPushPull(ctx, c2, d1)) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, "") + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, emptyClientRef) assert.NoError(t, err) assert.False(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c1.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, clientRef1) assert.NoError(t, err) assert.False(t, attached) - attached, err = db.IsDocumentAttached(ctx, projectID, d1.Key, d1.ID, c2.ID) + attached, err = db.IsDocumentAttached(ctx, projectID, docRef1, clientRef2) assert.NoError(t, err) assert.False(t, attached) }) diff --git a/server/backend/housekeeping/housekeeping.go b/server/backend/housekeeping/housekeeping.go index 23475b9be..cc1569739 100644 --- a/server/backend/housekeeping/housekeeping.go +++ b/server/backend/housekeeping/housekeeping.go @@ -163,8 +163,10 @@ func (h *Housekeeping) deactivateCandidates( if _, err := clients.Deactivate( ctx, h.database, - clientInfo.Key, - clientInfo.ID, + types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, ); err != nil { return database.DefaultProjectID, err } diff --git a/server/clients/clients.go b/server/clients/clients.go index a5455e868..8518065ba 100644 --- a/server/clients/clients.go +++ b/server/clients/clients.go @@ -22,7 +22,6 @@ import ( "errors" "github.com/yorkie-team/yorkie/api/types" - "github.com/yorkie-team/yorkie/pkg/document/time" "github.com/yorkie-team/yorkie/server/backend/database" ) @@ -48,62 +47,50 @@ func Activate( func Deactivate( ctx context.Context, db database.Database, - clientKey string, - clientID types.ID, + clientRef types.ClientRefKey, ) (*database.ClientInfo, error) { - clientInfo, err := db.FindClientInfoByKeyAndID( - ctx, - clientKey, - clientID, - ) + clientInfo, err := db.FindClientInfoByKeyAndID(ctx, clientRef) if err != nil { return nil, err } - for docKey, v := range clientInfo.Documents { - for docID, clientDocInfo := range v { - isAttached, err := clientInfo.IsAttached(docKey, docID) - if err != nil { - return nil, err - } - if !isAttached { - continue - } + for docRef, clientDocInfo := range clientInfo.Documents { + isAttached, err := clientInfo.IsAttached(docRef) + if err != nil { + return nil, err + } + if !isAttached { + continue + } - if err := clientInfo.DetachDocument(docKey, docID); err != nil { - return nil, err - } + if err := clientInfo.DetachDocument(docRef); err != nil { + return nil, err + } - // TODO(hackerwins): We need to remove the presence of the client from the document. - // Be careful that housekeeping is executed by the leader. And documents are sharded - // by the servers in the cluster. So, we need to consider the case where the leader is - // not the same as the server that handles the document. + // TODO(hackerwins): We need to remove the presence of the client from the document. + // Be careful that housekeeping is executed by the leader. And documents are sharded + // by the servers in the cluster. So, we need to consider the case where the leader is + // not the same as the server that handles the document. - if err := db.UpdateSyncedSeq( - ctx, - clientInfo, - docKey, - docID, - clientDocInfo.ServerSeq, - ); err != nil { - return nil, err - } + if err := db.UpdateSyncedSeq( + ctx, + clientInfo, + docRef, + clientDocInfo.ServerSeq, + ); err != nil { + return nil, err } + } - return db.DeactivateClient(ctx, clientKey, clientID) + return db.DeactivateClient(ctx, clientRef) } // FindClientInfo finds the client with the given id. func FindClientInfo( ctx context.Context, db database.Database, - clientKey string, - clientID *time.ActorID, + clientRef types.ClientRefKey, ) (*database.ClientInfo, error) { - return db.FindClientInfoByKeyAndID( - ctx, - clientKey, - types.IDFromActorID(clientID), - ) + return db.FindClientInfoByKeyAndID(ctx, clientRef) } diff --git a/server/documents/documents.go b/server/documents/documents.go index cba2c6389..e1a4d905b 100644 --- a/server/documents/documents.go +++ b/server/documents/documents.go @@ -48,7 +48,7 @@ func ListDocumentSummaries( ctx context.Context, be *backend.Backend, project *types.Project, - paging types.Paging[database.DocOffset], + paging types.Paging[types.DocRefKey], includeSnapshot bool, ) ([]*types.DocumentSummary, error) { if paging.PageSize > pageSizeLimit { @@ -101,9 +101,11 @@ func GetDocumentSummary( docInfo, err := be.DB.FindDocInfoByKeyAndOwner( ctx, project.ID, - "", - types.IDFromActorID(time.InitialActorID), k, + types.ClientRefKey{ + Key: "", + ID: types.IDFromActorID(time.InitialActorID), + }, false, ) if err != nil { @@ -136,9 +138,11 @@ func GetDocumentByServerSeq( docInfo, err := be.DB.FindDocInfoByKeyAndOwner( ctx, project.ID, - "", - types.IDFromActorID(time.InitialActorID), k, + types.ClientRefKey{ + Key: "", + ID: types.IDFromActorID(time.InitialActorID), + }, false, ) if err != nil { @@ -201,10 +205,9 @@ func FindDocInfoByKey( func FindDocInfoByKeyAndID( ctx context.Context, be *backend.Backend, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, ) (*database.DocInfo, error) { - return be.DB.FindDocInfoByKeyAndID(ctx, docKey, docID) + return be.DB.FindDocInfoByKeyAndID(ctx, docRef) } // FindDocInfoByKeyAndOwner returns a document for the given document key. If @@ -220,9 +223,11 @@ func FindDocInfoByKeyAndOwner( return be.DB.FindDocInfoByKeyAndOwner( ctx, project.ID, - clientInfo.Key, - clientInfo.ID, docKey, + types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, createDocIfNotExist, ) } @@ -233,15 +238,16 @@ func RemoveDocument( ctx context.Context, be *backend.Backend, project *types.Project, - docKey key.Key, - docID types.ID, + docRef types.DocRefKey, force bool, ) error { if force { - return be.DB.UpdateDocInfoStatusToRemoved(ctx, docKey, docID) + return be.DB.UpdateDocInfoStatusToRemoved(ctx, docRef) } - isAttached, err := be.DB.IsDocumentAttached(ctx, project.ID, docKey, docID, "") + isAttached, err := be.DB.IsDocumentAttached(ctx, project.ID, docRef, types.ClientRefKey{ + Key: "", ID: "", + }) if err != nil { return err } @@ -249,7 +255,7 @@ func RemoveDocument( return ErrDocumentAttached } - return be.DB.UpdateDocInfoStatusToRemoved(ctx, docKey, docID) + return be.DB.UpdateDocInfoStatusToRemoved(ctx, docRef) } // IsDocumentAttached returns true if the given document is attached to any client. @@ -257,9 +263,8 @@ func IsDocumentAttached( ctx context.Context, be *backend.Backend, project *types.Project, - docKey key.Key, - docID types.ID, - excludeClientID types.ID, + docRef types.DocRefKey, + excludeClientRef types.ClientRefKey, ) (bool, error) { - return be.DB.IsDocumentAttached(ctx, project.ID, docKey, docID, excludeClientID) + return be.DB.IsDocumentAttached(ctx, project.ID, docRef, excludeClientRef) } diff --git a/server/packs/history.go b/server/packs/history.go index 109ee90a3..a879c93bc 100644 --- a/server/packs/history.go +++ b/server/packs/history.go @@ -19,6 +19,7 @@ package packs import ( "context" + "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/pkg/document/change" "github.com/yorkie-team/yorkie/server/backend" "github.com/yorkie-team/yorkie/server/backend/database" @@ -32,19 +33,20 @@ func FindChanges( from int64, to int64, ) ([]*change.Change, error) { + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + if be.Config.SnapshotWithPurgingChanges { - minSyncedSeqInfo, err := be.DB.FindMinSyncedSeqInfo( - ctx, - docInfo.Key, - docInfo.ID, - ) + minSyncedSeqInfo, err := be.DB.FindMinSyncedSeqInfo(ctx, docRef) if err != nil { return nil, err } snapshotInfo, err := be.DB.FindClosestSnapshotInfo( ctx, - docInfo.Key, docInfo.ID, + docRef, minSyncedSeqInfo.ServerSeq+be.Config.SnapshotInterval, false, ) @@ -57,11 +59,6 @@ func FindChanges( } } - changes, err := be.DB.FindChangesBetweenServerSeqs( - ctx, - docInfo.Key, docInfo.ID, - from, - to, - ) + changes, err := be.DB.FindChangesBetweenServerSeqs(ctx, docRef, from, to) return changes, err } diff --git a/server/packs/packs.go b/server/packs/packs.go index f97d9c718..2801beac4 100644 --- a/server/packs/packs.go +++ b/server/packs/packs.go @@ -66,6 +66,11 @@ func PushPull( be.Metrics.ObservePushPullResponseSeconds(gotime.Since(start).Seconds()) }() + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + // TODO: Changes may be reordered or missing during communication on the network. // We should check the change.pack with checkpoint to make sure the changes are in the correct order. initialServerSeq := docInfo.ServerSeq @@ -84,7 +89,7 @@ func PushPull( be.Metrics.AddPushPullSentOperations(respPack.OperationsLen()) be.Metrics.AddPushPullSnapshotBytes(respPack.SnapshotLen()) - if err := clientInfo.UpdateCheckpoint(docInfo.Key, docInfo.ID, respPack.Checkpoint); err != nil { + if err := clientInfo.UpdateCheckpoint(docRef, respPack.Checkpoint); err != nil { return nil, err } @@ -111,7 +116,7 @@ func PushPull( minSyncedTicket, err := be.DB.UpdateAndFindMinSyncedTicket( ctx, clientInfo, - docInfo.Key, docInfo.ID, + docRef, reqPack.Checkpoint.ServerSeq, ) if err != nil { @@ -196,12 +201,11 @@ func BuildDocumentForServerSeq( docInfo *database.DocInfo, serverSeq int64, ) (*document.InternalDocument, error) { - snapshotInfo, err := be.DB.FindClosestSnapshotInfo( - ctx, - docInfo.Key, docInfo.ID, - serverSeq, - true, - ) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } + snapshotInfo, err := be.DB.FindClosestSnapshotInfo(ctx, docRef, serverSeq, true) if err != nil { return nil, err } @@ -221,7 +225,7 @@ func BuildDocumentForServerSeq( // certain size (e.g. 100) and read and gradually reflect it into the document. changes, err := be.DB.FindChangesBetweenServerSeqs( ctx, - docInfo.Key, docInfo.ID, + docRef, snapshotInfo.ServerSeq+1, serverSeq, ) diff --git a/server/packs/pushpull.go b/server/packs/pushpull.go index 7788d68be..c2911687d 100644 --- a/server/packs/pushpull.go +++ b/server/packs/pushpull.go @@ -43,7 +43,10 @@ func pushChanges( reqPack *change.Pack, initialServerSeq int64, ) (change.Checkpoint, []*change.Change) { - cp := clientInfo.Checkpoint(docInfo.Key, docInfo.ID) + cp := clientInfo.Checkpoint(types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + }) var pushedChanges []*change.Change for _, cn := range reqPack.Changes { @@ -185,8 +188,10 @@ func pullChangeInfos( ) (change.Checkpoint, []*database.ChangeInfo, error) { pulledChanges, err := be.DB.FindChangeInfosBetweenServerSeqs( ctx, - docInfo.Key, - docInfo.ID, + types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + }, reqPack.Checkpoint.ServerSeq+1, initialServerSeq, ) diff --git a/server/packs/snapshots.go b/server/packs/snapshots.go index 77c304ad2..3467387b3 100644 --- a/server/packs/snapshots.go +++ b/server/packs/snapshots.go @@ -19,6 +19,7 @@ package packs import ( "context" + "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/pkg/document" "github.com/yorkie-team/yorkie/pkg/document/change" "github.com/yorkie-team/yorkie/pkg/document/time" @@ -33,13 +34,12 @@ func storeSnapshot( docInfo *database.DocInfo, minSyncedTicket *time.Ticket, ) error { + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } // 01. get the closest snapshot's metadata of this docInfo - snapshotMetadata, err := be.DB.FindClosestSnapshotInfo( - ctx, - docInfo.Key, - docInfo.ID, - docInfo.ServerSeq, - false) + snapshotMetadata, err := be.DB.FindClosestSnapshotInfo(ctx, docRef, docInfo.ServerSeq, false) if err != nil { return err } @@ -53,8 +53,7 @@ func storeSnapshot( // 02. retrieve the changes between last snapshot and current docInfo changes, err := be.DB.FindChangesBetweenServerSeqs( ctx, - docInfo.Key, - docInfo.ID, + docRef, snapshotMetadata.ServerSeq+1, docInfo.ServerSeq, ) @@ -64,13 +63,8 @@ func storeSnapshot( // 03. create document instance of the docInfo snapshotInfo := snapshotMetadata - if snapshotMetadata.ID != "" { - snapshotInfo, err = be.DB.FindSnapshotInfoByID( - ctx, - snapshotInfo.DocKey, - snapshotInfo.DocID, - snapshotInfo.ServerSeq, - ) + if snapshotMetadata.DocKey != "" && snapshotInfo.DocID != "" { + snapshotInfo, err = be.DB.FindSnapshotInfo(ctx, docRef, snapshotInfo.ServerSeq) if err != nil { return err } @@ -99,22 +93,13 @@ func storeSnapshot( } // 04. save the snapshot of the docInfo - if err := be.DB.CreateSnapshotInfo( - ctx, - docInfo.Key, - docInfo.ID, - doc, - ); err != nil { + if err := be.DB.CreateSnapshotInfo(ctx, docRef, doc); err != nil { return err } // 05. delete changes before the smallest in `syncedseqs` to save storage. if be.Config.SnapshotWithPurgingChanges { - if err := be.DB.PurgeStaleChanges( - ctx, - docInfo.Key, - docInfo.ID, - ); err != nil { + if err := be.DB.PurgeStaleChanges(ctx, docRef); err != nil { logging.From(ctx).Error(err) } } diff --git a/server/rpc/admin_server.go b/server/rpc/admin_server.go index 84126f0a6..0f7319e44 100644 --- a/server/rpc/admin_server.go +++ b/server/rpc/admin_server.go @@ -26,7 +26,6 @@ import ( "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" "github.com/yorkie-team/yorkie/server/backend" - "github.com/yorkie-team/yorkie/server/backend/database" "github.com/yorkie-team/yorkie/server/backend/sync" "github.com/yorkie-team/yorkie/server/documents" "github.com/yorkie-team/yorkie/server/logging" @@ -281,8 +280,8 @@ func (s *adminServer) ListDocuments( ctx, s.backend, project, - types.Paging[database.DocOffset]{ - Offset: database.DocOffset{ + types.Paging[types.DocRefKey]{ + Offset: types.DocRefKey{ Key: key.Key(req.PreviousKey), ID: types.ID(req.PreviousId), }, @@ -371,7 +370,10 @@ func (s *adminServer) RemoveDocumentByAdmin( if err := documents.RemoveDocument( ctx, s.backend, project, - docInfo.Key, docInfo.ID, + types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + }, req.Force, ); err != nil { return nil, err diff --git a/server/rpc/yorkie_server.go b/server/rpc/yorkie_server.go index 800c4fa4a..35f0b070f 100644 --- a/server/rpc/yorkie_server.go +++ b/server/rpc/yorkie_server.go @@ -90,7 +90,10 @@ func (s *yorkieServer) DeactivateClient( return nil, err } - _, err = clients.Deactivate(ctx, s.backend.DB, req.ClientKey, types.IDFromActorID(actorID)) + _, err = clients.Deactivate(ctx, s.backend.DB, types.ClientRefKey{ + Key: req.ClientKey, + ID: types.IDFromActorID(actorID), + }) if err != nil { return nil, err } @@ -138,7 +141,10 @@ func (s *yorkieServer) AttachDocument( } }() - clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, actorID) + clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, types.ClientRefKey{ + Key: req.ClientKey, + ID: types.IDFromActorID(actorID), + }) if err != nil { return nil, err } @@ -147,7 +153,10 @@ func (s *yorkieServer) AttachDocument( return nil, err } - if err := clientInfo.AttachDocument(docInfo.Key, docInfo.ID); err != nil { + if err := clientInfo.AttachDocument(types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + }); err != nil { return nil, err } @@ -208,31 +217,37 @@ func (s *yorkieServer) DetachDocument( } }() - clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, actorID) + clientRef := types.ClientRefKey{ + Key: req.ClientKey, + ID: types.IDFromActorID(actorID), + } + docRef := types.DocRefKey{ + Key: pack.DocumentKey, + ID: docID, + } + + clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, clientRef) if err != nil { return nil, err } - docInfo, err := documents.FindDocInfoByKeyAndID(ctx, s.backend, pack.DocumentKey, docID) + + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, s.backend, docRef) if err != nil { return nil, err } - isAttached, err := documents.IsDocumentAttached( - ctx, s.backend, project, - docInfo.Key, docInfo.ID, - clientInfo.ID, - ) + isAttached, err := documents.IsDocumentAttached(ctx, s.backend, project, docRef, clientRef) if err != nil { return nil, err } if req.RemoveIfNotAttached && !isAttached { pack.IsRemoved = true - if err := clientInfo.RemoveDocument(docInfo.Key, docInfo.ID); err != nil { + if err := clientInfo.RemoveDocument(docRef); err != nil { return nil, err } } else { - if err := clientInfo.DetachDocument(docInfo.Key, docInfo.ID); err != nil { + if err := clientInfo.DetachDocument(docRef); err != nil { return nil, err } } @@ -304,16 +319,24 @@ func (s *yorkieServer) PushPullChanges( syncMode = types.SyncModePushOnly } - clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, actorID) + clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, types.ClientRefKey{ + Key: req.ClientKey, + ID: types.IDFromActorID(actorID), + }) if err != nil { return nil, err } - docInfo, err := documents.FindDocInfoByKeyAndID(ctx, s.backend, pack.DocumentKey, docID) + + docRef := types.DocRefKey{ + Key: pack.DocumentKey, + ID: docID, + } + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, s.backend, docRef) if err != nil { return nil, err } - if err := clientInfo.EnsureDocumentAttached(docInfo.Key, docInfo.ID); err != nil { + if err := clientInfo.EnsureDocumentAttached(docRef); err != nil { return nil, err } @@ -338,7 +361,7 @@ func (s *yorkieServer) WatchDocument( req *api.WatchDocumentRequest, stream api.YorkieService_WatchDocumentServer, ) error { - clientID, err := time.ActorIDFromHex(req.ClientId) + actorID, err := time.ActorIDFromHex(req.ClientId) if err != nil { return err } @@ -352,8 +375,10 @@ func (s *yorkieServer) WatchDocument( docInfo, err := documents.FindDocInfoByKeyAndID( stream.Context(), s.backend, - docKey, - docID, + types.DocRefKey{ + Key: docKey, + ID: docID, + }, ) if err != nil { return nil @@ -369,15 +394,17 @@ func (s *yorkieServer) WatchDocument( if _, err = clients.FindClientInfo( stream.Context(), s.backend.DB, - req.ClientKey, - clientID, + types.ClientRefKey{ + Key: req.ClientKey, + ID: types.IDFromActorID(actorID), + }, ); err != nil { return err } locker, err := s.backend.Coordinator.NewLocker( stream.Context(), - sync.NewKey(fmt.Sprintf("watchdoc-%s-%s", clientID.String(), docID)), + sync.NewKey(fmt.Sprintf("watchdoc-%s-%s", actorID.String(), docID)), ) if err != nil { return err @@ -391,7 +418,7 @@ func (s *yorkieServer) WatchDocument( } }() - subscription, clientIDs, err := s.watchDoc(stream.Context(), clientID, docKey, docID) + subscription, clientIDs, err := s.watchDoc(stream.Context(), actorID, docKey, docID) if err != nil { logging.From(stream.Context()).Error(err) return err @@ -487,16 +514,24 @@ func (s *yorkieServer) RemoveDocument( }() } - clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, actorID) + clientInfo, err := clients.FindClientInfo(ctx, s.backend.DB, types.ClientRefKey{ + Key: req.ClientKey, + ID: types.IDFromActorID(actorID), + }) if err != nil { return nil, err } - docInfo, err := documents.FindDocInfoByKeyAndID(ctx, s.backend, pack.DocumentKey, docID) + + docRef := types.DocRefKey{ + Key: pack.DocumentKey, + ID: docID, + } + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, s.backend, docRef) if err != nil { return nil, err } - if err := clientInfo.RemoveDocument(docInfo.Key, docInfo.ID); err != nil { + if err := clientInfo.RemoveDocument(docRef); err != nil { return nil, err } @@ -564,7 +599,7 @@ func (s *yorkieServer) Broadcast( ctx context.Context, req *api.BroadcastRequest, ) (*api.BroadcastResponse, error) { - clientID, err := time.ActorIDFromHex(req.ClientId) + actorID, err := time.ActorIDFromHex(req.ClientId) if err != nil { return nil, err } @@ -578,8 +613,10 @@ func (s *yorkieServer) Broadcast( docInfo, err := documents.FindDocInfoByKeyAndID( ctx, s.backend, - docKey, - docID, + types.DocRefKey{ + Key: docKey, + ID: docID, + }, ) if err != nil { return nil, err @@ -593,16 +630,19 @@ func (s *yorkieServer) Broadcast( return nil, err } - if _, err = clients.FindClientInfo(ctx, s.backend.DB, req.ClientKey, clientID); err != nil { + if _, err = clients.FindClientInfo(ctx, s.backend.DB, types.ClientRefKey{ + Key: req.ClientKey, + ID: types.IDFromActorID(actorID), + }); err != nil { return nil, err } s.backend.Coordinator.Publish( ctx, - clientID, + actorID, sync.DocEvent{ Type: types.DocumentBroadcastEvent, - Publisher: clientID, + Publisher: actorID, DocumentKey: docKey, DocumentID: docID, Body: types.DocEventBody{ diff --git a/test/bench/push_pull_bench_test.go b/test/bench/push_pull_bench_test.go index c9d4780f2..64713bf2e 100644 --- a/test/bench/push_pull_bench_test.go +++ b/test/bench/push_pull_bench_test.go @@ -88,9 +88,15 @@ func setUpClientsAndDocs( for i := 0; i < n; i++ { clientInfo, err := be.DB.ActivateClient(ctx, database.DefaultProjectID, fmt.Sprintf("client-%d", i)) assert.NoError(b, err) - docInfo, err := be.DB.FindDocInfoByKeyAndOwner(ctx, database.DefaultProjectID, clientInfo.Key, clientInfo.ID, docKey, true) + docInfo, err := be.DB.FindDocInfoByKeyAndOwner(ctx, database.DefaultProjectID, docKey, types.ClientRefKey{ + Key: clientInfo.Key, + ID: clientInfo.ID, + }, true) assert.NoError(b, err) - assert.NoError(b, clientInfo.AttachDocument(docInfo.Key, docInfo.ID)) + assert.NoError(b, clientInfo.AttachDocument(types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + })) assert.NoError(b, be.DB.UpdateClientInfoAfterPushPull(ctx, clientInfo, docInfo)) bytesID, _ := clientInfo.ID.Bytes() @@ -137,7 +143,10 @@ func benchmarkPushChanges( docKey := getDocKey(b, i) clientInfos, docID, docs := setUpClientsAndDocs(ctx, 1, docKey, b, be) pack := createChangePack(changeCnt, docs[0], b) - docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, types.DocRefKey{ + Key: docKey, + ID: docID, + }) assert.NoError(b, err) b.StartTimer() @@ -162,12 +171,18 @@ func benchmarkPullChanges( pushPack := createChangePack(changeCnt, pusherDoc, b) pullPack := createChangePack(0, pullerDoc, b) - docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, types.DocRefKey{ + Key: docKey, + ID: docID, + }) assert.NoError(b, err) _, err = packs.PushPull(ctx, be, project, pusherClientInfo, docInfo, pushPack, types.SyncModePushPull) assert.NoError(b, err) - docInfo, err = documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) + docInfo, err = documents.FindDocInfoByKeyAndID(ctx, be, types.DocRefKey{ + Key: docKey, + ID: docID, + }) assert.NoError(b, err) b.StartTimer() @@ -193,7 +208,10 @@ func benchmarkPushSnapshots( for j := 0; j < snapshotCnt; j++ { b.StopTimer() pushPack := createChangePack(changeCnt, docs[0], b) - docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, types.DocRefKey{ + Key: docKey, + ID: docID, + }) assert.NoError(b, err) b.StartTimer() @@ -227,12 +245,18 @@ func benchmarkPullSnapshot( pushPack := createChangePack(changeCnt, pusherDoc, b) pullPack := createChangePack(0, pullerDoc, b) - docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) + docInfo, err := documents.FindDocInfoByKeyAndID(ctx, be, types.DocRefKey{ + Key: docKey, + ID: docID, + }) assert.NoError(b, err) _, err = packs.PushPull(ctx, be, project, pusherClientInfo, docInfo, pushPack, types.SyncModePushPull) assert.NoError(b, err) - docInfo, err = documents.FindDocInfoByKeyAndID(ctx, be, docKey, docID) + docInfo, err = documents.FindDocInfoByKeyAndID(ctx, be, types.DocRefKey{ + Key: docKey, + ID: docID, + }) assert.NoError(b, err) b.StartTimer() diff --git a/test/integration/document_test.go b/test/integration/document_test.go index 8ea7cfed2..6ac60d5d4 100644 --- a/test/integration/document_test.go +++ b/test/integration/document_test.go @@ -29,12 +29,12 @@ import ( "github.com/stretchr/testify/assert" + "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/client" "github.com/yorkie-team/yorkie/pkg/document" "github.com/yorkie-team/yorkie/pkg/document/innerpresence" "github.com/yorkie-team/yorkie/pkg/document/json" "github.com/yorkie-team/yorkie/pkg/document/presence" - "github.com/yorkie-team/yorkie/server/backend/database" "github.com/yorkie-team/yorkie/test/helper" ) @@ -807,7 +807,7 @@ func TestDocumentWithProjects(t *testing.T) { assert.NoError(t, cli.Sync(ctx)) - offset := database.DocOffset{Key: "", ID: ""} + offset := types.DocRefKey{Key: "", ID: ""} docs, err := adminCli.ListDocuments(ctx, "default", offset, 0, true, false) assert.NoError(t, err) assert.Equal(t, "", docs[0].Snapshot) diff --git a/test/integration/retention_test.go b/test/integration/retention_test.go index 70732062c..c027e1d9d 100644 --- a/test/integration/retention_test.go +++ b/test/integration/retention_test.go @@ -27,6 +27,7 @@ import ( "github.com/stretchr/testify/assert" monkey "github.com/undefinedlabs/go-mpatch" + "github.com/yorkie-team/yorkie/api/types" "github.com/yorkie-team/yorkie/client" "github.com/yorkie-team/yorkie/pkg/document" "github.com/yorkie-team/yorkie/pkg/document/change" @@ -189,10 +190,13 @@ func TestRetention(t *testing.T) { ) assert.NoError(t, err) + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } changes, err := mongoCli.FindChangesBetweenServerSeqs( ctx, - docInfo.Key, - docInfo.ID, + docRef, change.InitialServerSeq, change.MaxServerSeq, ) @@ -230,8 +234,7 @@ func TestRetention(t *testing.T) { changes, err = mongoCli.FindChangesBetweenServerSeqs( ctx, - docInfo.Key, - docInfo.ID, + docRef, change.InitialServerSeq, change.MaxServerSeq, ) diff --git a/test/shard/mongo_client_test.go b/test/shard/mongo_client_test.go index f5ade74b0..dfb9ef2a5 100644 --- a/test/shard/mongo_client_test.go +++ b/test/shard/mongo_client_test.go @@ -133,7 +133,10 @@ func TestClientWithShardedDB(t *testing.T) { assert.NoError(t, err) docKey1 := key.Key(fmt.Sprintf("%s%d", "duplicateIDTestDocKey", 0)) - docInfo1, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientKey, dummyClientID, docKey1, true) + docInfo1, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, docKey1, types.ClientRefKey{ + Key: dummyClientKey, + ID: dummyClientID, + }, true) assert.NoError(t, err) // 02. Create an extra document with duplicate ID. @@ -158,8 +161,10 @@ func TestClientWithShardedDB(t *testing.T) { // 04. Check if the document is correctly found using docKey and docID. result, err := cli.FindDocInfoByKeyAndID( ctx, - docKey1, - docInfo1.ID, + types.DocRefKey{ + Key: docKey1, + ID: docInfo1.ID, + }, ) assert.NoError(t, err) assert.Equal(t, docInfo1.Key, result.Key) @@ -179,7 +184,10 @@ func TestClientWithShardedDB(t *testing.T) { var duplicateID types.ID for i := 0; i < totalDocCnt-duplicateIDDocCnt; i++ { testDocKey := key.Key("duplicateIDTestDocKey" + strconv.Itoa(i)) - docInfo, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, dummyClientKey, dummyClientID, testDocKey, true) + docInfo, err := cli.FindDocInfoByKeyAndOwner(ctx, projectInfo.ID, testDocKey, types.ClientRefKey{ + Key: dummyClientKey, + ID: dummyClientID, + }, true) assert.NoError(t, err) docInfos = append(docInfos, docInfo) @@ -213,7 +221,7 @@ func TestClientWithShardedDB(t *testing.T) { } // 03. List the documents. - result, err := cli.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[database.DocOffset]{ + result, err := cli.FindDocInfosByPaging(ctx, projectInfo.ID, types.Paging[types.DocRefKey]{ PageSize: 10, IsForward: false, }) @@ -254,8 +262,10 @@ func TestClientWithShardedDB(t *testing.T) { // 04. Check if the client is correctly found using clientKey and clientID. result, err := cli.FindClientInfoByKeyAndID( ctx, - clientKey1, - clientInfo1.ID, + types.ClientRefKey{ + Key: clientKey1, + ID: clientInfo1.ID, + }, ) assert.NoError(t, err) assert.Equal(t, clientInfo1.Key, result.Key) From 25565a8e0f90a2ff3ac11154ebd5442c4a0360fc Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Wed, 29 Nov 2023 15:17:16 +0900 Subject: [PATCH 09/11] Use DocRefKey type in Pubsub --- admin/client.go | 2 +- api/types/resource_ref_key.go | 2 +- cmd/yorkie/document/list.go | 2 +- pkg/document/change/id.go | 2 +- server/backend/database/doc_info.go | 2 +- server/backend/sync/coordinator.go | 7 +- server/backend/sync/memory/coordinator.go | 13 +- .../backend/sync/memory/coordinator_test.go | 6 +- server/backend/sync/memory/pubsub.go | 126 ++++++++---------- server/backend/sync/memory/pubsub_test.go | 11 +- server/backend/sync/pubsub.go | 4 +- server/documents/documents.go | 4 +- server/packs/packs.go | 3 +- server/rpc/admin_server.go | 12 +- server/rpc/yorkie_server.go | 41 +++--- 15 files changed, 110 insertions(+), 127 deletions(-) diff --git a/admin/client.go b/admin/client.go index fc261bfed..a92fee0b5 100644 --- a/admin/client.go +++ b/admin/client.go @@ -257,11 +257,11 @@ func (c *Client) ListDocuments( ctx, &api.ListDocumentsRequest{ ProjectName: projectName, + PreviousKey: previousOffset.Key.String(), PreviousId: previousOffset.ID.String(), PageSize: pageSize, IsForward: isForward, IncludeSnapshot: includeSnapshot, - PreviousKey: previousOffset.Key.String(), }, ) if err != nil { diff --git a/api/types/resource_ref_key.go b/api/types/resource_ref_key.go index c05cd63ce..a4410a95e 100644 --- a/api/types/resource_ref_key.go +++ b/api/types/resource_ref_key.go @@ -35,7 +35,7 @@ func (r *DocRefKey) String() string { return fmt.Sprintf("Doc (%s.%s)", r.Key, r.ID) } -// Set parses the given string (format: `docKey},{docID}`) and assigns the values +// Set parses the given string (format: `{docKey},{docID}`) and assigns the values // to the given DocRefKey. func (r *DocRefKey) Set(v string) error { parsed := strings.Split(v, ",") diff --git a/cmd/yorkie/document/list.go b/cmd/yorkie/document/list.go index 50a3c16a9..206ed9f2d 100644 --- a/cmd/yorkie/document/list.go +++ b/cmd/yorkie/document/list.go @@ -104,7 +104,7 @@ func init() { cmd.Flags().Var( &previousOffset, "previous-offset", - "The previous document offset to start from", + "The previous document offset to start from. Use the format 'docKey,docID' for the input.", ) cmd.Flags().Int32Var( &pageSize, diff --git a/pkg/document/change/id.go b/pkg/document/change/id.go index 43631db95..e5f15d88a 100644 --- a/pkg/document/change/id.go +++ b/pkg/document/change/id.go @@ -38,7 +38,7 @@ type ID struct { clientSeq uint32 // serverSeq is the sequence of the change on the server. We can find the - // change with serverSeq and documentID in the server. If the change is not + // change with serverSeq, documentKey and documentID in the server. If the change is not // stored on the server, serverSeq is 0. serverSeq int64 diff --git a/server/backend/database/doc_info.go b/server/backend/database/doc_info.go index 11b18a7fc..03671cfaf 100644 --- a/server/backend/database/doc_info.go +++ b/server/backend/database/doc_info.go @@ -40,7 +40,7 @@ type DocInfo struct { // OwnerKey is the key of the document owner. OwnerKey string `bson:"owner_key"` - // OwnerKey is the ID of the document owner. + // OwnerID is the ID of the document owner. OwnerID types.ID `bson:"owner_id"` // CreatedAt is the time when the document is created. diff --git a/server/backend/sync/coordinator.go b/server/backend/sync/coordinator.go index 110e81a7e..3d24f01c0 100644 --- a/server/backend/sync/coordinator.go +++ b/server/backend/sync/coordinator.go @@ -22,7 +22,6 @@ import ( gotime "time" "github.com/yorkie-team/yorkie/api/types" - "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" ) @@ -42,15 +41,13 @@ type Coordinator interface { Subscribe( ctx context.Context, subscriber *time.ActorID, - documentKey key.Key, - documentID types.ID, + documentRef types.DocRefKey, ) (*Subscription, []*time.ActorID, error) // Unsubscribe unsubscribes from the given documents. Unsubscribe( ctx context.Context, - documentKey key.Key, - documentID types.ID, + documentRef types.DocRefKey, sub *Subscription, ) error diff --git a/server/backend/sync/memory/coordinator.go b/server/backend/sync/memory/coordinator.go index 0bc52b55a..fe04b7341 100644 --- a/server/backend/sync/memory/coordinator.go +++ b/server/backend/sync/memory/coordinator.go @@ -21,7 +21,6 @@ import ( "context" "github.com/yorkie-team/yorkie/api/types" - "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" "github.com/yorkie-team/yorkie/pkg/locker" "github.com/yorkie-team/yorkie/server/backend/sync" @@ -59,26 +58,24 @@ func (c *Coordinator) NewLocker( func (c *Coordinator) Subscribe( ctx context.Context, subscriber *time.ActorID, - documentKey key.Key, - documentID types.ID, + documentRef types.DocRefKey, ) (*sync.Subscription, []*time.ActorID, error) { - sub, err := c.pubSub.Subscribe(ctx, subscriber, documentKey, documentID) + sub, err := c.pubSub.Subscribe(ctx, subscriber, documentRef) if err != nil { return nil, nil, err } - ids := c.pubSub.ClientIDs(documentKey, documentID) + ids := c.pubSub.ClientIDs(documentRef) return sub, ids, nil } // Unsubscribe unsubscribes the given documents. func (c *Coordinator) Unsubscribe( ctx context.Context, - documentKey key.Key, - documentID types.ID, + documentRef types.DocRefKey, sub *sync.Subscription, ) error { - c.pubSub.Unsubscribe(ctx, documentKey, documentID, sub) + c.pubSub.Unsubscribe(ctx, documentRef, sub) return nil } diff --git a/server/backend/sync/memory/coordinator_test.go b/server/backend/sync/memory/coordinator_test.go index e32bbfe25..159e5f318 100644 --- a/server/backend/sync/memory/coordinator_test.go +++ b/server/backend/sync/memory/coordinator_test.go @@ -33,13 +33,17 @@ func TestCoordinator(t *testing.T) { coordinator := memory.NewCoordinator(nil) docKey := key.Key(t.Name() + "key") docID := types.ID(t.Name() + "id") + docRef := types.DocRefKey{ + Key: docKey, + ID: docID, + } ctx := context.Background() for i := 0; i < 5; i++ { id, err := time.ActorIDFromBytes([]byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, byte(i)}) assert.NoError(t, err) - _, clientIDs, err := coordinator.Subscribe(ctx, id, docKey, docID) + _, clientIDs, err := coordinator.Subscribe(ctx, id, docRef) assert.NoError(t, err) assert.Len(t, clientIDs, i+1) } diff --git a/server/backend/sync/memory/pubsub.go b/server/backend/sync/memory/pubsub.go index 06d1c1411..696bbbde1 100644 --- a/server/backend/sync/memory/pubsub.go +++ b/server/backend/sync/memory/pubsub.go @@ -24,7 +24,6 @@ import ( "go.uber.org/zap" "github.com/yorkie-team/yorkie/api/types" - "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" "github.com/yorkie-team/yorkie/server/backend/sync" "github.com/yorkie-team/yorkie/server/logging" @@ -67,14 +66,14 @@ func (s *subscriptions) Len() int { // PubSub is the memory implementation of PubSub, used for single server. type PubSub struct { subscriptionsMapMu *gosync.RWMutex - subscriptionsMapByDoc map[key.Key]map[types.ID]*subscriptions + subscriptionsMapByDoc map[types.DocRefKey]*subscriptions } // NewPubSub creates an instance of PubSub. func NewPubSub() *PubSub { return &PubSub{ subscriptionsMapMu: &gosync.RWMutex{}, - subscriptionsMapByDoc: make(map[key.Key]map[types.ID]*subscriptions), + subscriptionsMapByDoc: make(map[types.DocRefKey]*subscriptions), } } @@ -82,13 +81,12 @@ func NewPubSub() *PubSub { func (m *PubSub) Subscribe( ctx context.Context, subscriber *time.ActorID, - documentKey key.Key, - documentID types.ID, + documentRef types.DocRefKey, ) (*sync.Subscription, error) { if logging.Enabled(zap.DebugLevel) { logging.From(ctx).Debugf( - `Subscribe(%s.%s,%s) Start`, - documentKey, documentID, + `Subscribe(%s,%s) Start`, + documentRef, subscriber.String(), ) } @@ -97,18 +95,15 @@ func (m *PubSub) Subscribe( defer m.subscriptionsMapMu.Unlock() sub := sync.NewSubscription(subscriber) - if _, ok := m.subscriptionsMapByDoc[documentKey]; !ok { - m.subscriptionsMapByDoc[documentKey] = make(map[types.ID]*subscriptions) + if _, ok := m.subscriptionsMapByDoc[documentRef]; !ok { + m.subscriptionsMapByDoc[documentRef] = newSubscriptions() } - if _, ok := m.subscriptionsMapByDoc[documentKey][documentID]; !ok { - m.subscriptionsMapByDoc[documentKey][documentID] = newSubscriptions() - } - m.subscriptionsMapByDoc[documentKey][documentID].Add(sub) + m.subscriptionsMapByDoc[documentRef].Add(sub) if logging.Enabled(zap.DebugLevel) { logging.From(ctx).Debugf( - `Subscribe(%s.%s,%s) End`, - documentKey, documentID, + `Subscribe(%s,%s) End`, + documentRef, subscriber.String(), ) } @@ -118,8 +113,7 @@ func (m *PubSub) Subscribe( // Unsubscribe unsubscribes the given docKeys. func (m *PubSub) Unsubscribe( ctx context.Context, - documentKey key.Key, - documentID types.ID, + documentRef types.DocRefKey, sub *sync.Subscription, ) { m.subscriptionsMapMu.Lock() @@ -127,31 +121,26 @@ func (m *PubSub) Unsubscribe( if logging.Enabled(zap.DebugLevel) { logging.From(ctx).Debugf( - `Unsubscribe(%s.%s,%s) Start`, - documentKey, documentID, + `Unsubscribe(%s,%s) Start`, + documentRef, sub.Subscriber().String(), ) } sub.Close() - if subsByDocID, ok := m.subscriptionsMapByDoc[documentKey]; ok { - if subs, ok := subsByDocID[documentID]; ok { - subs.Delete(sub.ID()) + if subs, ok := m.subscriptionsMapByDoc[documentRef]; ok { + subs.Delete(sub.ID()) - if subs.Len() == 0 { - delete(m.subscriptionsMapByDoc[documentKey], documentID) - } - } - if len(subsByDocID) == 0 { - delete(m.subscriptionsMapByDoc, documentKey) + if subs.Len() == 0 { + delete(m.subscriptionsMapByDoc, documentRef) } } if logging.Enabled(zap.DebugLevel) { logging.From(ctx).Debugf( - `Unsubscribe(%s.%s,%s) End`, - documentKey, documentID, + `Unsubscribe(%s,%s) End`, + documentRef, sub.Subscriber().String(), ) } @@ -166,60 +155,61 @@ func (m *PubSub) Publish( m.subscriptionsMapMu.RLock() defer m.subscriptionsMapMu.RUnlock() - documentKey := event.DocumentKey - documentID := event.DocumentID + documentRef := event.DocumentRef if logging.Enabled(zap.DebugLevel) { - logging.From(ctx).Debugf(`Publish(%s.%s,%s) Start`, - documentKey, documentID, - publisherID.String()) + logging.From(ctx).Debugf( + `Publish(%s,%s) Start`, + documentRef, + publisherID.String(), + ) } - if subsByDocID, ok := m.subscriptionsMapByDoc[documentKey]; ok { - if subs, ok := subsByDocID[documentID]; ok { - for _, sub := range subs.Map() { - if sub.Subscriber().Compare(publisherID) == 0 { - continue - } - - if logging.Enabled(zap.DebugLevel) { - logging.From(ctx).Debugf( - `Publish %s(%s.%s,%s) to %s`, - event.Type, - documentKey, documentID, - publisherID.String(), - sub.Subscriber().String(), - ) - } - - // NOTE: When a subscription is being closed by a subscriber, - // the subscriber may not receive messages. - select { - case sub.Events() <- event: - case <-gotime.After(100 * gotime.Millisecond): - logging.From(ctx).Warnf( - `Publish(%s.%s,%s) to %s timeout`, - documentKey, documentID, - publisherID.String(), - sub.Subscriber().String(), - ) - } + if subs, ok := m.subscriptionsMapByDoc[documentRef]; ok { + for _, sub := range subs.Map() { + if sub.Subscriber().Compare(publisherID) == 0 { + continue + } + + if logging.Enabled(zap.DebugLevel) { + logging.From(ctx).Debugf( + `Publish %s(%s,%s) to %s`, + event.Type, + documentRef, + publisherID.String(), + sub.Subscriber().String(), + ) + } + + // NOTE: When a subscription is being closed by a subscriber, + // the subscriber may not receive messages. + select { + case sub.Events() <- event: + case <-gotime.After(100 * gotime.Millisecond): + logging.From(ctx).Warnf( + `Publish(%s,%s) to %s timeout`, + documentRef, + publisherID.String(), + sub.Subscriber().String(), + ) } } } + if logging.Enabled(zap.DebugLevel) { - logging.From(ctx).Debugf(`Publish(%s.%s,%s) End`, - documentKey, documentID, + logging.From(ctx).Debugf( + `Publish(%s,%s) End`, + documentRef, publisherID.String()) } } // ClientIDs returns the clients of the given document. -func (m *PubSub) ClientIDs(documentKey key.Key, documentID types.ID) []*time.ActorID { +func (m *PubSub) ClientIDs(documentRef types.DocRefKey) []*time.ActorID { m.subscriptionsMapMu.RLock() defer m.subscriptionsMapMu.RUnlock() var ids []*time.ActorID - for _, sub := range m.subscriptionsMapByDoc[documentKey][documentID].Map() { + for _, sub := range m.subscriptionsMapByDoc[documentRef].Map() { ids = append(ids, sub.Subscriber()) } return ids diff --git a/server/backend/sync/memory/pubsub_test.go b/server/backend/sync/memory/pubsub_test.go index 7d5c934d9..681a560cc 100644 --- a/server/backend/sync/memory/pubsub_test.go +++ b/server/backend/sync/memory/pubsub_test.go @@ -40,19 +40,22 @@ func TestPubSub(t *testing.T) { pubSub := memory.NewPubSub() key := key.Key(t.Name() + "key") id := types.ID(t.Name() + "id") + docRef := types.DocRefKey{ + Key: key, + ID: id, + } docEvent := sync.DocEvent{ Type: types.DocumentWatchedEvent, Publisher: idB, - DocumentKey: key, - DocumentID: id, + DocumentRef: docRef, } ctx := context.Background() // subscribe the documents by actorA - subA, err := pubSub.Subscribe(ctx, idA, key, id) + subA, err := pubSub.Subscribe(ctx, idA, docRef) assert.NoError(t, err) defer func() { - pubSub.Unsubscribe(ctx, key, id, subA) + pubSub.Unsubscribe(ctx, docRef, subA) }() var wg gosync.WaitGroup diff --git a/server/backend/sync/pubsub.go b/server/backend/sync/pubsub.go index 3b6868257..1f3f00267 100644 --- a/server/backend/sync/pubsub.go +++ b/server/backend/sync/pubsub.go @@ -20,7 +20,6 @@ import ( "github.com/rs/xid" "github.com/yorkie-team/yorkie/api/types" - "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/pkg/document/time" ) @@ -50,8 +49,7 @@ func (s *Subscription) ID() string { type DocEvent struct { Type types.DocEventType Publisher *time.ActorID - DocumentKey key.Key - DocumentID types.ID + DocumentRef types.DocRefKey Body types.DocEventBody } diff --git a/server/documents/documents.go b/server/documents/documents.go index e1a4d905b..5e0b6381f 100644 --- a/server/documents/documents.go +++ b/server/documents/documents.go @@ -245,9 +245,7 @@ func RemoveDocument( return be.DB.UpdateDocInfoStatusToRemoved(ctx, docRef) } - isAttached, err := be.DB.IsDocumentAttached(ctx, project.ID, docRef, types.ClientRefKey{ - Key: "", ID: "", - }) + isAttached, err := be.DB.IsDocumentAttached(ctx, project.ID, docRef, types.ClientRefKey{}) if err != nil { return err } diff --git a/server/packs/packs.go b/server/packs/packs.go index 2801beac4..5530363f2 100644 --- a/server/packs/packs.go +++ b/server/packs/packs.go @@ -153,8 +153,7 @@ func PushPull( sync.DocEvent{ Type: types.DocumentChangedEvent, Publisher: publisherID, - DocumentKey: docInfo.Key, - DocumentID: docInfo.ID, + DocumentRef: docRef, }, ) diff --git a/server/rpc/admin_server.go b/server/rpc/admin_server.go index 0f7319e44..cc89a0db8 100644 --- a/server/rpc/admin_server.go +++ b/server/rpc/admin_server.go @@ -368,12 +368,13 @@ func (s *adminServer) RemoveDocumentByAdmin( } }() + docRef := types.DocRefKey{ + Key: docInfo.Key, + ID: docInfo.ID, + } if err := documents.RemoveDocument( ctx, s.backend, project, - types.DocRefKey{ - Key: docInfo.Key, - ID: docInfo.ID, - }, + docRef, req.Force, ); err != nil { return nil, err @@ -387,8 +388,7 @@ func (s *adminServer) RemoveDocumentByAdmin( sync.DocEvent{ Type: types.DocumentChangedEvent, Publisher: publisherID, - DocumentKey: docInfo.Key, - DocumentID: docInfo.ID, + DocumentRef: docRef, }, ) diff --git a/server/rpc/yorkie_server.go b/server/rpc/yorkie_server.go index 35f0b070f..e11bb7121 100644 --- a/server/rpc/yorkie_server.go +++ b/server/rpc/yorkie_server.go @@ -372,13 +372,14 @@ func (s *yorkieServer) WatchDocument( return err } + docRef := types.DocRefKey{ + Key: docKey, + ID: docID, + } docInfo, err := documents.FindDocInfoByKeyAndID( stream.Context(), s.backend, - types.DocRefKey{ - Key: docKey, - ID: docID, - }, + docRef, ) if err != nil { return nil @@ -418,13 +419,13 @@ func (s *yorkieServer) WatchDocument( } }() - subscription, clientIDs, err := s.watchDoc(stream.Context(), actorID, docKey, docID) + subscription, clientIDs, err := s.watchDoc(stream.Context(), actorID, docRef) if err != nil { logging.From(stream.Context()).Error(err) return err } defer func() { - s.unwatchDoc(subscription, docKey, docID) + s.unwatchDoc(subscription, docRef) }() var pbClientIDs []string @@ -553,10 +554,9 @@ func (s *yorkieServer) RemoveDocument( func (s *yorkieServer) watchDoc( ctx context.Context, clientID *time.ActorID, - documentKey key.Key, - documentID types.ID, + documentRef types.DocRefKey, ) (*sync.Subscription, []*time.ActorID, error) { - subscription, clientIDs, err := s.backend.Coordinator.Subscribe(ctx, clientID, documentKey, documentID) + subscription, clientIDs, err := s.backend.Coordinator.Subscribe(ctx, clientID, documentRef) if err != nil { logging.From(ctx).Error(err) return nil, nil, err @@ -568,8 +568,7 @@ func (s *yorkieServer) watchDoc( sync.DocEvent{ Type: types.DocumentWatchedEvent, Publisher: subscription.Subscriber(), - DocumentKey: documentKey, - DocumentID: documentID, + DocumentRef: documentRef, }, ) @@ -578,19 +577,17 @@ func (s *yorkieServer) watchDoc( func (s *yorkieServer) unwatchDoc( subscription *sync.Subscription, - documentKey key.Key, - documentID types.ID, + documentRef types.DocRefKey, ) { ctx := context.Background() - _ = s.backend.Coordinator.Unsubscribe(ctx, documentKey, documentID, subscription) + _ = s.backend.Coordinator.Unsubscribe(ctx, documentRef, subscription) s.backend.Coordinator.Publish( ctx, subscription.Subscriber(), sync.DocEvent{ Type: types.DocumentUnwatchedEvent, Publisher: subscription.Subscriber(), - DocumentKey: documentKey, - DocumentID: documentID, + DocumentRef: documentRef, }, ) } @@ -610,13 +607,14 @@ func (s *yorkieServer) Broadcast( return nil, err } + docRef := types.DocRefKey{ + Key: docKey, + ID: docID, + } docInfo, err := documents.FindDocInfoByKeyAndID( ctx, s.backend, - types.DocRefKey{ - Key: docKey, - ID: docID, - }, + docRef, ) if err != nil { return nil, err @@ -643,8 +641,7 @@ func (s *yorkieServer) Broadcast( sync.DocEvent{ Type: types.DocumentBroadcastEvent, Publisher: actorID, - DocumentKey: docKey, - DocumentID: docID, + DocumentRef: docRef, Body: types.DocEventBody{ Topic: req.Topic, Payload: req.Payload, From 11350c6f1e93266381227e8f3ada2c5837f81b0c Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Thu, 30 Nov 2023 15:51:37 +0900 Subject: [PATCH 10/11] Add test cases --- api/types/resource_ref_key.go | 5 +- api/types/resource_ref_key_test.go | 45 +++++++++ server/backend/database/mongo/registry.go | 93 ++++++++++--------- .../backend/database/mongo/registry_test.go | 64 +++++++++++++ 4 files changed, 161 insertions(+), 46 deletions(-) create mode 100644 api/types/resource_ref_key_test.go diff --git a/api/types/resource_ref_key.go b/api/types/resource_ref_key.go index a4410a95e..6f44fba0d 100644 --- a/api/types/resource_ref_key.go +++ b/api/types/resource_ref_key.go @@ -24,6 +24,9 @@ import ( "github.com/yorkie-team/yorkie/pkg/document/key" ) +// ErrInvalidDocRefKeySetInput is returned when the input of DocRefKey Set is invalid. +var ErrInvalidDocRefKeySetInput = errors.New("use the format 'docKey,docID' for the input") + // DocRefKey represents an identifier used to reference a document. type DocRefKey struct { Key key.Key @@ -40,7 +43,7 @@ func (r *DocRefKey) String() string { func (r *DocRefKey) Set(v string) error { parsed := strings.Split(v, ",") if len(parsed) != 2 { - return errors.New("use the format 'docKey,docID' for the input") + return ErrInvalidDocRefKeySetInput } r.Key = key.Key(parsed[0]) r.ID = ID(parsed[1]) diff --git a/api/types/resource_ref_key_test.go b/api/types/resource_ref_key_test.go new file mode 100644 index 000000000..6ae555f6d --- /dev/null +++ b/api/types/resource_ref_key_test.go @@ -0,0 +1,45 @@ +/* + * Copyright 2023 The Yorkie Authors. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package types_test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" +) + +func TestResourceRefKey(t *testing.T) { + t.Run("DocRefKey Set test", func(t *testing.T) { + docKey := key.Key("docKey") + docID := types.ID("docID") + docRef := types.DocRefKey{} + + // 01. Give an invalid input to Set. + err := docRef.Set("abc") + assert.ErrorIs(t, err, types.ErrInvalidDocRefKeySetInput) + + // 02. Give a valid input to Set. + err = docRef.Set(fmt.Sprintf("%s,%s", docKey, docID)) + assert.NoError(t, err) + assert.Equal(t, docRef.Key, docKey) + assert.Equal(t, docRef.ID, docID) + }) +} diff --git a/server/backend/database/mongo/registry.go b/server/backend/database/mongo/registry.go index f49e326b8..ebcc54c74 100644 --- a/server/backend/database/mongo/registry.go +++ b/server/backend/database/mongo/registry.go @@ -48,56 +48,59 @@ func NewRegistryBuilder() *bsoncodec.RegistryBuilder { // containing a number of `doc_key`.`doc_id`.{`client_seq`, `server_seq`, `status`}s. rb.RegisterTypeDecoder( reflect.TypeOf(make(database.ClientDocInfoMap)), - bsoncodec.ValueDecoderFunc(func(_ bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { - docs, err := vr.ReadDocument() - if err != nil { - return fmt.Errorf("read documents: %w", err) - } - if val.IsNil() { - val.Set(reflect.MakeMap(val.Type())) - } + bsoncodec.ValueDecoderFunc(clientDocumentsDecoder), + ) - for { - docKey, docInfoByDocIDMapReader, err := docs.ReadElement() - if err != nil { - if err == bsonrw.ErrEOD { - break - } - return fmt.Errorf("read the element in documents: %w", err) - } - docInfoByDocIDMap, err := docInfoByDocIDMapReader.ReadDocument() - if err != nil { - return fmt.Errorf("read docInfoByDocID: %w", err) - } - for { - docID, docInfoReader, err := docInfoByDocIDMap.ReadElement() - if err != nil { - if err == bsonrw.ErrEOD { - break - } - return fmt.Errorf("read the element in docInfoByDocID: %w", err) - } + return rb +} - docInfo := &database.ClientDocInfo{} - docInfoDecoder, err := bson.NewDecoder(docInfoReader) - if err != nil { - return fmt.Errorf("create docInfoDecoder: %w", err) - } - err = docInfoDecoder.Decode(docInfo) - if err != nil { - return fmt.Errorf("decode docInfo: %w", err) - } +func clientDocumentsDecoder(_ bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + docs, err := vr.ReadDocument() + if err != nil { + return fmt.Errorf("read documents: %w", err) + } + if val.IsNil() { + val.Set(reflect.MakeMap(val.Type())) + } - docRef := reflect.ValueOf(types.DocRefKey{ - Key: key.Key(docKey), - ID: types.ID(docID), - }) - val.SetMapIndex(docRef, reflect.ValueOf(docInfo)) + for { + docKey, docInfoByDocIDMapReader, err := docs.ReadElement() + if err != nil { + if err == bsonrw.ErrEOD { + break + } + return fmt.Errorf("read the element in documents: %w", err) + } + docInfoByDocIDMap, err := docInfoByDocIDMapReader.ReadDocument() + if err != nil { + return fmt.Errorf("read docInfoByDocID: %w", err) + } + for { + docID, docInfoReader, err := docInfoByDocIDMap.ReadElement() + if err != nil { + if err == bsonrw.ErrEOD { + break } + return fmt.Errorf("read the element in docInfoByDocID: %w", err) + } + + docInfo := &database.ClientDocInfo{} + docInfoDecoder, err := bson.NewDecoder(docInfoReader) + if err != nil { + return fmt.Errorf("create docInfoDecoder: %w", err) + } + err = docInfoDecoder.Decode(docInfo) + if err != nil { + return fmt.Errorf("decode docInfo: %w", err) } - return nil - })) + docRef := reflect.ValueOf(types.DocRefKey{ + Key: key.Key(docKey), + ID: types.ID(docID), + }) + val.SetMapIndex(docRef, reflect.ValueOf(docInfo)) + } + } - return rb + return nil } diff --git a/server/backend/database/mongo/registry_test.go b/server/backend/database/mongo/registry_test.go index 83b7cf35c..ee67aac6d 100644 --- a/server/backend/database/mongo/registry_test.go +++ b/server/backend/database/mongo/registry_test.go @@ -17,13 +17,17 @@ package mongo import ( + "reflect" "testing" "github.com/stretchr/testify/assert" "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/bsoncodec" + "go.mongodb.org/mongo-driver/bson/bsonrw" "go.mongodb.org/mongo-driver/bson/primitive" "github.com/yorkie-team/yorkie/api/types" + "github.com/yorkie-team/yorkie/pkg/document/key" "github.com/yorkie-team/yorkie/server/backend/database" ) @@ -41,3 +45,63 @@ func TestRegistry(t *testing.T) { assert.Equal(t, id, info.ID) } + +func TestDecoder(t *testing.T) { + t.Run("clientDocumentsDecoder test", func(t *testing.T) { + docs := []struct { + docRefKey types.DocRefKey + docInfo database.ClientDocInfo + }{ + { + docRefKey: types.DocRefKey{ + Key: key.Key("test-doc-key1"), + ID: types.ID("test-doc-id1"), + }, + docInfo: database.ClientDocInfo{ + ClientSeq: 0, + ServerSeq: 0, + Status: database.DocumentAttached, + }, + }, + { + docRefKey: types.DocRefKey{ + Key: key.Key("test-doc-key2"), + ID: types.ID("test-doc-id2"), + }, + docInfo: database.ClientDocInfo{ + ClientSeq: 0, + ServerSeq: 0, + Status: database.DocumentDetached, + }, + }, + } + + bsonDocs := make(bson.M) + for _, doc := range docs { + bsonDocs[doc.docRefKey.Key.String()] = bson.M{ + doc.docRefKey.ID.String(): bson.M{ + "client_seq": doc.docInfo.ClientSeq, + "server_seq": doc.docInfo.ServerSeq, + "status": doc.docInfo.Status, + }, + } + } + + marshaledDocs, err := bson.Marshal(bsonDocs) + assert.NoError(t, err) + + clientDocInfoMap := make(database.ClientDocInfoMap) + err = clientDocumentsDecoder( + bsoncodec.DecodeContext{}, + bsonrw.NewBSONDocumentReader(marshaledDocs), + reflect.ValueOf(clientDocInfoMap), + ) + assert.NoError(t, err) + assert.Len(t, clientDocInfoMap, len(docs)) + for _, doc := range docs { + assert.Equal(t, doc.docInfo.ClientSeq, clientDocInfoMap[doc.docRefKey].ClientSeq) + assert.Equal(t, doc.docInfo.ServerSeq, clientDocInfoMap[doc.docRefKey].ServerSeq) + assert.Equal(t, doc.docInfo.Status, clientDocInfoMap[doc.docRefKey].Status) + } + }) +} From 94dffbcb40bcc06229d6351a08f30d698567bedd Mon Sep 17 00:00:00 2001 From: Sejong Kim Date: Thu, 30 Nov 2023 16:34:59 +0900 Subject: [PATCH 11/11] Cleanup sharding scripts --- .github/workflows/ci.yml | 8 +-- .../sharding/prod/scripts/init-mongos1.js | 11 ++- .../sharding/test/scripts/init-mongos1.js | 72 +++++++++---------- test/{shard => sharding}/mongo_client_test.go | 6 +- test/{shard => sharding}/server_test.go | 6 +- 5 files changed, 51 insertions(+), 52 deletions(-) rename test/{shard => sharding}/mongo_client_test.go (98%) rename test/{shard => sharding}/server_test.go (98%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1db5358eb..dadeb6683 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -91,8 +91,8 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} comment-always: true - shard_test: - name: shard_test + sharding_test: + name: sharding_test runs-on: ubuntu-latest steps: @@ -113,5 +113,5 @@ jobs: - name: Wait for 30 seconds until the DB cluster is ready run: sleep 30s - - name: Run the tests with shard tag - run: go test -tags shard -race -v ./test/shard/... + - name: Run the tests with sharding tag + run: go test -tags sharding -race -v ./test/sharding/... diff --git a/build/docker/sharding/prod/scripts/init-mongos1.js b/build/docker/sharding/prod/scripts/init-mongos1.js index c7fc7cb7d..e6b30f71f 100644 --- a/build/docker/sharding/prod/scripts/init-mongos1.js +++ b/build/docker/sharding/prod/scripts/init-mongos1.js @@ -3,10 +3,9 @@ sh.addShard("shard-rs-2/shard2-1:27017,shard2-2:27017,shard2-3:27017") sh.addShard("shard-rs-3/shard3-1:27017,shard3-2:27017,shard3-3:27017") sh.enableSharding("yorkie-meta") -sh.shardCollection("yorkie-meta.projects", { owner: 1, name: 1 }, true) sh.shardCollection("yorkie-meta.users", { username: 1 }, true) -sh.shardCollection("yorkie-meta.clients", { project_id: 1, key: 1 }, true) -sh.shardCollection("yorkie-meta.documents", { project_id: 1, key: 1 }, true) -sh.shardCollection("yorkie-meta.changes", { doc_id: 1, server_seq: 1 }, true) -sh.shardCollection("yorkie-meta.snapshots", { doc_id: 1, server_seq: 1 }, true) -sh.shardCollection("yorkie-meta.syncedseqs", { doc_id: 1, client_id: 1 }, true) +sh.shardCollection("yorkie-meta.clients", { key: 1 }) +sh.shardCollection("yorkie-meta.documents", { key: 1 }) +sh.shardCollection("yorkie-meta.changes", { doc_key: 1 }) +sh.shardCollection("yorkie-meta.snapshots", { doc_key: 1 }) +sh.shardCollection("yorkie-meta.syncedseqs", { doc_key: 1 }) diff --git a/build/docker/sharding/test/scripts/init-mongos1.js b/build/docker/sharding/test/scripts/init-mongos1.js index f6e0bf90b..c2145f1ca 100644 --- a/build/docker/sharding/test/scripts/init-mongos1.js +++ b/build/docker/sharding/test/scripts/init-mongos1.js @@ -1,48 +1,48 @@ sh.addShard("shard-rs-1/shard1-1:27017") sh.addShard("shard-rs-2/shard2-1:27017") -// The DB 'yorkie-meta-1' is for the mongo client test. -sh.enableSharding("yorkie-meta-1") -sh.shardCollection("yorkie-meta-1.users", { username: 1 }, true) -sh.shardCollection("yorkie-meta-1.clients", { key: 1 }) -sh.shardCollection("yorkie-meta-1.documents", { key: 1 }) -sh.shardCollection("yorkie-meta-1.changes", { doc_key: 1 }) -sh.shardCollection("yorkie-meta-1.snapshots", { doc_key: 1 }) -sh.shardCollection("yorkie-meta-1.syncedseqs", { doc_key: 1 }) +function findAnotherShard(shard) { + if (shard == "shard-rs-1") { + return "shard-rs-2" + } else { + return "shard-rs-1" + } +} -const docSplitKey = "duplicateIDTestDocKey5" -const clientSplitKey = "duplicateIDTestClientKey5" +function shardOfChunk(minKeyOfChunk) { + return db.getSiblingDB("config").chunks.findOne({ min: { key: minKeyOfChunk } }).shard +} + +// Shard the database for the mongo client test +const mongoClientDB = "test-yorkie-meta-mongo-client" +sh.enableSharding(mongoClientDB) +sh.shardCollection(mongoClientDB + ".users", { username: 1 }, true) +sh.shardCollection(mongoClientDB + ".clients", { key: 1 }) +sh.shardCollection(mongoClientDB + ".documents", { key: 1 }) +sh.shardCollection(mongoClientDB + ".changes", { doc_key: 1 }) +sh.shardCollection(mongoClientDB + ".snapshots", { doc_key: 1 }) +sh.shardCollection(mongoClientDB + ".syncedseqs", { doc_key: 1 }) // Split the inital range at "duplicateIDTestDocKey5" to allow doc_ids duplicate in different shards. -sh.splitAt("yorkie-meta-1.documents", { key: docSplitKey }) +const docSplitKey = "duplicateIDTestDocKey5" +sh.splitAt(mongoClientDB + ".documents", { key: docSplitKey }) // Move the chunk to another shard. -const currentDocShard = db.getSiblingDB("config").chunks.findOne({ min: { key: docSplitKey } }).shard -var nextDocShard = "" -if (currentDocShard == "shard-rs-1") { - nextDocShard = "shard-rs-2" -} else { - nextDocShard = "shard-rs-1" -} -db.adminCommand({ moveChunk: "yorkie-meta-1.documents", find: { key: docSplitKey }, to: nextDocShard }) +db.adminCommand({ moveChunk: mongoClientDB + ".documents", find: { key: docSplitKey }, to: findAnotherShard(shardOfChunk(docSplitKey)) }) // Split the inital range at "duplicateIDTestClientKey5" to allow client_ids duplicate in different shards. -sh.splitAt("yorkie-meta-1.clients", { key: clientSplitKey }) +const clientSplitKey = "duplicateIDTestClientKey5" +sh.splitAt(mongoClientDB + ".clients", { key: clientSplitKey }) // Move the chunk to another shard. -const currentClientShard = db.getSiblingDB("config").chunks.findOne({ min: { key: clientSplitKey } }).shard -var nextClientShard = "" -if (currentClientShard == "shard-rs-1") { - nextClientShard = "shard-rs-2" -} else { - nextClientShard = "shard-rs-1" -} -db.adminCommand({ moveChunk: "yorkie-meta-1.clients", find: { key: clientSplitKey }, to: nextClientShard }) +db.adminCommand({ moveChunk: mongoClientDB + ".clients", find: { key: clientSplitKey }, to: findAnotherShard(shardOfChunk(clientSplitKey)) }) + +// Shard the database for the server test +const serverDB = "test-yorkie-meta-server" +sh.enableSharding(serverDB) +sh.shardCollection(serverDB + ".users", { username: 1 }, true) +sh.shardCollection(serverDB + ".clients", { key: 1 }) +sh.shardCollection(serverDB + ".documents", { key: 1 }) +sh.shardCollection(serverDB + ".changes", { doc_key: 1 }) +sh.shardCollection(serverDB + ".snapshots", { doc_key: 1 }) +sh.shardCollection(serverDB + ".syncedseqs", { doc_key: 1 }) -// The DB 'yorkie-meta-2' is for the server test. -sh.enableSharding("yorkie-meta-2") -sh.shardCollection("yorkie-meta-2.users", { username: 1 }, true) -sh.shardCollection("yorkie-meta-2.clients", { key: 1 }) -sh.shardCollection("yorkie-meta-2.documents", { key: 1 }) -sh.shardCollection("yorkie-meta-2.changes", { doc_key: 1 }) -sh.shardCollection("yorkie-meta-2.snapshots", { doc_key: 1 }) -sh.shardCollection("yorkie-meta-2.syncedseqs", { doc_key: 1 }) diff --git a/test/shard/mongo_client_test.go b/test/sharding/mongo_client_test.go similarity index 98% rename from test/shard/mongo_client_test.go rename to test/sharding/mongo_client_test.go index dfb9ef2a5..f5e8354b1 100644 --- a/test/shard/mongo_client_test.go +++ b/test/sharding/mongo_client_test.go @@ -1,4 +1,4 @@ -//go:build shard +//go:build sharding /* * Copyright 2023 The Yorkie Authors. All rights reserved. @@ -16,7 +16,7 @@ * limitations under the License. */ -package shard +package sharding import ( "context" @@ -35,7 +35,7 @@ import ( ) const ( - shardedDBNameForMongoClient = "yorkie-meta-1" + shardedDBNameForMongoClient = "test-yorkie-meta-mongo-client" dummyProjectID = types.ID("000000000000000000000000") projectOneID = types.ID("000000000000000000000001") projectTwoID = types.ID("000000000000000000000002") diff --git a/test/shard/server_test.go b/test/sharding/server_test.go similarity index 98% rename from test/shard/server_test.go rename to test/sharding/server_test.go index f49d54dc5..9ae8d0d1d 100644 --- a/test/shard/server_test.go +++ b/test/sharding/server_test.go @@ -1,4 +1,4 @@ -//go:build shard +//go:build sharding /* * Copyright 2023 The Yorkie Authors. All rights reserved. @@ -16,7 +16,7 @@ * limitations under the License. */ -package shard +package sharding import ( "context" @@ -41,7 +41,7 @@ import ( ) var ( - shardedDBNameForServer = "yorkie-meta-2" + shardedDBNameForServer = "test-yorkie-meta-server" testRPCServer *rpc.Server testRPCAddr = fmt.Sprintf("localhost:%d", helper.RPCPort) testClient api.YorkieServiceClient